Commit 67db2960 authored by Jan Möbius's avatar Jan Möbius
Browse files

Revert last ... too much

git-svn-id: http://www.openflipper.org/svnrepo/OpenFlipper/branches/Free@13656 383ad7c9-94d9-4d36-a494-682f7c89f535
parent 3f706d9a
// C++ Hello World
#include <QFile>
#include <QString>
#include <QStringList>
#include <QSettings>
#include <QFileInfo>
#include <QTextStream>
#include <iostream>
#include <math.h>
#include <limits>
/* Test File specification:
Doubles can be given as:
Exact test:
VALUE_DOUBLE=1.0
Tolerance test (0.0001)
VALUE_DOUBLE=1.0,0.0001
*/
bool compareDouble(QString _key , QVariant _result, QVariant _reference) {
std::cerr << "comparing Double" << std::endl;
// maximal allowed double tolerance
double tolerance = std::numeric_limits<int>::epsilon();
double tolerance = 0.0;
// Check if the reference consists of two colon separated values
// Second value would specify maximal allowed tolerance
......@@ -40,34 +28,29 @@ bool compareDouble(QString _key , QVariant _result, QVariant _reference) {
double reference = referenceData[0].toDouble();
if ( fabs(result-reference) <= tolerance ) {
std::cerr << "Compared " << result << " " << reference << " " << " " << tolerance << " ok" << std::endl;
return true;
} else {
QTextStream cerr(stderr, QIODevice::WriteOnly);
cerr.setRealNumberPrecision(40);
cerr << "===================================================================\n";
cerr << "Comparison failed for key " << _key << " :\n";
cerr << "Result: " << result << "\n";
cerr << "Expected: " << reference << "\n";
cerr << "Difference: " << fabs(result-reference) << "\n";
cerr << "Allowed tolerance was: " << tolerance << "\n";
cerr << "===================================================================\n";
std::cerr << "Comparison failed for key " << _key.toStdString() << " :" << std::endl;
std::cerr << "Result: " << result << " ; Expected: " << reference << std::endl;
std::cerr << "Difference: " << fabs(result-reference) << " allowed tolerance was: " << tolerance << std::endl;
return false;
}
}
bool compareString(QString _key ,QVariant _result, QVariant _reference) {
std::cerr << "comparing String" << std::endl;
QString resultStr = _result.toString().simplified();
QString resultRef = _reference.toString().simplified();
if (resultStr == resultRef ) {
std::cerr << "Comparison ok for key " << _key.toStdString() << " :" << resultStr.toStdString() << " ; " << resultRef.toStdString()<< std::endl;
return true;
} else {
QTextStream cerr(stderr, QIODevice::WriteOnly);
cerr << "Comparison failed for key " << _key << " :\n";
cerr << "Result: " << resultStr << " ; Expected: " << resultRef << "\n";
std::cerr << "Comparison failed for key " << _key.toStdString() << " :" << std::endl;
std::cerr << "Result: " << resultStr.toStdString() << " ; Expected: " << resultRef.toStdString() << std::endl;
return false;
}
......@@ -75,10 +58,6 @@ bool compareString(QString _key ,QVariant _result, QVariant _reference) {
int main(int argv, char **args)
{
QTextStream cout(stdout, QIODevice::WriteOnly);
QTextStream cerr(stderr, QIODevice::WriteOnly);
cout << "Comparing results to reference:\n" ;
// Flag if everything went fine
bool ok = true;
......@@ -88,44 +67,23 @@ int main(int argv, char **args)
// Return if we did not get exactly two arguments
if ( argv != 3 ) {
cerr << "Wrong number of arguments!\n";
cerr << "Usage:\n";
cerr << "compareTool ResultFile ReferenceFile\n";
std::cerr << "Wrong number of arguments!" << std::endl;
std::cerr << "Usage:" << std::endl;
std::cerr << "compareTool ResultFile ReferenceFile" << std::endl;
return(1);
}
QString file1(args[1]);
QString file2(args[2]);
QFileInfo resultFileInfo(file1);
if ( !resultFileInfo.exists() ) {
cerr << "Result file: " << file1 << " does not exist!\n";
return 1;
}
QFileInfo referenceFileInfo(file2);
if ( !referenceFileInfo.exists() ) {
cerr << "Reference file: " << file2 << " does not exist!\n";
return 1;
}
QSettings resultFile(file1,QSettings::IniFormat);
QSettings referenceFile(file2,QSettings::IniFormat);
if ( resultFile.status() != QSettings::NoError) {
cerr << "QSettings error when opening result file: " << file1 << "\n";
return 1;
}
if ( referenceFile.status() != QSettings::NoError) {
cerr << "QSettings error when opening result reference file: " << file2 << "\n";
return 1;
}
QStringList toplevelKeys = referenceFile.childKeys();
QStringList groups = referenceFile.childGroups();
if ( groups.size() == 0 ) {
std::cerr << "One level only" << std::endl;
for ( int i = 0 ; i < toplevelKeys.size(); ++i) {
if ( resultFile.contains(toplevelKeys[i]) ) {
if ( toplevelKeys[i].endsWith("_DOUBLE") ) {
......@@ -133,7 +91,7 @@ int main(int argv, char **args)
} else
compareString( toplevelKeys[i],resultFile.value(toplevelKeys[i]), referenceFile.value(toplevelKeys[i]));
} else {
cerr << "Missing key in result file: " << toplevelKeys[i] << "\n";
std::cerr << "Missing key in result file: " << toplevelKeys[i].toStdString() << std::endl;
ok = false;
}
......@@ -141,13 +99,12 @@ int main(int argv, char **args)
} else {
cerr << "Multiple levels!" << "\n";
return 1;
std::cerr << "Multiple levels!" << std::endl;
}
if ( ! ok ) {
cerr << "At least one of the tests failed!\n";
std::cerr << "Test failed!" << std::endl;
return 1;
}
......
......@@ -9,8 +9,8 @@ var componentCount = infomeshobject.componentCount(file)
var groupCount = datacontrol.groupCount();
printToFile("@OUTPUT_TEST_DATA_FILE@","TESTNAME=@TESTNAME@");
printToFile("@OUTPUT_TEST_DATA_FILE@","LOADEDFILE=@OPENFLIPPER_TEST_FILES@/@TEST_FILE@");
printToFile("@OUTPUT_TEST_DATA_FILE@","Test: @TESTNAME@");
printToFile("@OUTPUT_TEST_DATA_FILE@","Loaded File: @OPENFLIPPER_TEST_FILES@/@TEST_FILE@");
printToFile("@OUTPUT_TEST_DATA_FILE@","OBJECTNAME=",name);
printToFile("@OUTPUT_TEST_DATA_FILE@","VERTEXCOUNT=",vertexCount);
printToFile("@OUTPUT_TEST_DATA_FILE@","EDGECOUNT=",edgeCount);
......
......@@ -7,8 +7,8 @@ var branchCount = infoskeletonobject.branchCount(file)
var leafCount = infoskeletonobject.leafCount(file);
printToFile("@OUTPUT_TEST_DATA_FILE@","TEST=@TESTNAME@");
printToFile("@OUTPUT_TEST_DATA_FILE@","LOADEDFILE=@OPENFLIPPER_TEST_FILES@/@TEST_FILE@");
printToFile("@OUTPUT_TEST_DATA_FILE@","Test: @TESTNAME@");
printToFile("@OUTPUT_TEST_DATA_FILE@","Loaded File: @OPENFLIPPER_TEST_FILES@/@TEST_FILE@");
printToFile("@OUTPUT_TEST_DATA_FILE@","OBJECTNAME=",name);
printToFile("@OUTPUT_TEST_DATA_FILE@","GROUPCOUNT=",groupCount);
printToFile("@OUTPUT_TEST_DATA_FILE@","BRANCHCOUNT=",branchCount);
......
......@@ -11,14 +11,9 @@ endif( NOT output_test )
# output_test contains the info about the loaded file
if( NOT test_file_info )
message( FATAL_ERROR "Variable test_file_info not defined" )
message( FATAL_ERROR "Variable test_file_info= not defined" )
endif( NOT test_file_info )
if( NOT result_checker )
message( FATAL_ERROR "Variable result_checker not defined" )
endif( NOT result_checker )
# convert the space-separated string to a list
separate_arguments( test_args )
message( ${test_args} )
......@@ -40,24 +35,62 @@ if ( NOT ${PROCESSRESULT} EQUAL 0 )
endif()
# ===================================================
# Compare the two files
# Collect list of what should be tested
# ===================================================
if ( WIN32 )
set(result_checker "${result_checker}.exe")
endif()
# Read the info file
file (STRINGS ${test_file_info} FILEINFO)
# Timeout after 2 minutes
execute_process(
COMMAND ${result_checker} ${output_test} ${test_file_info}
TIMEOUT 60
RESULT_VARIABLE PROCESSRESULT
)
#Create a list of properties we want to test
foreach(loop ${FILEINFO})
string ( REGEX REPLACE "=.*$" "" TMP ${loop} )
list( APPEND CHECKS ${TMP})
endforeach()
if ( NOT ${PROCESSRESULT} EQUAL 0 )
message(SEND_ERROR "Compare Tool execution failed!")
set(test_not_successful TRUE)
endif()
# ===================================================
# Read the files
# ===================================================
# read the test output
file (READ ${output_test} TESTOUTPUT)
# read the test output
file (READ ${test_file_info} INFOFILE)
# ===================================================
# Compare
# ===================================================
# Now go through all checks:
foreach(loop ${CHECKS})
#Get value from results:
string (
REGEX REPLACE
"^.*${loop}=([^\n]*).*$" "\\1"
RESULT ${TESTOUTPUT}
)
# remove whitespace
string(STRIP ${RESULT} RESULT)
#Get value from Info:
string (
REGEX REPLACE
"^.*${loop}=([^\n]*).*$" "\\1"
EXPECTED ${INFOFILE}
)
# remove whitespace
string(STRIP ${EXPECTED} EXPECTED)
# Use Stringcompare here
if ( NOT ${EXPECTED} STREQUAL ${RESULT} )
message(WARNING "Mismatching values for ${loop}: EXPECTED ${EXPECTED} but got ${RESULT}!")
set(test_not_successful true)
endif()
endforeach()
if( test_not_successful )
message( SEND_ERROR "Test Failed! See messages above to see what went wrong!" )
......
......@@ -52,7 +52,6 @@ function( run_single_object_file_mesh_test FILEPLUGIN TEST_FILE TEST_SCRIPT )
-D test_args:string=${args}
-D output_test=${OUTPUT_TEST_DATA_FILE}
-D test_file_info=${TEST_FILE_INFO}
-D result_checker=${CMAKE_BINARY_DIR}/tests/compareTool
-P ${CMAKE_SOURCE_DIR}/tests/run_file_test.cmake
)
......@@ -66,130 +65,4 @@ function( run_single_object_file_mesh_test FILEPLUGIN TEST_FILE TEST_SCRIPT )
PROCESSORS 1
)
endfunction()
# This function generates a test for algorithmic plugins.
#
# The script name that is used is the first parameter. It has to be inside the plugins test directory (e.g. Plugin-Smoother/tests/script.ofs)
# It will be configured such that the following variables get replaced automatically:
#
# @OPENFLIPPER_TEST_INPUT_FILE@ : The file that should be loaded to run the algorithm on
# @OPENFLIPPER_TEST_REFERENCE_FILE@ : The reference that will be used for the compare operation
# @OUTPUT_TEST_DATA_FILE@ : The file that the results will be written to
#
# The second parameter is the name of the input file that will be loaded from the TestData/Plugin-<Name> directory.
#
# The third paramater is a reference file that is used to compare the algorithms output with. An info file with the
# name of this reference file has to exist specifying the expected results of the algorithm and the compare that is
# performed by the script.
#
#
function( run_algorithm_test TEST_SCRIPT INPUT_FILE INPUT_REFERENCE )
#Get the plugin name:
string (TOUPPER ${_plugin_dir} PLUGIN_DIR)
# check if we have the testData directory
if ( NOT EXISTS ${CMAKE_SOURCE_DIR}/TestData)
return()
endif()
# Check if we find the script file
if ( NOT EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${TEST_SCRIPT} )
message("No Input File: ${CMAKE_CURRENT_SOURCE_DIR}/${TEST_SCRIPT} ")
return()
endif()
# Check if we find the input file
if ( NOT EXISTS ${CMAKE_SOURCE_DIR}/TestData/${_plugin_dir}/${INPUT_FILE} )
message("No Input File: ${CMAKE_SOURCE_DIR}/TestData/${_plugin_dir}/${INPUT_FILE} ")
return()
endif()
set (OPENFLIPPER_TEST_INPUT_FILE "${CMAKE_SOURCE_DIR}/TestData/${_plugin_dir}/${INPUT_FILE}")
# Check if we find the reference file
if ( NOT EXISTS ${CMAKE_SOURCE_DIR}/TestData/${_plugin_dir}/${INPUT_REFERENCE} )
message("No Reference File ${CMAKE_SOURCE_DIR}/TestData/${_plugin_dir}/${INPUT_REFERENCE} ")
return()
endif()
set (OPENFLIPPER_TEST_REFERENCE_FILE "${CMAKE_SOURCE_DIR}/TestData/${_plugin_dir}/${INPUT_REFERENCE}")
# Check if we find the reference result file
if ( NOT EXISTS ${CMAKE_SOURCE_DIR}/TestData/${_plugin_dir}/${INPUT_REFERENCE}.info )
message("No Result info File ${CMAKE_SOURCE_DIR}/TestData/${_plugin_dir}/${INPUT_REFERENCE}.info ")
return()
endif()
# construct the testname from target test file and the plugin directory we are in
set (TESTNAME "${PLUGIN_DIR}-${TEST_SCRIPT}-${INPUT_FILE}")
# update the output file to be inside the results directory
set (OUTPUT_TEST_DATA_FILE "${OPENFLIPPER_TEST_RESULTS}/${TESTNAME}-result.txt" )
# set file containing the information about the file to be checked
set (TEST_FILE_INFO ${CMAKE_SOURCE_DIR}/TestData/${_plugin_dir}/${INPUT_REFERENCE}.info )
# Create the test scripts directory if it does not exist
if (NOT EXISTS ${OPENFLIPPER_TEST_FILES}/${_plugin_dir})
FILE(MAKE_DIRECTORY ${OPENFLIPPER_TEST_FILES}/${_plugin_dir} )
endif()
# Configure the test script from the current directory with the given filenames and variables into the test directory
configure_file( ${CMAKE_SOURCE_DIR}/${_plugin_dir}/tests/${TEST_SCRIPT}
${OPENFLIPPER_TEST_FILES}/${_plugin_dir}/${TEST_FILE} )
# Set the filename and path for the configured script
set(TESTSCRIPTNAME "${OPENFLIPPER_TEST_FILES}/${_plugin_dir}/${TEST_SCRIPT}" )
# Execute the script by OpenFlipper and than run the result parser which checks for correct values.
set( test_cmd ${OPENFLIPPER_EXECUTABLE} )
set( args "-c -b ${TESTSCRIPTNAME}" )
add_test( ${TESTNAME}
${CMAKE_COMMAND}
-D test_cmd=${test_cmd}
-D test_args:string=${args}
-D output_test=${OUTPUT_TEST_DATA_FILE}
-D test_file_info=${TEST_FILE_INFO}
-D result_checker=${CMAKE_BINARY_DIR}/tests/compareTool
-P ${CMAKE_SOURCE_DIR}/tests/run_file_test.cmake
)
# Timeout after 2 minutes if we have an endless loop
# Should be run serial to avoid collisons with other instances
# Only one processor required
set_tests_properties (
${TESTNAME} PROPERTIES
TIMEOUT 120
RUN_SERIAL TRUE
PROCESSORS 1
)
endfunction()
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment