Move printing sub-test information into test harness (#421)

This removes all the duplicated code from each test, and moves it to
test harness so that we have single place where this information is
printed.

Signed-off-by: Radek Szymanski <radek.szymanski@arm.com>
This commit is contained in:
Radek Szymanski
2019-08-05 15:16:12 +01:00
committed by Kévin Petit
parent 19951a2a14
commit 03650057bb
26 changed files with 87 additions and 184 deletions

View File

@@ -22,8 +22,6 @@
#include <malloc.h>
#endif
int gTestCount = 0;
int gTestFailure = 0;
RoundingMode gFloatToHalfRoundingMode = kDefaultRoundingMode;
static cl_ushort float2half_rte( float f );
@@ -3719,8 +3717,6 @@ bool check_minimum_supported( cl_image_format *formatList, unsigned int numForma
{
log_error( "ERROR: Format required by OpenCL 1.0 is not supported: " );
print_header( &formatsToTest[ i ], true );
gTestCount++;
gTestFailure++;
passed = false;
}
}

View File

@@ -40,8 +40,6 @@
#include "rounding_mode.h"
#include "clImageHelper.h"
extern int gTestCount;
extern int gTestFailure;
extern cl_device_type gDeviceType;
// Number of iterations per image format to test if not testing max images, rounding, or small images

View File

@@ -40,6 +40,8 @@
int gTestsPassed = 0;
int gTestsFailed = 0;
int gFailCount;
int gTestCount;
cl_uint gRandomSeed = 0;
cl_uint gReSeed = 0;
@@ -568,6 +570,37 @@ static int saveResultsToJson( const char *fileName, const char *suiteName, test_
return ret;
}
static void print_results( int failed, int count, const char* name )
{
if( count < failed )
{
count = failed;
}
if( failed == 0 )
{
if( count > 1 )
{
log_info( "PASSED %d of %d %ss.\n", count, count, name );
}
else
{
log_info( "PASSED %s.\n", name );
}
}
else if( failed > 0 )
{
if( count > 1 )
{
log_error( "FAILED %d of %d %ss.\n", failed, count, name );
}
else
{
log_error( "FAILED %s.\n", name );
}
}
}
int parseAndCallCommandLineTests( int argc, const char *argv[], cl_device_id device, int testNum,
test_definition testList[], int forceNoContextCreation,
cl_command_queue_properties queueProps, int num_elements )
@@ -617,28 +650,8 @@ int parseAndCallCommandLineTests( int argc, const char *argv[], cl_device_id dev
callTestFunctions( testList, selectedTestList, resultTestList, testNum, device,
forceNoContextCreation, num_elements, queueProps );
if( gTestsFailed == 0 )
{
if( gTestsPassed > 1 )
{
log_info("PASSED %d of %d tests.\n", gTestsPassed, gTestsPassed);
}
else if( gTestsPassed > 0 )
{
log_info("PASSED test.\n");
}
}
else if( gTestsFailed > 0 )
{
if( gTestsFailed+gTestsPassed > 1 )
{
log_error("FAILED %d of %d tests.\n", gTestsFailed, gTestsFailed+gTestsPassed);
}
else
{
log_error("FAILED test.\n");
}
}
print_results( gFailCount, gTestCount, "sub-test" );
print_results( gTestsFailed, gTestsFailed + gTestsPassed, "test" );
char *filename = getenv( "CL_CONFORMANCE_RESULTS_FILENAME" );
if( filename != NULL )

View File

@@ -69,6 +69,8 @@ typedef enum test_status
TEST_SKIP = 2,
} test_status;
extern int gFailCount;
extern int gTestCount;
extern cl_uint gReSeed;
extern cl_uint gRandomSeed;

View File

@@ -43,6 +43,8 @@
int gTestsPassed = 0;
int gTestsFailed = 0;
int gFailCount;
int gTestCount;
cl_uint gRandomSeed = 0;
cl_uint gReSeed = 0;
@@ -567,6 +569,37 @@ static int saveResultsToJson( const char *fileName, const char *suiteName, test_
return ret;
}
static void print_results( int failed, int count, const char* name )
{
if( count < failed )
{
count = failed;
}
if( failed == 0 )
{
if( count > 1 )
{
log_info( "PASSED %d of %d %ss.\n", count, count, name );
}
else
{
log_info( "PASSED %s.\n", name );
}
}
else if( failed > 0 )
{
if( count > 1 )
{
log_error( "FAILED %d of %d %ss.\n", failed, count, name );
}
else
{
log_error( "FAILED %s.\n", name );
}
}
}
int parseAndCallCommandLineTests( int argc, const char *argv[], cl_device_id device, int testNum,
test_definition testList[], int forceNoContextCreation,
cl_command_queue_properties queueProps, int num_elements )
@@ -616,28 +649,8 @@ int parseAndCallCommandLineTests( int argc, const char *argv[], cl_device_id dev
callTestFunctions( testList, selectedTestList, resultTestList, testNum, device,
forceNoContextCreation, num_elements, queueProps );
if( gTestsFailed == 0 )
{
if( gTestsPassed > 1 )
{
log_info("PASSED %d of %d tests.\n", gTestsPassed, gTestsPassed);
}
else if( gTestsPassed > 0 )
{
log_info("PASSED test.\n");
}
}
else if( gTestsFailed > 0 )
{
if( gTestsFailed+gTestsPassed > 1 )
{
log_error("FAILED %d of %d tests.\n", gTestsFailed, gTestsFailed+gTestsPassed);
}
else
{
log_error("FAILED test.\n");
}
}
print_results( gFailCount, gTestCount, "sub-test" );
print_results( gTestsFailed, gTestsFailed + gTestsPassed, "test" );
char *filename = getenv( "CL_CONFORMANCE_RESULTS_FILENAME" );
if( filename != NULL )

View File

@@ -64,6 +64,8 @@ typedef enum test_status
TEST_SKIP = 2,
} test_status;
extern int gFailCount;
extern int gTestCount;
extern cl_uint gReSeed;
extern cl_uint gRandomSeed;

View File

@@ -78,8 +78,6 @@ int argCount = 0;
cl_context gContext = NULL;
cl_command_queue gQueue = NULL;
char appName[64] = "ctest";
int gTestCount = 0;
int gFailCount = 0;
int gStartTestNumber = -1;
int gEndTestNumber = 0;
#if defined( __APPLE__ )
@@ -327,12 +325,6 @@ int main (int argc, const char **argv )
if (error)
vlog_error("clFinish failed: %d\n", error);
if (gFailCount == 0 && gTestCount >= 0) {
vlog("PASSED %d of %d sub-tests.\n", gTestCount, gTestCount);
} else if (gFailCount > 0) {
vlog_error("FAILED %d of %d sub-tests.\n", gFailCount, gTestCount);
}
clReleaseMemObject(gInBuffer);
for( int i = 0; i < kCallStyleCount; i++ ) {

View File

@@ -58,7 +58,6 @@ uint32_t gDeviceFrequency = 0;
uint32_t gComputeDevices = 0;
size_t gMaxThreadGroupSize = 0;
size_t gWorkGroupSize = 0;
int gFailCount = 0;
bool gWimpyMode = false;
int gWimpyReductionFactor = 512;
int gTestDouble = 0;

View File

@@ -66,7 +66,6 @@ extern uint32_t gDeviceFrequency;
extern uint32_t gComputeDevices;
extern size_t gMaxThreadGroupSize;
extern size_t gWorkGroupSize;
extern int gFailCount;
extern int gTestDouble;
extern int gReportTimes;

View File

@@ -123,11 +123,6 @@ exit:
}
}
if(gFailCount > 0)
{
vlog_error("FAILED %d sub-tests.\n", gFailCount);
}
ReleaseCL();
return error;
}

View File

@@ -177,21 +177,7 @@ int main(int argc, const char *argv[])
int ret = runTestHarness( argCount, argList, test_num, test_list, true, false, 0 );
if (gTestFailure == 0) {
if (gTestCount > 1)
log_info("PASSED %d of %d sub-tests.\n", gTestCount, gTestCount);
else
log_info("PASSED sub-test.\n");
} else if (gTestFailure > 0) {
if (gTestCount > 1)
log_error("FAILED %d of %d sub-tests.\n", gTestFailure, gTestCount);
else
log_error("FAILED sub-test.\n");
}
// Clean up
free(argList);
return ret;
}

View File

@@ -243,7 +243,7 @@ int test_image_type( cl_device_id device, cl_context context, cl_command_queue q
test_return = test_copy_image_set_3D_2D_array( device, context, queue, &formatList[ i ], false);
if (test_return) {
gTestFailure++;
gFailCount++;
log_error( "FAILED: " );
print_header( &formatList[ i ], true );
log_info( "\n" );

View File

@@ -146,19 +146,6 @@ int main(int argc, const char *argv[])
int ret = runTestHarness( argCount, argList, test_num, test_list, true, false, 0 );
if (gTestFailure == 0) {
if (gTestCount > 1)
log_info("PASSED %d of %d sub-tests.\n", gTestCount, gTestCount);
else
log_info("PASSED sub-test.\n");
}
else if (gTestFailure > 0) {
if (gTestCount > 1)
log_error("FAILED %d of %d sub-tests.\n", gTestFailure, gTestCount);
else
log_error("FAILED sub-test.\n");
}
free(argList);
return ret;
}

View File

@@ -204,7 +204,7 @@ int test_image_type( cl_device_id device, cl_context context, cl_command_queue q
if (test_return)
{
gTestFailure++;
gFailCount++;
log_error( "FAILED: " );
print_header( &formatList[ i ], true );
log_info( "\n" );
@@ -250,7 +250,7 @@ int test_image_type( cl_device_id device, cl_context context, cl_command_queue q
test_return = test_fill_image_set_3D( device, context, queue, &formatList[ i ], kInt );
if (test_return) {
gTestFailure++;
gFailCount++;
log_error( "FAILED: " );
print_header( &formatList[ i ], true );
log_info( "\n" );
@@ -297,7 +297,7 @@ int test_image_type( cl_device_id device, cl_context context, cl_command_queue q
test_return = test_fill_image_set_3D( device, context, queue, &formatList[ i ], kUInt );
if (test_return) {
gTestFailure++;
gFailCount++;
log_error( "FAILED: " );
print_header( &formatList[ i ], true );
log_info( "\n" );

View File

@@ -131,19 +131,6 @@ int main(int argc, const char *argv[])
int ret = runTestHarness( argCount, argList, test_num, test_list, true, false, 0 );
if (gTestFailure == 0) {
if (gTestCount > 1)
log_info("PASSED %d of %d sub-tests.\n", gTestCount, gTestCount);
else
log_info("PASSED sub-test.\n");
} else if (gTestFailure > 0) {
if (gTestCount > 1)
log_error("FAILED %d of %d sub-tests.\n", gTestFailure, gTestCount);
else
log_error("FAILED sub-test.\n");
}
free(argList);
return ret;
}

View File

@@ -199,7 +199,7 @@ int test_image_type( cl_device_id device, cl_context context, cl_mem_object_type
}
if (test_return) {
gTestFailure++;
gFailCount++;
log_error( "FAILED: " );
print_header( &formatList[ i ], true );
log_info( "\n" );

View File

@@ -138,18 +138,6 @@ int main(int argc, const char *argv[])
int ret = runTestHarness( argCount, argList, test_num, test_list, true, false, 0 );
if (gTestFailure == 0) {
if (gTestCount > 1)
log_info("PASSED %d of %d sub-tests.\n", gTestCount, gTestCount);
else
log_info("PASSED sub-test.\n");
} else if (gTestFailure > 0) {
if (gTestCount > 1)
log_error("FAILED %d of %d sub-tests.\n", gTestFailure, gTestCount);
else
log_error("FAILED sub-test.\n");
}
free(argList);
return ret;
}

View File

@@ -195,7 +195,7 @@ int test_image_type( cl_device_id device, cl_context context, cl_command_queue q
}
if (test_return) {
gTestFailure++;
gFailCount++;
log_error( "FAILED: " );
print_header( &formatList[ i ], true );
log_info( "\n" );

View File

@@ -134,20 +134,8 @@ int main(int argc, const char *argv[])
int ret = runTestHarness( argCount, argList, test_num, test_list, true, false, 0 );
if (gTestFailure == 0) {
if (gTestCount > 1)
log_info("PASSED %d of %d sub-tests.\n", gTestCount, gTestCount);
else
log_info("PASSED sub-test.\n");
} else if (gTestFailure > 0) {
if (gTestCount > 1)
log_error("FAILED %d of %d sub-tests.\n", gTestFailure, gTestCount);
else
log_error("FAILED sub-test.\n");
}
free(argList);
return ret;
free(argList);
return ret;
}
static void printUsage( const char *execName )

View File

@@ -181,7 +181,7 @@ int test_image_type( cl_device_id device, cl_context context, cl_command_queue q
}
if (test_return) {
gTestFailure++;
gFailCount++;
log_error( "FAILED: " );
print_header( &formatList[ i ], true );
log_info( "\n" );

View File

@@ -443,18 +443,6 @@ int main(int argc, const char *argv[])
// Restore FP state before leaving
RestoreFPState(&oldMode);
if (gTestFailure == 0) {
if (gTestCount > 1)
log_info("PASSED %d of %d sub-tests.\n", gTestCount, gTestCount);
else
log_info("PASSED sub-test.\n");
} else if (gTestFailure > 0) {
if (gTestCount > 1)
log_error("FAILED %d of %d sub-tests.\n", gTestFailure, gTestCount);
else
log_error("FAILED sub-test.\n");
}
free(argList);
return ret;
}

View File

@@ -226,7 +226,7 @@ int test_read_image_type( cl_device_id device, cl_context context, cl_command_qu
}
if( retCode != 0 )
{
gTestFailure++;
gFailCount++;
log_error( "FAILED: " );
print_read_header( format, imageSampler, true );
log_info( "\n" );
@@ -296,7 +296,7 @@ int test_image_set( cl_device_id device, cl_context context, cl_command_queue qu
if ( ( 0 == is_extension_available( device, "cl_khr_3d_image_writes" )) && (imageType == CL_MEM_OBJECT_IMAGE3D) && (formatTestFn == test_write_image_formats) )
{
gTestFailure++;
gFailCount++;
log_error( "-----------------------------------------------------\n" );
log_error( "FAILED: test writing CL_MEM_OBJECT_IMAGE3D images\n" );
log_error( "This device does not support the mandated extension cl_khr_3d_image_writes.\n");

View File

@@ -866,7 +866,7 @@ int test_write_image_formats( cl_device_id device, cl_context context, cl_comman
if( retCode != 0 )
{
gTestFailure++;
gFailCount++;
log_error( "FAILED: " );
print_write_header( &imageFormat, true );
log_info( "\n" );

View File

@@ -178,19 +178,6 @@ int main(int argc, const char *argv[])
// Restore FP state before leaving
RestoreFPState(&oldMode);
if (gTestFailure == 0) {
if (gTestCount > 1)
log_info("PASSED %d of %d sub-tests.\n", gTestCount, gTestCount);
else
log_info("PASSED sub-test.\n");
}
else if (gTestFailure > 0) {
if (gTestCount > 1)
log_error("FAILED %d of %d sub-tests.\n", gTestFailure, gTestCount);
else
log_error("FAILED sub-test.\n");
}
free(argList);
return ret;
}

View File

@@ -171,7 +171,7 @@ int test_read_image_type( cl_device_id device, cl_context context, cl_command_qu
if ( ret != 0 )
{
gTestFailure++;
gFailCount++;
log_error( "FAILED: " );
print_read_header( format, imageSampler, true );
log_info( "\n" );

View File

@@ -54,8 +54,6 @@ char appName[ MAXPATHLEN ] = "";
cl_device_id gDevice = NULL;
cl_context gContext = NULL;
cl_command_queue gQueue = NULL;
int gTestCount = 0;
int gFailCount = 0;
static int32_t gStartTestNumber;
static int32_t gEndTestNumber;
int gSkipCorrectnessTesting = 0;
@@ -883,21 +881,6 @@ int main (int argc, const char * argv[])
if (error_code)
vlog_error("clFinish failed:%d\n", error_code);
if (gFailCount == 0)
{
if (gTestCount > 1)
vlog("PASSED %d of %d sub-tests.\n", gTestCount, gTestCount);
else
vlog("PASSED sub-test.\n");
}
else if (gFailCount > 0)
{
if (gTestCount > 1)
vlog_error("FAILED %d of %d sub-tests.\n", gFailCount, gTestCount);
else
vlog_error("FAILED sub-test.\n");
}
ReleaseCL();
#if defined( __APPLE__ )