mirror of
				https://github.com/asterisk/asterisk.git
				synced 2025-10-31 18:55:19 +00:00 
			
		
		
		
	test.c: Fix counting of tests and add 2 new tests
The unit test XML output was counting all registered tests as "run" even when only a subset were actually requested to be run and the "failures" attribute was missing. * The "tests" attribute of the "testsuite" element in the output XML now reflects only the tests actually requested to be executed instead of all the tests registered. * The "failures" attribute was added to the "testsuite" element. Also added 2 new unit tests that just pass and fail to be used for CI testing. Change-Id: Ia137814b5aeb0e1a44c75034bd3615c26021da69
This commit is contained in:
		
				
					committed by
					
						 Friendly Automation
						Friendly Automation
					
				
			
			
				
	
			
			
			
						parent
						
							c95f8ce69f
						
					
				
				
					commit
					a346fa54a5
				
			
							
								
								
									
										11
									
								
								doc/CHANGES-staging/test.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								doc/CHANGES-staging/test.txt
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| Subject: test.c | ||||
|  | ||||
| The "tests" attribute of the "testsuite" element in the | ||||
| output XML now reflects only the tests actually requested | ||||
| to be executed instead of all the tests registered. | ||||
|  | ||||
| The "failures" attribute was added to the "testsuite" | ||||
| element. | ||||
|  | ||||
| Also added two new unit tests that just pass and fail | ||||
| to be used for testing CI itself. | ||||
| @@ -715,8 +715,8 @@ static int test_execute_multiple(const char *name, const char *category, struct | ||||
| 		/* update total counts as well during this iteration | ||||
| 		 * even if the current test did not execute this time */ | ||||
| 		last_results.total_time += test->time; | ||||
| 		last_results.total_tests++; | ||||
| 		if (test->state != AST_TEST_NOT_RUN) { | ||||
| 			last_results.total_tests++; | ||||
| 			if (test->state == AST_TEST_PASS) { | ||||
| 				last_results.total_passed++; | ||||
| 			} else { | ||||
| @@ -793,10 +793,10 @@ static int test_generate_results(const char *name, const char *category, const c | ||||
| 		 */ | ||||
| 		fprintf(f_xml, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); | ||||
| 		fprintf(f_xml, "<testsuites>\n"); | ||||
| 		fprintf(f_xml, "\t<testsuite errors=\"0\" time=\"%u.%u\" tests=\"%u\" " | ||||
| 		fprintf(f_xml, "\t<testsuite errors=\"0\" time=\"%u.%u\" tests=\"%u\" failures=\"%u\" " | ||||
| 				"name=\"AsteriskUnitTests\">\n", | ||||
| 				last_results.total_time / 1000, last_results.total_time % 1000, | ||||
| 				last_results.total_tests); | ||||
| 				last_results.total_tests, last_results.total_failed); | ||||
| 		fprintf(f_xml, "\t\t<properties>\n"); | ||||
| 		fprintf(f_xml, "\t\t\t<property name=\"version\" value=\"%s\"/>\n", ast_get_version()); | ||||
| 		fprintf(f_xml, "\t\t</properties>\n"); | ||||
|   | ||||
| @@ -383,8 +383,48 @@ AST_TEST_DEFINE(call_backtrace) | ||||
| 	return AST_TEST_PASS; | ||||
| } | ||||
|  | ||||
| AST_TEST_DEFINE(just_fail) | ||||
| { | ||||
| 	switch (cmd) { | ||||
| 	case TEST_INIT: | ||||
| 		info->name = "JUST_FAIL"; | ||||
| 		info->category = "/TEST_PASS_FAIL/"; | ||||
| 		info->summary = "Just fails"; | ||||
| 		info->description = "Just fails. " | ||||
| 			"This test is mainly used for testing CI and tool failure scenarios."; | ||||
| 		info->explicit_only = 1; | ||||
| 		return AST_TEST_NOT_RUN; | ||||
| 	case TEST_EXECUTE: | ||||
| 		break; | ||||
| 	} | ||||
| 	ast_test_status_update(test, "This test just forces a fail\n"); | ||||
|  | ||||
| 	return AST_TEST_FAIL; | ||||
| } | ||||
|  | ||||
| AST_TEST_DEFINE(just_pass) | ||||
| { | ||||
| 	switch (cmd) { | ||||
| 	case TEST_INIT: | ||||
| 		info->name = "JUST_PASS"; | ||||
| 		info->category = "/TEST_PASS_FAIL/"; | ||||
| 		info->summary = "Just passes"; | ||||
| 		info->description = "Just passes. " | ||||
| 			"This test is mainly used for testing CI and tool failure scenarios."; | ||||
| 		info->explicit_only = 1; | ||||
| 		return AST_TEST_NOT_RUN; | ||||
| 	case TEST_EXECUTE: | ||||
| 		break; | ||||
| 	} | ||||
| 	ast_test_status_update(test, "This test just forces a pass\n"); | ||||
|  | ||||
| 	return AST_TEST_PASS; | ||||
| } | ||||
|  | ||||
| static int unload_module(void) | ||||
| { | ||||
| 	AST_TEST_UNREGISTER(just_pass); | ||||
| 	AST_TEST_UNREGISTER(just_fail); | ||||
| 	AST_TEST_UNREGISTER(call_backtrace); | ||||
| 	AST_TEST_UNREGISTER(call_assert); | ||||
| 	AST_TEST_UNREGISTER(segv); | ||||
| @@ -398,6 +438,8 @@ static int load_module(void) | ||||
| 	AST_TEST_REGISTER(segv); | ||||
| 	AST_TEST_REGISTER(call_assert); | ||||
| 	AST_TEST_REGISTER(call_backtrace); | ||||
| 	AST_TEST_REGISTER(just_fail); | ||||
| 	AST_TEST_REGISTER(just_pass); | ||||
| 	return AST_MODULE_LOAD_SUCCESS; | ||||
| } | ||||
|  | ||||
|   | ||||
		Reference in New Issue
	
	Block a user