diff --git a/tests/test_copy_constructor2.py b/tests/test_copy_constructor2.py index 145bfe4c..a6e60fd8 100644 --- a/tests/test_copy_constructor2.py +++ b/tests/test_copy_constructor2.py @@ -5,70 +5,41 @@ import os import bz2 -import unittest from . import autoconfig -from . import parser_test_case from pygccxml import parser from pygccxml import declarations -class Test(parser_test_case.parser_test_case_t): - - def __init__(self, *args): - parser_test_case.parser_test_case_t.__init__(self, *args) - self.global_ns = None - self.xml_path = None - - def setUp(self): - if not self.global_ns: - - # Extract the xml file from the bz2 archive - bz2_path = os.path.join( - autoconfig.data_directory, - 'ogre.1.7.xml.bz2') - self.xml_path = os.path.join( - autoconfig.data_directory, - 'ogre.1.7.xml') - with open(self.xml_path, 'wb') as new_file: - # bz2.BZ2File can not be used in a with statement in python 2.6 - bz2_file = bz2.BZ2File(bz2_path, 'rb') - for data in iter(lambda: bz2_file.read(100 * 1024), b''): - new_file.write(data) - bz2_file.close() - - reader = parser.source_reader_t(autoconfig.cxx_parsers_cfg.config) - self.global_ns = declarations.get_global_namespace( - reader.read_xml_file( - self.xml_path)) - self.global_ns.init_optimizer() - - def tearDown(self): - # Delete the extracted xml file - os.remove(self.xml_path) - - def test_copy_constructor2(self): - for x in self.global_ns.typedefs('SettingsMultiMap'): - self.assertTrue(not declarations.is_noncopyable(x)) - - for x in self.global_ns.typedefs('SettingsIterator'): - self.assertTrue(not declarations.is_noncopyable(x)) - - for x in self.global_ns.typedefs('SectionIterator'): - self.assertTrue(not declarations.is_noncopyable(x)) - - -def create_suite(): - suite = unittest.TestSuite() - suite.addTest( - unittest.TestLoader().loadTestsFromTestCase(testCaseClass=Test)) - return suite - - -def run_suite(): - unittest.TextTestRunner(verbosity=2).run(create_suite()) - - -if __name__ == "__main__": - run_suite() +def test_copy_constructor2(): + # Extract the xml file from the bz2 archive + bz2_path = os.path.join( + autoconfig.data_directory, + 'ogre.1.7.xml.bz2') + xml_path = os.path.join( + autoconfig.data_directory, + 'ogre.1.7.xml') + with open(xml_path, 'wb') as new_file: + # bz2.BZ2File can not be used in a with statement in python 2.6 + bz2_file = bz2.BZ2File(bz2_path, 'rb') + for data in iter(lambda: bz2_file.read(100 * 1024), b''): + new_file.write(data) + bz2_file.close() + + reader = parser.source_reader_t(autoconfig.cxx_parsers_cfg.config) + global_ns = declarations.get_global_namespace( + reader.read_xml_file(xml_path) + ) + global_ns.init_optimizer() + + for x in global_ns.typedefs('SettingsMultiMap'): + assert declarations.is_noncopyable(x) is False + + for x in global_ns.typedefs('SettingsIterator'): + assert declarations.is_noncopyable(x) is False + + for x in global_ns.typedefs('SectionIterator'): + assert declarations.is_noncopyable(x) is False + + os.remove(xml_path) diff --git a/tests/test_cpp_standards.py b/tests/test_cpp_standards.py index 1eb5a3fb..36ea3c97 100644 --- a/tests/test_cpp_standards.py +++ b/tests/test_cpp_standards.py @@ -3,62 +3,44 @@ # Distributed under the Boost Software License, Version 1.0. # See http://www.boost.org/LICENSE_1_0.txt +import pytest + import platform -import unittest -from . import parser_test_case +from . import autoconfig from pygccxml import parser -class Test(parser_test_case.parser_test_case_t): - - def test(self): - """ - Test different compilation standards by setting cflags. - - """ - - parser.parse(["cpp_standards.hpp"], self.config) - - if platform.system() != 'Windows': - self.config.cflags = "-std=c++98" - parser.parse(["cpp_standards.hpp"], self.config) - - self.config.cflags = "-std=c++03" - parser.parse(["cpp_standards.hpp"], self.config) +def test_cpp_standards(): + """ + Test different compilation standards by setting cflags. - self.config.cflags = "-std=c++11" - parser.parse(["cpp_standards.hpp"], self.config) + """ - # This is broken with llvm 3.6.2 (the one from homebrew) - # It should work with never llvms but I keep the test disabled - # See https://llvm.org/bugs/show_bug.cgi?id=24872 - # self.config.cflags = "-std=c++14" - # parser.parse(["cpp_standards.hpp"], self.config) + config = autoconfig.cxx_parsers_cfg.config.clone() - # Same as above - # self.config.cflags = "-std=c++1z" - # parser.parse(["cpp_standards.hpp"], self.config) + parser.parse(["cpp_standards.hpp"], config) - # Pass down a flag that does not exist. - # This should raise an exception. - self.config.cflags = "-std=c++00" - self.assertRaises( - RuntimeError, - lambda: parser.parse(["cpp_standards.hpp"], self.config)) + if platform.system() != 'Windows': + config.cflags = "-std=c++98" + parser.parse(["cpp_standards.hpp"], config) + config.cflags = "-std=c++03" + parser.parse(["cpp_standards.hpp"], config) -def create_suite(): - suite = unittest.TestSuite() - suite.addTest( - unittest.TestLoader().loadTestsFromTestCase(testCaseClass=Test)) - return suite + config.cflags = "-std=c++11" + parser.parse(["cpp_standards.hpp"], config) -def run_suite(): - unittest.TextTestRunner(verbosity=2).run(create_suite()) + config.cflags = "-std=c++14" + parser.parse(["cpp_standards.hpp"], config) + config.cflags = "-std=c++1z" + parser.parse(["cpp_standards.hpp"], config) -if __name__ == "__main__": - run_suite() + # Pass down a flag that does not exist. + # This should raise an exception. + config.cflags = "-std=c++00" + with pytest.raises(RuntimeError): + parser.parse(["cpp_standards.hpp"], config) diff --git a/tests/test_decl_printer.py b/tests/test_decl_printer.py index 571b0f02..0a3542ef 100644 --- a/tests/test_decl_printer.py +++ b/tests/test_decl_printer.py @@ -4,76 +4,58 @@ # See http://www.boost.org/LICENSE_1_0.txt import sys -import unittest +import pytest -from . import parser_test_case +from . import autoconfig from pygccxml import parser from pygccxml import declarations -class Test(parser_test_case.parser_test_case_t): - - def __init__(self, *args): - parser_test_case.parser_test_case_t.__init__(self, *args) - self.__files = [ - 'core_ns_join_1.hpp', - 'core_ns_join_2.hpp', - 'core_ns_join_3.hpp', - 'core_membership.hpp', - 'core_class_hierarchy.hpp', - 'core_types.hpp', - 'core_diamand_hierarchy_base.hpp', - 'core_diamand_hierarchy_derived1.hpp', - 'core_diamand_hierarchy_derived2.hpp', - 'core_diamand_hierarchy_final_derived.hpp', - 'core_overloads_1.hpp', - 'core_overloads_2.hpp', - 'typedefs_base.hpp'] - - # for i, f in enumerate(self.__files): - # f = parser.create_cached_source_fc( - # os.path.join( autoconfig.data_directory, f) - # , os.path.join( autoconfig.data_directory, f + '.xml') ) - # self.__files[i] = f - prj_reader = parser.project_reader_t(self.config) - self.decls = prj_reader.read_files( - self.__files, - compilation_mode=parser.COMPILATION_MODE.FILE_BY_FILE) - - def test_printer(self): - - # Redirect sys.stdout to a class with a writer doing nothing - # This greatly reduces the size of the test output and makes - # test log files readable. - # Note: flush needs to be defined; because if not this will - # result in an AttributeError on call. - class DontPrint(object): - def write(*args): - pass - - def flush(*args): - pass - sys.stdout = DontPrint() - - declarations.print_declarations(self.decls) - - def test__str__(self): - decls = declarations.make_flatten(self.decls) - for decl in decls: - str(decl) - - -def create_suite(): - suite = unittest.TestSuite() - suite.addTest( - unittest.TestLoader().loadTestsFromTestCase(testCaseClass=Test)) - return suite - - -def run_suite(): - unittest.TextTestRunner(verbosity=2).run(create_suite()) - - -if __name__ == "__main__": - run_suite() +TEST_FILES = [ + 'core_ns_join_1.hpp', + 'core_ns_join_2.hpp', + 'core_ns_join_3.hpp', + 'core_membership.hpp', + 'core_class_hierarchy.hpp', + 'core_types.hpp', + 'core_diamand_hierarchy_base.hpp', + 'core_diamand_hierarchy_derived1.hpp', + 'core_diamand_hierarchy_derived2.hpp', + 'core_diamand_hierarchy_final_derived.hpp', + 'core_overloads_1.hpp', + 'core_overloads_2.hpp', + 'typedefs_base.hpp', +] + + +@pytest.fixture +def decls(): + COMPILATION_MODE = parser.COMPILATION_MODE.FILE_BY_FILE + config = autoconfig.cxx_parsers_cfg.config.clone() + config.castxml_epic_version = 1 + decls = parser.parse(TEST_FILES, config, COMPILATION_MODE) + return decls + + +def test_printer(decls): + # Redirect sys.stdout to a class with a writer doing nothing + # This greatly reduces the size of the test output and makes + # test log files readable. + # Note: flush needs to be defined; because if not this will + # result in an AttributeError on call. + class DontPrint(object): + def write(*args): + pass + + def flush(*args): + pass + sys.stdout = DontPrint() + + declarations.print_declarations(decls) + + +def test__str__(decls): + decls = declarations.make_flatten(decls) + for decl in decls: + str(decl) diff --git a/tests/test_declaration_files.py b/tests/test_declaration_files.py index 871d1181..845f8495 100644 --- a/tests/test_declaration_files.py +++ b/tests/test_declaration_files.py @@ -4,54 +4,38 @@ # See http://www.boost.org/LICENSE_1_0.txt import os -import unittest -from . import parser_test_case +from . import autoconfig from pygccxml import parser from pygccxml import declarations -class Test(parser_test_case.parser_test_case_t): - - def __init__(self, *args): - parser_test_case.parser_test_case_t.__init__(self, *args) - self.__files = [ - 'core_ns_join_1.hpp', - 'core_ns_join_2.hpp', - 'core_ns_join_3.hpp', - 'core_membership.hpp', - 'core_class_hierarchy.hpp', - 'core_types.hpp', - 'core_diamand_hierarchy_base.hpp', - 'core_diamand_hierarchy_derived1.hpp', - 'core_diamand_hierarchy_derived2.hpp', - 'core_diamand_hierarchy_final_derived.hpp', - 'core_overloads_1.hpp', - 'core_overloads_2.hpp'] - - def test(self): - prj_reader = parser.project_reader_t(self.config) - decls = prj_reader.read_files( - self.__files, - compilation_mode=parser.COMPILATION_MODE.ALL_AT_ONCE) - files = declarations.declaration_files(decls) - result = set() - for fn in files: - result.add(os.path.split(fn)[1]) - self.assertTrue(set(self.__files).issubset(result)) - - -def create_suite(): - suite = unittest.TestSuite() - suite.addTest( - unittest.TestLoader().loadTestsFromTestCase(testCaseClass=Test)) - return suite - - -def run_suite(): - unittest.TextTestRunner(verbosity=2).run(create_suite()) - - -if __name__ == "__main__": - run_suite() +TEST_FILES = [ + 'core_ns_join_1.hpp', + 'core_ns_join_2.hpp', + 'core_ns_join_3.hpp', + 'core_membership.hpp', + 'core_class_hierarchy.hpp', + 'core_types.hpp', + 'core_diamand_hierarchy_base.hpp', + 'core_diamand_hierarchy_derived1.hpp', + 'core_diamand_hierarchy_derived2.hpp', + 'core_diamand_hierarchy_final_derived.hpp', + 'core_overloads_1.hpp', + 'core_overloads_2.hpp', + 'typedefs_base.hpp', +] + + +def test_declaration_files(): + config = autoconfig.cxx_parsers_cfg.config.clone() + prj_reader = parser.project_reader_t(config) + decls = prj_reader.read_files( + TEST_FILES, + compilation_mode=parser.COMPILATION_MODE.ALL_AT_ONCE) + files = declarations.declaration_files(decls) + result = set() + for fn in files: + result.add(os.path.split(fn)[1]) + assert set(TEST_FILES).issubset(result) is True diff --git a/tests/test_declarations_cache.py b/tests/test_declarations_cache.py index 038267f7..204cc0bc 100644 --- a/tests/test_declarations_cache.py +++ b/tests/test_declarations_cache.py @@ -4,167 +4,141 @@ # See http://www.boost.org/LICENSE_1_0.txt import os -import unittest import os.path from . import autoconfig -from . import parser_test_case from pygccxml.parser.config import xml_generator_configuration_t from pygccxml.parser import declarations_cache -class Test(parser_test_case.parser_test_case_t): - - def test_file_signature(self): - file1 = os.path.join(autoconfig.data_directory, 'decl_cache_file1.txt') - file1_dup = os.path.join( - autoconfig.data_directory, - 'decl_cache_file1_duplicate.txt') - file2 = os.path.join(autoconfig.data_directory, 'decl_cache_file2.txt') - sig1 = declarations_cache.file_signature(file1) - sig1_dup = declarations_cache.file_signature(file1_dup) - sig2 = declarations_cache.file_signature(file2) - self.assertTrue(sig1 == sig1_dup) - self.assertTrue(sig1 != sig2) - - def test_config_signature(self): - diff_cfg_list = self.build_differing_cfg_list() - def_cfg = diff_cfg_list[0] - def_sig = declarations_cache.configuration_signature(def_cfg) - - # Test changes that should cause sig changes - for cfg in diff_cfg_list[1:]: - self.assertTrue( - declarations_cache.configuration_signature(cfg) != def_sig) - - # Test changes that should not cause sig changes - no_changes = def_cfg.clone() - self.assertTrue( - declarations_cache.configuration_signature(no_changes) == def_sig) - - # start_decls_changed = def_cfg.clone() - # start_decls_changed.start_with_declarations = "test object" - # self.assertTrue( - # configuration_signature(start_decls_changed) == def_sig) - - ignore_changed = def_cfg.clone() - ignore_changed.ignore_gccxml_output = True - self.assertTrue( - declarations_cache.configuration_signature( - ignore_changed) == def_sig) - - def test_cache_interface(self): - cache_file = os.path.join( - autoconfig.build_directory, - 'decl_cache_test.test_cache_read.cache') - file1 = os.path.join(autoconfig.data_directory, 'decl_cache_file1.txt') - file1_dup = os.path.join( - autoconfig.data_directory, - 'decl_cache_file1_duplicate.txt') - file2 = os.path.join(autoconfig.data_directory, 'decl_cache_file2.txt') - diff_cfg_list = self.build_differing_cfg_list() - def_cfg = diff_cfg_list[0] - - if os.path.exists(cache_file): - os.remove(cache_file) - - cache = declarations_cache.file_cache_t(cache_file) - self.assertTrue(len(cache._file_cache_t__cache) == 0) - - # test creating new entries for differing files - cache.update(file1, def_cfg, 1, []) - self.assertTrue(len(cache._file_cache_t__cache) == 1) - cache.update(file1_dup, def_cfg, 2, []) - self.assertTrue(len(cache._file_cache_t__cache) == 1) - cache.update(file2, def_cfg, 3, []) - self.assertTrue(len(cache._file_cache_t__cache) == 2) - - self.assertTrue(cache.cached_value(file1, def_cfg) == 2) - self.assertTrue(cache.cached_value(file2, def_cfg) == 3) - - # Test reading again - cache.flush() - cache = declarations_cache.file_cache_t(cache_file) - self.assertTrue(len(cache._file_cache_t__cache) == 2) - self.assertTrue(cache.cached_value(file1, def_cfg) == 2) - self.assertTrue(cache.cached_value(file2, def_cfg) == 3) - - # Test flushing doesn't happen if we don't touch the cache - cache = declarations_cache.file_cache_t(cache_file) - self.assertTrue( - cache.cached_value( - file1, def_cfg) == 2) # Read from cache - cache.flush() # should not actually flush - cache = declarations_cache.file_cache_t(cache_file) - self.assertTrue(len(cache._file_cache_t__cache) == 2) - - # Test flush culling - cache = declarations_cache.file_cache_t(cache_file) - cache.update(file1_dup, def_cfg, 4, []) # Modify cache - cache.flush() # should cull off one entry - cache = declarations_cache.file_cache_t(cache_file) - self.assertTrue(len(cache._file_cache_t__cache) == 1) - - @staticmethod - def build_differing_cfg_list(): - """ Return a list of configurations that all differ. """ - cfg_list = [] - def_cfg = xml_generator_configuration_t( - "xml_generator_path", - '.', ['tmp'], ['sym'], ['unsym'], None, False, "") - cfg_list.append(def_cfg) - - # Test changes that should cause sig changes - gccxml_changed = def_cfg.clone() - gccxml_changed.xml_generator_path = "other_path" - cfg_list.append(gccxml_changed) - - wd_changed = def_cfg.clone() - wd_changed.working_directory = "other_dir" - cfg_list.append(wd_changed) - - # inc_changed = def_cfg.clone() - # inc_changed.include_paths = ["/var/tmp"] - # self.assertTrue(configuration_signature(inc_changed) != def_sig) - inc_changed = xml_generator_configuration_t( - "xml_generator_path", '.', ['/var/tmp'], ['sym'], ['unsym'], - None, False, "") - cfg_list.append(inc_changed) - - # def_changed = def_cfg.clone() - # def_changed.define_symbols = ["symbol"] - # self.assertTrue(configuration_signature(def_changed) != def_sig) - def_changed = xml_generator_configuration_t( - "xml_generator_path", '.', ['/var/tmp'], ['new-sym'], ['unsym'], - None, False, "") - cfg_list.append(def_changed) - - # undef_changed = def_cfg.clone() - # undef_changed.undefine_symbols = ["symbol"] - # self.assertTrue(configuration_signature(undef_changed) != def_sig) - undef_changed = xml_generator_configuration_t( - "xml_generator_path", '.', ['/var/tmp'], ['sym'], ['new-unsym'], - None, False, "") - cfg_list.append(undef_changed) - - cflags_changed = def_cfg.clone() - cflags_changed.cflags = "new flags" - cfg_list.append(cflags_changed) - - return cfg_list - - -def create_suite(): - suite = unittest.TestSuite() - suite.addTest( - unittest.TestLoader().loadTestsFromTestCase(testCaseClass=Test)) - return suite - - -def run_suite(): - unittest.TextTestRunner(verbosity=2).run(create_suite()) - - -if __name__ == "__main__": - run_suite() +def test_file_signature(): + file1 = os.path.join(autoconfig.data_directory, 'decl_cache_file1.txt') + file1_dup = os.path.join( + autoconfig.data_directory, + 'decl_cache_file1_duplicate.txt') + file2 = os.path.join(autoconfig.data_directory, 'decl_cache_file2.txt') + sig1 = declarations_cache.file_signature(file1) + sig1_dup = declarations_cache.file_signature(file1_dup) + sig2 = declarations_cache.file_signature(file2) + assert sig1 == sig1_dup + assert sig1 != sig2 + + +def test_config_signature(): + diff_cfg_list = build_differing_cfg_list() + def_cfg = diff_cfg_list[0] + def_sig = declarations_cache.configuration_signature(def_cfg) + + # Test changes that should cause sig changes + for cfg in diff_cfg_list[1:]: + assert declarations_cache.configuration_signature(cfg) != def_sig + + # Test changes that should not cause sig changes + no_changes = def_cfg.clone() + assert declarations_cache.configuration_signature(no_changes) == def_sig + + ignore_changed = def_cfg.clone() + ignore_changed.ignore_gccxml_output = True + assert ( + declarations_cache.configuration_signature(ignore_changed) == def_sig + ) + + +def test_cache_interface(): + cache_file = os.path.join( + autoconfig.build_directory, + 'decl_cache_test.test_cache_read.cache') + file1 = os.path.join(autoconfig.data_directory, 'decl_cache_file1.txt') + file1_dup = os.path.join( + autoconfig.data_directory, + 'decl_cache_file1_duplicate.txt') + file2 = os.path.join(autoconfig.data_directory, 'decl_cache_file2.txt') + diff_cfg_list = build_differing_cfg_list() + def_cfg = diff_cfg_list[0] + + if os.path.exists(cache_file): + os.remove(cache_file) + + cache = declarations_cache.file_cache_t(cache_file) + assert len(cache._file_cache_t__cache) == 0 + + # test creating new entries for differing files + cache.update(file1, def_cfg, 1, []) + assert len(cache._file_cache_t__cache) == 1 + cache.update(file1_dup, def_cfg, 2, []) + assert len(cache._file_cache_t__cache) == 1 + cache.update(file2, def_cfg, 3, []) + assert len(cache._file_cache_t__cache) == 2 + + assert cache.cached_value(file1, def_cfg) == 2 + assert cache.cached_value(file2, def_cfg) == 3 + + # Test reading again + cache.flush() + cache = declarations_cache.file_cache_t(cache_file) + assert len(cache._file_cache_t__cache) == 2 + assert cache.cached_value(file1, def_cfg) == 2 + assert cache.cached_value(file2, def_cfg) == 3 + + # Test flushing doesn't happen if we don't touch the cache + cache = declarations_cache.file_cache_t(cache_file) + assert cache.cached_value(file1, def_cfg) == 2 # Read from cache + cache.flush() # should not actually flush + cache = declarations_cache.file_cache_t(cache_file) + assert len(cache._file_cache_t__cache) == 2 + + # Test flush culling + cache = declarations_cache.file_cache_t(cache_file) + cache.update(file1_dup, def_cfg, 4, []) # Modify cache + cache.flush() # should cull off one entry + cache = declarations_cache.file_cache_t(cache_file) + assert len(cache._file_cache_t__cache) == 1 + + +def build_differing_cfg_list(): + """ Return a list of configurations that all differ. """ + cfg_list = [] + def_cfg = xml_generator_configuration_t( + "xml_generator_path", + '.', ['tmp'], ['sym'], ['unsym'], None, False, "") + cfg_list.append(def_cfg) + + # Test changes that should cause sig changes + gccxml_changed = def_cfg.clone() + gccxml_changed.xml_generator_path = "other_path" + cfg_list.append(gccxml_changed) + + wd_changed = def_cfg.clone() + wd_changed.working_directory = "other_dir" + cfg_list.append(wd_changed) + + # inc_changed = def_cfg.clone() + # inc_changed.include_paths = ["/var/tmp"] + # assert configuration_signature(inc_changed) != def_sig) + inc_changed = xml_generator_configuration_t( + "xml_generator_path", '.', ['/var/tmp'], ['sym'], ['unsym'], + None, False, "") + cfg_list.append(inc_changed) + + # def_changed = def_cfg.clone() + # def_changed.define_symbols = ["symbol"] + # assert configuration_signature(def_changed) != def_sig) + def_changed = xml_generator_configuration_t( + "xml_generator_path", '.', ['/var/tmp'], ['new-sym'], ['unsym'], + None, False, "") + cfg_list.append(def_changed) + + # undef_changed = def_cfg.clone() + # undef_changed.undefine_symbols = ["symbol"] + # assert configuration_signature(undef_changed) != def_sig) + undef_changed = xml_generator_configuration_t( + "xml_generator_path", '.', ['/var/tmp'], ['sym'], ['new-unsym'], + None, False, "") + cfg_list.append(undef_changed) + + cflags_changed = def_cfg.clone() + cflags_changed.cflags = "new flags" + cfg_list.append(cflags_changed) + + return cfg_list diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py index 7edc359f..b217c90c 100644 --- a/tests/test_dependencies.py +++ b/tests/test_dependencies.py @@ -3,181 +3,168 @@ # Distributed under the Boost Software License, Version 1.0. # See http://www.boost.org/LICENSE_1_0.txt -import unittest +import pytest import warnings -from . import parser_test_case +from . import autoconfig from pygccxml import parser from pygccxml import declarations - -class Test(parser_test_case.parser_test_case_t): - global_ns = None - - def __init__(self, *args): - parser_test_case.parser_test_case_t.__init__(self, *args) - self.header = 'include_all.hpp' - self.global_ns = None - - def setUp(self): - if not Test.global_ns: - decls = parser.parse([self.header], self.config) - Test.xml_generator_from_xml_file = \ - self.config.xml_generator_from_xml_file - Test.global_ns = declarations.get_global_namespace(decls) - Test.global_ns.init_optimizer() - self.xml_generator_from_xml_file = Test.xml_generator_from_xml_file - self.global_ns = Test.global_ns - - def test_variable(self): - ns_vars = self.global_ns.namespace('::declarations::variables') - static_var = ns_vars.variable('static_var') - - # Legacy way of fetching dependencies. Is still valid but deprecated - warnings.simplefilter("ignore", Warning) - dependencies_old = static_var.i_depend_on_them() - warnings.simplefilter("error", Warning) - self.assertTrue(len(dependencies_old) == 1) - self.assertTrue(dependencies_old[0].declaration is static_var) - self.assertTrue(dependencies_old[0].depend_on_it.decl_string == 'int') - - dependencies_new = declarations.get_dependencies_from_decl(static_var) - self.assertTrue(len(dependencies_new) == 1) - self.assertTrue(dependencies_new[0].declaration is static_var) - self.assertTrue(dependencies_new[0].depend_on_it.decl_string == 'int') - - m_mutable = ns_vars.variable('m_mutable') - - # Legacy way of fetching dependencies. Is still valid but deprecated - warnings.simplefilter("ignore", Warning) - dependencies_old = m_mutable.i_depend_on_them() - warnings.simplefilter("error", Warning) - self.assertTrue(len(dependencies_old) == 1) - self.assertTrue(dependencies_old[0].declaration is m_mutable) - self.assertTrue(dependencies_old[0].depend_on_it.decl_string == 'int') - - dependencies_new = declarations.get_dependencies_from_decl(m_mutable) - self.assertTrue(len(dependencies_new) == 1) - self.assertTrue(dependencies_new[0].declaration is m_mutable) - self.assertTrue(dependencies_new[0].depend_on_it.decl_string == 'int') - - def test_class(self): - ns_vars = self.global_ns.namespace('::declarations::variables') - - cls = ns_vars.class_('struct_variables_t') - - # Legacy way of fetching dependencies. Is still valid but deprecated - warnings.simplefilter("ignore", Warning) - dependencies_old = cls.i_depend_on_them() - warnings.simplefilter("error", Warning) - dependencies_old = [ - d for d in dependencies_old if not d.declaration.is_artificial] - self.assertTrue(len(dependencies_old) == 1) - - dependencies_new = declarations.get_dependencies_from_decl(cls) - dependencies_new = [ - d for d in dependencies_new if not d.declaration.is_artificial] - self.assertTrue(len(dependencies_new) == 1) - - m_mutable = ns_vars.variable('m_mutable') - - # Legacy way of fetching dependencies. Is still valid but deprecated - dependencies_old = [ - dependency for dependency in dependencies_old if - dependency.declaration is m_mutable] - self.assertTrue(len(dependencies_old) == 1) - self.assertTrue(dependencies_old[0].depend_on_it.decl_string == 'int') - self.assertTrue(dependencies_old[0].access_type == 'public') - - dependencies_new = [ - dependency for dependency in dependencies_new if - dependency.declaration is m_mutable] - self.assertTrue(len(dependencies_new) == 1) - self.assertTrue(dependencies_new[0].depend_on_it.decl_string == 'int') - self.assertTrue(dependencies_new[0].access_type == 'public') - - ns_dh = self.global_ns.namespace('::core::diamand_hierarchy') - fd_cls = ns_dh.class_('final_derived_t') - derived1_cls = ns_dh.class_('derived1_t') - - # Legacy way of fetching dependencies. Is still valid but deprecated - warnings.simplefilter("ignore", Warning) - dependencies_old = declarations.get_dependencies_from_decl(fd_cls) - warnings.simplefilter("error", Warning) - dependencies_old = [ - dependency for dependency in dependencies_old if - dependency.depend_on_it is derived1_cls] - self.assertTrue(len(dependencies_old) == 1) - self.assertTrue(dependencies_old[0].depend_on_it is derived1_cls) - self.assertTrue(dependencies_old[0].access_type == 'public') - - dependencies_new = declarations.get_dependencies_from_decl(fd_cls) - dependencies_new = [ - dependency for dependency in dependencies_new if - dependency.depend_on_it is derived1_cls] - self.assertTrue(len(dependencies_new) == 1) - self.assertTrue(dependencies_new[0].depend_on_it is derived1_cls) - self.assertTrue(dependencies_new[0].access_type == 'public') - - def test_calldefs(self): - ns = self.global_ns.namespace('::declarations::calldef') - return_default_args = ns.calldef('return_default_args') - - # Legacy way of fetching dependencies. Is still valid but deprecated - warnings.simplefilter("ignore", Warning) - dependencies_old = return_default_args.i_depend_on_them() - warnings.simplefilter("error", Warning) - self.assertTrue(len(dependencies_old) == 3) - used_types = [ - dependency.depend_on_it.decl_string - for dependency in dependencies_old] - self.assertTrue(used_types == ['int', 'int', 'bool']) - - dependencies_new = declarations.get_dependencies_from_decl( - return_default_args) - self.assertTrue(len(dependencies_new) == 3) - used_types = [ - dependency.depend_on_it.decl_string - for dependency in dependencies_new] - self.assertTrue(used_types == ['int', 'int', 'bool']) - - some_exception = ns.class_('some_exception_t') - other_exception = ns.class_('other_exception_t') - calldef_with_throw = ns.calldef('calldef_with_throw') - - # Legacy way of fetching dependencies. Is still valid but deprecated - warnings.simplefilter("ignore", Warning) - dependencies_old = calldef_with_throw.i_depend_on_them() - warnings.simplefilter("error", Warning) - self.assertTrue(len(dependencies_old) == 3) - dependencies_old = [ - dependency for dependency in dependencies_old if - dependency.depend_on_it in (some_exception, other_exception)] - self.assertTrue(len(dependencies_old) == 2) - - dependencies_new = declarations.get_dependencies_from_decl( - calldef_with_throw) - self.assertTrue(len(dependencies_new) == 3) - dependencies_new = [ - dependency for dependency in dependencies_new if - dependency.depend_on_it in (some_exception, other_exception)] - self.assertTrue(len(dependencies_new) == 2) - - def test_coverage(self): - declarations.get_dependencies_from_decl(self.global_ns) - - -def create_suite(): - suite = unittest.TestSuite() - suite.addTest( - unittest.TestLoader().loadTestsFromTestCase(testCaseClass=Test)) - return suite - - -def run_suite(): - unittest.TextTestRunner(verbosity=2).run(create_suite()) - - -if __name__ == "__main__": - run_suite() +TEST_FILES = [ + "include_all.hpp", +] + + +@pytest.fixture +def global_ns(): + COMPILATION_MODE = parser.COMPILATION_MODE.ALL_AT_ONCE + INIT_OPTIMIZER = True + config = autoconfig.cxx_parsers_cfg.config.clone() + config.castxml_epic_version = 1 + decls = parser.parse(TEST_FILES, config, COMPILATION_MODE) + global_ns = declarations.get_global_namespace(decls) + if INIT_OPTIMIZER: + global_ns.init_optimizer() + return global_ns + + +def test_variable(global_ns): + ns_vars = global_ns.namespace('::declarations::variables') + static_var = ns_vars.variable('static_var') + + # Legacy way of fetching dependencies. Is still valid but deprecated + warnings.simplefilter("ignore", Warning) + dependencies_old = static_var.i_depend_on_them() + warnings.simplefilter("error", Warning) + assert len(dependencies_old) == 1 + assert dependencies_old[0].declaration is static_var + assert dependencies_old[0].depend_on_it.decl_string == 'int' + + dependencies_new = declarations.get_dependencies_from_decl(static_var) + assert len(dependencies_new) == 1 + assert dependencies_new[0].declaration is static_var + assert dependencies_new[0].depend_on_it.decl_string == 'int' + + m_mutable = ns_vars.variable('m_mutable') + + # Legacy way of fetching dependencies. Is still valid but deprecated + warnings.simplefilter("ignore", Warning) + dependencies_old = m_mutable.i_depend_on_them() + warnings.simplefilter("error", Warning) + assert len(dependencies_old) == 1 + assert dependencies_old[0].declaration is m_mutable + assert dependencies_old[0].depend_on_it.decl_string == 'int' + + dependencies_new = declarations.get_dependencies_from_decl(m_mutable) + assert len(dependencies_new) == 1 + assert dependencies_new[0].declaration is m_mutable + assert dependencies_new[0].depend_on_it.decl_string == 'int' + + +def test_class(global_ns): + ns_vars = global_ns.namespace('::declarations::variables') + + cls = ns_vars.class_('struct_variables_t') + + # Legacy way of fetching dependencies. Is still valid but deprecated + warnings.simplefilter("ignore", Warning) + dependencies_old = cls.i_depend_on_them() + warnings.simplefilter("error", Warning) + dependencies_old = [ + d for d in dependencies_old if not d.declaration.is_artificial] + assert len(dependencies_old) == 1 + + dependencies_new = declarations.get_dependencies_from_decl(cls) + dependencies_new = [ + d for d in dependencies_new if not d.declaration.is_artificial] + assert len(dependencies_new) == 1 + + m_mutable = ns_vars.variable('m_mutable') + + # Legacy way of fetching dependencies. Is still valid but deprecated + dependencies_old = [ + dependency for dependency in dependencies_old if + dependency.declaration is m_mutable] + assert len(dependencies_old) == 1 + assert dependencies_old[0].depend_on_it.decl_string == 'int' + assert dependencies_old[0].access_type == 'public' + + dependencies_new = [ + dependency for dependency in dependencies_new if + dependency.declaration is m_mutable] + assert len(dependencies_new) == 1 + assert dependencies_new[0].depend_on_it.decl_string == 'int' + assert dependencies_new[0].access_type == 'public' + + ns_dh = global_ns.namespace('::core::diamand_hierarchy') + fd_cls = ns_dh.class_('final_derived_t') + derived1_cls = ns_dh.class_('derived1_t') + + # Legacy way of fetching dependencies. Is still valid but deprecated + warnings.simplefilter("ignore", Warning) + dependencies_old = declarations.get_dependencies_from_decl(fd_cls) + warnings.simplefilter("error", Warning) + dependencies_old = [ + dependency for dependency in dependencies_old if + dependency.depend_on_it is derived1_cls] + assert len(dependencies_old) == 1 + assert dependencies_old[0].depend_on_it is derived1_cls + assert dependencies_old[0].access_type == 'public' + + dependencies_new = declarations.get_dependencies_from_decl(fd_cls) + dependencies_new = [ + dependency for dependency in dependencies_new if + dependency.depend_on_it is derived1_cls] + assert len(dependencies_new) == 1 + assert dependencies_new[0].depend_on_it is derived1_cls + assert dependencies_new[0].access_type == 'public' + + +def test_calldefs(global_ns): + ns = global_ns.namespace('::declarations::calldef') + return_default_args = ns.calldef('return_default_args') + + # Legacy way of fetching dependencies. Is still valid but deprecated + warnings.simplefilter("ignore", Warning) + dependencies_old = return_default_args.i_depend_on_them() + warnings.simplefilter("error", Warning) + assert len(dependencies_old) == 3 + used_types = [ + dependency.depend_on_it.decl_string + for dependency in dependencies_old] + assert used_types == ['int', 'int', 'bool'] + + dependencies_new = declarations.get_dependencies_from_decl( + return_default_args) + assert len(dependencies_new) == 3 + used_types = [ + dependency.depend_on_it.decl_string + for dependency in dependencies_new] + assert used_types == ['int', 'int', 'bool'] + + some_exception = ns.class_('some_exception_t') + other_exception = ns.class_('other_exception_t') + calldef_with_throw = ns.calldef('calldef_with_throw') + + # Legacy way of fetching dependencies. Is still valid but deprecated + warnings.simplefilter("ignore", Warning) + dependencies_old = calldef_with_throw.i_depend_on_them() + warnings.simplefilter("error", Warning) + assert len(dependencies_old) == 3 + dependencies_old = [ + dependency for dependency in dependencies_old if + dependency.depend_on_it in (some_exception, other_exception)] + assert len(dependencies_old) == 2 + + dependencies_new = declarations.get_dependencies_from_decl( + calldef_with_throw) + assert len(dependencies_new) == 3 + dependencies_new = [ + dependency for dependency in dependencies_new if + dependency.depend_on_it in (some_exception, other_exception)] + assert len(dependencies_new) == 2 + + +def test_coverage(global_ns): + declarations.get_dependencies_from_decl(global_ns) diff --git a/tests/test_deprecation.py b/tests/test_deprecation.py index 6c3e71a6..deed1741 100644 --- a/tests/test_deprecation.py +++ b/tests/test_deprecation.py @@ -2,85 +2,69 @@ # Distributed under the Boost Software License, Version 1.0. # See http://www.boost.org/LICENSE_1_0.txt -import unittest +import pytest -from . import parser_test_case +from . import autoconfig from pygccxml import parser from pygccxml import declarations -class Test(parser_test_case.parser_test_case_t): - global_ns = None +TEST_FILES = [ + "test_deprecation.hpp", +] - def __init__(self, *args): - parser_test_case.parser_test_case_t.__init__(self, *args) - self.header = "test_deprecation.hpp" - self.global_ns = None - self.config.castxml_epic_version = 1 - def _check_text_content(self, desired_text, deprecation_string): - if deprecation_string: - self.assertEqual(desired_text, deprecation_string) - else: - print("No text in deprecation attribute to check") +@pytest.fixture +def global_ns(): + COMPILATION_MODE = parser.COMPILATION_MODE.ALL_AT_ONCE + INIT_OPTIMIZER = True + config = autoconfig.cxx_parsers_cfg.config.clone() + config.castxml_epic_version = 1 + decls = parser.parse(TEST_FILES, config, COMPILATION_MODE) + global_ns = declarations.get_global_namespace(decls) + if INIT_OPTIMIZER: + global_ns.init_optimizer() + return global_ns - def setUp(self): - if not self.global_ns: - decls = parser.parse([self.header], self.config) - Test.global_ns = declarations.get_global_namespace(decls) - Test.xml_generator_from_xml_file = \ - self.config.xml_generator_from_xml_file - self.xml_generator_from_xml_file = Test.xml_generator_from_xml_file - self.global_ns = Test.global_ns +def _check_text_content(desired_text, deprecation_string): + assert desired_text == deprecation_string - def test(self): - """ - Check the comment parsing - """ - if self.config.castxml_epic_version != 1: - # Run this test only with castxml epic version == 1 - return - tnamespace = self.global_ns.namespace("deprecation") +def test_comment_deprecation(global_ns): + """ + Check the comment parsing + """ - tenumeration = tnamespace.enumeration("com_enum") - self.assertIn("deprecation", dir(tenumeration)) - self._check_text_content('Enumeration is Deprecated', - tenumeration.deprecation) + tnamespace = global_ns.namespace("deprecation") - tclass = tnamespace.class_("test") - self.assertIn("deprecation", dir(tclass)) - self._check_text_content("Test class Deprecated", tclass.deprecation) + tenumeration = tnamespace.enumeration("com_enum") + assert "deprecation" in dir(tenumeration) + _check_text_content( + 'Enumeration is Deprecated', + tenumeration.deprecation) - tmethod = tclass.member_functions()[0] - tmethod_dep = tclass.member_functions()[1] + tclass = tnamespace.class_("test") + assert "deprecation" in dir(tclass) + _check_text_content( + "Test class Deprecated", + tclass.deprecation) - self.assertIn("deprecation", dir(tmethod)) - self.assertIsNone(tmethod.deprecation) - self._check_text_content("Function is deprecated", - tmethod_dep.deprecation) + tmethod = tclass.member_functions()[0] + tmethod_dep = tclass.member_functions()[1] - tconstructor = tclass.constructors()[0] - tconstructor_dep = tclass.constructors()[1] + assert "deprecation", dir(tmethod) + assert tmethod.deprecation is None + _check_text_content( + "Function is deprecated", + tmethod_dep.deprecation) - self.assertIsNone(tconstructor.deprecation) - self.assertIn("deprecation", dir(tconstructor_dep)) - self._check_text_content("One arg constructor is Deprecated", - tconstructor_dep.deprecation) + tconstructor = tclass.constructors()[0] + tconstructor_dep = tclass.constructors()[1] - -def create_suite(): - suite = unittest.TestSuite() - suite.addTest( - unittest.TestLoader().loadTestsFromTestCase(testCaseClass=Test)) - return suite - - -def run_suite(): - unittest.TextTestRunner(verbosity=2).run(create_suite()) - - -if __name__ == "__main__": - run_suite() + assert tconstructor.deprecation is None + assert "deprecation" in dir(tconstructor_dep) + _check_text_content( + "One arg constructor is Deprecated", + tconstructor_dep.deprecation) diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py deleted file mode 100644 index 6d2180ef..00000000 --- a/tests/test_deprecations.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2014-2017 Insight Software Consortium. -# Copyright 2004-2009 Roman Yakovenko. -# Distributed under the Boost Software License, Version 1.0. -# See http://www.boost.org/LICENSE_1_0.txt - -import unittest - -from . import parser_test_case - - -class Test(parser_test_case.parser_test_case_t): - - """ - Used to test deprecated methods/functions. Does nothing for the moment. - """ - - @staticmethod - def _check(w): - assert len(w) == 1 - assert issubclass(w[-1].category, DeprecationWarning) - assert "deprecated" in str(w[-1].message) - - -def create_suite(): - suite = unittest.TestSuite() - suite.addTest( - unittest.TestLoader().loadTestsFromTestCase(testCaseClass=Test)) - return suite - - -def run_suite(): - unittest.TextTestRunner(verbosity=2).run(create_suite()) - - -if __name__ == "__main__": - run_suite() diff --git a/tests/test_directory_cache.py b/tests/test_directory_cache.py index ee58e64c..930d7c5b 100644 --- a/tests/test_directory_cache.py +++ b/tests/test_directory_cache.py @@ -5,84 +5,77 @@ import os import shutil -import unittest + +import pytest from . import autoconfig -from . import parser_test_case from pygccxml import parser - -class Test(parser_test_case.parser_test_case_t): - - def __init__(self, *args): - parser_test_case.parser_test_case_t.__init__(self, *args) - self.header = "typedefs1.hpp" - self.cache_dir = os.path.join( - autoconfig.data_directory, "directory_cache_test") - - def setUp(self): - # Clear the cache tree - if os.path.isdir(self.cache_dir): # pragma: no cover - shutil.rmtree(self.cache_dir) - - def test_directory_cache_without_compression(self): - """ - Test the directory cache without compression - - """ - # Test with compression OFF - cache = parser.directory_cache_t(directory=self.cache_dir) - # Generate a cache on first read - parser.parse([self.header], self.config, cache=cache) - # Read from the cache the second time - parser.parse([self.header], self.config, cache=cache) - - def test_directory_cache_with_compression(self): - """ - Test the directory cache wit compression - - """ - # Test with compression ON - cache = parser.directory_cache_t( - directory=self.cache_dir, compression=True) - # Generate a cache on first read - parser.parse([self.header], self.config, cache=cache) - # Read from the cache the second time - parser.parse([self.header], self.config, cache=cache) - - def test_directory_cache_twice(self): - """ - Setup two caches in a row. - - The second run will reload the same cache directory. - """ - cache = parser.directory_cache_t(directory=self.cache_dir) - parser.parse([self.header], self.config, cache=cache) - cache = parser.directory_cache_t(directory=self.cache_dir) - parser.parse([self.header], self.config, cache=cache) - - def test_directory_existing_dir(self): - """ - Setup a cache when there is already a file at the cache's location. - """ - open(self.cache_dir, "a").close() - self.assertRaises( - ValueError, - lambda: parser.directory_cache_t(directory=self.cache_dir)) - os.remove(self.cache_dir) - - -def create_suite(): - suite = unittest.TestSuite() - suite.addTest( - unittest.TestLoader().loadTestsFromTestCase(testCaseClass=Test)) - return suite - - -def run_suite(): - unittest.TextTestRunner(verbosity=2).run(create_suite()) - - -if __name__ == "__main__": - run_suite() +TEST_FILES = [ + "typedefs1.hpp" +] + +CACHE_DIR = os.path.join(autoconfig.data_directory, "directory_cache_test") + + +def set_up(): + if os.path.isdir(CACHE_DIR): + shutil.rmtree(CACHE_DIR) + if os.path.isfile(CACHE_DIR): + os.remove(CACHE_DIR) + + +def test_directory_cache_without_compression(): + """ + Test the directory cache without compression + + """ + config = autoconfig.cxx_parsers_cfg.config.clone() + set_up() + # Test with compression OFF + cache = parser.directory_cache_t(directory=CACHE_DIR) + # Generate a cache on first read + parser.parse(TEST_FILES, config, cache=cache) + # Read from the cache the second time + parser.parse(TEST_FILES, config, cache=cache) + + +def test_directory_cache_with_compression(): + """ + Test the directory cache wit compression + + """ + config = autoconfig.cxx_parsers_cfg.config.clone() + set_up() + # Test with compression ON + cache = parser.directory_cache_t( + directory=CACHE_DIR, compression=True) + # Generate a cache on first read + parser.parse(TEST_FILES, config, cache=cache) + # Read from the cache the second time + parser.parse(TEST_FILES, config, cache=cache) + + +def test_directory_cache_twice(): + """ + Setup two caches in a row. + + The second run will reload the same cache directory. + """ + config = autoconfig.cxx_parsers_cfg.config.clone() + set_up() + cache = parser.directory_cache_t(directory=CACHE_DIR) + parser.parse(TEST_FILES, config, cache=cache) + cache = parser.directory_cache_t(directory=CACHE_DIR) + parser.parse(TEST_FILES, config, cache=cache) + + +def test_directory_existing_dir(): + """ + Setup a cache when there is already a file at the cache's location. + """ + set_up() + open(CACHE_DIR, "a").close() + with pytest.raises(ValueError): + parser.directory_cache_t(directory=CACHE_DIR)