55import unittest
66from pathlib import Path
77
8- from codebasin import CodeBase , finder , platform , preprocessor
8+ from codebasin import CodeBase , finder , preprocessor
9+ from codebasin .preprocessor import Platform
910
1011
1112class TestMacroExpansion (unittest .TestCase ):
@@ -62,7 +63,7 @@ def test_cat(self):
6263 test_str = "CATTEST=first ## 2"
6364 macro = preprocessor .macro_from_definition_string (test_str )
6465 tokens = preprocessor .Lexer ("CATTEST" ).tokenize ()
65- p = platform . Platform ("Test" , self .rootdir )
66+ p = Platform ("Test" , self .rootdir )
6667 p ._definitions = {macro .name : macro }
6768 expanded_tokens = preprocessor .MacroExpander (p ).expand (tokens )
6869 expected_tokens = preprocessor .Lexer ("first2" ).tokenize ()
@@ -75,7 +76,7 @@ def test_stringify_quote(self):
7576 test_str = "STR(x)= #x"
7677 macro = preprocessor .macro_from_definition_string (test_str )
7778 tokens = preprocessor .Lexer ('STR(foo("4 + 5"))' ).tokenize ()
78- p = platform . Platform ("Test" , self .rootdir )
79+ p = Platform ("Test" , self .rootdir )
7980 p ._definitions = {macro .name : macro }
8081 expanded_tokens = preprocessor .MacroExpander (p ).expand (tokens )
8182 expected_tokens = preprocessor .Lexer ('"foo(\\ "4 + 5\\ ")"' ).tokenize ()
@@ -89,7 +90,7 @@ def test_stringify_ws(self):
8990 macro = preprocessor .macro_from_definition_string (test_str )
9091 to_expand_str = r'STR(L + 2-2 "\" \n")'
9192 tokens = preprocessor .Lexer (to_expand_str ).tokenize ()
92- p = platform . Platform ("Test" , self .rootdir )
93+ p = Platform ("Test" , self .rootdir )
9394 p ._definitions = {macro .name : macro }
9495 expanded_tokens = preprocessor .MacroExpander (p ).expand (tokens )
9596 expected_str = r'TEST "L + 2-2 \"\\\" \\n\""'
@@ -103,7 +104,7 @@ def test_stringify_nested(self):
103104 mac_xstr = preprocessor .macro_from_definition_string ("xstr(s)=str(s)" )
104105 mac_str = preprocessor .macro_from_definition_string ("str(s)=#s" )
105106 mac_def = preprocessor .macro_from_definition_string ("foo=4" )
106- p = platform . Platform ("Test" , self .rootdir )
107+ p = Platform ("Test" , self .rootdir )
107108 p ._definitions = {x .name : x for x in [mac_xstr , mac_str , mac_def ]}
108109
109110 tokens = preprocessor .Lexer ("str(foo)" ).tokenize ()
@@ -148,7 +149,7 @@ def test_variadic(self):
148149 tokens = preprocessor .Lexer (
149150 'eprintf("%d, %f, %e", a, b, c)' ,
150151 ).tokenize ()
151- p = platform . Platform ("Test" , self .rootdir )
152+ p = Platform ("Test" , self .rootdir )
152153 p ._definitions = {macro .name : macro }
153154 expanded_tokens = preprocessor .MacroExpander (p ).expand (tokens )
154155 self .assertTrue (len (expanded_tokens ) == len (expected_expansion ))
@@ -172,7 +173,7 @@ def test_self_reference_macros_1(self):
172173 def_string = "FOO=(4 + FOO)"
173174 macro = preprocessor .macro_from_definition_string (def_string )
174175 tokens = preprocessor .Lexer ("FOO" ).tokenize ()
175- p = platform . Platform ("Test" , self .rootdir )
176+ p = Platform ("Test" , self .rootdir )
176177 p ._definitions = {macro .name : macro }
177178 expanded_tokens = preprocessor .MacroExpander (p ).expand (tokens )
178179 self .assertTrue (len (expanded_tokens ) == len (expected_expansion ))
@@ -201,7 +202,7 @@ def test_self_reference_macros_2(self):
201202 def_string = "FOO=FOO"
202203 macro = preprocessor .macro_from_definition_string (def_string )
203204 tokens = preprocessor .Lexer ("FOO" ).tokenize ()
204- p = platform . Platform ("Test" , self .rootdir )
205+ p = Platform ("Test" , self .rootdir )
205206 p ._definitions = {macro .name : macro }
206207 expanded_tokens = preprocessor .MacroExpander (p ).expand (tokens )
207208 self .assertTrue (len (expanded_tokens ) == len (expected_expansion ))
@@ -226,7 +227,7 @@ def test_self_reference_macros_3(self):
226227 def_string = "foo(x)=bar x"
227228 macro = preprocessor .macro_from_definition_string (def_string )
228229 tokens = preprocessor .Lexer ("foo(foo) (2)" ).tokenize ()
229- p = platform . Platform ("Test" , self .rootdir )
230+ p = Platform ("Test" , self .rootdir )
230231 p ._definitions = {macro .name : macro }
231232 expanded_tokens = preprocessor .MacroExpander (p ).expand (tokens )
232233 expected_tokens = preprocessor .Lexer ("bar foo (2)" ).tokenize ()
@@ -270,7 +271,7 @@ def test_indirect_self_reference_macros(self):
270271 x_tokens = preprocessor .Lexer ("x" ).tokenize ()
271272 y_tokens = preprocessor .Lexer ("y" ).tokenize ()
272273
273- p = platform . Platform ("Test" , self .rootdir )
274+ p = Platform ("Test" , self .rootdir )
274275 p ._definitions = {x_macro .name : x_macro , y_macro .name : y_macro }
275276
276277 x_expanded_tokens = preprocessor .MacroExpander (p ).expand (x_tokens )
0 commit comments