1818LOGGER = logging .getLogger ("TestCompute" )
1919
2020
21- def executeChunks (node , tmpPath , size ):
22- nodeCache = os .path .join (tmpPath , node .internalFolder )
23- os .makedirs (nodeCache )
21+ def executeChunks (node , size ):
22+ os .makedirs (node .internalFolder )
2423 logFiles = {}
2524 for chunkIndex in range (size ):
2625 iteration = chunkIndex if size > 1 else - 1
2726 logFileName = "log"
2827 if size > 1 :
2928 logFileName = f"{ chunkIndex } .log"
30- logFile = Path (nodeCache ) / logFileName
29+ logFile = Path (node . internalFolder ) / logFileName
3130 logFiles [chunkIndex ] = logFile
3231 logFile .touch ()
3332 node .prepareLogger (iteration )
@@ -97,7 +96,7 @@ def test_nodeWithChunks(self, tmp_path):
9796 graph ._cacheDir = tmp_path
9897 node = graph .addNewNode (TestNodeA .__name__ )
9998 # Compute
100- logFiles = executeChunks (node , tmp_path , 2 )
99+ logFiles = executeChunks (node , 2 )
101100 for chunkId , logFile in logFiles .items ():
102101 with open (logFile , "r" ) as f :
103102 content = f .read ()
@@ -109,7 +108,7 @@ def test_nodeWithoutChunks(self, tmp_path):
109108 graph ._cacheDir = tmp_path
110109 node = graph .addNewNode (TestNodeB .__name__ )
111110 # Compute
112- logFiles = executeChunks (node , tmp_path , 1 )
111+ logFiles = executeChunks (node , 1 )
113112 for _ , logFile in logFiles .items ():
114113 with open (logFile , "r" ) as f :
115114 content = f .read ()
0 commit comments