@@ -169,8 +169,21 @@ def main():
169
169
170
170
args = parser .parse_args ()
171
171
172
+ now = datetime .datetime .now ().strftime ("%Y%m%d" )
173
+
174
+ # Canonicalization of web url and create path for output.
175
+ website = ''
176
+ out_path = ''
177
+
178
+ if len (args .url ) > 0 :
179
+ website = url_canon (args .url , args .verbose )
180
+ if args .folder is not None :
181
+ out_path = folder (args .folder , args .verbose )
182
+ else :
183
+ out_path = folder (extract_domain (website ), args .verbose )
184
+
172
185
# Parse arguments to variables else initiate variables.
173
- input_file = args .input if args .input else ''
186
+ input_file = args .input if args .input else ( out_path + '/' + now + '_links.txt' )
174
187
output_file = args .output if args .output else ''
175
188
c_depth = args .cdepth if args .cdepth else 0
176
189
c_pause = args .cpause if args .cpause else 1
@@ -185,30 +198,17 @@ def main():
185
198
check_ip ()
186
199
print (('## URL: ' + args .url ))
187
200
188
- website = ''
189
- out_path = ''
190
-
191
- # Canonicalization of web url and create path for output.
192
- if len (args .url ) > 0 :
193
- website = url_canon (args .url , args .verbose )
194
- if args .folder is not None :
195
- out_path = folder (args .folder , args .verbose )
196
- else :
197
- out_path = folder (extract_domain (website ), args .verbose )
198
-
199
201
if args .crawl :
200
202
crawler = Crawler (website , c_depth , c_pause , out_path , args .log ,
201
203
args .verbose )
202
204
lst = crawler .crawl ()
203
205
204
- now = datetime .datetime .now ().strftime ("%Y%m%d" )
205
- with open (out_path + '/' + now + '_links.txt' , 'w+' , encoding = 'UTF-8' ) as file :
206
+ with open (input_file , 'w+' , encoding = 'UTF-8' ) as file :
206
207
for item in lst :
207
208
file .write (f"{ item } \n " )
208
- print (f"## File created on { os .getcwd ()} /{ out_path } /links.txt " )
209
+ print (f"## File created on { os .getcwd ()} /{ input_file } " )
209
210
210
211
if args .extract :
211
- input_file = out_path + "/links.txt"
212
212
extractor (website , args .crawl , output_file , input_file , out_path ,
213
213
selection_yara )
214
214
else :
0 commit comments