@@ -10,9 +10,9 @@ class Crawler():
10
10
domain_url = base_url + 'domains/{}/{}'
11
11
problem_url = base_url + 'challenges/{}/problem'
12
12
13
- new_readme_text = '## [{}]({})\n \n |Problem Name|Problem Link|Solution Link|\n | ---|---|---|\n '
13
+ new_readme_text = '## [{}]({})\n \n |Problem Name|Problem Link|Language| Solution Link|\n ---|---|---|--- \n '
14
14
readme_headers_len = len (new_readme_text .split ('\n ' ))
15
- problem_readme_text = '|{}|[Problem]({})|[Solution](./{})|\n '
15
+ problem_readme_text = '|{}|[Problem]({})|{}| [Solution](./{})|\n '
16
16
17
17
base_folder_name = 'Hackerrank'
18
18
@@ -75,6 +75,7 @@ class Crawler():
75
75
76
76
def __init__ (self ):
77
77
self .session = requests .Session ()
78
+ self .total_submissions = 0
78
79
79
80
def login (self , username , password ):
80
81
resp = self .session .get (self .login_url , auth = (username , password ))
@@ -182,7 +183,7 @@ def get_submissions(self, submissions):
182
183
if not os .path .exists (readme_file_path ):
183
184
self .create_readme (track_folder_name , track_url , readme_file_path )
184
185
problem_url = self .problem_url .format (challenge_slug )
185
- readme_text = self .problem_readme_text .format (challenge_name , problem_url , file_name + file_extension )
186
+ readme_text = self .problem_readme_text .format (challenge_name , problem_url , language , file_name + file_extension )
186
187
self .update_readme (
187
188
readme_file_path ,
188
189
readme_text ,
0 commit comments