@@ -5,7 +5,6 @@ def check_proxy(proxies):
55 try :
66 response = requests .get ("https://ipapi.co/json/" , proxies = proxies , timeout = 4 )
77 data = response .json ()
8- # print(f'查询代理的地理位置,返回的结果是{data}')
98 if 'country_name' in data :
109 country = data ['country_name' ]
1110 result = f"代理配置 { proxies_https } , 代理所在地:{ country } "
@@ -47,8 +46,8 @@ def backup_and_download(current_version, remote_version):
4746 os .makedirs (new_version_dir )
4847 shutil .copytree ('./' , backup_dir , ignore = lambda x , y : ['history' ])
4948 proxies = get_conf ('proxies' )
50- r = requests .get (
51- 'https://github.com/binary-husky/chatgpt_academic/archive/refs/heads /master.zip' , proxies = proxies , stream = True )
49+ try : r = requests .get ('https://github.com/binary-husky/chatgpt_academic/archive/refs/heads/master.zip' , proxies = proxies , stream = True )
50+ except : r = requests . get ( 'https://public.gpt-academic.top/publish /master.zip' , proxies = proxies , stream = True )
5251 zip_file_path = backup_dir + '/master.zip'
5352 with open (zip_file_path , 'wb+' ) as f :
5453 f .write (r .content )
@@ -111,11 +110,10 @@ def auto_update(raise_error=False):
111110 try :
112111 from toolbox import get_conf
113112 import requests
114- import time
115113 import json
116114 proxies = get_conf ('proxies' )
117- response = requests .get (
118- "https://raw.githubusercontent.com/binary-husky/chatgpt_academic/master /version" , proxies = proxies , timeout = 5 )
115+ try : response = requests .get ("https://raw.githubusercontent.com/binary-husky/chatgpt_academic/master/version" , proxies = proxies , timeout = 5 )
116+ except : response = requests . get ( "https://public.gpt-academic.top/publish /version" , proxies = proxies , timeout = 5 )
119117 remote_json_data = json .loads (response .text )
120118 remote_version = remote_json_data ['version' ]
121119 if remote_json_data ["show_feature" ]:
@@ -127,8 +125,7 @@ def auto_update(raise_error=False):
127125 current_version = json .loads (current_version )['version' ]
128126 if (remote_version - current_version ) >= 0.01 - 1e-5 :
129127 from colorful import print亮黄
130- print亮黄 (
131- f'\n 新版本可用。新版本:{ remote_version } ,当前版本:{ current_version } 。{ new_feature } ' )
128+ print亮黄 (f'\n 新版本可用。新版本:{ remote_version } ,当前版本:{ current_version } 。{ new_feature } ' )
132129 print ('(1)Github更新地址:\n https://github.com/binary-husky/chatgpt_academic\n ' )
133130 user_instruction = input ('(2)是否一键更新代码(Y+回车=确认,输入其他/无输入+回车=不更新)?' )
134131 if user_instruction in ['Y' , 'y' ]:
@@ -154,7 +151,7 @@ def auto_update(raise_error=False):
154151 print (msg )
155152
156153def warm_up_modules ():
157- print ('正在执行一些模块的预热...' )
154+ print ('正在执行一些模块的预热 ...' )
158155 from toolbox import ProxyNetworkActivate
159156 from request_llms .bridge_all import model_info
160157 with ProxyNetworkActivate ("Warmup_Modules" ):
0 commit comments