@@ -832,7 +832,7 @@ def _upload_big_file(self, file_path, dir_id, *, callback=None, uploaded_handler
832
832
if not os .path .exists (record_file ): # 初始化记录文件
833
833
info = {'name' : file_name , 'size' : file_size , 'uploaded' : 0 , 'parts' : []}
834
834
with open (record_file , 'wb' ) as f :
835
- pickle .dump (info , f )
835
+ pickle .dump (info , f , protocol = 4 )
836
836
else :
837
837
with open (record_file , 'rb' ) as f :
838
838
info = pickle .load (f )
@@ -859,7 +859,7 @@ def _close_pwd(fid, is_file): # 数据块上传后默认关闭提取码
859
859
info ['parts' ].append (os .path .basename (data_path )) # 记录已上传的文件名
860
860
with open (record_file , 'wb' ) as f :
861
861
logger .debug (f"Update record file: { uploaded_size } /{ file_size } " )
862
- pickle .dump (info , f )
862
+ pickle .dump (info , f , protocol = 4 )
863
863
else :
864
864
logger .debug (f"Upload data file failed: data_path={ data_path } " )
865
865
return LanZouCloud .FAILED
@@ -1041,10 +1041,19 @@ def down_file_by_url(self, share_url, pwd='', save_path='./Download', *, callbac
1041
1041
os .rename (tmp_file_path , file_path ) # 下载完成,改回正常文件名
1042
1042
if os .path .getsize (file_path ) > 512 : # 文件大于 512 bytes 就检查一下
1043
1043
file_info = None
1044
+ is_protocol_3 = False
1044
1045
with open (file_path , 'rb' ) as f :
1045
1046
f .seek (- 512 , os .SEEK_END )
1046
1047
last_512_bytes = f .read ()
1047
1048
file_info = un_serialize (last_512_bytes )
1049
+ # Python3.6 序列化时默认使用 pickle 第三版协议,
1050
+ # 导致计算时文件尾部多写了 5 字节, 保险期起见处理一下
1051
+ if not file_info :
1052
+ is_protocol_3 = True
1053
+ f .seek (- 517 , os .SEEK_END )
1054
+ last_517_bytes = f .read ()
1055
+ file_info = un_serialize (last_517_bytes )
1056
+
1048
1057
1049
1058
# 大文件的记录文件也可以反序列化出 name,但是没有 padding 字段
1050
1059
if file_info is not None and 'padding' in file_info :
@@ -1059,7 +1068,8 @@ def down_file_by_url(self, share_url, pwd='', save_path='./Download', *, callbac
1059
1068
os .rename (file_path , new_file_path )
1060
1069
# 截断最后 512 字节隐藏信息, 还原文件
1061
1070
with open (new_file_path , 'rb+' ) as f :
1062
- f .seek (- 512 , os .SEEK_END )
1071
+ truncate_size = 517 if is_protocol_3 else 512
1072
+ f .seek (- truncate_size , os .SEEK_END )
1063
1073
f .truncate ()
1064
1074
file_path = new_file_path # 保存文件重命名后真实路径
1065
1075
@@ -1222,7 +1232,7 @@ def _down_big_file(self, name, total_size, file_list, save_path, *, callback=Non
1222
1232
if not os .path .exists (record_file ): # 初始化记录文件
1223
1233
info = {'last_ending' : 0 , 'finished' : []} # 记录上一个数据块结尾地址和已经下载的数据块
1224
1234
with open (record_file , 'wb' ) as rf :
1225
- pickle .dump (info , rf )
1235
+ pickle .dump (info , rf , protocol = 4 )
1226
1236
else : # 读取记录文件,下载续传
1227
1237
with open (record_file , 'rb' ) as rf :
1228
1238
info = pickle .load (rf )
@@ -1266,7 +1276,7 @@ def _down_big_file(self, name, total_size, file_list, save_path, *, callback=Non
1266
1276
finally :
1267
1277
info ['last_ending' ] = file_size_now
1268
1278
with open (record_file , 'wb' ) as rf :
1269
- pickle .dump (info , rf )
1279
+ pickle .dump (info , rf , protocol = 4 )
1270
1280
logger .debug (f"Update download record info: { info } " )
1271
1281
# 全部数据块下载完成, 记录文件可以删除
1272
1282
logger .debug (f"Delete download record file: { record_file } " )
0 commit comments