-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathdbcomp.py
More file actions
497 lines (450 loc) · 17.3 KB
/
dbcomp.py
File metadata and controls
497 lines (450 loc) · 17.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import zlib
import binascii
import operator
import sqlite3
import struct
# File format of .sbstore files:
#
# We do not store the add prefixes, those are retrieved by
# decompressing the PrefixSet cache whenever we need to apply
# an update.
#
# byte slicing: Many of the 4-byte values stored here are strongly
# correlated in the upper bytes, and uncorrelated in the lower
# bytes. Because zlib/DEFLATE requires match lengths of at least
# 3 to achieve good compression, and we don't get those if only
# the upper 16-bits are correlated, it is worthwhile to slice 32-bit
# values into 4 1-byte slices and compress the slices individually.
# The slices corresponding to MSBs will compress very well, and the
# slice corresponding to LSB almost nothing. Because of this, we
# only apply DEFLATE to the 3 most significant bytes, and store the
# LSB uncompressed.
#
# byte sliced (numValues) data format:
# uint32 compressed-size
# compressed-size bytes zlib DEFLATE data
# 0...numValues byte MSB of 4-byte numValues data
# uint32 compressed-size
# compressed-size bytes zlib DEFLATE data
# 0...numValues byte 2nd byte of 4-byte numValues data
# uint32 compressed-size
# compressed-size bytes zlib DEFLATE data
# 0...numValues byte 3rd byte of 4-byte numValues data
# 0...numValues byte LSB of 4-byte numValues data
#
# Store data format:
# uint32 magic
# uint32 version
# uint32 numAddChunks
# uint32 numSubChunks
# uint32 numAddPrefixes
# uint32 numSubPrefixes
# uint32 numAddCompletes
# uint32 numSubCompletes
# 0...numAddChunks uint32 addChunk
# 0...numSubChunks uint32 subChunk
# byte sliced (numAddPrefixes) uint32 add chunk of AddPrefixes
# byte sliced (numSubPrefixes) uint32 add chunk of SubPrefixes
# byte sliced (numSubPrefixes) uint32 sub chunk of SubPrefixes
# byte sliced (numSubPrefixes) uint32 SubPrefixes
# 0...numAddCompletes 32-byte Completions
# 0...numSubCompletes 32-byte Completions
# 16-byte MD5 of all preceding data
class SBHash:
def __init__(self, prefix=None, addc=None, subc=None):
self.prefix = prefix
self.addchunk = addc
self.subchunk = subc
def __str__(self):
if self.subchunk:
result = "Prefix %X AddChunk: %d SubChunk: %d" \
% (self.prefix, self.addchunk, self.subchunk)
else:
result = "Prefix %X AddChunk: %d" % (self.prefix, self.addchunk)
return result
def __key(self):
return self.prefix, self.addchunk, self.subchunk
def __eq__(self, other):
return self.__key() == other.__key()
def __hash__(self):
return hash(self.__key())
class SBData:
def __init__(self):
self.name = None
self.addchunks = set()
self.fake_add_chunks = set()
self.subchunks = set()
self.addprefixes = []
self.subprefixes = []
self.addcompletes = []
self.subcompletes = []
def add_addchunk(self, chunk):
self.addchunks.add(chunk)
def add_subchunk(self, chunk):
self.subchunks.add(chunk)
def fill_addprefixes(self, prefixes):
"""Add prefixes are stored in the PrefixSet instead of in the sbstore,
so allow filling them in seperately afterwards."""
assert len(prefixes) == len(self.addprefixes), \
"Prefixes: %d AddPrefixes: %d" \
% (len(prefixes), len(self.addprefixes))
for i, pref in enumerate(self.addprefixes):
pref.prefix = prefixes[i]
def sort_all_data(self):
self.addprefixes.sort(
key=operator.attrgetter('prefix', 'addchunk'))
self.subprefixes.sort(
key=operator.attrgetter('prefix', 'subchunk', 'addchunk'))
self.addcompletes.sort(
key=operator.attrgetter('prefix', 'addchunk'))
self.subcompletes.sort(
key=operator.attrgetter('prefix', 'subchunk', 'addchunk'))
def read_unzip(fp, comp_size):
"""Read comp_size bytes from a zlib stream and
return as a tuple of bytes"""
zlib_data = fp.read(comp_size)
uncomp_data = zlib.decompress(zlib_data)
bytebuffer = struct.Struct("=" + str(len(uncomp_data)) + "B")
data = bytebuffer.unpack_from(uncomp_data, 0)
return data
def read_raw(fp, size):
"""Read raw bytes from a stream and return as a tuple of bytes"""
bytebuffer = struct.Struct("=" + str(size) + "B")
data = bytebuffer.unpack_from(fp.read(size), 0)
return data
def readuint32(fp):
uint32 = struct.Struct("=I")
return uint32.unpack_from(fp.read(uint32.size), 0)[0]
def readuint16(fp):
uint16 = struct.Struct("=H")
return uint16.unpack_from(fp.read(uint16.size), 0)[0]
def read_bytesliced(fp, count):
comp_size = readuint32(fp)
slice1 = read_unzip(fp, comp_size)
comp_size = readuint32(fp)
slice2 = read_unzip(fp, comp_size)
comp_size = readuint32(fp)
slice3 = read_unzip(fp, comp_size)
slice4 = read_raw(fp, count)
if (len(slice1) != len(slice2)) or \
(len(slice2) != len(slice3)) or \
(len(slice3) != len(slice4)) or \
(count != len(slice1)):
print("Slices inconsistent %d %d %d %d %d"
% (count, len(slice1), len(slice2), len(slice3), len(slice4)))
exit(1)
result = []
for i in range(count):
val = (slice1[i] << 24) | (slice2[i] << 16) \
| (slice3[i] << 8) | slice4[i]
result.append(val)
return result
def read_sbstore(sbstorefile):
data = SBData()
fp = open(sbstorefile, "rb")
# parse header
header = struct.Struct("=IIIIIIII")
magic, version, num_add_chunk, num_sub_chunk, \
num_add_prefix, num_sub_prefix, \
num_add_complete, num_sub_complete = header.unpack_from(fp.read(header.size), 0)
print(("Magic %X Version %u NumAddChunk: %d NumSubChunk: %d "
+ "NumAddPrefix: %d NumSubPrefix: %d NumAddComplete: %d "
+ "NumSubComplete: %d") % (magic, version, num_add_chunk,
num_sub_chunk, num_add_prefix,
num_sub_prefix, num_add_complete,
num_sub_complete))
# parse chunk data
for x in range(num_add_chunk):
chunk = readuint32(fp)
data.add_addchunk(chunk)
for x in range(num_sub_chunk):
chunk = readuint32(fp)
data.add_subchunk(chunk)
# read bytesliced data
addprefix_addchunk = read_bytesliced(fp, num_add_prefix)
subprefix_addchunk = read_bytesliced(fp, num_sub_prefix)
subprefix_subchunk = read_bytesliced(fp, num_sub_prefix)
subprefixes = read_bytesliced(fp, num_sub_prefix)
# Construct the prefix objects
for i in range(num_add_prefix):
prefix = SBHash(0, addprefix_addchunk[i])
data.addprefixes.append(prefix)
for i in range(num_sub_prefix):
prefix = SBHash(subprefixes[i],
subprefix_addchunk[i],
subprefix_subchunk[i])
data.subprefixes.append(prefix)
for x in range(num_add_complete):
complete = read_raw(fp, 32)
addchunk = readuint32(fp)
entry = SBHash(complete, addchunk)
data.addcompletes.append(entry)
for x in range(num_sub_complete):
complete = read_raw(fp, 32)
addchunk = readuint32(fp)
subchunk = readuint32(fp)
entry = SBHash(complete, addchunk, subchunk)
data.subcompletes.append(entry)
md5sum = fp.read(16)
print("MD5: " + binascii.b2a_hex(md5sum))
# EOF detection
dummy = fp.read(1)
if len(dummy) or (len(md5sum) != 16):
if len(md5sum) != 16:
print("Checksum truncated")
print("File doesn't end where expected:", end=" ")
# Don't count the dummy read, we finished before it
ourpos = fp.tell() - len(dummy)
# Seek to end
fp.seek(0, 2)
endpos = fp.tell()
print("%d bytes remaining" % (endpos - ourpos))
exit(1)
return data
def pset_to_prefixes(index_prefixes, index_starts, index_deltas):
prefixes = []
prefix_len = len(index_prefixes)
for i in range(prefix_len):
prefix = index_prefixes[i]
prefixes.append(prefix)
start = index_starts[i]
if i != (prefix_len - 1):
end = index_starts[i + 1]
else:
end = len(index_deltas)
#print("s: %d e: %d" % (start, end))
for j in range(start, end):
#print("%d " % index_deltas[j])
prefix += index_deltas[j]
prefixes.append(prefix)
return prefixes
def read_pset(filename):
fp = open(filename, "rb")
version = readuint32(fp)
indexsize = readuint32(fp)
deltasize = readuint32(fp)
print("Version: %X Indexes: %d Deltas: %d" % (version, indexsize, deltasize))
index_prefixes = []
index_starts = []
index_deltas = []
for x in range(indexsize):
index_prefixes.append(readuint32(fp))
for x in range(indexsize):
index_starts.append(readuint32(fp))
for x in range(deltasize):
index_deltas.append(readuint16(fp))
prefixes = pset_to_prefixes(index_prefixes, index_starts, index_deltas)
# empty set has a special form
if len(prefixes) and prefixes[0] == 0:
prefixes = []
return prefixes
def parse_new_databases(dir):
# look for all sbstore files
sb_lists = {}
for file in os.listdir(dir):
if file.endswith(".sbstore"):
sb_file = os.path.join(dir, file)
sb_name = file[:-len(".sbstore")]
print("Reading " + sb_name)
sb_data = read_sbstore(sb_file)
prefixes = read_pset(os.path.join(dir, sb_name + ".pset"))
sb_data.name = sb_name
sb_data.fill_addprefixes(prefixes)
sb_data.sort_all_data()
sb_lists[sb_name] = sb_data
print("\n")
print("Found safebrowsing lists in new DB:")
for name in sb_lists.keys():
print(name)
return sb_lists
def parse_old_database(dir):
filename = os.path.join(dir, "urlclassifier3.sqlite")
connection = sqlite3.connect(filename)
cursor = connection.cursor()
tables_query = "SELECT name, id FROM moz_tables"
cursor.execute(tables_query)
sb_names = {}
while True:
row = cursor.fetchone()
if not row: break
name, id = row[0], row[1]
sb_names[name] = id
cursor.close()
print("\nFound safebrowsing lists in old DB:")
for key in sb_names.keys():
print(key)
sb_lists = {}
for table_name in sb_names.keys():
table_id = sb_names[table_name]
data = SBData()
data.name = table_name
# Gather add prefixes
addpref_query = ("SELECT domain, partial_data, chunk_id "
"FROM moz_classifier WHERE table_id = ?")
cursor = connection.cursor()
cursor.execute(addpref_query, (table_id,))
while True:
row = cursor.fetchone()
if not row: break
domain, prefix, addchunk = row[0], row[1], row[2]
if not prefix:
prefix = struct.unpack("=I", domain)[0]
else:
prefix = struct.unpack("=I", prefix)[0]
pref_data = SBHash(prefix, addchunk)
data.addprefixes.append(pref_data)
data.add_addchunk(addchunk)
cursor.close()
# Gather sub prefixes
subpref_query = ("SELECT domain, partial_data, chunk_id, "
"add_chunk_id FROM moz_subs WHERE table_id = ?")
cursor = connection.cursor()
cursor.execute(subpref_query, (table_id,))
while True:
row = cursor.fetchone()
if not row: break
domain, prefix, subchunk, addchunk = \
row[0], row[1], row[2], row[3]
if not prefix:
prefix = struct.unpack("=I", domain)[0]
else:
prefix = struct.unpack("=I", prefix)[0]
pref_data = SBHash(prefix, addchunk, subchunk)
data.subprefixes.append(pref_data)
data.add_subchunk(subchunk)
cursor.close()
# Note that chunk count reported here is the real chunks we have
# data for. In reality we expect less chunks to exist in the prefix
# data due to knocking them out.
print("\nTable: %s\nAddChunks: %d SubChunks: %d AddPrefixes: %d " \
"SubPrefixes: %d" % (table_name, len(data.addchunks),
len(data.subchunks), len(data.addprefixes),
len(data.subprefixes)))
data.sort_all_data()
sb_names[table_name] = data
connection.close()
return sb_names
def compare_chunks(old_table, new_table):
# Addchunks min/max, subchunks min/ax
old_set = [set(), set()]
new_set = [set(), set()]
for pref in old_table.addprefixes:
old_set[0].add(pref.addchunk)
for pref in old_table.subprefixes:
old_set[1].add(pref.subchunk)
for pref in new_table.addprefixes:
new_set[0].add(pref.addchunk)
for pref in new_table.subprefixes:
new_set[1].add(pref.subchunk)
if len(old_set[0]):
print("Old DB Add range: %d - %d" % (min(old_set[0]), max(old_set[0])))
if len(new_set[0]):
print("New DB Add range: %d - %d" % (min(new_set[0]), max(new_set[0])))
max_chunks = max(new_table.addchunks)
min_chunks = min(new_table.addchunks)
print(" DB Add range: %d - %d" % (min_chunks, max_chunks))
if len(old_set[1]):
print("Old DB Sub range: %d - %d" % (min(old_set[1]), max(old_set[1])))
if len(new_set[1]):
print("New DB Sub range: %d - %d" % (min(new_set[1]), max(new_set[1])))
max_chunks = max(new_table.subchunks)
min_chunks = min(new_table.subchunks)
print(" DB Sub range: %d - %d" % (min_chunks, max_chunks))
# Intersect real sets and reported sets, for addchunks
# Reported sets should be a superset of real sets
print(end="\n")
reported = new_table.addchunks
if not reported >= new_set[0]:
print("Reported sets not a superset of real sets")
fake_set = reported - new_set[0]
fake_list = list(fake_set)
fake_list.sort()
for chunk in fake_list:
print("Fake set %d" % chunk)
new_table.fake_add_chunks.update(fake_set)
print(end="\n")
return False
def compare_table(old_table, new_table):
verbose = True
total_prefixes = 0
failed_prefixes = 0
# Compare AddPrefixes
old_addprefixes = set()
for pref in old_table.addprefixes:
old_addprefixes.add(pref)
new_addprefixes = set()
for pref in new_table.addprefixes:
new_addprefixes.add(pref)
total_prefixes += len(old_addprefixes)
symm_intersec = list(old_addprefixes ^ new_addprefixes)
symm_intersec.sort(key=operator.attrgetter('addchunk', 'prefix'))
failed_prefixes += len(symm_intersec)
print("%d add mismatches" % len(symm_intersec))
if verbose:
for pref in symm_intersec:
if pref in new_addprefixes:
print("No match AddPrefix new " + str(pref))
elif pref in old_addprefixes:
print("No match AddPrefix old " + str(pref))
else:
print("wut?")
# Compare SubPrefixes
old_subprefixes = set()
for pref in old_table.subprefixes:
old_subprefixes.add(pref)
new_subprefixes = set()
for pref in new_table.subprefixes:
new_subprefixes.add(pref)
total_prefixes += len(old_subprefixes)
symm_intersec = list(old_subprefixes ^ new_subprefixes)
symm_intersec.sort(
key=operator.attrgetter('subchunk', 'prefix', 'addchunk'))
failed_prefixes += len(symm_intersec)
print("%d sub mismatches" % len(symm_intersec))
if verbose:
for pref in symm_intersec:
if pref in new_subprefixes:
print("No match SubPrefix new " + str(pref), end="")
addchunk = pref.addchunk
if addchunk in new_table.addchunks:
if addchunk in new_table.fake_add_chunks:
print(", In FAKE Adds")
else:
print(", In Adds")
else:
print(", Missing in Adds")
elif pref in old_subprefixes:
print("No match SubPrefix old " + str(pref))
else:
print("wut?")
print("Correct: %f%%"
% ((total_prefixes - failed_prefixes)*100.0/total_prefixes))
return failed_prefixes != 0
def compare_all_the_things(new_lists, old_lists):
failure = False
for table in old_lists:
print("\nComparing table " + table)
old_data = old_lists[table]
new_data = new_lists[table]
failure |= compare_chunks(old_data, new_data)
failure |= compare_table(old_data, new_data)
return failure
def main(argv):
new_profile_dir = argv.pop()
old_profile_dir = argv.pop()
new_lists = parse_new_databases(new_profile_dir)
old_lists = parse_old_database(old_profile_dir)
failure = compare_all_the_things(new_lists, old_lists)
if failure:
exit(1)
else:
exit(0)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Need to specify 2 profile directories.")
exit(1)
main(sys.argv)