1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
|
#!/usr/bin/python
"""
CREATE TABLE package (package TEXT PRIMARY KEY, version TEXT, architecture TEXT);
CREATE TABLE content (package TEXT, filename TEXT, size INTEGER, function TEXT, hash TEXT, FOREIGN KEY (package) REFERENCES package(package));
CREATE TABLE dependency (package TEXT, required TEXT, FOREIGN KEY (package) REFERENCES package(package), FOREIGN KEY (required) REFERENCES package(package));
CREATE INDEX content_package_index ON content (package);
CREATE INDEX content_hash_index ON content (hash);
"""
import hashlib
import sqlite3
import struct
import sys
import tarfile
import zlib
from debian.debian_support import version_compare
from debian import deb822
import lzma
from dedup.hashing import HashBlacklist, DecompressedHash, SuppressingHash, hash_file
from dedup.compression import GzipDecompressor, DecompressedStream
class ArReader(object):
global_magic = b"!<arch>\n"
file_magic = b"`\n"
def __init__(self, fileobj):
self.fileobj = fileobj
self.remaining = None
self.padding = 0
def skip(self, length):
while length:
data = self.fileobj.read(min(4096, length))
if not data:
raise ValueError("archive truncated")
length -= len(data)
def read_magic(self):
data = self.fileobj.read(len(self.global_magic))
if data != self.global_magic:
raise ValueError("ar global header not found")
self.remaining = 0
def read_entry(self):
self.skip_current_entry()
if self.padding:
if self.fileobj.read(1) != '\n':
raise ValueError("missing ar padding")
self.padding = 0
file_header = self.fileobj.read(60)
if not file_header:
raise EOFError("end of archive found")
parts = struct.unpack("16s 12s 6s 6s 8s 10s 2s", file_header)
parts = [p.rstrip(" ") for p in parts]
if parts.pop() != self.file_magic:
raise ValueError("ar file header not found")
self.remaining = int(parts[5])
self.padding = self.remaining % 2
return parts[0] # name
def skip_current_entry(self):
self.skip(self.remaining)
self.remaining = 0
def read(self, length=None):
if length is None:
length = self.remaining
else:
length = min(self.remaining, length)
data = self.fileobj.read(length)
self.remaining -= len(data)
return data
class MultiHash(object):
def __init__(self, *hashes):
self.hashes = hashes
def update(self, data):
for hasher in self.hashes:
hasher.update(data)
boring_sha512_hashes = set((
# ""
"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e",
# "\n"
"be688838ca8686e5c90689bf2ab585cef1137c999b48c70b92f67a5c34dc15697b5d11c982ed6d71be1e1e7f7b4e0733884aa97c3f7a339a8ed03577cf74be09"))
def sha512_nontrivial():
return HashBlacklist(hashlib.sha512(), boring_sha512_hashes)
def gziphash():
hashobj = DecompressedHash(GzipDecompressor(), hashlib.sha512())
hashobj = SuppressingHash(hashobj, (ValueError, zlib.error))
hashobj.name = "gzip_sha512"
return HashBlacklist(hashobj, boring_sha512_hashes)
def get_hashes(tar):
for elem in tar:
if not elem.isreg(): # excludes hard links as well
continue
hasher = MultiHash(sha512_nontrivial(), gziphash())
hasher = hash_file(hasher, tar.extractfile(elem))
for hashobj in hasher.hashes:
hashvalue = hashobj.hexdigest()
if hashvalue:
yield (elem.name, elem.size, hashobj.name, hashvalue)
def process_package(db, filelike):
cur = db.cursor()
af = ArReader(filelike)
af.read_magic()
state = "start"
while True:
try:
name = af.read_entry()
except EOFError:
break
if name == "control.tar.gz":
if state != "start":
raise ValueError("unexpected control.tar.gz")
state = "control"
tf = tarfile.open(fileobj=af, mode="r|gz")
for elem in tf:
if elem.name != "./control":
continue
if state != "control":
raise ValueError("duplicate control file")
state = "control_file"
control = tf.extractfile(elem).read()
control = deb822.Packages(control)
package = control["package"].encode("ascii")
version = control["version"].encode("ascii")
architecture = control["architecture"].encode("ascii")
cur.execute("SELECT version FROM package WHERE package = ?;",
(package,))
row = cur.fetchone()
if row and version_compare(row[0], version) > 0:
return # already seen a newer package
cur.execute("DELETE FROM package WHERE package = ?;",
(package,))
cur.execute("DELETE FROM content WHERE package = ?;",
(package,))
cur.execute("INSERT INTO package (package, version, architecture) VALUES (?, ?, ?);",
(package, version, architecture))
depends = control.relations.get("depends", [])
depends = set(dep[0]["name"].encode("ascii")
for dep in depends if len(dep) == 1)
cur.execute("DELETE FROM dependency WHERE package = ?;",
(package,))
cur.executemany("INSERT INTO dependency (package, required) VALUES (?, ?);",
((package, dep) for dep in depends))
break
continue
elif name == "data.tar.gz":
tf = tarfile.open(fileobj=af, mode="r|gz")
elif name == "data.tar.bz2":
tf = tarfile.open(fileobj=af, mode="r|bz2")
elif name == "data.tar.xz":
zf = DecompressedStream(af, lzma.LZMADecompressor())
tf = tarfile.open(fileobj=zf, mode="r|")
else:
continue
if state != "control_file":
raise ValueError("missing control file")
for name, size, function, hexhash in get_hashes(tf):
cur.execute("INSERT INTO content (package, filename, size, function, hash) VALUES (?, ?, ?, ?, ?);",
(package, name.decode("utf8"), size, function, hexhash))
db.commit()
return
raise ValueError("data.tar not found")
def main():
db = sqlite3.connect("test.sqlite3")
process_package(db, sys.stdin)
if __name__ == "__main__":
main()
|