1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
|
#!/usr/bin/python
"""This tool reads a Debian package from stdin and emits a yaml stream on
stdout. It does not access a database. Therefore it can be run in parallel and
on multiple machines. The generated yaml contains multiple documents. The first
document contains package metadata. Then a document is emitted for each file.
And finally a document consisting of the string "commit" is emitted."""
import hashlib
import itertools
import optparse
import sys
import tarfile
import zlib
import lzma
import yaml
from dedup.arreader import ArReader
from dedup.debpkg import process_control, get_tar_hashes
from dedup.hashing import DecompressedHash, SuppressingHash, HashedStream, \
HashBlacklistContent
from dedup.compression import GzipDecompressor, DecompressedStream
from dedup.image import GIFHash, PNGHash
boring_content = set(("", "\n"))
def sha512_nontrivial():
return HashBlacklistContent(hashlib.sha512(), boring_content)
def gziphash():
hashobj = DecompressedHash(GzipDecompressor(), hashlib.sha512())
hashobj = SuppressingHash(hashobj, (ValueError, zlib.error))
hashobj.name = "gzip_sha512"
return HashBlacklistContent(hashobj, boring_content)
def pnghash():
hashobj = PNGHash(hashlib.sha512())
hashobj = SuppressingHash(hashobj, (ValueError,))
hashobj.name = "png_sha512"
return hashobj
def gifhash():
hashobj = GIFHash(hashlib.sha512())
hashobj = SuppressingHash(hashobj, (ValueError,))
hashobj.name = "gif_sha512"
return hashobj
def decompress_tar(filelike, extension):
if extension in (b".lzma", b".xz"):
filelike = DecompressedStream(filelike, lzma.LZMADecompressor())
extension = b""
if extension not in (b"", b".gz", b".bz2"):
raise ValueError("unknown compression format with extension %r" %
extension)
return tarfile.open(fileobj=filelike,
mode="r|" + extension[1:].decode("ascii"))
def process_package(filelike, hash_functions):
af = ArReader(filelike)
af.read_magic()
state = "start"
while True:
try:
name = af.read_entry()
except EOFError:
raise ValueError("data.tar not found")
if name.startswith(b"control.tar"):
if state != "start":
raise ValueError("unexpected control.tar")
state = "control"
tf = decompress_tar(af, name[11:])
for elem in tf:
if elem.name not in ("./control", "control"):
continue
if state != "control":
raise ValueError("duplicate control file")
state = "control_file"
yield process_control(tf.extractfile(elem).read())
break
continue
elif name.startswith(b"data.tar"):
if state != "control_file":
raise ValueError("missing control file")
state = "data"
tf = decompress_tar(af, name[8:])
for name, size, hashes in get_tar_hashes(tf, hash_functions):
try:
name = name.decode("utf8")
except UnicodeDecodeError:
print("warning: skipping filename with encoding error")
continue # skip files with non-utf8 encoding for now
yield dict(name=name, size=size, hashes=hashes)
break
def hashed_stream_check(hstream, hashvalue):
if False: # pylint: disable=using-constant-test
yield # defer checking until being iterated
while hstream.read(4096):
pass
if hstream.hexdigest() != hashvalue:
raise ValueError("hash sum mismatch")
def main():
parser = optparse.OptionParser()
parser.add_option("-H", "--hash", action="store",
help="verify that stdin hash given sha256 hash")
options, args = parser.parse_args()
hash_functions = [sha512_nontrivial, gziphash, pnghash, gifhash]
try:
stdin = sys.stdin.buffer
except AttributeError: # python2
stdin = sys.stdin
iters = [("commit",)]
if options.hash:
stdin = HashedStream(stdin, hashlib.sha256())
iters.insert(0, hashed_stream_check(stdin, options.hash))
iters.insert(0, process_package(stdin, hash_functions))
yaml.safe_dump_all(itertools.chain(*iters), sys.stdout)
if __name__ == "__main__":
main()
|