#!/usr/bin/python """This tool reads a Debian package from stdin and emits a yaml stream on stdout. It does not access a database. Therefore it can be run in parallel and on multiple machines. The generated yaml contains multiple documents. The first document contains package metadata. Then a document is emitted for each file. And finally a document consisting of the string "commit" is emitted.""" import hashlib import itertools import optparse import sys import tarfile import zlib import lzma import yaml from dedup.arreader import ArReader from dedup.debpkg import process_control, get_tar_hashes from dedup.hashing import DecompressedHash, SuppressingHash, HashedStream, \ HashBlacklistContent from dedup.compression import GzipDecompressor, DecompressedStream from dedup.image import GIFHash, PNGHash boring_content = set(("", "\n")) def sha512_nontrivial(): return HashBlacklistContent(hashlib.sha512(), boring_content) def gziphash(): hashobj = DecompressedHash(GzipDecompressor(), hashlib.sha512()) hashobj = SuppressingHash(hashobj, (ValueError, zlib.error)) hashobj.name = "gzip_sha512" return HashBlacklistContent(hashobj, boring_content) def pnghash(): hashobj = PNGHash(hashlib.sha512()) hashobj = SuppressingHash(hashobj, (ValueError,)) hashobj.name = "png_sha512" return hashobj def gifhash(): hashobj = GIFHash(hashlib.sha512()) hashobj = SuppressingHash(hashobj, (ValueError,)) hashobj.name = "gif_sha512" return hashobj def decompress_tar(filelike, extension): if extension in (b".lzma", b".xz"): filelike = DecompressedStream(filelike, lzma.LZMADecompressor()) extension = b"" if extension not in (b"", b".gz", b".bz2"): raise ValueError("unknown compression format with extension %r" % extension) return tarfile.open(fileobj=filelike, mode="r|" + extension[1:].decode("ascii")) def process_package(filelike, hash_functions): af = ArReader(filelike) af.read_magic() state = "start" while True: try: name = af.read_entry() except EOFError: raise ValueError("data.tar not found") if name.startswith(b"control.tar"): if state != "start": raise ValueError("unexpected control.tar") state = "control" tf = decompress_tar(af, name[11:]) for elem in tf: if elem.name not in ("./control", "control"): continue if state != "control": raise ValueError("duplicate control file") state = "control_file" yield process_control(tf.extractfile(elem).read()) break continue elif name.startswith(b"data.tar"): if state != "control_file": raise ValueError("missing control file") state = "data" tf = decompress_tar(af, name[8:]) for name, size, hashes in get_tar_hashes(tf, hash_functions): try: name = name.decode("utf8") except UnicodeDecodeError: print("warning: skipping filename with encoding error") continue # skip files with non-utf8 encoding for now yield dict(name=name, size=size, hashes=hashes) break def hashed_stream_check(hstream, hashvalue): if False: # pylint: disable=using-constant-test yield # defer checking until being iterated while hstream.read(4096): pass if hstream.hexdigest() != hashvalue: raise ValueError("hash sum mismatch") def main(): parser = optparse.OptionParser() parser.add_option("-H", "--hash", action="store", help="verify that stdin hash given sha256 hash") options, args = parser.parse_args() hash_functions = [sha512_nontrivial, gziphash, pnghash, gifhash] try: stdin = sys.stdin.buffer except AttributeError: # python2 stdin = sys.stdin iters = [("commit",)] if options.hash: stdin = HashedStream(stdin, hashlib.sha256()) iters.insert(0, hashed_stream_check(stdin, options.hash)) iters.insert(0, process_package(stdin, hash_functions)) yaml.safe_dump_all(itertools.chain(*iters), sys.stdout) if __name__ == "__main__": main()