#!/usr/bin/python """This tool reads a Debian package from stdin and emits a yaml stream on stdout. It does not access a database. Therefore it can be run in parallel and on multiple machines. The generated yaml contains multiple documents. The first document contains package metadata. Then a document is emitted for each file. And finally a document consisting of the string "commit" is emitted.""" import hashlib import optparse import sys import tarfile import zlib import lzma import yaml from dedup.arreader import ArReader from dedup.debpkg import process_control, get_tar_hashes from dedup.hashing import DecompressedHash, SuppressingHash, HashedStream, \ HashBlacklistContent from dedup.compression import GzipDecompressor, DecompressedStream from dedup.image import GIFHash, PNGHash boring_content = set(("", "\n")) def sha512_nontrivial(): return HashBlacklistContent(hashlib.sha512(), boring_content) def gziphash(): hashobj = DecompressedHash(GzipDecompressor(), hashlib.sha512()) hashobj = SuppressingHash(hashobj, (ValueError, zlib.error)) hashobj.name = "gzip_sha512" return HashBlacklistContent(hashobj, boring_content) def pnghash(): hashobj = PNGHash(hashlib.sha512()) hashobj = SuppressingHash(hashobj, (ValueError,)) hashobj.name = "png_sha512" return hashobj def gifhash(): hashobj = GIFHash(hashlib.sha512()) hashobj = SuppressingHash(hashobj, (ValueError,)) hashobj.name = "gif_sha512" return hashobj def decompress_tar(filelike, extension): if extension in (".lzma", ".xz"): filelike = DecompressedStream(filelike, lzma.LZMADecompressor()) extension = "" if extension not in ("", ".gz", ".bz2"): raise ValueError("unknown compression format with extension %r" % extension) return tarfile.open(fileobj=filelike, mode="r|" + extension[1:]) def process_package(filelike, hash_functions): af = ArReader(filelike) af.read_magic() state = "start" while True: try: name = af.read_entry() except EOFError: raise ValueError("data.tar not found") if name.startswith("control.tar"): if state != "start": raise ValueError("unexpected control.tar") state = "control" tf = decompress_tar(af, name[11:]) for elem in tf: if elem.name != "./control": continue if state != "control": raise ValueError("duplicate control file") state = "control_file" yield process_control(tf.extractfile(elem).read()) break continue elif name.startswith("data.tar"): if state != "control_file": raise ValueError("missing control file") state = "data" tf = decompress_tar(af, name[8:]) for name, size, hashes in get_tar_hashes(tf, hash_functions): # filenames are not actually iso-8859-1, but this decode # cannot cause UnicodeDecodeError name = name.decode("iso-8859-1") yield dict(name=name, size=size, hashes=hashes) yield "commit" break def process_package_with_hash(filelike, hash_functions, sha256hash): hstream = HashedStream(filelike, hashlib.sha256()) for elem in process_package(hstream, hash_functions): if elem == "commit": while hstream.read(4096): pass if hstream.hexdigest() != sha256hash: raise ValueError("hash sum mismatch") yield elem break yield elem def main(): parser = optparse.OptionParser() parser.add_option("-H", "--hash", action="store", help="verify that stdin hash given sha256 hash") options, args = parser.parse_args() hash_functions = [sha512_nontrivial, gziphash, pnghash, gifhash] if options.hash: gen = process_package_with_hash(sys.stdin, hash_functions, options.hash) else: gen = process_package(sys.stdin, hash_functions) yaml.safe_dump_all(gen, sys.stdout) if __name__ == "__main__": main()