summaryrefslogtreecommitdiff
path: root/dedup
diff options
context:
space:
mode:
Diffstat (limited to 'dedup')
-rw-r--r--dedup/arreader.py79
-rw-r--r--dedup/compression.py46
-rw-r--r--dedup/debpkg.py61
-rw-r--r--dedup/filemagic.py49
-rw-r--r--dedup/hashing.py28
-rw-r--r--dedup/image.py8
-rw-r--r--dedup/templates/base.html2
-rw-r--r--dedup/utils.py36
8 files changed, 123 insertions, 186 deletions
diff --git a/dedup/arreader.py b/dedup/arreader.py
deleted file mode 100644
index e53efd9..0000000
--- a/dedup/arreader.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import struct
-
-class ArReader(object):
- """Streaming AR file reader. After constructing an object, you usually
- call read_magic once. Then you call read_entry in a loop and use the
- ArReader object as file-like only providing read() to read the respective
- file contents until you get EOFError from read_entry.
- """
- global_magic = b"!<arch>\n"
- file_magic = b"`\n"
-
- def __init__(self, fileobj):
- """
- @param fileobj: a file-like object providing nothing but read(length)
- """
- self.fileobj = fileobj
- self.remaining = None
- self.padding = 0
-
- def read_magic(self):
- """Consume the AR magic marker at the beginning of an AR file. You
- must not call any other method before calling this method.
- @raises ValueError: if the magic is not found
- """
- data = self.fileobj.read(len(self.global_magic))
- if data != self.global_magic:
- raise ValueError("ar global header not found")
- self.remaining = 0
-
- def read_entry(self):
- """Read the next file header, return the filename and record the
- length of the next file, so that the read method can be used to
- exhaustively read the current file.
- @rtype: bytes
- @returns: the name of the next file
- @raises ValueError: if the data format is wrong
- @raises EOFError: when the end f the stream is reached
- """
- self.skip_current_entry()
- if self.padding:
- if self.fileobj.read(1) != b'\n':
- raise ValueError("missing ar padding")
- self.padding = 0
- file_header = self.fileobj.read(60)
- if not file_header:
- raise EOFError("end of archive found")
- parts = struct.unpack("16s 12s 6s 6s 8s 10s 2s", file_header)
- parts = [p.rstrip(b"/ ") for p in parts]
- if parts.pop() != self.file_magic:
- raise ValueError("ar file header not found")
- self.remaining = int(parts[5])
- self.padding = self.remaining % 2
- return parts[0] # name
-
- def skip_current_entry(self):
- """Skip the remainder of the current file. This method must not be
- called before calling read_entry.
- @raises ValueError: if the archive appears truncated
- """
- while self.remaining:
- data = self.fileobj.read(min(4096, self.remaining))
- if not data:
- raise ValueError("archive truncated")
- self.remaining -= len(data)
-
- def read(self, length=None):
- """
- @type length: int or None
- @param length: number of bytes to read from the current file
- @rtype: bytes
- @returns: length or fewer bytes from the current file
- """
- if length is None:
- length = self.remaining
- else:
- length = min(self.remaining, length)
- data = self.fileobj.read(length)
- self.remaining -= len(data)
- return data
diff --git a/dedup/compression.py b/dedup/compression.py
index 8d1912b..da6e9a0 100644
--- a/dedup/compression.py
+++ b/dedup/compression.py
@@ -1,13 +1,10 @@
import bz2
import struct
-import sys
import zlib
import lzma
-crc32_type = "L" if sys.version_info.major >= 3 else "l"
-
-class GzipDecompressor(object):
+class GzipDecompressor:
"""An interface to gzip which is similar to bz2.BZ2Decompressor and
lzma.LZMADecompressor."""
def __init__(self):
@@ -66,7 +63,7 @@ class GzipDecompressor(object):
elif not self.sawheader:
return self.inbuffer
else:
- expect = struct.pack("<" + crc32_type + "L", self.crc, self.size)
+ expect = struct.pack("<LL", self.crc, self.size)
if self.inbuffer.startswith(expect) and \
self.inbuffer[len(expect):].replace(b"\0", b"") == b"":
return b""
@@ -90,7 +87,7 @@ class GzipDecompressor(object):
new.size = self.size
return new
-class DecompressedStream(object):
+class DecompressedStream:
"""Turn a readable file-like into a decompressed file-like. It supports
read(optional length), tell, seek(forward only) and close."""
blocksize = 65536
@@ -104,25 +101,23 @@ class DecompressedStream(object):
"""
self.fileobj = fileobj
self.decompressor = decompressor
- self.buff = b""
+ self.buff = bytearray()
self.pos = 0
- self.closed = False
def _fill_buff_until(self, predicate):
- assert not self.closed
- data = True
- while True:
- if predicate(self.buff) or not data:
- return
+ assert self.fileobj is not None
+ while not predicate(self.buff):
data = self.fileobj.read(self.blocksize)
if data:
self.buff += self.decompressor.decompress(data)
- elif hasattr(self.decompressor, "flush"):
- self.buff += self.decompressor.flush()
+ else:
+ if hasattr(self.decompressor, "flush"):
+ self.buff += self.decompressor.flush()
+ break
def _read_from_buff(self, length):
- ret = self.buff[:length]
- self.buff = self.buff[length:]
+ ret = bytes(self.buff[:length])
+ self.buff[:length] = b""
self.pos += length
return ret
@@ -146,12 +141,12 @@ class DecompressedStream(object):
return iter(self.readline, b'')
def tell(self):
- assert not self.closed
+ assert self.fileobj is not None
return self.pos
def seek(self, pos):
"""Forward seeks by absolute position only."""
- assert not self.closed
+ assert self.fileobj is not None
if pos < self.pos:
raise ValueError("negative seek not allowed on decompressed stream")
while True:
@@ -165,18 +160,17 @@ class DecompressedStream(object):
return
def close(self):
- if not self.closed:
+ if self.fileobj is not None:
self.fileobj.close()
self.fileobj = None
self.decompressor = None
- self.buff = b""
- self.closed = True
+ self.buff = bytearray()
decompressors = {
- u'.gz': GzipDecompressor,
- u'.bz2': bz2.BZ2Decompressor,
- u'.lzma': lzma.LZMADecompressor,
- u'.xz': lzma.LZMADecompressor,
+ '.gz': GzipDecompressor,
+ '.bz2': bz2.BZ2Decompressor,
+ '.lzma': lzma.LZMADecompressor,
+ '.xz': lzma.LZMADecompressor,
}
def decompress(filelike, extension):
diff --git a/dedup/debpkg.py b/dedup/debpkg.py
index 3a30b3e..de00e60 100644
--- a/dedup/debpkg.py
+++ b/dedup/debpkg.py
@@ -1,13 +1,12 @@
-import sys
import tarfile
+import arpy
from debian import deb822
-from dedup.arreader import ArReader
from dedup.compression import decompress
from dedup.hashing import hash_file
-class MultiHash(object):
+class MultiHash:
def __init__(self, *hashes):
self.hashes = hashes
@@ -30,7 +29,7 @@ def get_tar_hashes(tar, hash_functions):
if not elem.isreg(): # excludes hard links as well
continue
hasher = MultiHash(*[func() for func in hash_functions])
- hasher = hash_file(hasher, tar.extractfile(elem))
+ hash_file(hasher, tar.extractfile(elem))
hashes = {}
for hashobj in hasher.hashes:
hashvalue = hashobj.hexdigest()
@@ -38,32 +37,11 @@ def get_tar_hashes(tar, hash_functions):
hashes[hashobj.name] = hashvalue
yield (elem.name, elem.size, hashes)
-if sys.version_info.major >= 3:
- def opentar(filelike):
- return tarfile.open(fileobj=filelike, mode="r|", encoding="utf8",
- errors="surrogateescape")
+def opentar(filelike):
+ return tarfile.open(fileobj=filelike, mode="r|", encoding="utf8",
+ errors="surrogateescape")
- def decodetarname(name):
- """Decoded name of a tarinfo.
- @raises UnicodeDecodeError:
- """
- try:
- name.encode("utf8", "strict")
- except UnicodeEncodeError as e:
- if e.reason == "surrogates not allowed":
- name.encode("utf8", "surrogateescape").decode("utf8", "strict")
- return name
-else:
- def opentar(filelike):
- return tarfile.open(fileobj=filelike, mode="r|")
-
- def decodetarname(name):
- """Decoded name of a tarinfo.
- @raises UnicodeDecodeError:
- """
- return name.decode("utf8")
-
-class DebExtractor(object):
+class DebExtractor:
"Base class for extracting desired features from a Debian package."
def __init__(self):
@@ -74,45 +52,36 @@ class DebExtractor(object):
@param filelike: is a file-like object containing the contents of the
Debian packge and can be read once without seeks.
"""
- af = ArReader(filelike)
- af.read_magic()
- while True:
- try:
- name = af.read_entry()
- except EOFError:
- break
- else:
- self.handle_ar_member(name, af)
+ af = arpy.Archive(fileobj=filelike)
+ for member in af:
+ self.handle_ar_member(member)
self.handle_ar_end()
- def handle_ar_member(self, name, filelike):
+ def handle_ar_member(self, arfiledata: arpy.ArchiveFileData) -> None:
"""Handle an ar archive member of the Debian package.
If you replace this method, you must also replace handle_ar_end and
none of the methods handle_debversion, handle_control_tar or
handle_data_tar are called.
- @type name: bytes
- @param name: is the name of the member
- @param filelike: is a file-like object containing the contents of the
- member and can be read once without seeks.
"""
+ name = arfiledata.header.name
if self.arstate == "start":
if name != b"debian-binary":
raise ValueError("debian-binary not found")
- version = filelike.read()
+ version = arfiledata.read()
self.handle_debversion(version)
if not version.startswith(b"2."):
raise ValueError("debian version not recognized")
self.arstate = "version"
elif self.arstate == "version":
if name.startswith(b"control.tar"):
- filelike = decompress(filelike, name[11:].decode("ascii"))
+ filelike = decompress(arfiledata, name[11:].decode("ascii"))
self.handle_control_tar(opentar(filelike))
self.arstate = "control"
elif not name.startswith(b"_"):
raise ValueError("unexpected ar member %r" % name)
elif self.arstate == "control":
if name.startswith(b"data.tar"):
- filelike = decompress(filelike, name[8:].decode("ascii"))
+ filelike = decompress(arfiledata, name[8:].decode("ascii"))
self.handle_data_tar(opentar(filelike))
self.arstate = "data"
elif not name.startswith(b"_"):
diff --git a/dedup/filemagic.py b/dedup/filemagic.py
new file mode 100644
index 0000000..b71c276
--- /dev/null
+++ b/dedup/filemagic.py
@@ -0,0 +1,49 @@
+"""A very strange "hash" that uses the magic module (python3-magic) to guess
+the file type."""
+
+import magic
+
+# It changed API a few times...
+try:
+ _magic_identify = magic.from_buffer
+except AttributeError:
+ _magic_identify = magic.none_magic.buffer
+
+class FileDigester:
+ """A hashlib-like class to guess a filetype using the magic module."""
+ FILE_BYTES_MAX = 1024 * 1024 # copied from file source
+
+ def __init__(self):
+ self.buff = b""
+ self.identification = None
+
+ def _compute_identification(self):
+ try:
+ return _magic_identify(self.buff)
+ except UnicodeDecodeError:
+ return "magic identification is not valid UTF-8"
+
+ def update(self, buff):
+ if self.identification:
+ return
+ self.buff += buff
+ if len(self.buff) >= self.FILE_BYTES_MAX:
+ self.identification = self._compute_identification()
+ self.buff = None
+
+ def identify(self):
+ """Return the guessed file magic identification."""
+ if self.identification:
+ return self.identification
+ return self._compute_identification()
+
+ def hexdigest(self):
+ """Compatibility with hashlib. An alias of identify. Doesn't return
+ hex."""
+ return self.identify()
+
+ def copy(self):
+ new = FileDigester()
+ new.buff = self.buff
+ new.identification = self.identification
+ return new
diff --git a/dedup/hashing.py b/dedup/hashing.py
index 2a83929..9cebcbb 100644
--- a/dedup/hashing.py
+++ b/dedup/hashing.py
@@ -1,10 +1,6 @@
import itertools
-try:
- from itertools import imap as map
-except ImportError:
- pass # in python3 map is already imap
-class HashBlacklist(object):
+class HashBlacklist:
"""Turn a hashlib-like object into a hash that returns None for some
blacklisted hashes instead of the real hash value.
@@ -35,7 +31,7 @@ class HashBlacklist(object):
def copy(self):
return HashBlacklist(self.hashobj.copy(), self.blacklist)
-class HashBlacklistContent(object):
+class HashBlacklistContent:
"""Turn a hashlib-like object into a hash that returns None for some
blacklisted content instead of the real hash value. Unlike HashBlacklist,
not the output of the hash is considered, but its input."""
@@ -80,13 +76,15 @@ class HashBlacklistContent(object):
return self.hashobj.hexdigest()
def copy(self):
- return HashBlacklistContent(self.hashobj.copy(), self.blacklist,
- self.maxlen)
+ new = HashBlacklistContent(self.hashobj.copy(), self.blacklist,
+ self.maxlen)
+ new.stored = self.stored
+ return new
-class DecompressedHash(object):
+class DecompressedHash:
"""Apply a decompression function before the hash. This class provides the
hashlib interface (update, hexdigest, copy) excluding digest and name."""
- def __init__(self, decompressor, hashobj):
+ def __init__(self, decompressor, hashobj, name="unnamed"):
"""
@param decompressor: a decompression object like bz2.BZ2Decompressor or
lzma.LZMADecompressor. It has to provide methods decompress and
@@ -94,9 +92,11 @@ class DecompressedHash(object):
method.
@param hashobj: a hashlib-like obj providing methods update, hexdigest
and copy
+ @param name: initialized the name property
"""
self.decompressor = decompressor
self.hashobj = hashobj
+ self.name = name
def update(self, data):
self.hashobj.update(self.decompressor.decompress(data))
@@ -115,9 +115,10 @@ class DecompressedHash(object):
return tmphash.hexdigest()
def copy(self):
- return DecompressedHash(self.decompressor.copy(), self.hashobj.copy())
+ return DecompressedHash(self.decompressor.copy(), self.hashobj.copy(),
+ self.name)
-class SuppressingHash(object):
+class SuppressingHash:
"""A hash that silences exceptions from the update and hexdigest methods of
a hashlib-like object. If an exception has occurred, hexdigest always
returns None."""
@@ -163,9 +164,8 @@ def hash_file(hashobj, filelike, blocksize=65536):
while data:
hashobj.update(data)
data = filelike.read(blocksize)
- return hashobj
-class HashedStream(object):
+class HashedStream:
"""A file-like object, that supports sequential reading and hashes the
contents on the fly."""
def __init__(self, filelike, hashobj):
diff --git a/dedup/image.py b/dedup/image.py
index 314eb44..91321f4 100644
--- a/dedup/image.py
+++ b/dedup/image.py
@@ -3,7 +3,7 @@ import struct
import PIL.Image
-class ImageHash(object):
+class ImageHash:
"""A hash on the contents of an image data type supported by PIL. This
disregards mode, depth and meta information. Note that due to limitations
in PIL and the image format (interlacing) the full contents are stored and
@@ -69,9 +69,14 @@ class ImageHash(object):
self.content.seek(pos)
return "%s%8.8x%8.8x" % (hashobj.hexdigest(), width, height)
+ @property
+ def name(self):
+ return self.name_prefix + self.hashobj.name
+
class PNGHash(ImageHash):
"""A hash on the contents of a PNG image."""
+ name_prefix = "png_"
def detect(self):
if self.content.tell() < 33: # header + IHDR
@@ -86,6 +91,7 @@ class PNGHash(ImageHash):
class GIFHash(ImageHash):
"""A hash on the contents of the first frame of a GIF image."""
+ name_prefix = "gif_"
def detect(self):
if self.content.tell() < 10: # magic + logical dimension
diff --git a/dedup/templates/base.html b/dedup/templates/base.html
index 9dfb788..bac516e 100644
--- a/dedup/templates/base.html
+++ b/dedup/templates/base.html
@@ -15,7 +15,7 @@
<ul>
<li>More information: <a href="https://wiki.debian.org/dedup.debian.net">see wiki</a></li>
<li>Maintainer: Helmut Grohne &lt;helmut@subdivi.de&gt;</li>
- <li>Source: git://murkel.subdivi.de/~helmut/debian-dedup.git</li>
+ <li>Source: git://git.subdivi.de/~helmut/debian-dedup.git</li>
<li>Bugs reports / Feedback / Patches: to the maintainer</li>
</ul>
</body>
diff --git a/dedup/utils.py b/dedup/utils.py
index dab6653..55cdef0 100644
--- a/dedup/utils.py
+++ b/dedup/utils.py
@@ -1,29 +1,19 @@
+import contextlib
import errno
-try:
- from urllib.error import URLError, HTTPError
-except ImportError:
- from urllib2 import URLError, HTTPError
-try:
- from urllib.request import urlopen
-except ImportError:
- from urllib2 import urlopen
+import urllib.error
+import urllib.request
-from debian.debian_support import version_compare
+import debian.deb822
from dedup.compression import decompress
def fetchiter(cursor):
rows = cursor.fetchmany()
while rows:
- for row in rows:
- yield row
+ yield from rows
rows = cursor.fetchmany()
-def sql_add_version_compare(db):
- db.create_collation("debian_version", version_compare)
- db.create_function("debian_version_compare", 2, version_compare)
-
-def open_compressed_mirror_url(url, extensions=(u".xz", u".gz", u"")):
+def open_compressed_mirror_url(url, extensions=(".xz", ".gz", "")):
"""Fetch the given url. Try appending each of the given compression
schemes and move on in case it doesn't exist. Decompress the resulting
stream on the fly.
@@ -31,11 +21,11 @@ def open_compressed_mirror_url(url, extensions=(u".xz", u".gz", u"")):
"""
for ext in extensions:
try:
- handle = urlopen(url + ext)
- except HTTPError as error:
+ handle = urllib.request.urlopen(url + ext)
+ except urllib.error.HTTPError as error:
if error.code != 404:
raise
- except URLError as error:
+ except urllib.error.URLError as error:
if not hasattr(error.reason, "errno"):
raise
if error.reason.errno != errno.ENOENT:
@@ -43,3 +33,11 @@ def open_compressed_mirror_url(url, extensions=(u".xz", u".gz", u"")):
else:
return decompress(handle, ext)
raise OSError(errno.ENOENT, "No such file or directory")
+
+def iterate_packages(mirror, architecture, distribution="sid", section="main"):
+ """Download the relevant binary package list and generate
+ debian.deb822.Packages objects per listed package."""
+ url = "%s/dists/%s/%s/binary-%s/Packages" % \
+ (mirror, distribution, section, architecture)
+ with contextlib.closing(open_compressed_mirror_url(url)) as pkglist:
+ yield from debian.deb822.Packages.iter_paragraphs(pkglist)