From 27b95909f061ae3ecb3ba1b8d46adfef98ca5e6f Mon Sep 17 00:00:00 2001 From: Helmut Grohne Date: Sun, 16 Feb 2020 08:21:20 +0100 Subject: drop support for Python 2.x --- dedup/arreader.py | 2 +- dedup/compression.py | 17 +++++++---------- dedup/debpkg.py | 32 +++++--------------------------- dedup/filemagic.py | 2 +- dedup/hashing.py | 14 +++++--------- dedup/image.py | 2 +- dedup/utils.py | 21 +++++++-------------- 7 files changed, 27 insertions(+), 63 deletions(-) (limited to 'dedup') diff --git a/dedup/arreader.py b/dedup/arreader.py index e53efd9..8b14ff9 100644 --- a/dedup/arreader.py +++ b/dedup/arreader.py @@ -1,6 +1,6 @@ import struct -class ArReader(object): +class ArReader: """Streaming AR file reader. After constructing an object, you usually call read_magic once. Then you call read_entry in a loop and use the ArReader object as file-like only providing read() to read the respective diff --git a/dedup/compression.py b/dedup/compression.py index 8d1912b..161eda2 100644 --- a/dedup/compression.py +++ b/dedup/compression.py @@ -1,13 +1,10 @@ import bz2 import struct -import sys import zlib import lzma -crc32_type = "L" if sys.version_info.major >= 3 else "l" - -class GzipDecompressor(object): +class GzipDecompressor: """An interface to gzip which is similar to bz2.BZ2Decompressor and lzma.LZMADecompressor.""" def __init__(self): @@ -66,7 +63,7 @@ class GzipDecompressor(object): elif not self.sawheader: return self.inbuffer else: - expect = struct.pack("<" + crc32_type + "L", self.crc, self.size) + expect = struct.pack("= 3: - def opentar(filelike): - return tarfile.open(fileobj=filelike, mode="r|", encoding="utf8", - errors="surrogateescape") +def opentar(filelike): + return tarfile.open(fileobj=filelike, mode="r|", encoding="utf8", + errors="surrogateescape") - def decodetarname(name): - """Decoded name of a tarinfo. - @raises UnicodeDecodeError: - """ - try: - name.encode("utf8", "strict") - except UnicodeEncodeError as e: - if e.reason == "surrogates not allowed": - name.encode("utf8", "surrogateescape").decode("utf8", "strict") - return name -else: - def opentar(filelike): - return tarfile.open(fileobj=filelike, mode="r|") - - def decodetarname(name): - """Decoded name of a tarinfo. - @raises UnicodeDecodeError: - """ - return name.decode("utf8") - -class DebExtractor(object): +class DebExtractor: "Base class for extracting desired features from a Debian package." def __init__(self): diff --git a/dedup/filemagic.py b/dedup/filemagic.py index c5a6357..b71c276 100644 --- a/dedup/filemagic.py +++ b/dedup/filemagic.py @@ -9,7 +9,7 @@ try: except AttributeError: _magic_identify = magic.none_magic.buffer -class FileDigester(object): +class FileDigester: """A hashlib-like class to guess a filetype using the magic module.""" FILE_BYTES_MAX = 1024 * 1024 # copied from file source diff --git a/dedup/hashing.py b/dedup/hashing.py index c91fb64..21f14ea 100644 --- a/dedup/hashing.py +++ b/dedup/hashing.py @@ -1,10 +1,6 @@ import itertools -try: - from itertools import imap as map -except ImportError: - pass # in python3 map is already imap -class HashBlacklist(object): +class HashBlacklist: """Turn a hashlib-like object into a hash that returns None for some blacklisted hashes instead of the real hash value. @@ -35,7 +31,7 @@ class HashBlacklist(object): def copy(self): return HashBlacklist(self.hashobj.copy(), self.blacklist) -class HashBlacklistContent(object): +class HashBlacklistContent: """Turn a hashlib-like object into a hash that returns None for some blacklisted content instead of the real hash value. Unlike HashBlacklist, not the output of the hash is considered, but its input.""" @@ -85,7 +81,7 @@ class HashBlacklistContent(object): new.stored = self.stored return new -class DecompressedHash(object): +class DecompressedHash: """Apply a decompression function before the hash. This class provides the hashlib interface (update, hexdigest, copy) excluding digest and name.""" def __init__(self, decompressor, hashobj): @@ -119,7 +115,7 @@ class DecompressedHash(object): def copy(self): return DecompressedHash(self.decompressor.copy(), self.hashobj.copy()) -class SuppressingHash(object): +class SuppressingHash: """A hash that silences exceptions from the update and hexdigest methods of a hashlib-like object. If an exception has occurred, hexdigest always returns None.""" @@ -167,7 +163,7 @@ def hash_file(hashobj, filelike, blocksize=65536): data = filelike.read(blocksize) return hashobj -class HashedStream(object): +class HashedStream: """A file-like object, that supports sequential reading and hashes the contents on the fly.""" def __init__(self, filelike, hashobj): diff --git a/dedup/image.py b/dedup/image.py index 314eb44..2e64e6b 100644 --- a/dedup/image.py +++ b/dedup/image.py @@ -3,7 +3,7 @@ import struct import PIL.Image -class ImageHash(object): +class ImageHash: """A hash on the contents of an image data type supported by PIL. This disregards mode, depth and meta information. Note that due to limitations in PIL and the image format (interlacing) the full contents are stored and diff --git a/dedup/utils.py b/dedup/utils.py index dab6653..46f8e64 100644 --- a/dedup/utils.py +++ b/dedup/utils.py @@ -1,12 +1,6 @@ import errno -try: - from urllib.error import URLError, HTTPError -except ImportError: - from urllib2 import URLError, HTTPError -try: - from urllib.request import urlopen -except ImportError: - from urllib2 import urlopen +import urllib.error +import urllib.request from debian.debian_support import version_compare @@ -15,15 +9,14 @@ from dedup.compression import decompress def fetchiter(cursor): rows = cursor.fetchmany() while rows: - for row in rows: - yield row + yield from rows rows = cursor.fetchmany() def sql_add_version_compare(db): db.create_collation("debian_version", version_compare) db.create_function("debian_version_compare", 2, version_compare) -def open_compressed_mirror_url(url, extensions=(u".xz", u".gz", u"")): +def open_compressed_mirror_url(url, extensions=(".xz", ".gz", "")): """Fetch the given url. Try appending each of the given compression schemes and move on in case it doesn't exist. Decompress the resulting stream on the fly. @@ -31,11 +24,11 @@ def open_compressed_mirror_url(url, extensions=(u".xz", u".gz", u"")): """ for ext in extensions: try: - handle = urlopen(url + ext) - except HTTPError as error: + handle = urllib.request.urlopen(url + ext) + except urllib.error.HTTPError as error: if error.code != 404: raise - except URLError as error: + except urllib.error.URLError as error: if not hasattr(error.reason, "errno"): raise if error.reason.errno != errno.ENOENT: -- cgit v1.2.3