From 27b95909f061ae3ecb3ba1b8d46adfef98ca5e6f Mon Sep 17 00:00:00 2001 From: Helmut Grohne Date: Sun, 16 Feb 2020 08:21:20 +0100 Subject: drop support for Python 2.x --- dedup/utils.py | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) (limited to 'dedup/utils.py') diff --git a/dedup/utils.py b/dedup/utils.py index dab6653..46f8e64 100644 --- a/dedup/utils.py +++ b/dedup/utils.py @@ -1,12 +1,6 @@ import errno -try: - from urllib.error import URLError, HTTPError -except ImportError: - from urllib2 import URLError, HTTPError -try: - from urllib.request import urlopen -except ImportError: - from urllib2 import urlopen +import urllib.error +import urllib.request from debian.debian_support import version_compare @@ -15,15 +9,14 @@ from dedup.compression import decompress def fetchiter(cursor): rows = cursor.fetchmany() while rows: - for row in rows: - yield row + yield from rows rows = cursor.fetchmany() def sql_add_version_compare(db): db.create_collation("debian_version", version_compare) db.create_function("debian_version_compare", 2, version_compare) -def open_compressed_mirror_url(url, extensions=(u".xz", u".gz", u"")): +def open_compressed_mirror_url(url, extensions=(".xz", ".gz", "")): """Fetch the given url. Try appending each of the given compression schemes and move on in case it doesn't exist. Decompress the resulting stream on the fly. @@ -31,11 +24,11 @@ def open_compressed_mirror_url(url, extensions=(u".xz", u".gz", u"")): """ for ext in extensions: try: - handle = urlopen(url + ext) - except HTTPError as error: + handle = urllib.request.urlopen(url + ext) + except urllib.error.HTTPError as error: if error.code != 404: raise - except URLError as error: + except urllib.error.URLError as error: if not hasattr(error.reason, "errno"): raise if error.reason.errno != errno.ENOENT: -- cgit v1.2.3 From 529f985adaabfe0c63a1e7ad8d97ec36bb881e52 Mon Sep 17 00:00:00 2001 From: Helmut Grohne Date: Wed, 29 Dec 2021 14:55:43 +0100 Subject: drop unused function sql_add_version_compare --- dedup/utils.py | 6 ------ 1 file changed, 6 deletions(-) (limited to 'dedup/utils.py') diff --git a/dedup/utils.py b/dedup/utils.py index 46f8e64..d3a27a0 100644 --- a/dedup/utils.py +++ b/dedup/utils.py @@ -2,8 +2,6 @@ import errno import urllib.error import urllib.request -from debian.debian_support import version_compare - from dedup.compression import decompress def fetchiter(cursor): @@ -12,10 +10,6 @@ def fetchiter(cursor): yield from rows rows = cursor.fetchmany() -def sql_add_version_compare(db): - db.create_collation("debian_version", version_compare) - db.create_function("debian_version_compare", 2, version_compare) - def open_compressed_mirror_url(url, extensions=(".xz", ".gz", "")): """Fetch the given url. Try appending each of the given compression schemes and move on in case it doesn't exist. Decompress the resulting -- cgit v1.2.3 From 5b359b10053cbade539246eec26e86b44793ca40 Mon Sep 17 00:00:00 2001 From: Helmut Grohne Date: Fri, 31 Dec 2021 15:24:01 +0100 Subject: dedup.utils: uninline helper function iterate_packages --- autoimport.py | 26 +++++++++++--------------- dedup/utils.py | 11 +++++++++++ 2 files changed, 22 insertions(+), 15 deletions(-) (limited to 'dedup/utils.py') diff --git a/autoimport.py b/autoimport.py index eb610b4..0f518c6 100755 --- a/autoimport.py +++ b/autoimport.py @@ -4,7 +4,6 @@ packages contained. It has rather strong assumptions on the working directory. """ import argparse -import contextlib import errno import multiprocessing import pathlib @@ -14,26 +13,23 @@ import sys import tempfile import urllib.parse import concurrent.futures -from debian import deb822 from debian.debian_support import version_compare -from dedup.utils import open_compressed_mirror_url +from dedup.utils import iterate_packages from readyaml import readyaml def process_http(pkgs, url, addhash=True): - listurl = url + "/dists/sid/main/binary-amd64/Packages" - with contextlib.closing(open_compressed_mirror_url(listurl)) as pkglist: - for pkg in deb822.Packages.iter_paragraphs(pkglist): - name = pkg["Package"] - if name in pkgs and \ - version_compare(pkgs[name]["version"], pkg["Version"]) > 0: - continue - inst = dict(version=pkg["Version"], - filename="%s/%s" % (url, pkg["Filename"])) - if addhash: - inst["sha256hash"] = pkg["SHA256"] - pkgs[name] = inst + for pkg in iterate_packages(url, "amd64"): + name = pkg["Package"] + if name in pkgs and \ + version_compare(pkgs[name]["version"], pkg["Version"]) > 0: + continue + inst = dict(version=pkg["Version"], + filename="%s/%s" % (url, pkg["Filename"])) + if addhash: + inst["sha256hash"] = pkg["SHA256"] + pkgs[name] = inst def process_file(pkgs, filename): if filename.suffix != ".deb": diff --git a/dedup/utils.py b/dedup/utils.py index d3a27a0..55cdef0 100644 --- a/dedup/utils.py +++ b/dedup/utils.py @@ -1,7 +1,10 @@ +import contextlib import errno import urllib.error import urllib.request +import debian.deb822 + from dedup.compression import decompress def fetchiter(cursor): @@ -30,3 +33,11 @@ def open_compressed_mirror_url(url, extensions=(".xz", ".gz", "")): else: return decompress(handle, ext) raise OSError(errno.ENOENT, "No such file or directory") + +def iterate_packages(mirror, architecture, distribution="sid", section="main"): + """Download the relevant binary package list and generate + debian.deb822.Packages objects per listed package.""" + url = "%s/dists/%s/%s/binary-%s/Packages" % \ + (mirror, distribution, section, architecture) + with contextlib.closing(open_compressed_mirror_url(url)) as pkglist: + yield from debian.deb822.Packages.iter_paragraphs(pkglist) -- cgit v1.2.3