From 420804c25797add9689bacbd3db138a92a623c8e Mon Sep 17 00:00:00 2001 From: Helmut Grohne Date: Wed, 25 May 2016 19:27:35 +0200 Subject: autoimport: improve fetching package lists Moving the fetching part into dedup.utils. Instead of hard coding the gzip compressed copy, try xz, gz and plain in that order. Also take care to actually close the connection. --- dedup/utils.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) (limited to 'dedup') diff --git a/dedup/utils.py b/dedup/utils.py index 6fb233b..dab6653 100644 --- a/dedup/utils.py +++ b/dedup/utils.py @@ -1,5 +1,17 @@ +import errno +try: + from urllib.error import URLError, HTTPError +except ImportError: + from urllib2 import URLError, HTTPError +try: + from urllib.request import urlopen +except ImportError: + from urllib2 import urlopen + from debian.debian_support import version_compare +from dedup.compression import decompress + def fetchiter(cursor): rows = cursor.fetchmany() while rows: @@ -10,3 +22,24 @@ def fetchiter(cursor): def sql_add_version_compare(db): db.create_collation("debian_version", version_compare) db.create_function("debian_version_compare", 2, version_compare) + +def open_compressed_mirror_url(url, extensions=(u".xz", u".gz", u"")): + """Fetch the given url. Try appending each of the given compression + schemes and move on in case it doesn't exist. Decompress the resulting + stream on the fly. + @returns: a file-like with the decompressed contents + """ + for ext in extensions: + try: + handle = urlopen(url + ext) + except HTTPError as error: + if error.code != 404: + raise + except URLError as error: + if not hasattr(error.reason, "errno"): + raise + if error.reason.errno != errno.ENOENT: + raise + else: + return decompress(handle, ext) + raise OSError(errno.ENOENT, "No such file or directory") -- cgit v1.2.3