import errno import urllib.error import urllib.request from debian.debian_support import version_compare from dedup.compression import decompress def fetchiter(cursor): rows = cursor.fetchmany() while rows: yield from rows rows = cursor.fetchmany() def sql_add_version_compare(db): db.create_collation("debian_version", version_compare) db.create_function("debian_version_compare", 2, version_compare) def open_compressed_mirror_url(url, extensions=(".xz", ".gz", "")): """Fetch the given url. Try appending each of the given compression schemes and move on in case it doesn't exist. Decompress the resulting stream on the fly. @returns: a file-like with the decompressed contents """ for ext in extensions: try: handle = urllib.request.urlopen(url + ext) except urllib.error.HTTPError as error: if error.code != 404: raise except urllib.error.URLError as error: if not hasattr(error.reason, "errno"): raise if error.reason.errno != errno.ENOENT: raise else: return decompress(handle, ext) raise OSError(errno.ENOENT, "No such file or directory")