import errno
try:
    from urllib.error import URLError, HTTPError
except ImportError:
    from urllib2 import URLError, HTTPError
try:
    from urllib.request import urlopen
except ImportError:
    from urllib2 import urlopen

from debian.debian_support import version_compare

from dedup.compression import decompress

def fetchiter(cursor):
    rows = cursor.fetchmany()
    while rows:
        for row in rows:
            yield row
        rows = cursor.fetchmany()

def sql_add_version_compare(db):
    db.create_collation("debian_version", version_compare)
    db.create_function("debian_version_compare", 2, version_compare)

def open_compressed_mirror_url(url, extensions=(u".xz", u".gz", u"")):
    """Fetch the given url. Try appending each of the given compression
    schemes and move on in case it doesn't exist. Decompress the resulting
    stream on the fly.
    @returns: a file-like with the decompressed contents
    """
    for ext in extensions:
        try:
            handle = urlopen(url + ext)
        except HTTPError as error:
            if error.code != 404:
                raise
        except URLError as error:
            if not hasattr(error.reason, "errno"):
                raise
            if error.reason.errno != errno.ENOENT:
                raise
        else:
            return decompress(handle, ext)
    raise OSError(errno.ENOENT, "No such file or directory")