18 version_compare = apt_pkg.version_compare
21 from common import decompress_stream, yield_lines
24 PROFILES = frozenset(("cross", "nocheck"))
26 CPUEntry = collections.namedtuple('CPUEntry',
27 'debcpu gnucpu regex bits endianness')
29 TupleEntry = collections.namedtuple('TupleEntry',
34 def read_table(filename):
35 with open(filename) as f:
37 if not line.startswith("#"):
40 def __init__(self, cputable="/usr/share/dpkg/cputable",
41 tupletable="/usr/share/dpkg/tupletable",
42 abitable="/usr/share/dpkg/abitable"):
46 self.read_cputable(cputable)
47 self.read_tupletable(tupletable)
48 self.read_abitable(abitable)
50 def read_cputable(self, cputable):
52 for values in self.read_table(cputable):
53 values[3] = int(values[3]) # bits
54 entry = CPUEntry(*values)
55 self.cputable[entry.debcpu] = entry
57 def read_tupletable(self, tupletable):
58 self.tupletable.clear()
59 for debtuple, debarch in self.read_table(tupletable):
60 if '<cpu>' in debtuple:
61 for cpu in self.cputable:
62 entry = TupleEntry(*debtuple.replace("<cpu>", cpu)
64 self.tupletable[debarch.replace("<cpu>", cpu)] = entry
66 self.tupletable[debarch] = TupleEntry(*debtuple.split("-"))
68 def read_abitable(self, abitable):
70 for arch, bits in self.read_table(abitable):
72 self.abitable[arch] = bits
74 def match(self, arch, pattern):
75 parts = pattern.split("-")
76 if not "any" in parts:
77 return pattern == arch
79 parts.insert(0, "any")
80 entry = self.tupletable[arch]
81 return all(parts[i] in (entry[i], "any") for i in range(4))
83 def getendianness(self, arch):
84 return self.cputable[self.tupletable[arch].cpu].endianness
86 architectures = Architectures()
87 arch_match = architectures.match
89 def call_dose_builddebcheck(arguments):
91 @type arguments: [str]
92 @param arguments: command line arguments to dose-builddebcheck
93 @returns: an iterable over loaded yaml documents. The first document
94 is the header, all other documents are per-package.
95 @raises subprocess.CalledProcessError: if dose errors out
97 cmd = ["dose-builddebcheck"]
100 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
103 for line in proc.stdout:
104 if line.startswith(b' '):
106 elif line == b' -\n':
107 yield yaml.load(b"".join(lines), Loader=yaml.CBaseLoader)
111 yield yaml.load(b"".join(lines), Loader=yaml.CSafeLoader)
112 if proc.wait() not in (0, 1):
113 raise subprocess.CalledProcessError(proc.returncode, cmd)
115 def parse_deb822(iterable):
116 """Parse an iterable of bytes into an iterable of str-dicts."""
120 for line in yield_lines(iterable):
121 line = line.decode("utf8")
124 mapping[key] = value.strip()
128 elif key and line.startswith((" ", "\t")):
132 mapping[key] = value.strip()
134 key, value = line.split(":", 1)
136 raise ValueError("invalid input line %r" % line)
138 mapping[key] = value.strip()
142 def serialize_deb822(dct):
143 """Serialize a byte-dict into a single bytes object."""
144 return "".join(map("%s: %s\n".__mod__, dct.items())) + "\n"
146 class HashSumMismatch(Exception):
149 def hash_check(iterable, hashobj, expected_digest):
150 """Wraps an iterable that yields bytes. It doesn't modify the sequence,
151 but on the final element it verifies that the concatenation of bytes
152 yields an expected digest value. Upon failure, the final next() results in
153 a HashSumMismatch rather than StopIteration.
155 for data in iterable:
158 if hashobj.hexdigest() != expected_digest:
159 raise HashSumMismatch()
162 return datetime.datetime.strptime(s, "%a, %d %b %Y %H:%M:%S %Z")
165 def __init__(self, files=("/etc/apt/trusted.gpg",),
166 partsdir="/etc/apt/trusted.gpg.d"):
167 candidates = list(files)
168 candidates.extend(os.path.join(partsdir, e)
169 for e in os.listdir(partsdir))
170 self.keyrings = list(filter(lambda f: os.access(f, os.R_OK),
173 def verify(self, content):
174 cmdline = ["gpgv", "--quiet", "--weak-digest", "SHA1", "--output", "-"]
175 for keyring in self.keyrings:
176 cmdline.extend(("--keyring", keyring))
177 proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE,
178 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
179 stdout, _ = proc.communicate(content)
181 raise ValueError("signature verififcation failed")
186 def __init__(self, uri, dist="sid"):
189 self.releasetime = None
194 def get_all_keyrings():
195 yield "/etc/apt/trusted.gpg"
196 partsdir = "/etc/apt/trusted.gpg.d"
198 for e in os.listdir(partsdir):
199 yield os.path.join(partsdir, e)
200 except FileNotFoundError:
205 return filter(lambda f: os.access(f, os.R_OK),
206 DebianMirror.get_all_keyrings())
208 def get_uri(self, filename):
209 return "%s/dists/%s/%s" % (self.uri, self.dist, filename)
211 def fetch_release(self):
212 resp = requests.get(self.get_uri("InRelease"))
213 resp.raise_for_status()
214 return GPGV().verify(resp.content)
216 def parse_release(self, content):
217 info, = list(parse_deb822([content]))
218 self.releasetime = parse_date(info["Date"])
219 valid_until = parse_date(info["Valid-Until"])
220 now = datetime.datetime.utcnow()
221 if self.releasetime > now:
222 raise ValueError("release file generated in future")
223 if valid_until < now:
224 raise ValueError("release signature expired")
225 self.byhash = info.pop("Acquire-By-Hash", "no") == "yes"
227 for line in info[self.hashfunc].splitlines():
232 raise ValueError("invalid %s line %r" % (self.hashfunc, line))
233 self.files[parts[2]] = parts[0]
235 def update_release(self):
236 self.parse_release(self.fetch_release())
238 def fetch_list(self, listname):
239 if listname + ".xz" in self.files:
241 wrapper = lambda i: decompress_stream(i, lzma.LZMADecompressor())
243 wrapper = lambda i: i
244 hashvalue = self.files[listname]
246 listname = "%s/by-hash/%s/%s" % (os.path.dirname(listname),
247 self.hashfunc, hashvalue)
248 with contextlib.closing(requests.get(self.get_uri(listname),
249 stream=True)) as resp:
250 resp.raise_for_status()
251 it = resp.iter_content(65536)
252 it = hash_check(it, hashlib.new(self.hashfunc), hashvalue)
253 yield from wrapper(it)
255 def fetch_sources(self, component="main"):
256 return self.fetch_list("%s/source/Sources" % component)
258 def fetch_binaries(self, architecture, component="main"):
259 return self.fetch_list("%s/binary-%s/Packages" %
260 (component, architecture))
262 binfields = frozenset((
275 srcdepfields = frozenset((
277 "Build-Conflicts-Arch",
279 "Build-Depends-Arch",
281 srcfields = srcdepfields.union((
287 bad_foreign_packages = frozenset((
288 "flex-old", # cannot execute /usr/bin/flex
289 "icmake", # cannot execute /usr/bin/icmake, build system
290 "jam", # cannot execute /usr/bin/jam, build system
291 "libtool-bin", # #836123
292 "python2.7-minimal", # fails postinst
293 "python3.6-minimal", # fails postinst
294 "python3.7-minimal", # fails postinst
295 "swi-prolog-nox", # fails postinst
296 "xrdp", # fails postinst
297 "libgvc6", # fails postinst
300 def strip_dict(dct, keepfields):
301 keys = set(dct.keys())
302 keys.difference_update(keepfields)
306 def strip_alternatvies(dct, fields):
312 dct[f] = ",".join(dep.split("|", 1)[0]
313 for dep in value.split(","))
315 def latest_versions(pkgs):
320 if version_compare(packages[name]["Version"], p["Version"]) > 0:
325 return (p for p in packages.values()
326 if "Package" in p and not "Negative-Entry" in p)
328 def make_binary_list_build(mirror, arch):
329 for p in parse_deb822(mirror.fetch_binaries(BUILD_ARCH)):
330 if p["Package"].startswith("crossbuild-essential-"):
331 if p["Package"] != "crossbuild-essential-" + arch:
333 p["Depends"] += ", libc-dev:%s, libstdc++-dev:%s" % (arch, arch)
334 strip_dict(p, binfields)
337 def make_binary_list_host(mirror, arch):
338 for p in parse_deb822(mirror.fetch_binaries(arch)):
339 if p["Architecture"] == "all":
341 if p.get("Multi-Arch") == "foreign":
343 if p.get("Essential") == "yes":
345 if p["Package"] in bad_foreign_packages:
347 strip_dict(p, binfields)
350 def make_binary_list(mirror, arch):
351 return itertools.chain(make_binary_list_build(mirror, arch),
352 make_binary_list_host(mirror, arch))
354 def make_source_list(mirror, arch):
355 for p in parse_deb822(mirror.fetch_sources()):
356 if p.get("Extra-Source-Only") == "yes":
358 if any(arch_match(arch, pattern)
359 for pattern in p["Architecture"].split()):
360 strip_dict(p, srcfields)
361 strip_alternatvies(p, srcdepfields)
364 # dummy entry preventing older matching versions
365 yield {"Package": p["Package"], "Version": p["Version"],
366 "Negative-Entry": "yes"}
368 def check_bdsat(mirror, arch):
370 "--deb-native-arch=" + BUILD_ARCH,
371 "--deb-host-arch=" + arch,
372 "--deb-drop-b-d-indep",
373 "--deb-profiles=" + ",".join(PROFILES),
378 "--deb-emulate-sbuild",
381 with tempfile.NamedTemporaryFile("w", encoding="utf8") as bintmp, \
382 tempfile.NamedTemporaryFile("w", encoding="utf8") as srctmp:
383 for p in make_binary_list(mirror, arch):
384 bintmp.write(serialize_deb822(p))
386 cmd.append(bintmp.name)
388 for p in latest_versions(make_source_list(mirror, arch)):
389 srctmp.write(serialize_deb822(p))
391 cmd.append(srctmp.name)
393 dose_result = call_dose_builddebcheck(cmd)
394 next(dose_result) # skip header
395 for d in dose_result:
396 if d["status"] == "ok":
397 yield (d["package"], d["version"], None)
401 reason = "missing %s" % r["missing"]["pkg"]["unsat-dependency"].split()[0].split(":", 1)[0]
402 elif "conflict" in r:
403 r = r["conflict"]["pkg1"]["unsat-conflict"]
404 reason = "skew " if ' (!= ' in r else "conflict "
405 reason += r.split()[0].split(':', 1)[0]
408 yield (d["package"], d["version"], reason)
410 def update_depcheck(mirror, db, architecture):
411 now = datetime.datetime.utcnow()
412 mirror.update_release()
414 for source, version, reason in check_bdsat(mirror, architecture):
415 state[source] = (version, reason)
416 with contextlib.closing(db.cursor()) as cur:
417 cur.execute("BEGIN;")
418 cur.execute("SELECT source, version, satisfiable, reason FROM depstate WHERE architecture = ?;",
420 for source, version, satisfiable, reason in list(cur.fetchall()):
421 if satisfiable == (reason is None) and \
422 state.get(source) == (version, reason):
425 cur.execute("DELETE FROM depstate WHERE source = ? AND version = ? AND architecture = ?;",
426 (source, version, architecture))
427 cur.executemany("INSERT INTO depstate (source, architecture, version, satisfiable, reason) VALUES (?, ?, ?, ?, ?);",
428 ((source, architecture, version, reason is None,
430 for source, (version, reason) in state.items()))
431 cur.execute("UPDATE depcheck SET releasetime = ?, updatetime = ?, giveback = 0 WHERE architecture = ?",
432 (mirror.releasetime, now, architecture))
436 argp = argparse.ArgumentParser()
437 argp.add_argument('-m', '--mirror',
438 default='http://deb.debian.org/debian',
439 help="debian mirror to use")
440 args = argp.parse_args()
441 mirror = DebianMirror(args.mirror)
442 mirror.update_release()
443 db = sqlite3.connect("db")
445 cur.execute("""SELECT architecture FROM depcheck
446 WHERE giveback = 1 OR releasetime < ?;""",
447 (mirror.releasetime,))
448 for architecture, in list(cur.fetchall()):
449 print("update %s" % architecture)
450 update_depcheck(mirror, db, architecture)
452 if __name__ == "__main__":