17 version_compare = apt_pkg.version_compare
20 from common import decompress_stream, yield_lines
23 MIRROR = "http://proxy:3142/debian"
24 PROFILES = frozenset(("cross", "nocheck"))
26 CPUEntry = collections.namedtuple('CPUEntry',
27 'debcpu gnucpu regex bits endianness')
29 TupleEntry = collections.namedtuple('TupleEntry',
34 def read_table(filename):
35 with open(filename) as f:
37 if not line.startswith("#"):
40 def __init__(self, cputable="/usr/share/dpkg/cputable",
41 tupletable="/usr/share/dpkg/tupletable",
42 abitable="/usr/share/dpkg/abitable"):
46 self.read_cputable(cputable)
47 self.read_tupletable(tupletable)
48 self.read_abitable(abitable)
50 def read_cputable(self, cputable):
52 for values in self.read_table(cputable):
53 values[3] = int(values[3]) # bits
54 entry = CPUEntry(*values)
55 self.cputable[entry.debcpu] = entry
57 def read_tupletable(self, tupletable):
58 self.tupletable.clear()
59 for debtuple, debarch in self.read_table(tupletable):
60 if '<cpu>' in debtuple:
61 for cpu in self.cputable:
62 entry = TupleEntry(*debtuple.replace("<cpu>", cpu)
64 self.tupletable[debarch.replace("<cpu>", cpu)] = entry
66 self.tupletable[debarch] = TupleEntry(*debtuple.split("-"))
68 def read_abitable(self, abitable):
70 for arch, bits in self.read_table(abitable):
72 self.abitable[arch] = bits
74 def match(self, arch, pattern):
75 parts = pattern.split("-")
76 if not "any" in parts:
77 return pattern == arch
79 parts.insert(0, "any")
80 entry = self.tupletable[arch]
81 return all(parts[i] in (entry[i], "any") for i in range(4))
83 def getendianness(self, arch):
84 return self.cputable[self.tupletable[arch].cpu].endianness
86 architectures = Architectures()
87 arch_match = architectures.match
89 def call_dose_builddebcheck(arguments):
91 @type arguments: [str]
92 @param arguments: command line arguments to dose-builddebcheck
93 @returns: an iterable over loaded yaml documents. The first document
94 is the header, all other documents are per-package.
95 @raises subprocess.CalledProcessError: if dose errors out
97 cmd = ["dose-builddebcheck"]
100 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
103 for line in proc.stdout:
104 if line.startswith(b' '):
106 elif line == b' -\n':
107 yield yaml.load(b"".join(lines), Loader=yaml.CBaseLoader)
111 yield yaml.load(b"".join(lines), Loader=yaml.CSafeLoader)
112 if proc.wait() not in (0, 1):
113 raise subprocess.CalledProcessError(proc.returncode, cmd)
115 def parse_deb822(iterable):
116 """Parse an iterable of bytes into an iterable of str-dicts."""
120 for line in yield_lines(iterable):
121 line = line.decode("utf8")
124 mapping[key] = value.strip()
128 elif key and line.startswith((" ", "\t")):
132 mapping[key] = value.strip()
134 key, value = line.split(":", 1)
136 raise ValueError("invalid input line %r" % line)
138 mapping[key] = value.strip()
142 def serialize_deb822(dct):
143 """Serialize a byte-dict into a single bytes object."""
144 return "".join(map("%s: %s\n".__mod__, dct.items())) + "\n"
146 class HashSumMismatch(Exception):
149 def hash_check(iterable, hashobj, expected_digest):
150 """Wraps an iterable that yields bytes. It doesn't modify the sequence,
151 but on the final element it verifies that the concatenation of bytes
152 yields an expected digest value. Upon failure, the final next() results in
153 a HashSumMismatch rather than StopIteration.
155 for data in iterable:
158 if hashobj.hexdigest() != expected_digest:
159 raise HashSumMismatch()
162 return datetime.datetime.strptime(s, "%a, %d %b %Y %H:%M:%S %Z")
165 def __init__(self, files=("/etc/apt/trusted.gpg",),
166 partsdir="/etc/apt/trusted.gpg.d"):
167 candidates = list(files)
168 candidates.extend(os.path.join(partsdir, e)
169 for e in os.listdir(partsdir))
170 self.keyrings = list(filter(lambda f: os.access(f, os.R_OK),
173 def verify(self, content):
174 cmdline = ["gpgv", "--quiet", "--weak-digest", "SHA1", "--output", "-"]
175 for keyring in self.keyrings:
176 cmdline.extend(("--keyring", keyring))
177 proc = subprocess.Popen(cmdline, stdin=subprocess.PIPE,
178 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
179 stdout, _ = proc.communicate(content)
181 raise ValueError("signature verififcation failed")
186 def __init__(self, uri, dist="sid"):
189 self.releasetime = None
194 def get_all_keyrings():
195 yield "/etc/apt/trusted.gpg"
196 partsdir = "/etc/apt/trusted.gpg.d"
198 for e in os.listdir(partsdir):
199 yield os.path.join(partsdir, e)
200 except FileNotFoundError:
205 return filter(lambda f: os.access(f, os.R_OK),
206 DebianMirror.get_all_keyrings())
208 def get_uri(self, filename):
209 return "%s/dists/%s/%s" % (self.uri, self.dist, filename)
211 def fetch_release(self):
212 resp = requests.get(self.get_uri("InRelease"))
213 resp.raise_for_status()
214 return GPGV().verify(resp.content)
216 def parse_release(self, content):
217 info, = list(parse_deb822([content]))
218 self.releasetime = parse_date(info["Date"])
219 valid_until = parse_date(info["Valid-Until"])
220 now = datetime.datetime.utcnow()
221 if self.releasetime > now:
222 raise ValueError("release file generated in future")
223 if valid_until < now:
224 raise ValueError("release signature expired")
225 self.byhash = info.pop("Acquire-By-Hash", "no") == "yes"
227 for line in info[self.hashfunc].splitlines():
232 raise ValueError("invalid %s line %r" % (self.hashfunc, line))
233 self.files[parts[2]] = parts[0]
235 def update_release(self):
236 self.parse_release(self.fetch_release())
238 def fetch_list(self, listname):
239 if listname + ".xz" in self.files:
241 wrapper = lambda i: decompress_stream(i, lzma.LZMADecompressor())
243 wrapper = lambda i: i
244 hashvalue = self.files[listname]
246 listname = "%s/by-hash/%s/%s" % (os.path.dirname(listname),
247 self.hashfunc, hashvalue)
248 with requests.get(self.get_uri(listname), stream=True) as resp:
249 resp.raise_for_status()
250 it = resp.iter_content(65536)
251 it = hash_check(it, hashlib.new(self.hashfunc), hashvalue)
252 yield from wrapper(it)
254 def fetch_sources(self, component="main"):
255 return self.fetch_list("%s/source/Sources" % component)
257 def fetch_binaries(self, architecture, component="main"):
258 return self.fetch_list("%s/binary-%s/Packages" %
259 (component, architecture))
261 binfields = frozenset((
274 srcdepfields = frozenset((
276 "Build-Conflicts-Arch",
278 "Build-Depends-Arch",
280 srcfields = srcdepfields.union((
286 bad_foreign_packages = frozenset((
287 "flex-old", # cannot execute /usr/bin/flex
288 "icmake", # cannot execute /usr/bin/icmake, build system
289 "jam", # cannot execute /usr/bin/jam, build system
290 "libtool-bin", # #836123
291 "python2.7-minimal", # fails postinst
292 "python3.6-minimal", # fails postinst
293 "python3.7-minimal", # fails postinst
294 "swi-prolog-nox", # fails postinst
295 "xrdp", # fails postinst
296 "libgvc6", # fails postinst
299 def strip_dict(dct, keepfields):
300 keys = set(dct.keys())
301 keys.difference_update(keepfields)
305 def strip_alternatvies(dct, fields):
311 dct[f] = ",".join(dep.split("|", 1)[0]
312 for dep in value.split(","))
314 def latest_versions(pkgs):
319 if version_compare(packages[name]["Version"], p["Version"]) > 0:
324 return (p for p in packages.values()
325 if "Package" in p and not "Negative-Entry" in p)
327 def make_binary_list_build(mirror, arch):
328 for p in parse_deb822(mirror.fetch_binaries(BUILD_ARCH)):
329 if p["Package"].startswith("crossbuild-essential-"):
330 if p["Package"] != "crossbuild-essential-" + arch:
332 p["Depends"] += ", libc-dev:%s, libstdc++-dev:%s" % (arch, arch)
333 strip_dict(p, binfields)
336 def make_binary_list_host(mirror, arch):
337 for p in parse_deb822(mirror.fetch_binaries(arch)):
338 if p["Architecture"] == "all":
340 if p.get("Multi-Arch") == "foreign":
342 if p.get("Essential") == "yes":
344 if p["Package"] in bad_foreign_packages:
346 strip_dict(p, binfields)
349 def make_binary_list(mirror, arch):
350 return itertools.chain(make_binary_list_build(mirror, arch),
351 make_binary_list_host(mirror, arch))
353 def make_source_list(mirror, arch):
354 for p in parse_deb822(mirror.fetch_sources()):
355 if p.get("Extra-Source-Only") == "yes":
357 if any(arch_match(arch, pattern)
358 for pattern in p["Architecture"].split()):
359 strip_dict(p, srcfields)
360 strip_alternatvies(p, srcdepfields)
363 # dummy entry preventing older matching versions
364 yield {"Package": p["Package"], "Version": p["Version"],
365 "Negative-Entry": "yes"}
367 def check_bdsat(mirror, arch):
369 "--deb-native-arch=" + BUILD_ARCH,
370 "--deb-host-arch=" + arch,
371 "--deb-drop-b-d-indep",
372 "--deb-profiles=" + ",".join(PROFILES),
377 "--deb-emulate-sbuild",
380 with tempfile.NamedTemporaryFile("w", encoding="utf8") as bintmp, \
381 tempfile.NamedTemporaryFile("w", encoding="utf8") as srctmp:
382 for p in make_binary_list(mirror, arch):
383 bintmp.write(serialize_deb822(p))
385 cmd.append(bintmp.name)
387 for p in latest_versions(make_source_list(mirror, arch)):
388 srctmp.write(serialize_deb822(p))
390 cmd.append(srctmp.name)
392 dose_result = call_dose_builddebcheck(cmd)
393 next(dose_result) # skip header
394 for d in dose_result:
395 if d["status"] == "ok":
396 yield (d["package"], d["version"], True, None)
400 reason = "missing %s" % r["missing"]["pkg"]["unsat-dependency"].split()[0].split(":", 1)[0]
401 elif "conflict" in r:
402 r = r["conflict"]["pkg1"]["unsat-conflict"]
403 reason = "skew " if ' (!= ' in r else "conflict "
404 reason += r.split()[0].split(':', 1)[0]
407 yield (d["package"], d["version"], False, reason)
409 def update_depcheck(mirror, db, architecture):
410 now = datetime.datetime.utcnow()
411 mirror.update_release()
413 for source, version, satisfiable, reason in check_bdsat(mirror, architecture):
414 state[source] = (version, satisfiable, reason)
415 with contextlib.closing(db.cursor()) as cur:
416 cur.execute("BEGIN;")
417 cur.execute("SELECT source, version, satisfiable, reason FROM depstate WHERE architecture = ?;",
419 for source, version, satisfiable, reason in list(cur.fetchall()):
420 if state.get(source) == (version, satisfiable, reason):
423 cur.execute("DELETE FROM depstate WHERE source = ? AND version = ? AND architecture = ?;",
424 (source, version, architecture))
425 cur.executemany("INSERT INTO depstate (source, architecture, version, satisfiable, reason) VALUES (?, ?, ?, ?, ?);",
426 ((source, architecture, version, satisfiable, reason)
427 for source, (version, satisfiable, reason) in state.items()))
428 cur.execute("UPDATE depcheck SET releasetime = ?, updatetime = ?, giveback = 0 WHERE architecture = ?",
429 (mirror.releasetime, now, architecture))
433 mirror = DebianMirror(MIRROR)
434 mirror.update_release()
435 db = sqlite3.connect("db", detect_types=sqlite3.PARSE_DECLTYPES)
437 cur.execute("SELECT architecture, releasetime, updatetime, giveback FROM depcheck;")
438 lastupdate = datetime.datetime.utcnow() - datetime.timedelta(hours=6)
439 for architecture, releasetime, updatetime, giveback in list(cur.fetchall()):
440 if giveback or updatetime < lastupdate or releasetime < mirror.releasetime:
441 print("update %s" % architecture)
442 update_depcheck(mirror, db, architecture)
444 if __name__ == "__main__":