tint2/packaging/version_status.py

423 lines
13 KiB
Python
Raw Permalink Normal View History

2017-08-28 14:02:56 +00:00
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import datetime
import xml.etree.ElementTree as ET
import ftplib
import gzip
import json
import re
from StringIO import StringIO
import urllib2
# Returns true if `value` is an integer represented as a string.
def is_int(value):
# type: (str) -> bool
try:
value = int(value)
except ValueError:
return False
return True
# Returns a new string with all instances of multiple whitespace
# replaced with a single space.
def collapse_multiple_spaces(line):
# type: (str) -> str
return " ".join(line.split())
# Extracts the file name from a line of an FTP listing.
# The input must be a valid directory entry (starting with "-" or "d").
def ftp_file_name_from_listing(line):
# type: (str) -> str
line = collapse_multiple_spaces(line)
return line.split(" ", 8)[-1]
# Extracts a list of the directories and a list of the files
# from an FTP listing.
def ftp_list_dir_process_listing(lines):
# type: (List[str]) -> List[str], List[str]
dirs = []
files = []
for line in lines:
if line.startswith("d"):
dirs.append(ftp_file_name_from_listing(line))
elif line.startswith("-"):
files.append(ftp_file_name_from_listing(line))
return dirs, files
# Lists the remote FTP directory located at `path`.
# Returns a list of the directories and a list of the files.
def ftp_list_dir(ftp, path):
# type: (ftplib.FTP, str) -> List[str], List[str]
ftp.cwd(path)
lines = []
ftp.retrlines("LIST", lines.append)
return ftp_list_dir_process_listing(lines)
# Downloads a binary file to a string.
# Returns the string.
def ftp_download(ftp, path):
# type: (ftplib.FTP, str) -> str
blocks = []
ftp.retrbinary("RETR {0}".format(path), blocks.append)
return "".join(blocks)
# Extracts the list of links from an HTML string.
def http_links_from_listing(html):
# type: (str) -> List[str]
pattern = re.compile(r"""href=['"]+([^'"]+)['"]+""")
return re.findall(pattern, html)
# Extracts the list of paths (relative links, except to ../*) from an HTML string.
def http_paths_from_listing(html):
# type: (str) -> List[str]
paths = []
for link in http_links_from_listing(html):
if link.startswith(".."):
continue
2017-08-28 16:50:40 +00:00
if link == "./" or link == "/":
continue
2017-08-28 14:02:56 +00:00
if "://" in link:
continue
paths.append(link)
return paths
# Downloads a file as string from an URL. Decodes correctly.
def http_download_txt(url):
# type: (str) -> str
2017-08-28 16:50:40 +00:00
try:
r = urllib2.urlopen(url)
encoding = r.headers.getparam("charset")
if not encoding:
encoding = "utf-8"
return r.read().decode(encoding)
except:
raise
2017-08-28 14:02:56 +00:00
# Extracts the list of paths (relative links, except to ../*) from the HTML code
# located at `url`.
def http_list_dir(url):
# type: (str) -> List[str]
try:
html = http_download_txt(url)
except:
return []
return http_paths_from_listing(html)
# Extracts the version and maintainer info for a package, from a Debian repository Packages file.
def deb_packages_extract_version(packages, name):
# type: (str, str) -> str, str
inside = False
version = None
maintainer = None
for line in packages.split("\n"):
if line == "Package: " + name:
inside = True
elif not line:
if inside:
break
else:
if inside:
if line.startswith("Version:"):
version = line.split(":", 1)[-1].strip()
elif line.startswith("Maintainer:"):
maintainer = line.split(":", 1)[-1].strip()
return version, maintainer
# Extracts the version and maintainer info for a package, from an Arch PKGBUILD file.
def arch_pkgbuild_extract_version(pkgbuild):
# type: (str) -> str, str
version = None
maintainer = None
for line in pkgbuild.split("\n"):
if line.startswith("# Maintainer:"):
maintainer = line.split(":", 1)[-1].strip()
elif line.startswith("pkgver="):
version = line.split("=", 1)[-1].strip()
return version, maintainer
# Debian
def get_debian_release_version(release):
data = http_download_txt("http://metadata.ftp-master.debian.org/changelogs/main/t/tint2/{0}_changelog".format(release))
version = data.split("\n", 1)[0].split("(", 1)[-1].split(")", 1)[0].strip()
maintainer = [line.split("--", 1)[-1] for line in data.split("\n") if line.startswith(" --")][0].split(" ")[0].strip()
return release, version, maintainer
def get_debian_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "Debian ..."
2017-08-28 14:02:56 +00:00
return "Debian", "debian", [get_debian_release_version(release) for release in ["stable", "testing", "unstable", "experimental"]]
# Ubuntu
def get_ubuntu_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "Ubuntu ..."
2017-08-28 14:02:56 +00:00
data = http_download_txt("https://api.launchpad.net/1.0/ubuntu/+archive/primary?ws.op=getPublishedSources&source_name=tint2&exact_match=true")
data = json.loads(data)["entries"]
data.reverse()
versions = []
for package in data:
if package["status"] == "Published":
version = package["source_package_version"]
release = package["distro_series_link"].split("/")[-1]
maintainer = package["package_maintainer_link"]
versions.append((release, version, maintainer))
return "Ubuntu", "ubuntu", versions
# BunsenLabs
def get_bunsenlabs_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "BunsenLabs ..."
2017-08-28 14:02:56 +00:00
dirs = http_list_dir("https://eu.pkg.bunsenlabs.org/debian/dists/")
versions = []
for d in dirs:
2017-08-28 16:50:40 +00:00
if d.endswith("/") and "/" not in d[:-1]:
2017-08-28 14:02:56 +00:00
release = d.replace("/", "")
packages = http_download_txt("https://eu.pkg.bunsenlabs.org/debian/dists/{0}/main/binary-amd64/Packages".format(release))
version, maintainer = deb_packages_extract_version(packages, "tint2")
if version:
versions.append((release, version, maintainer))
return "BunsenLabs", "bunsenlabs", versions
# Arch
def get_arch_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "Arch ..."
2017-08-28 14:02:56 +00:00
pkgbuild = http_download_txt("https://git.archlinux.org/svntogit/community.git/plain/trunk/PKGBUILD?h=packages/tint2")
version, maintainer = arch_pkgbuild_extract_version(pkgbuild)
return "Arch Linux", "archlinux", [("Community", version, maintainer)]
# Fedora
def get_fedora_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "Fedora ..."
2017-08-28 14:02:56 +00:00
dirs = http_list_dir("http://mirror.switch.ch/ftp/mirror/fedora/linux/development/")
versions = []
for d in dirs:
if d.endswith("/") and "/" not in d[:-1]:
release = d.replace("/", "")
packages = http_list_dir("http://mirror.switch.ch/ftp/mirror/fedora/linux/development/{0}/Everything/source/tree/Packages/t/".format(release))
for p in packages:
if p.startswith("tint2-"):
version = p.split("-", 1)[-1].split(".fc")[0]
v = (release, version, "")
if v not in versions:
versions.append(v)
return "Fedora", "fedora", versions
# Red Hat (EPEL)
def get_redhat_epel_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "RedHat ..."
2017-08-28 14:02:56 +00:00
dirs = http_list_dir("http://mirror.switch.ch/ftp/mirror/epel/")
versions = []
for d in dirs:
if d.endswith("/") and "/" not in d[:-1] and is_int(d[:-1]):
release = d.replace("/", "")
packages = http_list_dir("http://mirror.switch.ch/ftp/mirror/epel/{0}/SRPMS/t/".format(release))
for p in packages:
if p.startswith("tint2-"):
version = p.split("-", 1)[-1].split(".el")[0]
v = (release, version, "")
if v not in versions:
versions.append(v)
return "RedHat (EPEL)", "rhel", versions
# SUSE
def get_suse_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "Suse ..."
2017-08-28 14:02:56 +00:00
ftp = ftplib.FTP("mirror.switch.ch")
ftp.login()
2017-09-12 21:26:27 +00:00
releases, _ = ftp_list_dir(ftp, "/mirror/opensuse/opensuse/ports/update/leap/")
2017-08-28 14:02:56 +00:00
versions = []
for release in releases:
2017-09-12 21:26:27 +00:00
root = "/mirror/opensuse/opensuse/ports/update/leap/{0}/oss/repodata/".format(release)
2017-08-28 14:02:56 +00:00
_, files = ftp_list_dir(ftp, root)
for fname in files:
if fname.endswith("-primary.xml.gz"):
data = ftp_download(ftp, "{0}/{1}".format(root, fname))
xml = gzip.GzipFile(fileobj=StringIO(data)).read()
root = ET.fromstring(xml)
for package in root.findall("{http://linux.duke.edu/metadata/common}package"):
name = package.find("{http://linux.duke.edu/metadata/common}name").text
if name == "tint2":
version = package.find("{http://linux.duke.edu/metadata/common}version").get("ver")
versions.append((release, version, ""))
ftp.quit()
return "OpenSUSE", "opensuse", versions
# Gentoo
def get_gentoo_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "Gentoo ..."
2017-08-28 14:02:56 +00:00
files = http_list_dir("https://gitweb.gentoo.org/repo/gentoo.git/tree/x11-misc/tint2")
versions = []
for f in files:
if "tint2" in f and f.endswith(".ebuild"):
version = f.split("tint2-")[-1].split(".ebuild")[0]
v = ("", version, "")
if v not in versions:
versions.append(v)
return "Gentoo", "gentoo", versions
# Void
def get_void_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "Void ..."
2018-08-05 09:35:28 +00:00
template = http_download_txt("https://raw.githubusercontent.com/void-linux/void-packages/master/srcpkgs/tint2/template")
2017-08-28 14:02:56 +00:00
versions = []
version = None
maintainer = None
for line in template.split("\n"):
if line.startswith("version="):
version = line.split("=", 1)[-1].replace('"', "").strip()
elif line.startswith("maintainer="):
maintainer = line.split("=", 1)[-1].replace('"', "").strip()
if version:
versions.append(("", version, maintainer))
return "Void Linux", "void", versions
2017-08-28 16:50:40 +00:00
# Alpine
def get_alpine_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "Alpine ..."
2017-08-28 16:50:40 +00:00
apkbuild = http_download_txt("https://git.alpinelinux.org/cgit/aports/plain/community/tint2/APKBUILD")
versions = []
version = None
maintainer = None
for line in apkbuild.split("\n"):
if line.startswith("pkgver="):
version = line.split("=", 1)[-1].replace('"', "").strip()
elif line.startswith("# Maintainer:"):
maintainer = line.split(":", 1)[-1].replace('"', "").strip()
if version:
versions.append(("", version, maintainer))
return "Alpine Linux", "alpine", versions
# Slackware
def get_slack_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "Slackware ..."
2017-08-28 16:50:40 +00:00
dirs = http_list_dir("https://slackbuilds.org/slackbuilds/")
versions = []
for d in dirs:
if d.endswith("/") and "/" not in d[:-1]:
release = d.replace("/", "")
try:
info = http_download_txt("https://slackbuilds.org/slackbuilds/{0}/desktop/tint2/tint2.info".format(release))
except:
continue
version = None
maintainer = None
for line in info.split("\n"):
if line.startswith("VERSION="):
version = line.split("=", 1)[-1].replace('"', "").strip()
elif line.startswith("MAINTAINER="):
maintainer = line.split("=", 1)[-1].replace('"', "").strip()
if version:
versions.append((release, version, maintainer))
return "Slackware", "slackware", versions
2017-08-28 14:02:56 +00:00
# FreeBSD
def get_freebsd_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "FreeBSD ..."
2017-08-28 14:02:56 +00:00
makefile = http_download_txt("https://svnweb.freebsd.org/ports/head/x11/tint/Makefile?view=co")
versions = []
version = None
maintainer = None
for line in makefile.split("\n"):
if line.startswith("PORTVERSION="):
version = line.split("=", 1)[-1].strip()
elif line.startswith("MAINTAINER="):
maintainer = line.split("=", 1)[-1].strip()
if version:
versions.append(("", version, maintainer))
return "FreeBSD", "freebsd", versions
# OpenBSD
def get_openbsd_versions():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "OpenBSD ..."
2017-12-21 11:31:17 +00:00
makefile = http_download_txt("http://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/ports/x11/tint2/Makefile?content-type=text/plain")
2017-08-28 14:02:56 +00:00
versions = []
version = None
for line in makefile.split("\n"):
if line.startswith("V="):
version = line.split("=", 1)[-1].strip()
if version:
versions.append(("", version, ""))
return "OpenBSD", "openbsd", versions
2017-08-28 14:29:49 +00:00
# Upstream
2017-08-28 14:02:56 +00:00
def get_tint2_version():
2017-09-12 20:36:13 +00:00
print >> sys.stderr, "Upstream ..."
2017-08-28 14:02:56 +00:00
readme = http_download_txt("https://gitlab.com/o9000/tint2/raw/master/README.md")
version = readme.split("\n", 1)[0].split(":", 1)[-1].strip()
return version
def main():
latest = get_tint2_version()
distros = []
distros.append(get_debian_versions())
distros.append(get_bunsenlabs_versions())
2017-08-28 14:10:25 +00:00
distros.append(get_ubuntu_versions())
2017-08-28 14:02:56 +00:00
distros.append(get_fedora_versions())
distros.append(get_redhat_epel_versions())
2017-09-12 21:28:49 +00:00
#distros.append(get_suse_versions())
2017-08-28 16:50:40 +00:00
distros.append(get_alpine_versions())
distros.append(get_slack_versions())
2017-08-28 14:10:25 +00:00
distros.append(get_arch_versions())
2017-08-28 14:02:56 +00:00
distros.append(get_void_versions())
2017-08-28 14:10:25 +00:00
distros.append(get_gentoo_versions())
2017-08-28 14:02:56 +00:00
distros.append(get_freebsd_versions())
distros.append(get_openbsd_versions())
print "| Distribution | Release | Version | Status |"
print "| ------------ | ------- | ------- | ------ |"
for dist, dcode, releases in distros:
icon = "![](numix-icons/distributor-logo-{0}.svg.png)".format(dcode)
for r in releases:
2017-08-28 14:10:25 +00:00
if r[1].split("-", 1)[0] == latest:
2017-08-28 14:02:56 +00:00
status = ":white_check_mark: Latest"
else:
status = ":warning: Out of date"
print "| {0} {1} | {2} | {3} | {4} |".format(icon, dist, r[0], r[1], status)
utc_datetime = datetime.datetime.utcnow()
print ""
2017-08-28 14:29:49 +00:00
print "Last updated:", utc_datetime.strftime("%Y-%m-%d %H:%M UTC")
2017-08-28 14:02:56 +00:00
if __name__ == "__main__":
main()