560 lines
		
	
	
		
			18 KiB
		
	
	
	
		
			Python
		
	
	
		
			Executable File
		
	
	
	
	
			
		
		
	
	
			560 lines
		
	
	
		
			18 KiB
		
	
	
	
		
			Python
		
	
	
		
			Executable File
		
	
	
	
	
| #!/usr/bin/env python3
 | |
| 
 | |
| """
 | |
| Builds deb packages and uploads them to S3-compatible storage or bintray.
 | |
| Works locally and on GitHub Actions and CircleCI
 | |
| Detects which package[s] need to be built.
 | |
| Support "release" and PR/testing archives.
 | |
| 
 | |
| scan              - scan the current repository for packages to be built
 | |
| build             - locate and build packages
 | |
| upload <filename> - upload one package to S3 or Bintray
 | |
| ci                - detect CircleCI PRs, build and upload packages
 | |
| delete_from_archive <filename> - delete filename from archive
 | |
| 
 | |
| Features:
 | |
|  - Implement CI/CD workflow using package archives
 | |
|  - Support adding packages to an existing S3 archive without requiring a
 | |
|    local mirror
 | |
|  - Support multiple packages in the same git repository
 | |
|  - GPG signing
 | |
|  - Phased updates (rolling deployments)
 | |
|  - Update changelogs automatically
 | |
|  - Easy to debug
 | |
| """
 | |
| 
 | |
| # TODO: fix S3 credentials passing security
 | |
| # TODO: Phased-Update-Percentage
 | |
| 
 | |
| # debdeps: git
 | |
| from argparse import ArgumentParser
 | |
| import os
 | |
| from os import getenv
 | |
| from pathlib import Path
 | |
| from requests.auth import HTTPBasicAuth
 | |
| from subprocess import run
 | |
| from tempfile import mkdtemp, NamedTemporaryFile
 | |
| from textwrap import dedent
 | |
| from time import sleep
 | |
| from typing import List
 | |
| from hashlib import sha256
 | |
| import requests
 | |
| import sys
 | |
| 
 | |
| try:
 | |
|     import gnupg
 | |
| except ImportError:
 | |
|     gnupg = None
 | |
| 
 | |
| # TODO remove these
 | |
| BINTRAY_API = "https://bintray.com/api/v1"
 | |
| DEFAULT_ORG = "ooni"
 | |
| DEFAULT_PR_REPO = "internal-pull-requests"
 | |
| DEFAULT_MASTER_REPO = "internal-master"
 | |
| DEFAULT_REPO = "internal-pull-requests"
 | |
| 
 | |
| EXAMPLE_CONFIG = """
 | |
| """
 | |
| 
 | |
| assert sys.version_info >= (3, 7, 0), "Python 3.7.0 or later is required"
 | |
| 
 | |
| conf = None
 | |
| 
 | |
| 
 | |
| def run2(cmd, **kw):
 | |
|     if conf.show_commands:
 | |
|         print(f"Running {cmd}\nKW: {kw}")
 | |
|     p = run(cmd.split(), capture_output=True, **kw)
 | |
|     if p.returncode != 0:
 | |
|         stdout = p.stdout.decode().strip()
 | |
|         print(f"--stdout--\n{stdout}\n----\n")
 | |
|         stderr = p.stderr.decode().strip()
 | |
|         print(f"--stderr--\n{stderr}\n----\n")
 | |
|         raise Exception(f"'{cmd}' returned: {p.returncode}")
 | |
|     return p.stdout.decode().strip()
 | |
| 
 | |
| 
 | |
| def runi(cmd: str, cwd: Path, sudo=False) -> None:
 | |
|     if sudo:
 | |
|         cmd = f"sudo {cmd}"
 | |
|     run(cmd.split(), cwd=cwd, check=True)
 | |
| 
 | |
| 
 | |
| def runc(cmd):
 | |
|     print("Running:", cmd)
 | |
|     r = run(cmd.split(), capture_output=True)
 | |
|     print("Retcode", r.returncode)
 | |
|     return r.returncode, r.stdout.decode()
 | |
| 
 | |
| 
 | |
| def detect_changed_packages() -> List[Path]:
 | |
|     """Detects files named debian/changelog
 | |
|     that have been changed in the current branch
 | |
|     """
 | |
|     DCH = "debian/changelog"
 | |
|     # TODO: find a cleaner method:
 | |
|     commit = run2("git merge-base remotes/origin/master HEAD")
 | |
|     changes = run2(f"git diff --name-only {commit}")
 | |
|     pkgs = set()
 | |
|     for c in changes.splitlines():
 | |
|         c = Path(c)
 | |
|         if c.as_posix().endswith(DCH):
 | |
|             pkgs.add(c.parent.parent)
 | |
|             continue
 | |
|         while c.name:
 | |
|             if c.joinpath(DCH).is_file():
 | |
|                 pkgs.add(c)
 | |
|             c = c.parent
 | |
| 
 | |
|     return sorted(pkgs)
 | |
| 
 | |
| 
 | |
| def trim_compare(url: str) -> str:
 | |
|     """Shorten GitHub URLs used to compare changes"""
 | |
|     if url.startswith("https://github.com/") and "..." in url:
 | |
|         base, commits = url.rsplit("/", 1)
 | |
|         if len(commits) == 83:
 | |
|             beginning = commits[0:8]
 | |
|             end = commits[43 : 43 + 8]
 | |
|             return f"{base}/{beginning}...{end}"
 | |
| 
 | |
|     return url
 | |
| 
 | |
| 
 | |
| def _set_pkg_version_from_circleci(p, ver):
 | |
|     comp = trim_compare(getenv("CIRCLE_COMPARE_URL", ""))  # show changes in VCS
 | |
|     if not comp:
 | |
|         # https://discuss.circleci.com/t/circle-compare-url-is-empty/24549/8
 | |
|         comp = getenv("CIRCLE_PULL_REQUEST")
 | |
| 
 | |
|     if getenv("CIRCLE_PULL_REQUEST"):
 | |
|         # This is a PR: build ~pr<N>-<N> version. CIRCLE_PR_NUMBER is broken
 | |
|         pr_num = getenv("CIRCLE_PULL_REQUEST", "").rsplit("/", 1)[-1]
 | |
|         build_num = getenv("CIRCLE_BUILD_NUM")
 | |
|         ver = f"{ver}~pr{pr_num}-{build_num}"
 | |
|         print(f"CircleCI Pull Request detected - using version {ver}")
 | |
|         run2(f"dch -b -v {ver} {comp}", cwd=p)
 | |
|         run2(f"dch -r {ver} {comp}", cwd=p)
 | |
|         ver2 = run2("dpkg-parsechangelog --show-field Version", cwd=p)
 | |
|         assert ver == ver2, ver + " <--> " + ver2
 | |
| 
 | |
|     elif getenv("CIRCLE_BRANCH") == "master":
 | |
|         # This is a build outside of a PR and in the mainline branch
 | |
|         print(f"CircleCI mainline build detected - using version {ver}")
 | |
|         run2(f"dch -b -v {ver} {comp}", cwd=p)
 | |
|         run2(f"dch -r {ver} {comp}", cwd=p)
 | |
|         ver2 = run2("dpkg-parsechangelog --show-field Version", cwd=p)
 | |
|         assert ver == ver2, ver + " <--> " + ver2
 | |
| 
 | |
|     else:
 | |
|         # This is a build for a new branch but without a PR: ignore it
 | |
|         return []
 | |
| 
 | |
| 
 | |
| def _set_pkg_version_from_github_actions(p, ver):
 | |
|     """When running in GitHub Actions, access env vars to set
 | |
|     the package version"""
 | |
|     # GITHUB_REF syntax: refs/heads/<branch-name> or refs/pull/<PR#>/merge
 | |
|     gh_ref = getenv("GITHUB_REF")
 | |
|     try:
 | |
|         pr_num = int(gh_ref.split("/")[2])
 | |
|     except ValueError:
 | |
|         pr_num = None
 | |
| 
 | |
|     gh_run_number = int(getenv("GITHUB_RUN_NUMBER"))
 | |
|     print(f"GitHub Actions PR #: {pr_num} Run #: {gh_run_number}")
 | |
|     print("SHA " + getenv("GITHUB_SHA"))
 | |
| 
 | |
|     comp = ""
 | |
|     if pr_num is None:
 | |
|         if gh_ref.endswith("/master"):
 | |
|             print(f"GitHub release build detected - using version {ver}")
 | |
|             run2(f"dch -b -v {ver} ''", cwd=p)
 | |
|             run2(f"dch --release ''", cwd=p)
 | |
|             ver2 = run2("dpkg-parsechangelog --show-field Version", cwd=p)
 | |
|             assert ver == ver2, ver + " <--> " + ver2
 | |
|             return True
 | |
| 
 | |
|         else:
 | |
|             print("Not a PR or release build. Skipping.")  # run by "on: push"
 | |
|             return False
 | |
| 
 | |
|     else:
 | |
|         # This is a PR: build ~pr<N>-<N> version.
 | |
|         ver = f"{ver}~pr{pr_num}-{gh_run_number}"
 | |
|         print(f"GitHub Pull Request detected - using version {ver}")
 | |
|         run2(f"dch -b -v {ver} ''", cwd=p)
 | |
|         run2(f"dch --release ''", cwd=p)
 | |
|         ver2 = run2("dpkg-parsechangelog --show-field Version", cwd=p)
 | |
|         assert ver == ver2, ver + " <--> " + ver2
 | |
|         return True
 | |
| 
 | |
| 
 | |
| def buildpkg(p) -> List[Path]:
 | |
|     """Build one package, installing required dependencies"""
 | |
|     print(f"Building package in {p}")
 | |
|     ver = run2("dpkg-parsechangelog --show-field Version", cwd=p)
 | |
|     assert ver, f"No version number found in {p}/debian/changelog"
 | |
|     sudo = True
 | |
|     should_build = False
 | |
|     if getenv("CIRCLECI"):
 | |
|         # Running in CircleCI
 | |
|         sudo = False
 | |
|         _set_pkg_version_from_circleci(p, ver)
 | |
|     elif getenv("GITHUB_EVENT_PATH"):
 | |
|         sudo = False
 | |
|         should_build = _set_pkg_version_from_github_actions(p, ver)
 | |
| 
 | |
|     if not should_build:
 | |
|         return []
 | |
| 
 | |
|     runi("apt-get build-dep -qy --no-install-recommends .", p, sudo=sudo)
 | |
|     runi("fakeroot debian/rules build", p)
 | |
|     runi("fakeroot debian/rules binary", p)
 | |
|     with p.joinpath("debian/files").open() as f:
 | |
|         return [p.parent.joinpath(line.split()[0]) for line in f]
 | |
| 
 | |
| 
 | |
| def detect_archive_backend():
 | |
|     if getenv("BINTRAY_USERNAME") and getenv("BINTRAY_API_KEY"):
 | |
|         return "bintray"
 | |
| 
 | |
|     if getenv("AWS_ACCESS_KEY_ID") and getenv("AWS_SECRET_ACCESS_KEY"):
 | |
|         return "s3"
 | |
| 
 | |
| 
 | |
| def setup_gpg_key(keyfp, tmpdir):
 | |
|     """Import key from env var or use existing keyring"""
 | |
|     if gnupg is None:
 | |
|         print("Please install python3-gnupg")
 | |
|         sys.exit(1)
 | |
| 
 | |
|     if keyfp is None and "DEB_GPG_KEY" not in os.environ:
 | |
|         print(
 | |
|             "Error: place a GPG key in the DEB_GPG_KEY env var or"
 | |
|             " fetch it from the local keyring using --gpg-key-fp"
 | |
|         )
 | |
|         sys.exit(1)
 | |
| 
 | |
|     if "DEB_GPG_KEY" in os.environ:
 | |
|         gpg = gnupg.GPG(gnupghome=tmpdir.as_posix())
 | |
|         import_result = gpg.import_keys(os.getenv("DEB_GPG_KEY"))
 | |
|         assert import_result.count == 1
 | |
|         fp = import_result.fingerprints[0]
 | |
|         if keyfp:
 | |
|             assert keyfp == fp
 | |
| 
 | |
|     else:
 | |
|         gpg = gnupg.GPG()
 | |
|         assert gpg.list_keys(keys=keyfp)
 | |
| 
 | |
|     return gpg, keyfp
 | |
| 
 | |
| 
 | |
| def ci(args) -> None:
 | |
|     # TODO: detect sudo presence
 | |
| 
 | |
|     backend_name = detect_archive_backend()
 | |
|     if backend_name == "bintray":
 | |
|         backend = Bintray()
 | |
|     elif backend_name == "s3":
 | |
|         backend = S3()
 | |
|     else:
 | |
|         print(
 | |
|             "Either set BINTRAY_USERNAME / BINTRAY_API_KEY env vars or "
 | |
|             "AWS_ACCESS_KEY_ID / AWS_SECRET_ACCESS_KEY"
 | |
|         )
 | |
|         sys.exit(1)
 | |
|     del backend_name
 | |
| 
 | |
|     run2("apt-get update -q")
 | |
|     run2("apt-get install -qy --no-install-recommends git")
 | |
|     pkgdirs = detect_changed_packages()
 | |
|     if not pkgdirs:
 | |
|         print("Nothing to build")
 | |
|         return
 | |
| 
 | |
|     print(f"Building {pkgdirs}")
 | |
|     run2("apt-get install -qy --no-install-recommends devscripts")
 | |
| 
 | |
|     pkgs_lists = [buildpkg(pd) for pd in pkgdirs]
 | |
|     print(f"Processing {pkgs_lists}")
 | |
|     for pli in pkgs_lists:
 | |
|         for p in pli:
 | |
|             backend.upload(p, args)
 | |
| 
 | |
| 
 | |
| def build() -> None:
 | |
|     """Run manual build on workstation"""
 | |
|     pkgdirs = detect_changed_packages()
 | |
|     pkgs_lists = [buildpkg(pd) for pd in pkgdirs]
 | |
|     print("Outputs:")
 | |
|     for pli in pkgs_lists:
 | |
|         for p in pli:
 | |
|             print(p)
 | |
| 
 | |
| 
 | |
| class DuplicatePkgError(Exception):
 | |
|     pass
 | |
| 
 | |
| 
 | |
| def check_duplicate_package(pkgblock, packages_text):
 | |
|     li = pkgblock.splitlines()
 | |
|     assert li[0].startswith("Package: "), li
 | |
|     pname = li[0].split(" ", 1)[1]
 | |
|     assert li[1].startswith("Version: "), li
 | |
|     pver = li[1].split(" ", 1)[1]
 | |
| 
 | |
|     m = f"Package: {pname}\nVersion: {pver}"
 | |
|     if m in packages_text:
 | |
|         raise DuplicatePkgError()
 | |
| 
 | |
| 
 | |
| class Bintray:
 | |
|     """Bintray backend"""
 | |
| 
 | |
|     def __init__(self):
 | |
|         self._btuser = getenv("BINTRAY_USERNAME")
 | |
|         assert self._btuser, "Missing BINTRAY_USERNAME"
 | |
| 
 | |
|     def upload(self, fi, args) -> None:
 | |
|         """Upload to Bintray"""
 | |
|         # FIXME: specify repo
 | |
|         assert repo, "Please specify a repository"
 | |
|         assert fi.is_file()
 | |
|         pname, pver, arch = fi.name.split("_")
 | |
|         auth = HTTPBasicAuth(self._btuser, getenv("BINTRAY_API_KEY"))
 | |
|         dist = "unstable"
 | |
|         url = (
 | |
|             f"{BINTRAY_API}/content/{args.org}/{repo}/{pname}/{pver}/{fi.name};"
 | |
|             f"deb_distribution={dist};deb_component=main;deb_architecture=amd64;publish=1"
 | |
|         )
 | |
|         with open(fi, "rb") as f:
 | |
|             resp = requests.put(url, auth=auth, data=f)
 | |
| 
 | |
|         if not resp.ok:
 | |
|             print(f"Error {resp.text} when calling {resp.request.url}")
 | |
|             sys.exit(1)
 | |
| 
 | |
|     def delete_package(self, args, extra) -> None:
 | |
|         """Delete package from Bintray"""
 | |
|         auth = HTTPBasicAuth(self._btuser, getenv("BINTRAY_API_KEY"))
 | |
|         filename = extra[0]
 | |
|         assert filename.endswith(".deb")
 | |
|         assert args.repo, "Please specify a repository"
 | |
|         url = f"{BINTRAY_API}/content/{args.org}/{args.repo}/{filename}"
 | |
|         resp = requests.delete(url, auth=auth)
 | |
|         if not resp.ok:
 | |
|             print(f"Error {resp.text} when calling {resp.request.url}")
 | |
|             sys.exit(1)
 | |
| 
 | |
| 
 | |
| class S3:
 | |
|     """S3 backend"""
 | |
| 
 | |
|     def generate_release_file(self, conf, sha, size):
 | |
|         r = dedent(
 | |
|             f"""
 | |
|             Acquire-By-Hash: no
 | |
|             Architectures: {conf.arch}
 | |
|             Codename: {conf.distro}
 | |
|             Components: main
 | |
|             Date: Thu, 07 Nov 2019 14:23:37 UTC
 | |
|             Origin: private
 | |
|             Valid-Until: Thu, 14 Nov 2029 14:23:37 UTC
 | |
|             SHA256:
 | |
|              {sha} {size} main/binary-{conf.arch}/Packages
 | |
|         """
 | |
|         )
 | |
|         return r
 | |
| 
 | |
|     def init_archive(self, conf):
 | |
|         """Initialize the archive"""
 | |
|         assert conf.bucket_name
 | |
|         r, o = runc(f"s3cmd mb s3://{conf.bucket_name}")
 | |
|         if r == 0:
 | |
|             print("Bucket created")
 | |
|             runc(f"s3cmd ws-create s3://{conf.bucket_name}")
 | |
| 
 | |
|         r, out = runc(f"s3cmd ws-info s3://{conf.bucket_name}")
 | |
|         for li in out.splitlines():
 | |
|             if li.startswith("Website endpoint"):
 | |
|                 s3url = li.split()[2]
 | |
|                 break
 | |
| 
 | |
|         # Initialize distro if needed. Check for InRelease
 | |
|         baseuri = f"s3://{conf.bucket_name}/dists/{conf.distro}"
 | |
|         r, o = runc(f"s3cmd info --no-progress {baseuri}/InRelease")
 | |
|         if r == 0:
 | |
|             return
 | |
| 
 | |
|         if r != 12:
 | |
|             print(f"Unexpected return code {r} {o}")
 | |
|             sys.exit(1)
 | |
| 
 | |
|         # InRelease file not found: create lock file
 | |
|         print("Creating initial lock file")
 | |
|         tf = NamedTemporaryFile()
 | |
|         # put = "s3cmd --acl-public --guess-mime-type --no-progress put"
 | |
|         put = "s3cmd --guess-mime-type --no-progress put"
 | |
|         r2, o = runc(f"{put} --no-progress {tf.name} {baseuri}/.debrepos3.lock")
 | |
|         assert r2 == 0, repr(o)
 | |
| 
 | |
|         # Create empty InRelease
 | |
|         r2, o = runc(f"{put} {tf.name} {baseuri}/InRelease")
 | |
| 
 | |
|         # Create empty Packages
 | |
|         r, o = runc(f"{put} {tf.name} {baseuri}/main/binary-{conf.arch}/Packages")
 | |
| 
 | |
|         # Create index
 | |
|         html = dedent(
 | |
|             f"""
 | |
|             <html><body>
 | |
|             <p>Create /etc/apt/sources.list.d/{conf.distro}.list containing:</p>
 | |
|             <pre>deb {s3url} {conf.distro} main</pre>
 | |
|             </body></html>
 | |
|         """
 | |
|         )
 | |
|         with open(tf.name, "w") as f:
 | |
|             f.write(html)
 | |
| 
 | |
|         r, o = runc(f"{put} {tf.name} {baseuri}/index.html")
 | |
| 
 | |
|     def lock(self, conf, baseuri):
 | |
|         """Rename semaphore file"""
 | |
|         print(f"Locking {baseuri} ...")
 | |
|         cmd = f"s3cmd mv --no-progress {baseuri}/.debrepos3.nolock {baseuri}/.debrepos3.lock"
 | |
|         while True:
 | |
|             r, o = runc(cmd)
 | |
|             print(r)
 | |
|             if r == 0:
 | |
|                 return
 | |
| 
 | |
|             print("The distro is locked. Waiting...")
 | |
|             sleep(10)
 | |
| 
 | |
|     def unlock(self, baseuri):
 | |
|         """Rename semaphore file"""
 | |
|         r, o = runc(
 | |
|             f"s3cmd mv --no-progress {baseuri}/.debrepos3.lock {baseuri}/.debrepos3.nolock"
 | |
|         )
 | |
|         print(r)
 | |
| 
 | |
|     def scanpackages(self, conf, debfn) -> str:
 | |
|         r, o = runc(f"dpkg-scanpackages {debfn}")
 | |
|         assert r == 0, repr(r)
 | |
|         out = []
 | |
|         for line in o.splitlines():
 | |
|             if line.startswith("Filename: "):
 | |
|                 fn = line.split("/")[-1]
 | |
|                 line = f"Filename: dists/{conf.distro}/main/binary-{conf.arch}/{fn}"
 | |
|             out.append(line)
 | |
| 
 | |
|         return "\n".join(out) + "\n"
 | |
| 
 | |
|     def _inner_upload(self, debfn, tmpdir, baseuri, pkgblock: str, gpg, gpgkeyfp):
 | |
|         # Fetch existing Packages file
 | |
|         packages = tmpdir / "Packages"
 | |
|         uri = f"{baseuri}/main/binary-{conf.arch}/Packages {packages}"
 | |
|         run2(f"s3cmd --no-progress get {uri}")
 | |
| 
 | |
|         # Check for already uploaded package
 | |
|         check_duplicate_package(pkgblock, packages.read_text())
 | |
| 
 | |
|         # Append, then read whole file back
 | |
|         with packages.open("a") as f:
 | |
|             f.write(pkgblock)
 | |
| 
 | |
|         data = packages.read_bytes()
 | |
|         packagesf_size = len(data)
 | |
|         packagesf_sha = sha256(data).hexdigest()
 | |
|         del data
 | |
| 
 | |
|         # Create, sign, upload InRelease
 | |
|         release = tmpdir / "Release"
 | |
|         inrelease = tmpdir / "InRelease"
 | |
|         rfdata = self.generate_release_file(conf, packagesf_sha, packagesf_size)
 | |
|         # print(rfdata)
 | |
|         release.write_text(rfdata)
 | |
|         # r, o = runc(f"gpg -a -s --clearsign -o {inrelease} {release}")
 | |
|         # if r != 0:
 | |
|         #    self.unlock(baseuri)
 | |
|         #    print("Error during GPG signature")
 | |
|         #    sys.exit(1)
 | |
|         sig = gpg.sign(release.read_text(), keyid=gpgkeyfp)
 | |
|         assert sig.status == "signature created"
 | |
|         inrelease.write_bytes(sig.data)
 | |
| 
 | |
|         # Upload InRelease and Packages
 | |
|         put = "s3cmd --acl-public --guess-mime-type --no-progress put"
 | |
|         run2(f"{put} {inrelease} {baseuri}/InRelease")
 | |
|         run2(f"{put} {packages} {baseuri}/main/binary-{conf.arch}/Packages")
 | |
|         run2(f"{put} {debfn} {baseuri}/main/binary-{conf.arch}/")
 | |
| 
 | |
|     def upload(self, debfn, conf):
 | |
|         assert conf.bucket_name
 | |
|         tmpdir = Path(mkdtemp(prefix="debops-ci"))
 | |
| 
 | |
|         self.init_archive(conf)
 | |
|         baseuri = f"s3://{conf.bucket_name}/dists/{conf.distro}"
 | |
| 
 | |
|         pkgblock = self.scanpackages(conf, debfn)
 | |
| 
 | |
|         gpg, gpgkeyfp = setup_gpg_key(conf.gpg_key_fp, tmpdir)
 | |
| 
 | |
|         # Lock distro on S3 to prevent race during appends to Packages
 | |
|         self.lock(conf, baseuri)
 | |
| 
 | |
|         try:
 | |
|             self._inner_upload(debfn, tmpdir, baseuri, pkgblock, gpg, gpgkeyfp)
 | |
|         except DuplicatePkgError:
 | |
|             print(f"Error: {debfn} is already in the archive. Not uploading.")
 | |
|             sys.exit(1)  # the unlock in the finally block is still executed
 | |
|         finally:
 | |
|             self.unlock(baseuri)
 | |
| 
 | |
|     #  # TODO check
 | |
|     #  dpkg-scanpackages $1 | gzip >> Packages.gz
 | |
|     #  upload Packages.gz
 | |
|     #  rm Packages.gz
 | |
| 
 | |
| 
 | |
| def main():
 | |
|     global conf
 | |
|     ap = ArgumentParser(usage=__doc__)
 | |
|     ap.add_argument(
 | |
|         "action", choices=("upload", "scan", "ci", "build", "delete_from_archive")
 | |
|     )
 | |
|     ap.add_argument("-r", "--repo", default=None, help="S3/Bintray repository name")
 | |
|     ap.add_argument("-o", "--org", default=DEFAULT_ORG, help="S3/Bintray org name")
 | |
|     ap.add_argument("--bucket-name", help="S3 bucket name")
 | |
|     ap.add_argument("--distro", default="unstable", help="Debian distribution name")
 | |
|     ap.add_argument("--arch", default="amd64", help="Debian architecture name")
 | |
|     ap.add_argument("--gpg-key-fp", help="GPG key fingerprint")
 | |
|     ap.add_argument("--show-commands", action="store_true", help="Show shell commands")
 | |
|     args, extra = ap.parse_known_args()
 | |
|     conf = args
 | |
| 
 | |
|     if args.action == "ci":
 | |
|         ci(args)
 | |
|     elif args.action == "scan":
 | |
|         for p in sorted(detect_changed_packages()):
 | |
|             print(p.as_posix())
 | |
|     elif args.action == "upload":
 | |
|         # TODO select backend
 | |
|         # bk = Bintray()
 | |
|         bk = S3()
 | |
|         for fn in extra:
 | |
|             bk.upload(Path(fn), args)
 | |
|     elif args.action == "delete_from_archive":
 | |
|         # TODO select backend
 | |
|         # bk = Bintray()
 | |
|         bk = S3()
 | |
|         bk.delete_package(args, extra)
 | |
|     elif args.action == "build":
 | |
|         build()
 | |
| 
 | |
| 
 | |
| if __name__ == "__main__":
 | |
|     main()
 |