fix(debian): make sure we can publish all archs (#350)

We are mostly good to declare a stable release. We still need to deal with https://github.com/ooni/probe/issues/1484.

In this PR, we fix the aforementioned issue. These are the changes:

1. we remove the vendored `debops-ci`, and we pull it directly from `ooni/sysadmin`
2. we introduce a new script, `./CLI/linux/pubdebian`, to publish packages
3. we modify `./mk` to allow for publishing debian packages built outside of CI

The latter point has been quite useful in debugging what was wrong.
This commit is contained in:
Simone Basso 2021-05-19 13:54:19 +02:00 committed by GitHub
parent 6125eb96a6
commit e9da23f123
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 96 additions and 608 deletions

View File

@ -1,561 +0,0 @@
#!/usr/bin/env python3
"""
Builds deb packages and uploads them to S3-compatible storage or bintray.
Works locally and on GitHub Actions and CircleCI
Detects which package[s] need to be built.
Support "release" and PR/testing archives.
scan - scan the current repository for packages to be built
build - locate and build packages
upload <filename> - upload one package to S3 or Bintray
ci - detect CircleCI PRs, build and upload packages
delete_from_archive <filename> - delete filename from archive
Features:
- Implement CI/CD workflow using package archives
- Support adding packages to an existing S3 archive without requiring a
local mirror
- Support multiple packages in the same git repository
- GPG signing
- Phased updates (rolling deployments)
- Update changelogs automatically
- Easy to debug
"""
# TODO: fix S3 credentials passing security
# TODO: Phased-Update-Percentage
# debdeps: git
from argparse import ArgumentParser
import os
from os import getenv
from pathlib import Path
from requests.auth import HTTPBasicAuth
from subprocess import run
from tempfile import mkdtemp, NamedTemporaryFile
from textwrap import dedent
from time import sleep
from typing import List
from hashlib import sha256
import requests
import sys
try:
import gnupg
except ImportError:
gnupg = None
# TODO remove these
BINTRAY_API = "https://bintray.com/api/v1"
DEFAULT_ORG = "ooni"
DEFAULT_PR_REPO = "internal-pull-requests"
DEFAULT_MASTER_REPO = "internal-master"
DEFAULT_REPO = "internal-pull-requests"
EXAMPLE_CONFIG = """
"""
assert sys.version_info >= (3, 7, 0), "Python 3.7.0 or later is required"
conf = None
def run2(cmd, **kw):
if conf.show_commands:
print(f"Running {cmd}\nKW: {kw}")
p = run(cmd.split(), capture_output=True, **kw)
if p.returncode != 0:
stdout = p.stdout.decode().strip()
print(f"--stdout--\n{stdout}\n----\n")
stderr = p.stderr.decode().strip()
print(f"--stderr--\n{stderr}\n----\n")
raise Exception(f"'{cmd}' returned: {p.returncode}")
return p.stdout.decode().strip()
def runi(cmd: str, cwd: Path, sudo=False) -> None:
if sudo:
cmd = f"sudo {cmd}"
run(cmd.split(), cwd=cwd, check=True)
def runc(cmd):
print("Running:", cmd)
r = run(cmd.split(), capture_output=True)
print("Retcode", r.returncode)
return r.returncode, r.stdout.decode()
def detect_changed_packages() -> List[Path]:
"""Detects files named debian/changelog
that have been changed in the current branch
"""
DCH = "debian/changelog"
# TODO: find a cleaner method:
commit = run2("git merge-base remotes/origin/master HEAD")
changes = run2(f"git diff --name-only {commit}")
pkgs = set()
for c in changes.splitlines():
c = Path(c)
if c.as_posix().endswith(DCH):
pkgs.add(c.parent.parent)
continue
while c.name:
if c.joinpath(DCH).is_file():
pkgs.add(c)
c = c.parent
return sorted(pkgs)
def trim_compare(url: str) -> str:
"""Shorten GitHub URLs used to compare changes"""
if url.startswith("https://github.com/") and "..." in url:
base, commits = url.rsplit("/", 1)
if len(commits) == 83:
beginning = commits[0:8]
end = commits[43 : 43 + 8]
return f"{base}/{beginning}...{end}"
return url
def _set_pkg_version_from_circleci(p, ver):
comp = trim_compare(getenv("CIRCLE_COMPARE_URL", "")) # show changes in VCS
if not comp:
# https://discuss.circleci.com/t/circle-compare-url-is-empty/24549/8
comp = getenv("CIRCLE_PULL_REQUEST")
if getenv("CIRCLE_PULL_REQUEST"):
# This is a PR: build ~pr<N>-<N> version. CIRCLE_PR_NUMBER is broken
pr_num = getenv("CIRCLE_PULL_REQUEST", "").rsplit("/", 1)[-1]
build_num = getenv("CIRCLE_BUILD_NUM")
ver = f"{ver}~pr{pr_num}-{build_num}"
print(f"CircleCI Pull Request detected - using version {ver}")
run2(f"dch -b -v {ver} {comp}", cwd=p)
run2(f"dch -r {ver} {comp}", cwd=p)
ver2 = run2("dpkg-parsechangelog --show-field Version", cwd=p)
assert ver == ver2, ver + " <--> " + ver2
elif getenv("CIRCLE_BRANCH") == "master":
# This is a build outside of a PR and in the mainline branch
print(f"CircleCI mainline build detected - using version {ver}")
run2(f"dch -b -v {ver} {comp}", cwd=p)
run2(f"dch -r {ver} {comp}", cwd=p)
ver2 = run2("dpkg-parsechangelog --show-field Version", cwd=p)
assert ver == ver2, ver + " <--> " + ver2
else:
# This is a build for a new branch but without a PR: ignore it
return []
def _set_pkg_version_from_github_actions(p, ver):
"""When running in GitHub Actions, access env vars to set
the package version"""
# GITHUB_REF syntax: refs/heads/<branch-name> or refs/pull/<PR#>/merge
gh_ref = getenv("GITHUB_REF")
try:
pr_num = int(gh_ref.split("/")[2])
except ValueError:
pr_num = None
gh_run_number = int(getenv("GITHUB_RUN_NUMBER"))
print(f"GitHub Actions PR #: {pr_num} Run #: {gh_run_number}")
print("SHA " + getenv("GITHUB_SHA"))
comp = ""
if pr_num is None:
if gh_ref.endswith("/master"):
print(f"GitHub release build detected - using version {ver}")
run2(f"dch -b -v {ver} ''", cwd=p)
run2(f"dch --release ''", cwd=p)
ver2 = run2("dpkg-parsechangelog --show-field Version", cwd=p)
assert ver == ver2, ver + " <--> " + ver2
return True
else:
print("Not a PR or release build. Skipping.") # run by "on: push"
return False
else:
# This is a PR: build ~pr<N>-<N> version.
ver = f"{ver}~pr{pr_num}-{gh_run_number}"
print(f"GitHub Pull Request detected - using version {ver}")
run2(f"dch -b -v {ver} ''", cwd=p)
run2(f"dch --release ''", cwd=p)
ver2 = run2("dpkg-parsechangelog --show-field Version", cwd=p)
assert ver == ver2, ver + " <--> " + ver2
return True
def buildpkg(p) -> List[Path]:
"""Build one package, installing required dependencies"""
print(f"Building package in {p}")
ver = run2("dpkg-parsechangelog --show-field Version", cwd=p)
assert ver, f"No version number found in {p}/debian/changelog"
sudo = True
should_build = False
if getenv("CIRCLECI"):
# Running in CircleCI
sudo = False
_set_pkg_version_from_circleci(p, ver)
elif getenv("GITHUB_EVENT_PATH"):
sudo = False
should_build = _set_pkg_version_from_github_actions(p, ver)
if not should_build:
return []
runi("apt-get build-dep -qy --no-install-recommends .", p, sudo=sudo)
runi("fakeroot debian/rules build", p)
runi("fakeroot debian/rules binary", p)
with p.joinpath("debian/files").open() as f:
return [p.parent.joinpath(line.split()[0]) for line in f]
def detect_archive_backend():
if getenv("BINTRAY_USERNAME") and getenv("BINTRAY_API_KEY"):
return "bintray"
if getenv("AWS_ACCESS_KEY_ID") and getenv("AWS_SECRET_ACCESS_KEY"):
return "s3"
def setup_gpg_key(keyfp, tmpdir):
"""Import key from env var or use existing keyring"""
if gnupg is None:
print("Please install python3-gnupg")
sys.exit(1)
if keyfp is None and "DEB_GPG_KEY" not in os.environ:
print(
"Error: place a GPG key in the DEB_GPG_KEY env var or"
" fetch it from the local keyring using --gpg-key-fp"
)
sys.exit(1)
if "DEB_GPG_KEY" in os.environ:
gpg = gnupg.GPG(gnupghome=tmpdir.as_posix())
import_result = gpg.import_keys(os.getenv("DEB_GPG_KEY"))
assert import_result.count == 1
fp = import_result.fingerprints[0]
if keyfp:
assert keyfp == fp
else:
gpg = gnupg.GPG()
assert gpg.list_keys(keys=keyfp)
return gpg, keyfp
def ci(args) -> None:
# TODO: detect sudo presence
backend_name = detect_archive_backend()
if backend_name == "bintray":
backend = Bintray()
elif backend_name == "s3":
backend = S3()
else:
print(
"Either set BINTRAY_USERNAME / BINTRAY_API_KEY env vars or "
"AWS_ACCESS_KEY_ID / AWS_SECRET_ACCESS_KEY"
)
sys.exit(1)
del backend_name
run2("apt-get update -q")
run2("apt-get install -qy --no-install-recommends git")
pkgdirs = detect_changed_packages()
if not pkgdirs:
print("Nothing to build")
return
print(f"Building {pkgdirs}")
run2("apt-get install -qy --no-install-recommends devscripts")
pkgs_lists = [buildpkg(pd) for pd in pkgdirs]
print(f"Processing {pkgs_lists}")
for pli in pkgs_lists:
for p in pli:
backend.upload(p, args)
def build() -> None:
"""Run manual build on workstation"""
pkgdirs = detect_changed_packages()
pkgs_lists = [buildpkg(pd) for pd in pkgdirs]
print("Outputs:")
for pli in pkgs_lists:
for p in pli:
print(p)
class DuplicatePkgError(Exception):
pass
def check_duplicate_package(pkgblock, packages_text):
li = pkgblock.splitlines()
assert li[0].startswith("Package: "), li
pname = li[0].split(" ", 1)[1]
assert li[1].startswith("Version: "), li
pver = li[1].split(" ", 1)[1]
assert li[2].startswith("Architecture: "), li
parch = li[2].split(" ", 1)[1]
m = f"Package: {pname}\nVersion: {pver}\nArchitecture: {parch}"
if m in packages_text:
raise DuplicatePkgError()
class Bintray:
"""Bintray backend"""
def __init__(self):
self._btuser = getenv("BINTRAY_USERNAME")
assert self._btuser, "Missing BINTRAY_USERNAME"
def upload(self, fi, args) -> None:
"""Upload to Bintray"""
# FIXME: specify repo
assert repo, "Please specify a repository"
assert fi.is_file()
pname, pver, arch = fi.name.split("_")
auth = HTTPBasicAuth(self._btuser, getenv("BINTRAY_API_KEY"))
dist = "unstable"
url = (
f"{BINTRAY_API}/content/{args.org}/{repo}/{pname}/{pver}/{fi.name};"
f"deb_distribution={dist};deb_component=main;deb_architecture=amd64;publish=1"
)
with open(fi, "rb") as f:
resp = requests.put(url, auth=auth, data=f)
if not resp.ok:
print(f"Error {resp.text} when calling {resp.request.url}")
sys.exit(1)
def delete_package(self, args, extra) -> None:
"""Delete package from Bintray"""
auth = HTTPBasicAuth(self._btuser, getenv("BINTRAY_API_KEY"))
filename = extra[0]
assert filename.endswith(".deb")
assert args.repo, "Please specify a repository"
url = f"{BINTRAY_API}/content/{args.org}/{args.repo}/{filename}"
resp = requests.delete(url, auth=auth)
if not resp.ok:
print(f"Error {resp.text} when calling {resp.request.url}")
sys.exit(1)
class S3:
"""S3 backend"""
def generate_release_file(self, conf, sha, size):
r = dedent(
f"""
Acquire-By-Hash: no
Architectures: {conf.arch}
Codename: {conf.distro}
Components: main
Date: Thu, 07 Nov 2019 14:23:37 UTC
Origin: private
Valid-Until: Thu, 14 Nov 2029 14:23:37 UTC
SHA256:
{sha} {size} main/binary-{conf.arch}/Packages
"""
)
return r
def init_archive(self, conf):
"""Initialize the archive"""
assert conf.bucket_name
r, o = runc(f"s3cmd mb s3://{conf.bucket_name}")
if r == 0:
print("Bucket created")
runc(f"s3cmd ws-create s3://{conf.bucket_name}")
r, out = runc(f"s3cmd ws-info s3://{conf.bucket_name}")
for li in out.splitlines():
if li.startswith("Website endpoint"):
s3url = li.split()[2]
break
# Initialize distro if needed. Check for InRelease
baseuri = f"s3://{conf.bucket_name}/dists/{conf.distro}"
r, o = runc(f"s3cmd info --no-progress {baseuri}/InRelease")
if r == 0:
return
if r != 12:
print(f"Unexpected return code {r} {o}")
sys.exit(1)
# InRelease file not found: create lock file
print("Creating initial lock file")
tf = NamedTemporaryFile()
# put = "s3cmd --acl-public --guess-mime-type --no-progress put"
put = "s3cmd --guess-mime-type --no-progress put"
r2, o = runc(f"{put} --no-progress {tf.name} {baseuri}/.debrepos3.lock")
assert r2 == 0, repr(o)
# Create empty InRelease
r2, o = runc(f"{put} {tf.name} {baseuri}/InRelease")
# Create empty Packages
r, o = runc(f"{put} {tf.name} {baseuri}/main/binary-{conf.arch}/Packages")
# Create index
html = dedent(
f"""
<html><body>
<p>Create /etc/apt/sources.list.d/{conf.distro}.list containing:</p>
<pre>deb {s3url} {conf.distro} main</pre>
</body></html>
"""
)
with open(tf.name, "w") as f:
f.write(html)
r, o = runc(f"{put} {tf.name} {baseuri}/index.html")
def lock(self, conf, baseuri):
"""Rename semaphore file"""
print(f"Locking {baseuri} ...")
cmd = f"s3cmd mv --no-progress {baseuri}/.debrepos3.nolock {baseuri}/.debrepos3.lock"
while True:
r, o = runc(cmd)
print(r)
if r == 0:
return
print("The distro is locked. Waiting...")
sleep(10)
def unlock(self, baseuri):
"""Rename semaphore file"""
r, o = runc(
f"s3cmd mv --no-progress {baseuri}/.debrepos3.lock {baseuri}/.debrepos3.nolock"
)
print(r)
def scanpackages(self, conf, debfn) -> str:
r, o = runc(f"dpkg-scanpackages {debfn}")
assert r == 0, repr(r)
out = []
for line in o.splitlines():
if line.startswith("Filename: "):
fn = line.split("/")[-1]
line = f"Filename: dists/{conf.distro}/main/binary-{conf.arch}/{fn}"
out.append(line)
return "\n".join(out) + "\n"
def _inner_upload(self, debfn, tmpdir, baseuri, pkgblock: str, gpg, gpgkeyfp):
# Fetch existing Packages file
packages = tmpdir / "Packages"
uri = f"{baseuri}/main/binary-{conf.arch}/Packages {packages}"
run2(f"s3cmd --no-progress get {uri}")
# Check for already uploaded package
check_duplicate_package(pkgblock, packages.read_text())
# Append, then read whole file back
with packages.open("a") as f:
f.write(pkgblock)
data = packages.read_bytes()
packagesf_size = len(data)
packagesf_sha = sha256(data).hexdigest()
del data
# Create, sign, upload InRelease
release = tmpdir / "Release"
inrelease = tmpdir / "InRelease"
rfdata = self.generate_release_file(conf, packagesf_sha, packagesf_size)
# print(rfdata)
release.write_text(rfdata)
# r, o = runc(f"gpg -a -s --clearsign -o {inrelease} {release}")
# if r != 0:
# self.unlock(baseuri)
# print("Error during GPG signature")
# sys.exit(1)
sig = gpg.sign(release.read_text(), keyid=gpgkeyfp)
assert sig.status == "signature created"
inrelease.write_bytes(sig.data)
# Upload InRelease and Packages
put = "s3cmd --acl-public --guess-mime-type --no-progress put"
run2(f"{put} {inrelease} {baseuri}/InRelease")
run2(f"{put} {packages} {baseuri}/main/binary-{conf.arch}/Packages")
run2(f"{put} {debfn} {baseuri}/main/binary-{conf.arch}/")
def upload(self, debfn, conf):
assert conf.bucket_name
tmpdir = Path(mkdtemp(prefix="debops-ci"))
self.init_archive(conf)
baseuri = f"s3://{conf.bucket_name}/dists/{conf.distro}"
pkgblock = self.scanpackages(conf, debfn)
gpg, gpgkeyfp = setup_gpg_key(conf.gpg_key_fp, tmpdir)
# Lock distro on S3 to prevent race during appends to Packages
self.lock(conf, baseuri)
try:
self._inner_upload(debfn, tmpdir, baseuri, pkgblock, gpg, gpgkeyfp)
except DuplicatePkgError:
print(f"Error: {debfn} is already in the archive. Not uploading.")
sys.exit(1) # the unlock in the finally block is still executed
finally:
self.unlock(baseuri)
# # TODO check
# dpkg-scanpackages $1 | gzip >> Packages.gz
# upload Packages.gz
# rm Packages.gz
def main():
global conf
ap = ArgumentParser(usage=__doc__)
ap.add_argument(
"action", choices=("upload", "scan", "ci", "build", "delete_from_archive")
)
ap.add_argument("-r", "--repo", default=None, help="S3/Bintray repository name")
ap.add_argument("-o", "--org", default=DEFAULT_ORG, help="S3/Bintray org name")
ap.add_argument("--bucket-name", help="S3 bucket name")
ap.add_argument("--distro", default="unstable", help="Debian distribution name")
ap.add_argument("--arch", default="amd64", help="Debian architecture name")
ap.add_argument("--gpg-key-fp", help="GPG key fingerprint")
ap.add_argument("--show-commands", action="store_true", help="Show shell commands")
args, extra = ap.parse_known_args()
conf = args
if args.action == "ci":
ci(args)
elif args.action == "scan":
for p in sorted(detect_changed_packages()):
print(p.as_posix())
elif args.action == "upload":
# TODO select backend
# bk = Bintray()
bk = S3()
for fn in extra:
bk.upload(Path(fn), args)
elif args.action == "delete_from_archive":
# TODO select backend
# bk = Bintray()
bk = S3()
bk.delete_package(args, extra)
elif args.action == "build":
build()
if __name__ == "__main__":
main()

View File

@ -12,11 +12,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- run: ./mk OONI_PSIPHON_TAGS="" DEBIAN_TILDE_VERSION=$GITHUB_RUN_NUMBER ./debian/386 - run: ./mk OONI_PSIPHON_TAGS="" DEBIAN_TILDE_VERSION=$GITHUB_RUN_NUMBER ./debian/386
- run: ./E2E/ooniprobe.sh ./CLI/linux/386/ooniprobe - run: ./E2E/ooniprobe.sh ./CLI/linux/386/ooniprobe
- run: sudo apt-get install -y --no-install-recommends git python3 python3-requests python3-gnupg s3cmd - run: ./CLI/linux/pubdebian
- run: |
for deb in *.deb; do
./.github/workflows/debops-ci --arch i386 --show-commands upload --bucket-name ooni-deb $deb
done
env: env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@ -28,11 +24,7 @@ jobs:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- run: ./mk OONI_PSIPHON_TAGS="" DEBIAN_TILDE_VERSION=$GITHUB_RUN_NUMBER ./debian/amd64 - run: ./mk OONI_PSIPHON_TAGS="" DEBIAN_TILDE_VERSION=$GITHUB_RUN_NUMBER ./debian/amd64
- run: ./E2E/ooniprobe.sh ./CLI/linux/amd64/ooniprobe - run: ./E2E/ooniprobe.sh ./CLI/linux/amd64/ooniprobe
- run: sudo apt-get install -y --no-install-recommends git python3 python3-requests python3-gnupg s3cmd - run: ./CLI/linux/pubdebian
- run: |
for deb in *.deb; do
./.github/workflows/debops-ci --arch amd64 --show-commands upload --bucket-name ooni-deb $deb
done
env: env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@ -46,17 +38,12 @@ jobs:
- run: sudo apt-get install -y qemu-user-static - run: sudo apt-get install -y qemu-user-static
- run: ./mk OONI_PSIPHON_TAGS="" DEBIAN_TILDE_VERSION=$GITHUB_RUN_NUMBER ./debian/arm - run: ./mk OONI_PSIPHON_TAGS="" DEBIAN_TILDE_VERSION=$GITHUB_RUN_NUMBER ./debian/arm
- run: ./E2E/ooniprobe.sh ./CLI/linux/arm/ooniprobe - run: ./E2E/ooniprobe.sh ./CLI/linux/arm/ooniprobe
- run: sudo apt-get install -y --no-install-recommends git python3 python3-requests python3-gnupg s3cmd - run: ./CLI/linux/pubdebian
- run: |
for deb in *.deb; do
./.github/workflows/debops-ci --arch armhf --show-commands upload --bucket-name ooni-deb $deb
done
env: env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
DEB_GPG_KEY: ${{ secrets.DEB_GPG_KEY }} DEB_GPG_KEY: ${{ secrets.DEB_GPG_KEY }}
build_arm64: build_arm64:
runs-on: "ubuntu-20.04" runs-on: "ubuntu-20.04"
steps: steps:
@ -65,11 +52,7 @@ jobs:
- run: sudo apt-get install -y qemu-user-static - run: sudo apt-get install -y qemu-user-static
- run: ./mk OONI_PSIPHON_TAGS="" DEBIAN_TILDE_VERSION=$GITHUB_RUN_NUMBER ./debian/arm64 - run: ./mk OONI_PSIPHON_TAGS="" DEBIAN_TILDE_VERSION=$GITHUB_RUN_NUMBER ./debian/arm64
- run: ./E2E/ooniprobe.sh ./CLI/linux/arm64/ooniprobe - run: ./E2E/ooniprobe.sh ./CLI/linux/arm64/ooniprobe
- run: sudo apt-get install -y --no-install-recommends git python3 python3-requests python3-gnupg s3cmd - run: ./CLI/linux/pubdebian
- run: |
for deb in *.deb; do
./.github/workflows/debops-ci --arch arm64 --show-commands upload --bucket-name ooni-deb $deb
done
env: env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

17
.gitignore vendored
View File

@ -1,21 +1,22 @@
.DS_Store
/*.asc
/*.deb
/*.jsonl
/*.tar.gz
/*.zip
/apitool /apitool
/apitool.exe /apitool.exe
/*.asc
/coverage.cov /coverage.cov
/*.deb
/debops-ci
.DS_Store
/*.jsonl
/miniooni /miniooni
/miniooni.exe /miniooni.exe
/oohelper /oohelper
/oohelper.exe
/oohelperd /oohelperd
/oohelperd.exe /oohelperd.exe
/oohelper.exe
/ooniprobe /ooniprobe
/ooniprobe.exe
/ooniprobe_checksums.txt /ooniprobe_checksums.txt
/ooniprobe_checksums.txt.asc /ooniprobe_checksums.txt.asc
/ooniprobe.exe
/probe-cli.cov /probe-cli.cov
/*.tar.gz
/testdata/gotmp /testdata/gotmp
/*.zip

52
CLI/linux/pubdebian Executable file
View File

@ -0,0 +1,52 @@
#!/bin/sh
# This script publishes Debian packages. When run by `mk`, it's
# run inside of an `ubuntu:20.04` container. It's fine also to run
# this script from a live Debian-like system as long as all the
# following assumptions are met:
#
# 1. Debian packages we want to publish are in the toplevel dir.
# ensure that we have all the required environment variables.
fail=0
if [ -z "$AWS_ACCESS_KEY_ID" ]; then
echo "warning: missing AWS_ACCESS_KEY_ID environment variable" 1>&2
fail=1
fi
if [ -z "$AWS_SECRET_ACCESS_KEY" ]; then
echo "warning: missing AWS_SECRET_ACCESS_KEY environment variable" 1>&2
fail=1
fi
if [ -z "$DEB_GPG_KEY" ]; then
echo "warning: missing DEB_GPG_KEY environment variable" 1>&2
fail=1
fi
if [ $fail -ne 0 ]; then
exit 1
fi
set -ex
export DEBIAN_FRONTEND=noninteractive
maybe_with_sudo() {
if command -v sudo 1>/dev/null; then
sudo "$@"
else
"$@"
fi
}
# install the dependencies required by the uploader.
maybe_with_sudo apt-get update -q
maybe_with_sudo apt-get install --yes --no-install-recommends curl git make python3 python3-requests python3-gnupg s3cmd
# pull the latest version of the debops-ci script from ooni/sysadmin.
curl -fsSLO https://raw.githubusercontent.com/ooni/sysadmin/master/tools/debops-ci
chmod +x debops-ci
# loop over the available packages and upload.
for debpkg in *.deb; do
# for example: ooniprobe-cli_3.10.0_i386.deb
arch=$(echo "$debpkg" | awk -F_ '{print $3}' | sed 's/\.deb$//g')
./debops-ci --show-commands upload --bucket-name ooni-deb --arch "$arch" "$debpkg"
done

49
mk
View File

@ -279,7 +279,7 @@ GOLANG_DOCKER_IMAGE = golang:$(GOLANG_VERSION_NUMBER)-alpine
.PHONY: ./CLI/linux/386/ooniprobe .PHONY: ./CLI/linux/386/ooniprobe
./debian/386: search/for/docker ./CLI/linux/386/ooniprobe ./debian/386: search/for/docker ./CLI/linux/386/ooniprobe
docker pull --platform linux/386 debian:stable docker pull --platform linux/386 debian:stable
docker run --platform linux/386 -v $(shell pwd):/ooni -w /ooni debian:stable ./CLI/linux/debian 386 "$(DEBIAN_TILDE_VERSION)" docker run --platform linux/386 -v $(shell pwd):/ooni -w /ooni debian:stable ./CLI/linux/pkgdebian 386 "$(DEBIAN_TILDE_VERSION)"
#help: #help:
#help: * `./mk ./debian/amd64`: debian/amd64 #help: * `./mk ./debian/amd64`: debian/amd64
@ -288,7 +288,7 @@ GOLANG_DOCKER_IMAGE = golang:$(GOLANG_VERSION_NUMBER)-alpine
.PHONY: ./CLI/linux/amd64/ooniprobe .PHONY: ./CLI/linux/amd64/ooniprobe
./debian/amd64: search/for/docker ./CLI/linux/amd64/ooniprobe ./debian/amd64: search/for/docker ./CLI/linux/amd64/ooniprobe
docker pull --platform linux/amd64 debian:stable docker pull --platform linux/amd64 debian:stable
docker run --platform linux/amd64 -v $(shell pwd):/ooni -w /ooni debian:stable ./CLI/linux/debian amd64 "$(DEBIAN_TILDE_VERSION)" docker run --platform linux/amd64 -v $(shell pwd):/ooni -w /ooni debian:stable ./CLI/linux/pkgdebian amd64 "$(DEBIAN_TILDE_VERSION)"
# Note that we're building for armv7 here # Note that we're building for armv7 here
#help: #help:
@ -298,7 +298,7 @@ GOLANG_DOCKER_IMAGE = golang:$(GOLANG_VERSION_NUMBER)-alpine
.PHONY: ./CLI/linux/arm/ooniprobe .PHONY: ./CLI/linux/arm/ooniprobe
./debian/arm: search/for/docker ./CLI/linux/arm/ooniprobe ./debian/arm: search/for/docker ./CLI/linux/arm/ooniprobe
docker pull --platform linux/arm/v7 debian:stable docker pull --platform linux/arm/v7 debian:stable
docker run --platform linux/arm/v7 -v $(shell pwd):/ooni -w /ooni debian:stable ./CLI/linux/debian arm "$(DEBIAN_TILDE_VERSION)" docker run --platform linux/arm/v7 -v $(shell pwd):/ooni -w /ooni debian:stable ./CLI/linux/pkgdebian arm "$(DEBIAN_TILDE_VERSION)"
#help: #help:
#help: * `./mk ./debian/arm64`: debian/arm64 #help: * `./mk ./debian/arm64`: debian/arm64
@ -307,7 +307,7 @@ GOLANG_DOCKER_IMAGE = golang:$(GOLANG_VERSION_NUMBER)-alpine
.PHONY: ./CLI/linux/arm64/ooniprobe .PHONY: ./CLI/linux/arm64/ooniprobe
./debian/arm64: search/for/docker ./CLI/linux/arm64/ooniprobe ./debian/arm64: search/for/docker ./CLI/linux/arm64/ooniprobe
docker pull --platform linux/arm64 debian:stable docker pull --platform linux/arm64 debian:stable
docker run --platform linux/arm64 -v $(shell pwd):/ooni -w /ooni debian:stable ./CLI/linux/debian arm64 "$(DEBIAN_TILDE_VERSION)" docker run --platform linux/arm64 -v $(shell pwd):/ooni -w /ooni debian:stable ./CLI/linux/pkgdebian arm64 "$(DEBIAN_TILDE_VERSION)"
#help: #help:
#help: The `./mk ./CLI/ooniprobe/linux` command builds the ooniprobe official command #help: The `./mk ./CLI/ooniprobe/linux` command builds the ooniprobe official command
@ -498,6 +498,19 @@ __android_build_with_ooni_go: search/for/go
OONIMKALL_V := $(shell date -u +%Y.%m.%d-%H%M%S) OONIMKALL_V := $(shell date -u +%Y.%m.%d-%H%M%S)
OONIMKALL_R := $(shell git describe --tags || echo '0.0.0-dev') OONIMKALL_R := $(shell git describe --tags || echo '0.0.0-dev')
#help: The `debian/publish` target publishes all the debian packages
#help: present in the toplevel directory using debopos-ci.
# TODO(bassosimone): do not hardcode using linux/amd64 here?
.PHONY: debian/publish
debian/publish: search/for/docker
test -z "$(CI)" || { echo "fatal: refusing to run in a CI environment" 1>&2; exit 1; }
ls *.deb 2>/dev/null || { echo "fatal: no debian packages in the toplevel dir" 1>&2; exit 1; }
test -n "$(AWS_ACCESS_KEY_ID)" || { echo "fatal: AWS_ACCESS_KEY_ID not set" 1>&2; exit 1; }
test -n "$(AWS_SECRET_ACCESS_KEY)" || { echo "fatal: AWS_SECRET_ACCESS_KEY not set" 1>&2; exit 1; }
test -n "$(DEB_GPG_KEY)" || { echo "fatal: DEB_GPG_KEY not set" 1>&2; exit 1; }
docker pull --platform linux/amd64 ubuntu:20.04
docker run --platform linux/amd64 -e AWS_ACCESS_KEY_ID="$(AWS_ACCESS_KEY_ID)" -e AWS_SECRET_ACCESS_KEY="$(AWS_SECRET_ACCESS_KEY)" -e DEB_GPG_KEY="$(DEB_GPG_KEY)" -v $(shell pwd):/ooni -w /ooni ubuntu:20.04 ./CLI/linux/pubdebian
#help: #help:
#help: The following commands check for the availability of dependencies: #help: The following commands check for the availability of dependencies:
# TODO(bassosimone): make checks more robust? # TODO(bassosimone): make checks more robust?
@ -537,20 +550,6 @@ search/for/gpg:
@printf "checking for gpg... " @printf "checking for gpg... "
@command -v gpg || { echo "not found"; exit 1; } @command -v gpg || { echo "not found"; exit 1; }
#help:
#help: * `./mk search/for/jar`: checks for jar
.PHONY: search/for/jar
search/for/jar:
@printf "checking for jar... "
@command -v jar || { echo "not found"; exit 1; }
#help:
#help: * `./mk search/for/java`: checks for java
.PHONY: search/for/java
search/for/java:
@printf "checking for java... "
@command -v java || { echo "not found"; exit 1; }
#help: #help:
#help: * `./mk search/for/go`: checks for go #help: * `./mk search/for/go`: checks for go
.PHONY: search/for/go .PHONY: search/for/go
@ -565,6 +564,20 @@ search/for/go:
# SHOULD NOT cache this value so we ARE NOT using `:=`) # SHOULD NOT cache this value so we ARE NOT using `:=`)
__GOVERSION_REAL = $(shell go version | awk '{print $$3}') __GOVERSION_REAL = $(shell go version | awk '{print $$3}')
#help:
#help: * `./mk search/for/jar`: checks for jar
.PHONY: search/for/jar
search/for/jar:
@printf "checking for jar... "
@command -v jar || { echo "not found"; exit 1; }
#help:
#help: * `./mk search/for/java`: checks for java
.PHONY: search/for/java
search/for/java:
@printf "checking for java... "
@command -v java || { echo "not found"; exit 1; }
#help: #help:
#help: * `./mk search/for/mingw-w64`: checks for mingw-w64 #help: * `./mk search/for/mingw-w64`: checks for mingw-w64
.PHONY: search/for/mingw-w64 .PHONY: search/for/mingw-w64