aboutsummaryrefslogtreecommitdiffstats
path: root/scripts/obs
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/obs')
-rw-r--r--scripts/obs/README23
-rwxr-xr-xscripts/obs/build_binpkg.py107
-rwxr-xr-xscripts/obs/build_srcpkg.py20
-rwxr-xr-xscripts/obs/check_builders.sh19
-rwxr-xr-xscripts/obs/check_new_distros.py110
-rw-r--r--scripts/obs/data/Dockerfile35
-rwxr-xr-xscripts/obs/data/build.sh15
-rw-r--r--scripts/obs/data/build_binpkg.Dockerfile90
-rw-r--r--scripts/obs/data/build_binpkg_manuals.Dockerfile20
-rwxr-xr-xscripts/obs/data/build_deb.sh25
-rwxr-xr-xscripts/obs/data/build_rpm.sh44
-rw-r--r--scripts/obs/data/build_srcpkg.Dockerfile31
-rw-r--r--scripts/obs/data/rpmmacros5
-rwxr-xr-xscripts/obs/gerrit_binpkgs.sh40
-rw-r--r--scripts/obs/lib/__init__.py68
-rw-r--r--scripts/obs/lib/binpkg_deb.py29
-rw-r--r--scripts/obs/lib/binpkg_rpm.py29
-rw-r--r--scripts/obs/lib/config.py48
-rw-r--r--scripts/obs/lib/debian.py30
-rw-r--r--scripts/obs/lib/docker.py89
-rw-r--r--scripts/obs/lib/git.py54
-rw-r--r--scripts/obs/lib/metapkg.py30
-rw-r--r--scripts/obs/lib/osc.py90
-rw-r--r--scripts/obs/lib/srcpkg.py175
-rwxr-xr-xscripts/obs/sync_obs_projects.py305
-rwxr-xr-xscripts/obs/update_obs_project.py125
-rwxr-xr-xscripts/obs/update_obs_wireshark.sh47
27 files changed, 1499 insertions, 204 deletions
diff --git a/scripts/obs/README b/scripts/obs/README
index 67d2a2b..e6ca502 100644
--- a/scripts/obs/README
+++ b/scripts/obs/README
@@ -1,3 +1,24 @@
+Overview
+========
+This directory contains the following scripts, related to building Osmocom
+projects in OBS (Open Build Service) and building binary packages. Here is an
+overview of the scripts, run them with -h to get a more detailed description.
+
+* gerrit_binpkgs.sh: build source + binary packages like CI for gerrit
+* build_srcpkg.py: build one source package for an Osmocom project
+* update_obs_project.py: generate source packages and upload them to OBS
+* build_binpkg.py: build rpm/deb packages for one Osmocom project
+* sync_obs_projects.py: sync projects from another instance (OS#6165)
+
+Docker
+------
+The above scripts have a -d parameter, that allows running them inside docker.
+This is how we run it in jenkins to avoid installing dependencies on the host.
+
+Note that when building binary packages, libosmocore socket tests and possibly
+others will fail unless docker has ipv6 configured:
+https://osmocom.org/issues/4700#note-3
+
Usage Example: Submitting source packages to Osmocom's OBS build server
=======================================================================
@@ -29,7 +50,7 @@ Verify that it worked:
libosmo-pfcp
osmo-hnbgw
-FYI, the config file will look like this:
+FYI, the config file (oscrc) will look like this:
[general]
apiurl = https://obs.osmocom.org
diff --git a/scripts/obs/build_binpkg.py b/scripts/obs/build_binpkg.py
new file mode 100755
index 0000000..ae2fe53
--- /dev/null
+++ b/scripts/obs/build_binpkg.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright 2022 sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
+import argparse
+import fnmatch
+import lib
+import multiprocessing
+import os
+import sys
+import lib.binpkg_deb
+import lib.config
+import lib.docker
+import lib.git
+import lib.metapkg
+import lib.srcpkg
+
+
+def arg_type_docker_distro(arg):
+ for pattern in lib.config.docker_distro_other:
+ if fnmatch.fnmatch(arg, pattern):
+ return arg
+ raise ValueError
+
+
+def main():
+ distro_default = lib.config.docker_distro_default
+ jobs_default = multiprocessing.cpu_count() + 1
+
+ parser = argparse.ArgumentParser(
+ description="Build a deb or rpm package as it would be done on"
+ " obs.osmocom.org. Use after building a source package"
+ " with build_srcpkg.py."
+ f" Output dir: {lib.config.path_temp}/binpkgs")
+ parser.add_argument("-d", "--docker", type=arg_type_docker_distro,
+ const=distro_default, nargs="?", metavar="DISTRO",
+ help="build the package in docker for a specific"
+ f" distro (default: {distro_default}, other:"
+ f" almalinux:8, debian:10, ubuntu:22.04 etc.)")
+ parser.add_argument("-f", "--feed", dest="docker_feed", default="master",
+ choices=["master", "nightly", "latest"],
+ help="the OBS feed to configure inside docker, against"
+ " which the package will get built (use nightly"
+ " if master doesn't get built for DISTRO)")
+ parser.add_argument("-j", "--jobs", type=int, default=jobs_default,
+ help=f"parallel running jobs (default: {jobs_default})")
+ parser.add_argument("-r", "--run-shell-on-error", action="store_true",
+ help="run an interactive shell if the build fails")
+ parser.add_argument("-v", "--verbose", action="store_true",
+ help="always print shell commands and their output,"
+ " instead of only printing them on error")
+ parser.add_argument("package",
+ help="package name, e.g. libosmocore")
+ args = parser.parse_args()
+
+ lib.set_args(args)
+
+ srcdir = f"{lib.config.path_temp}/srcpkgs/{args.package}"
+ if not os.path.exists(srcdir):
+ print(f"ERROR: {args.package}: no srcpkg found, run build_srcpkg.py"
+ " first!")
+ sys.exit(1)
+
+ bindir = f"{lib.config.path_temp}/binpkgs"
+ lib.run_cmd(["rm", "-rf", bindir])
+ os.makedirs(bindir)
+
+ distro = args.docker if args.docker else distro_default
+
+ env = {"JOBS": str(args.jobs),
+ "PACKAGE": args.package,
+ "BUILDUSER": os.environ["USER"],
+ "PACKAGEFORMAT": "deb"}
+
+ docker_args = []
+ if args.run_shell_on_error:
+ env["RUN_SHELL_ON_ERROR"] = "1"
+ docker_args += ["-i", "-t"]
+
+ # Add capability needed for building without network
+ docker_args += ["--cap-add=NET_ADMIN"]
+
+ script_path = "data/build.sh"
+
+ if not distro.startswith("debian:") and not distro.startswith("ubuntu:"):
+ env["PACKAGEFORMAT"] = "rpm"
+
+ if args.docker:
+ image_type = "build_binpkg"
+
+ # Optimization: use docker container with osmo-gsm-manuals-dev already
+ # installed if it is in build depends
+ if env["PACKAGEFORMAT"] == "deb" \
+ and lib.srcpkg.requires_osmo_gsm_manuals_dev(args.package):
+ image_type += "_manuals"
+
+ env["BUILDUSER"] = "user"
+ lib.docker.run_in_docker_and_exit(script_path,
+ image_type=image_type,
+ distro=distro,
+ pass_argv=False, env=env,
+ docker_args=docker_args)
+ else:
+ lib.run_cmd(["sudo", "-E", script_path], env=env,
+ cwd=lib.config.path_top)
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/obs/build_srcpkg.py b/scripts/obs/build_srcpkg.py
index ea7ab34..a5feeea 100755
--- a/scripts/obs/build_srcpkg.py
+++ b/scripts/obs/build_srcpkg.py
@@ -2,6 +2,7 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright 2022 sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
import argparse
+import sys
import lib
import lib.config
import lib.docker
@@ -18,30 +19,33 @@ def main():
" upload to https://obs.osmocom.org."
f" Output dir: {lib.config.path_temp}/srcpkgs")
lib.add_shared_arguments(parser)
+ parser.add_argument("-g", "--gerrit-id", type=int, default=0,
+ help="clone particular revision from gerrit using given ID")
parser.add_argument("package", nargs="?",
help="package name, e.g. libosmocore or open5gs")
args = parser.parse_args()
if not args.meta and not args.package:
print("ERROR: specify -m and/or a package. See -h for help.")
- exit(1)
+ sys.exit(1)
- lib.set_cmds_verbose(args.verbose)
+ lib.set_args(args)
if args.docker:
- lib.docker.run_in_docker_and_exit(__file__, args)
+ lib.docker.run_in_docker_and_exit("build_srcpkg.py")
+
+ if not args.ignore_req:
+ lib.check_required_programs()
- lib.check_required_programs()
if args.package:
- lib.check_package(args.package)
+ args.package = lib.set_proper_package_name(args.package)
lib.remove_temp()
if args.meta:
- lib.metapkg.build(args.feed, args.conflict_version)
+ lib.metapkg.build()
if args.package:
- lib.srcpkg.build(args.package, args.feed, args.git_branch, args.conflict_version,
- args.git_fetch)
+ lib.srcpkg.build(args.package, args.gerrit_id)
if __name__ == "__main__":
diff --git a/scripts/obs/check_builders.sh b/scripts/obs/check_builders.sh
new file mode 100755
index 0000000..65a0d76
--- /dev/null
+++ b/scripts/obs/check_builders.sh
@@ -0,0 +1,19 @@
+#!/bin/sh -ex
+min=10
+max=500
+wget -q https://obs.osmocom.org -O index.html
+
+set +x
+for i in $(seq $min $max); do
+ if grep -q " of $i build hosts" index.html; then
+ echo
+ echo "Check successful, $i builders are connected to OBS"
+ echo
+ exit 0
+ fi
+done
+
+echo
+echo "ERROR: expected at least $min builders to be connected to OBS!"
+echo
+exit 1
diff --git a/scripts/obs/check_new_distros.py b/scripts/obs/check_new_distros.py
new file mode 100755
index 0000000..962d547
--- /dev/null
+++ b/scripts/obs/check_new_distros.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright 2023 sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
+import argparse
+import lib.docker
+import lib.osc
+import sys
+
+projects_opensuse = None
+projects_osmocom = None
+
+
+def parse_args():
+ parser = argparse.ArgumentParser(description="Check for new distribution"
+ " projects on the openSUSE OBS, that we want to configure in the"
+ " Osmocom OBS as soon as they are available")
+ parser.add_argument("-d", "--docker",
+ help="run in docker to avoid installing required pkgs",
+ action="store_true")
+ parser.add_argument("-v", "--verbose", action="store_true",
+ help="always print shell commands and their output,"
+ " instead of only printing them on error")
+
+ advanced = parser.add_argument_group("advanced options")
+ advanced.add_argument("-A", "--apiurl", help="source OBS API URL"
+ " (default: https://api.opensuse.org)",
+ default="https://api.opensuse.org")
+ advanced.add_argument("-p", "--prefix", default="openSUSE.org-mirror",
+ help="destination OBS prefix"
+ " (default: openSUSE.org-mirror)")
+ advanced.add_argument("-t", "--to-apiurl", help="destination OBS API URL"
+ " (default: https://obs.osmocom.org)",
+ default="https://obs.osmocom.org")
+
+ args = parser.parse_args()
+ lib.set_args(args)
+
+ lib.osc.check_oscrc()
+
+ if args.docker:
+ lib.docker.run_in_docker_and_exit("check_new_distros.py", add_oscrc=True)
+
+
+def find_highest_distro_project(distro):
+ highest = None
+ for project in projects_opensuse:
+ if not project.startswith(f"{distro}:"):
+ continue
+
+ num = project[len(distro) + 1:]
+ if not lib.config.check_new_distros_version_regex.match(num):
+ if lib.args.verbose:
+ print(f"ignoring {distro}:{num} (doesn't match version regex)")
+ continue
+
+ if not highest or float(num) > float(highest):
+ highest = num
+
+ if highest:
+ return f"{distro}:{highest}"
+
+ return None
+
+
+def check_distro(distro):
+ highest = find_highest_distro_project(distro)
+ if not highest:
+ print(f"ERROR: {distro}: not found in {lib.args.apiurl}")
+ return False
+
+ # check if it is in the osmo obs
+ exp = f"{lib.args.prefix}:{highest}"
+ if exp in projects_osmocom:
+ print(f"{exp}: OK")
+ return True
+
+ print()
+ print(f"ERROR: {exp} not found")
+ print()
+ print("Follow this guide to add it to the Osmocom OBS:")
+ print("https://osmocom.org/projects/cellular-infrastructure/wiki/Add_a_new_distribution_to_OBS")
+ print()
+
+ return False
+
+
+def main():
+ global projects_opensuse
+ global projects_osmocom
+
+ parse_args()
+ ret = 0
+
+ # Get list of projects from Osmocom OBS
+ lib.osc.set_apiurl(lib.args.to_apiurl)
+ projects_osmocom = lib.osc.get_projects()
+
+ # Get list of projects from openSUSE OBS
+ lib.osc.set_apiurl(lib.args.apiurl)
+ projects_opensuse = lib.osc.get_projects()
+
+ # Check for missing distros in Osmocom OBS
+ for distro in lib.config.check_new_distros:
+ if not check_distro(distro):
+ ret = 1
+
+ sys.exit(ret)
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/obs/data/Dockerfile b/scripts/obs/data/Dockerfile
deleted file mode 100644
index d8bd9c5..0000000
--- a/scripts/obs/data/Dockerfile
+++ /dev/null
@@ -1,35 +0,0 @@
-FROM debian:bullseye
-ARG UID
-
-RUN apt-get update && \
- apt-get upgrade -y && \
- apt-get install -y --no-install-recommends \
- ca-certificates \
- gnupg2 \
- && \
- apt-get clean
-
-COPY Release.key /tmp/Release.key
-RUN apt-key add /tmp/Release.key && \
- rm /tmp/Release.key && \
- echo "deb https://downloads.osmocom.org/packages/osmocom:/latest/Debian_11/ ./" \
- > /etc/apt/sources.list.d/osmocom-latest.list
-
-RUN apt-get update && \
- apt-get upgrade -y && \
- apt-get install -y --no-install-recommends \
- debhelper \
- dh-python \
- dpkg-dev \
- fakeroot \
- git \
- meson \
- osc \
- python3-setuptools \
- rebar3 \
- sed \
- && \
- apt-get clean
-
-RUN useradd --uid=${UID} -m user
-USER user
diff --git a/scripts/obs/data/build.sh b/scripts/obs/data/build.sh
new file mode 100755
index 0000000..7dfbda7
--- /dev/null
+++ b/scripts/obs/data/build.sh
@@ -0,0 +1,15 @@
+#!/bin/sh -e
+
+if ! data/build_"$PACKAGEFORMAT".sh; then
+ echo
+ echo "ERROR: build failed!"
+ echo
+ if [ -n "$RUN_SHELL_ON_ERROR" ]; then
+ bash
+ fi
+ exit 1
+fi
+
+echo
+echo "Build successful!"
+echo
diff --git a/scripts/obs/data/build_binpkg.Dockerfile b/scripts/obs/data/build_binpkg.Dockerfile
new file mode 100644
index 0000000..55bfe0c
--- /dev/null
+++ b/scripts/obs/data/build_binpkg.Dockerfile
@@ -0,0 +1,90 @@
+ARG DISTRO_FROM
+FROM ${DISTRO_FROM}
+ARG DISTRO
+ARG FEED
+ARG UID
+
+COPY Release.key /tmp/Release.key
+
+RUN useradd --uid=${UID} -m user
+
+# Only install build-essential here, and what's needed to add the Osmocom
+# repository. Everything else must be defined as dependency in the package
+# build recipe. For rpm-based distributions, there is no build-essential or
+# similar package. Instead add relevant packages from prjconf, e.g.:
+# https://build.opensuse.org/projects/CentOS:CentOS-8/prjconf
+# For debian, make sure we don't have man pages as otherwise it takes some time
+# to regenerate the manuals database when installing build dependencies.
+# SYS#5818: using almalinux:8 instead of centos:8
+RUN case "$DISTRO" in \
+ debian*|ubuntu*) \
+ echo "path-exclude=/usr/share/man/*" \
+ > /etc/dpkg/dpkg.cfg.d/exclude-man-pages && \
+ rm -rf /usr/share/man/ && \
+ apt-get update && \
+ apt-get install -y --no-install-recommends \
+ build-essential \
+ ca-certificates \
+ fakeroot \
+ git \
+ gnupg2 \
+ iproute2 \
+ && \
+ apt-get clean \
+ ;; \
+ almalinux*) \
+ dnf -y install \
+ autoconf \
+ automake \
+ binutils \
+ dnf-utils \
+ gcc \
+ gcc-c++ \
+ glibc-devel \
+ iproute \
+ libtool \
+ make \
+ redhat-rpm-config \
+ rpm-build \
+ rpmdevtools \
+ wget && \
+ yum config-manager --set-enabled powertools && \
+ su user -c rpmdev-setuptree \
+ ;; \
+ esac
+
+# Add master repository, where packages immediately get updated after merging
+# patches to master.
+# sed: first letter uppercase (testing -> Testing)
+RUN set -x; \
+ VERSION="$(echo "$DISTRO" | cut -d : -f 2 | sed 's/./\u&/')"; \
+ case "$DISTRO" in \
+ debian:*) \
+ apt-key add /tmp/Release.key && \
+ rm /tmp/Release.key && \
+ echo "deb https://downloads.osmocom.org/packages/osmocom:/$FEED/Debian_$VERSION/ ./" \
+ > /etc/apt/sources.list.d/osmocom-$FEED.list \
+ ;; \
+ ubuntu:*) \
+ apt-key add /tmp/Release.key && \
+ rm /tmp/Release.key && \
+ echo "deb https://downloads.osmocom.org/packages/osmocom:/$FEED/xUbuntu_$VERSION/ ./" \
+ > /etc/apt/sources.list.d/osmocom-$FEED.list \
+ ;; \
+ almalinux:*) \
+ { echo "[network_osmocom_$FEED]"; \
+ echo "name=osmocom:$FEED"; \
+ echo "type=rpm-md"; \
+ echo "baseurl=https://downloads.osmocom.org/packages/osmocom:/$FEED/CentOS_$VERSION/"; \
+ echo "gpgcheck=1"; \
+ echo "gpgkey=https://downloads.osmocom.org/packages/osmocom:/$FEED/CentOS_$VERSION/repodata/repomd.xml.key"; \
+ echo "enabled=1"; \
+ } > /etc/yum.repos.d/network:osmocom:$FEED.repo \
+ ;; \
+ *) \
+ echo "can't install repo for $DISTRO" && \
+ exit 1 \
+ ;; \
+ esac
+
+WORKDIR /obs/
diff --git a/scripts/obs/data/build_binpkg_manuals.Dockerfile b/scripts/obs/data/build_binpkg_manuals.Dockerfile
new file mode 100644
index 0000000..c2b1211
--- /dev/null
+++ b/scripts/obs/data/build_binpkg_manuals.Dockerfile
@@ -0,0 +1,20 @@
+# Optimization: installing osmo-gsm-manuals-dev and its many, many dependencies
+# takes quite a long time - sometimes longer than building the package itself
+# (related: OS#4132). Instead of doing this every time before starting a build,
+# here is a second docker container that already has it installed. This gets
+# used by build_binpkg.py in case the package to build depends on
+# osmo-gsm-manuals-dev and the build is done for Debian. Note that right now we
+# don't build the manuals for rpm-based distributions.
+ARG DISTRO_FROM
+FROM ${DISTRO_FROM}
+ARG DISTRO
+
+RUN case "$DISTRO" in \
+ debian*|ubuntu*) \
+ apt-get update && \
+ apt-get install -y --no-install-recommends \
+ osmo-gsm-manuals-dev \
+ && \
+ apt-get clean \
+ ;; \
+ esac
diff --git a/scripts/obs/data/build_deb.sh b/scripts/obs/data/build_deb.sh
new file mode 100755
index 0000000..931919d
--- /dev/null
+++ b/scripts/obs/data/build_deb.sh
@@ -0,0 +1,25 @@
+#!/bin/sh -ex
+
+apt_get="apt-get"
+if [ -n "$INSIDE_DOCKER" ]; then
+ export DEBIAN_FRONTEND=noninteractive
+ apt_get="apt-get -y"
+fi
+
+su "$BUILDUSER" -c "tar -C _temp/binpkgs -xvf _temp/srcpkgs/$PACKAGE/*.tar.*"
+cd _temp/binpkgs/*
+
+$apt_get update
+$apt_get build-dep .
+
+if [ -n "$INSIDE_DOCKER" ]; then
+ ip link set eth0 down
+fi
+
+su "$BUILDUSER" -c "dpkg-buildpackage -us -uc -j$JOBS"
+
+# Show contents
+cd ..
+for i in *.deb; do
+ dpkg -c "$i"
+done
diff --git a/scripts/obs/data/build_rpm.sh b/scripts/obs/data/build_rpm.sh
new file mode 100755
index 0000000..a73d164
--- /dev/null
+++ b/scripts/obs/data/build_rpm.sh
@@ -0,0 +1,44 @@
+#!/bin/sh -ex
+
+if ! [ -d /home/$BUILDUSER/rpmbuild/SOURCES ]; then
+ set +x
+ echo "ERROR: rpmdev-setuptree did not run"
+ echo "If this is an rpm based system and you want to build the package"
+ echo "here, run rpmdev-setuptree. Otherwise consider building the"
+ echo "package in docker (-d)."
+ exit 1
+fi
+
+yum_builddep="yum-builddep"
+if [ -n "$INSIDE_DOCKER" ]; then
+ yum_builddep="yum-builddep -y"
+fi
+
+spec="$(basename "$(find _temp/srcpkgs/"$PACKAGE" -name '*.spec')")"
+
+su "$BUILDUSER" -c "cp _temp/srcpkgs/$PACKAGE/$spec ~/rpmbuild/SPECS"
+su "$BUILDUSER" -c "cp _temp/srcpkgs/$PACKAGE/*.tar.* ~/rpmbuild/SOURCES"
+su "$BUILDUSER" -c "cp _temp/srcpkgs/$PACKAGE/rpmlintrc ~/rpmbuild/SOURCES"
+su "$BUILDUSER" -c "cp /obs/data/rpmmacros ~/.rpmmacros"
+
+# Force refresh of package index data (OS#6038)
+dnf makecache --refresh
+
+$yum_builddep "/home/$BUILDUSER/rpmbuild/SPECS/$spec"
+
+if [ -n "$INSIDE_DOCKER" ]; then
+ ip link set eth0 down
+fi
+
+su "$BUILDUSER" -c "rpmbuild -bb ~/rpmbuild/SPECS/$spec"
+
+# Make built rpms available outside of docker
+if [ -n "$INSIDE_DOCKER" ]; then
+ su "$BUILDUSER" -c "mv ~/rpmbuild/RPMS/*/*.rpm _temp/binpkgs/"
+fi
+
+# Show contents
+cd _temp/binpkgs
+for i in *.rpm; do
+ rpm -qlp "$i"
+done
diff --git a/scripts/obs/data/build_srcpkg.Dockerfile b/scripts/obs/data/build_srcpkg.Dockerfile
new file mode 100644
index 0000000..80488ea
--- /dev/null
+++ b/scripts/obs/data/build_srcpkg.Dockerfile
@@ -0,0 +1,31 @@
+# Change distro in lib/config.py:docker_distro_default
+ARG DISTRO_FROM
+FROM ${DISTRO_FROM}
+ARG UID
+
+RUN apt-get update && \
+ apt-get upgrade -y && \
+ apt-get install -y --no-install-recommends \
+ ca-certificates \
+ colordiff \
+ debhelper \
+ dh-python \
+ dpkg-dev \
+ fakeroot \
+ git \
+ git-review \
+ gnupg2 \
+ libxml2-utils \
+ lsb-release \
+ meson \
+ osc \
+ python3-packaging \
+ python3-setuptools \
+ quilt \
+ rebar3 \
+ sed \
+ && \
+ apt-get clean
+
+RUN useradd --uid=${UID} -m user
+USER user
diff --git a/scripts/obs/data/rpmmacros b/scripts/obs/data/rpmmacros
new file mode 100644
index 0000000..5ac26c7
--- /dev/null
+++ b/scripts/obs/data/rpmmacros
@@ -0,0 +1,5 @@
+# Default values for macros from OBS project config:
+# https://build.opensuse.org/projects/CentOS:CentOS-8/prjconf
+
+%ext_info .gz
+%ext_man .gz
diff --git a/scripts/obs/gerrit_binpkgs.sh b/scripts/obs/gerrit_binpkgs.sh
new file mode 100755
index 0000000..1e4ae26
--- /dev/null
+++ b/scripts/obs/gerrit_binpkgs.sh
@@ -0,0 +1,40 @@
+#!/bin/sh -e
+SCRIPTS_OBS_DIR="$(realpath "$(dirname "$0")")"
+
+DISTRO="$1"
+if [ -z "$DISTRO" ]; then
+ echo "usage: gerrit-binpkgs.sh DISTRO"
+ echo "examples:"
+ echo " gerrit-binpkgs.sh debian:12"
+ echo " gerrit-binpkgs.sh almalinux:8"
+ exit 1
+fi
+
+GIT_REPO_DIR="$(git rev-parse --show-toplevel 2>/dev/null || true)"
+if [ -z "$GIT_REPO_DIR" ]; then
+ echo "ERROR: run inside a git repository of an Osmocom project"
+ exit 1
+fi
+
+CACHE_DIR="$SCRIPTS_OBS_DIR/_cache"
+PROJECT_NAME="$(basename "$GIT_REPO_DIR")"
+
+# Copy the source dir into the cache dir. It will be mounted inside the docker
+# containers for building source and binary packages (so using a symlink does
+# not work). Use rsync so it is very fast.
+echo ":: Copying the source to the cache dir"
+mkdir -p "$CACHE_DIR"
+rsync -a --delete "$GIT_REPO_DIR" "$CACHE_DIR"
+
+echo ":: Building the source package"
+"$SCRIPTS_OBS_DIR"/build_srcpkg.py \
+ --docker \
+ --feed master \
+ --git-skip-fetch \
+ --git-skip-checkout \
+ "$PROJECT_NAME"
+
+echo ":: Building the binary packages"
+"$SCRIPTS_OBS_DIR"/build_binpkg.py \
+ --docker "$DISTRO" \
+ "$PROJECT_NAME"
diff --git a/scripts/obs/lib/__init__.py b/scripts/obs/lib/__init__.py
index a07a072..5292dc5 100644
--- a/scripts/obs/lib/__init__.py
+++ b/scripts/obs/lib/__init__.py
@@ -7,20 +7,29 @@ import shutil
import subprocess
import sys
import tempfile
+import inspect
import lib.config
+# Argparse result
+args = None
+
+# Print output of commands as they run, not only on error
cmds_verbose = False
def add_shared_arguments(parser):
+ """ Arguments shared between build_srcpkg.py and update_obs_project.py. """
parser.add_argument("-f", "--feed",
help="package feed (default: nightly). The feed"
" determines the git revision to be built:"
- " 'nightly' builds 'origin/master',"
+ " 'nightly' and 'master' build 'origin/master',"
" 'latest' builds the last signed tag,"
" other feeds build their respective branch.",
metavar="FEED", default="nightly",
choices=lib.config.feeds)
+ parser.add_argument("-a", "--allow-unknown-package", action="store_true",
+ help="don't complain if the name of the package is not"
+ " stored in lib/config.py")
parser.add_argument("-b", "--git-branch", help="instead of using a branch"
" based on the feed, checkout this git branch",
metavar="BRANCH", default=None)
@@ -30,8 +39,13 @@ def add_shared_arguments(parser):
parser.add_argument("-s", "--git-skip-fetch",
help="do not fetch already cloned git repositories",
action="store_false", dest="git_fetch")
+ parser.add_argument("-S", "--git-skip-checkout",
+ help="do not checkout and reset to a branch/tag",
+ action="store_false", dest="git_checkout")
parser.add_argument("-m", "--meta", action="store_true",
help="build a meta package (e.g. osmocom-nightly)")
+ parser.add_argument("-i", "--ignore-req", action="store_true",
+ help="skip required programs check")
parser.add_argument("-c", "--conflict-version", nargs="?",
help="Of the generated source packages, all Osmocom"
" packages (not e.g. open5gs, see lib/config.py"
@@ -46,9 +60,19 @@ def add_shared_arguments(parser):
" 20YYqX packages to ensure they are not mixed"
" from different build dates (ABI compatibility"
" is only on latest).")
+ parser.add_argument("-p", "--conflict-pkgname", nargs="?",
+ help="name of the meta-package to depend on (default:"
+ " osmocom-$feed)")
+ parser.add_argument("-M", "--no-meta", action="store_true",
+ help="Don't depend on the meta package (helpful when"
+ " building one-off packages for development)")
parser.add_argument("-v", "--verbose", action="store_true",
help="always print shell commands and their output,"
" instead of only printing them on error")
+ parser.add_argument("-e", "--version-append",
+ help="add a string at the end of the version, e.g."
+ " '~osmocom' for the wireshark package to"
+ " indicate that it is the version from our repo")
def set_cmds_verbose(new_val):
@@ -56,6 +80,12 @@ def set_cmds_verbose(new_val):
cmds_verbose = new_val
+def set_args(new_val):
+ global args
+ args = new_val
+ set_cmds_verbose(args.verbose)
+
+
def check_required_programs():
ok = True
@@ -71,28 +101,41 @@ def check_required_programs():
if not ok:
print("Either install them or use the -d argument to run in docker")
- exit(1)
+ sys.exit(1)
-def check_package(package):
+def set_proper_package_name(package):
if package in lib.config.projects_osmocom:
- return
+ return package
if package in lib.config.projects_other:
- return
+ return package
+
+ # Add prefix to Osmocom package if missing
+ for package_cfg in lib.config.projects_osmocom:
+ if os.path.basename(package_cfg) == package:
+ return package_cfg
+
+ if lib.args.allow_unknown_package:
+ return package
print(f"ERROR: unknown package: {package}")
- print("See packages_osmocom and packages_other in obs/lib/config.py")
- exit(1)
+ print("See projects_osmocom and projects_other in obs/lib/config.py")
+ sys.exit(1)
def exit_error_cmd(completed, error_msg):
""" :param completed: return from run_cmd() below """
+ global cmds_verbose
+
print()
print(f"ERROR: {error_msg}")
print()
print(f"*** command ***\n{completed.args}\n")
print(f"*** returncode ***\n{completed.returncode}\n")
- print(f"*** output ***\n{completed.output}")
+
+ if not cmds_verbose:
+ print(f"*** output ***\n{completed.output}")
+
print("*** python trace ***")
raise RuntimeError("shell command related error, find details right above"
" this python trace")
@@ -108,13 +151,14 @@ def run_cmd(cmd, check=True, *args, **kwargs):
:param check: stop with error if exit code is not 0 """
global cmds_verbose
+ caller = inspect.stack()[2][3]
if cmds_verbose:
- print(f"+ {cmd}")
+ print(f"+ {caller}(): {cmd}")
with tempfile.TemporaryFile(encoding="utf8", mode="w+") as output_buf:
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT, text=True, bufsize=1,
- *args, **kwargs)
+ p = subprocess.Popen(cmd, stdin=subprocess.DEVNULL,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ text=True, bufsize=1, *args, **kwargs)
while True:
out = p.stdout.read(1)
diff --git a/scripts/obs/lib/binpkg_deb.py b/scripts/obs/lib/binpkg_deb.py
new file mode 100644
index 0000000..b26623c
--- /dev/null
+++ b/scripts/obs/lib/binpkg_deb.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright 2022 sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
+import os
+import glob
+import lib.config
+
+
+def extract_source(srcdir, bindir):
+ tarball = glob.glob(f"{srcdir}/*.tar.*")[0]
+
+ print(f"extracting {os.path.basename(tarball)}")
+ lib.run_cmd(["tar", "-xf", tarball], cwd=bindir)
+
+ return glob.glob(f"{bindir}/*/")[0]
+
+
+def build(srcdir, jobs):
+ bindir = f"{lib.config.path_temp}/binpkg"
+ extractdir = extract_source(srcdir, bindir)
+
+ lib.set_cmds_verbose(True)
+
+ # install deps
+ lib.run_cmd(["apt-get", "-y", "build-dep", "."], cwd=extractdir)
+
+ print("running dpkg-buildpackage")
+ lib.run_cmd(["dpkg-buildpackage", "-us", "-uc", f"-j{jobs}"],
+ cwd=extractdir)
diff --git a/scripts/obs/lib/binpkg_rpm.py b/scripts/obs/lib/binpkg_rpm.py
new file mode 100644
index 0000000..b26623c
--- /dev/null
+++ b/scripts/obs/lib/binpkg_rpm.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright 2022 sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
+import os
+import glob
+import lib.config
+
+
+def extract_source(srcdir, bindir):
+ tarball = glob.glob(f"{srcdir}/*.tar.*")[0]
+
+ print(f"extracting {os.path.basename(tarball)}")
+ lib.run_cmd(["tar", "-xf", tarball], cwd=bindir)
+
+ return glob.glob(f"{bindir}/*/")[0]
+
+
+def build(srcdir, jobs):
+ bindir = f"{lib.config.path_temp}/binpkg"
+ extractdir = extract_source(srcdir, bindir)
+
+ lib.set_cmds_verbose(True)
+
+ # install deps
+ lib.run_cmd(["apt-get", "-y", "build-dep", "."], cwd=extractdir)
+
+ print("running dpkg-buildpackage")
+ lib.run_cmd(["dpkg-buildpackage", "-us", "-uc", f"-j{jobs}"],
+ cwd=extractdir)
diff --git a/scripts/obs/lib/config.py b/scripts/obs/lib/config.py
index 1e569ef..b4e23a0 100644
--- a/scripts/obs/lib/config.py
+++ b/scripts/obs/lib/config.py
@@ -2,6 +2,7 @@
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright 2022 sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
import os
+import re
# Lists are ordered alphabetically.
@@ -17,6 +18,7 @@ required_programs = [
"fakeroot",
"find",
"git",
+ "git-review",
"meson",
"osc",
"rebar3",
@@ -24,24 +26,28 @@ required_programs = [
]
required_python_modules = [
+ "packaging",
"setuptools",
]
feeds = [
"2022q1",
"2022q2",
+ "2023q1",
"latest",
+ "master",
"nightly",
]
# Osmocom projects: generated source packages will depend on a meta package,
# such as osmocom-nightly, osmocom-latest or osmocom-2022q1. This meta package
# prevents that packages from different feeds are mixed by accident.
-# NOTE: Before adding new projects, make sure the rpm and deb build in OBS!
-# Test it in your own namespace (home:youruser), see README for
-# instructions and/or ask osmith for help.
+# NOTE: Before adding new projects, add them to jobs/gerrit-verifications.yml
+# and ensure the rpm and deb packages build successfully in jenkins.
projects_osmocom = [
"erlang/osmo_dia2gsup",
+ "erlang/osmo-epdg",
+ "gapk",
"libasn1c",
"libgtpnl",
"libosmo-abis",
@@ -78,6 +84,7 @@ projects_osmocom = [
"osmo-trx",
"osmo-uecups",
"osmo-upf",
+ "osmocom-bb",
"python/osmo-python-tests",
"rtl-sdr",
"simtrace2",
@@ -107,6 +114,39 @@ git_latest_tag_pattern_default = "^[0-9]*\\.[0-9]*\\.[0-9]*$"
git_latest_tag_pattern_other = {
"limesuite": "^v[0-9]*\\.[0-9]*\\.[0-9]*$",
"open5gs": "^v[0-9]*\\.[0-9]*\\.[0-9]*$",
+ "osmo-fl2k": "^v[0-9]*\\.[0-9]*\\.[0-9]*$",
+ "rtl-sdr": "^v[0-9]*\\.[0-9]*\\.[0-9]*$",
+ "wireshark": "^v[0-9]*\\.[0-9]*\\.[0-9a-z]*$",
}
-docker_image_name = "debian-bullseye-osmocom-obs"
+docker_distro_default = "debian:12"
+docker_distro_other = [
+ "almalinux:*", # instead of centos (SYS#5818)
+ "debian:*",
+ "ubuntu:*",
+]
+
+#
+# Options related to sync from build.opensuse.org (OS#6165)
+#
+
+sync_remove_paths = [
+ # This path has a kernel-obs-build package that other OBS instances use to
+ # build armv7l/hl packages, but we don't need it
+ "OBS:DefaultKernel",
+]
+
+sync_set_maintainers = [
+ "osmocom-jenkins",
+]
+
+# Distributions for which we want to make sure we add the latest release as
+# soon as it is available in openSUSE's OBS
+# https://osmocom.org/projects/cellular-infrastructure/wiki/Linux_Distributions
+check_new_distros = [
+ "Debian",
+ "Raspbian",
+ "Ubuntu",
+]
+
+check_new_distros_version_regex = re.compile(r'[0-9.]+$')
diff --git a/scripts/obs/lib/debian.py b/scripts/obs/lib/debian.py
index 3bba3ec..6703f8b 100644
--- a/scripts/obs/lib/debian.py
+++ b/scripts/obs/lib/debian.py
@@ -7,6 +7,13 @@ import shlex
import lib
import lib.git
+# Imports that may not be available during startup, ignore it here and rely on
+# lib.check_required_programs() checking this later on (possibly after the
+# script executed itself in docker if using --docker).
+try:
+ import packaging.version
+except ImportError:
+ pass
def control_add_depend(project, pkgname, version):
""" :param pkgname: of the meta-package to depend on (e.g. osmocom-nightly)
@@ -22,8 +29,9 @@ def control_add_depend(project, pkgname, version):
lib.run_cmd(cmd, cwd=repo_path)
-def changelog_add_entry(project, feed, version):
+def changelog_add_entry(project, version):
""" :param version: for the new changelog entry """
+ feed = lib.args.feed
repo_path = lib.git.get_repo_path(project)
changelog_path = f"{repo_path}/debian/changelog"
@@ -84,16 +92,32 @@ def get_last_version_from_changelog(project):
return ret
-def changelog_add_entry_if_needed(project, feed, version):
+def changelog_add_entry_if_needed(project, version):
""" Adjust the changelog if the version in the changelog is different from
the given version. """
version_changelog = get_last_version_from_changelog(project)
+
+ # Don't use a lower number (OS#6173)
+ try:
+ if packaging.version.parse(version_changelog.split("-")[0]) > \
+ packaging.version.parse(version.split("-")[0]):
+ print(f"{project}: WARNING: version from changelog"
+ f" ({version_changelog}) is higher than version based on git tag"
+ f" ({version}), using version from changelog (git tag not pushed"
+ " yet?)")
+ return
+ except packaging.version.InvalidVersion:
+ # packaging.version.parse can parse the version numbers used in Osmocom
+ # projects (where we need the above check), but not e.g. some versions
+ # from wireshark. Don't abort here if that is the case.
+ pass
+
if version_changelog == version:
return
print(f"{project}: adding debian/changelog entry ({version_changelog} =>"
f" {version})")
- changelog_add_entry(project, feed, version)
+ changelog_add_entry(project, version)
def build_source_package(project):
diff --git a/scripts/obs/lib/docker.py b/scripts/obs/lib/docker.py
index a5dbcd9..779099b 100644
--- a/scripts/obs/lib/docker.py
+++ b/scripts/obs/lib/docker.py
@@ -8,11 +8,41 @@ import lib
import lib.config
-def build_image():
- print(f"docker: building image {lib.config.docker_image_name}")
+def get_image_name(distro, image_type):
+ ret = f"{distro}-osmocom-obs-{image_type}"
+ ret = ret.replace(":","-").replace("_","-")
+ return ret
+
+
+def get_distro_from(distro, image_type):
+ # Manuals: depend on regular image (data/build_binpkg_manuals.Dockerfile)
+ if image_type.endswith("_manuals"):
+ return get_image_name(distro, image_type.replace("_manuals", ""))
+
+ return distro
+
+
+def build_image(distro, image_type):
+ image_name = get_image_name(distro, image_type)
+ distro_from = get_distro_from(distro, image_type)
+
+ print(f"docker: building image {image_name}")
+
+ # Set the feed of packages to be configured inside the docker container
+ # (master, nightly, latest). This can be set with build_binpkg.py --feed,
+ # to reproduce a build error that happens with a distro that is only in
+ # nightly but not in the master feed (all ubuntu versions as of writing).
+ build_arg_feed = []
+ if getattr(lib.args, "docker_feed", None):
+ build_arg_feed = ["--build-arg", f"FEED={lib.args.docker_feed}"]
+
lib.run_cmd(["docker", "build",
- "--build-arg", f"UID={os.getuid()}",
- "-t", lib.config.docker_image_name,
+ "--build-arg", f"DISTRO={distro}",
+ "--build-arg", f"DISTRO_FROM={distro_from}",
+ "--build-arg", f"UID={os.getuid()}"] +
+ build_arg_feed +
+ ["-t", image_name,
+ "-f", f"{lib.config.path_top}/data/{image_type}.Dockerfile",
f"{lib.config.path_top}/data"])
@@ -25,38 +55,65 @@ def get_oscrc():
return os.path.realpath(ret)
print("ERROR: couldn't find ~/.oscrc. Put it there or set OSCRC.")
- exit(1)
+ sys.exit(1)
-def run_in_docker_and_exit(script_path, args, add_oscrc=False):
+def run_in_docker_and_exit(script_path, add_oscrc=False,
+ image_type="build_srcpkg", distro=None,
+ pass_argv=True, env={}, docker_args=[]):
+ """
+ :param script_path: what to run inside docker, relative to scripts/obs/
+ :param add_oscrc: put user's oscrc in docker (contains obs credentials!)
+ :param image_type: which Dockerfile to use (data/{image_type}.Dockerfile)
+ :param distro: which Linux distribution to use, e.g. "debian:11"
+ :param pass_argv: pass arguments from sys.argv to the script
+ :param env: dict of environment variables
+ :param docker_args: extra arguments to pass to docker
+ """
if "INSIDE_DOCKER" in os.environ:
return
if not shutil.which("docker"):
print("ERROR: docker is not installed")
- exit(1)
+ sys.exit(1)
+
+ if not distro:
+ distro = lib.config.docker_distro_default
+ image_name = get_image_name(distro, image_type)
oscrc = None
if add_oscrc:
oscrc = get_oscrc()
- # Build the docker image. Unless it is up-to-date, this will take a few
- # minutes or so, therefore print the output.
+ # Unless the docker image is up-to-date, building will take a few
+ # minutes or so, therefore print the output. No need to restore
+ # set_cmds_verbose, as we use subprocess.run() below and exit afterwards.
lib.set_cmds_verbose(True)
- build_image()
- lib.set_cmds_verbose(args.verbose)
+
+ # Manuals: build regular image first (data/build_binpkg_manuals.Dockerfile)
+ if image_type.endswith("_manuals"):
+ build_image(distro, image_type.replace("_manuals",""))
+
+ build_image(distro, image_type)
cmd = ["docker", "run",
+ "--rm",
"-e", "INSIDE_DOCKER=1",
"-e", "PYTHONUNBUFFERED=1",
- "--rm", "-v", f"{lib.config.path_top}:/obs"]
+ "-v", f"{lib.config.path_top}:/obs"]
+
+ for env_key, env_val in env.items():
+ cmd += ["-e", f"{env_key}={env_val}"]
if oscrc:
cmd += ["-v", f"{oscrc}:/home/user/.oscrc"]
- script_path = f"/obs/{os.path.basename(script_path)}"
- cmd += [lib.config.docker_image_name, script_path] + sys.argv[1:]
+ cmd += docker_args
+ cmd += [image_name, f"/obs/{script_path}"]
+
+ if pass_argv:
+ cmd += sys.argv[1:]
- print(f"docker: running: {os.path.basename(script_path)} inside docker")
+ print(f"docker: running: {script_path} inside docker")
ret = subprocess.run(cmd)
- exit(ret.returncode)
+ sys.exit(ret.returncode)
diff --git a/scripts/obs/lib/git.py b/scripts/obs/lib/git.py
index 29c183b..0194f77 100644
--- a/scripts/obs/lib/git.py
+++ b/scripts/obs/lib/git.py
@@ -22,7 +22,8 @@ def get_latest_tag_pattern(project):
return lib.config.git_latest_tag_pattern_default
-def clone(project, fetch=False):
+def clone(project):
+ fetch = lib.args.git_fetch
repo_path = get_repo_path(project)
url = get_repo_url(project)
@@ -52,20 +53,65 @@ def clean(project):
def checkout(project, branch):
repo_path = get_repo_path(project)
+ if not lib.args.git_checkout:
+ ref = lib.run_cmd(["git", "log", "--pretty=oneline", "--abbrev-commit",
+ "-1"], cwd=repo_path).output.rstrip()
+ print(f"{project}: skipping git checkout, current commit: {ref}")
+ return
print(f"{project}: 'git checkout -f {branch}'")
lib.run_cmd(["git", "checkout", "-f", branch], cwd=repo_path)
print(f"{project}: 'git reset --hard {branch}'")
lib.run_cmd(["git", "reset", "--hard", branch], cwd=repo_path)
+ print(f"{project}: 'git submodule update --init'")
+ lib.run_cmd(["git", "submodule", "update", "--init"], cwd=repo_path)
-def checkout_default_branch(project):
- branch = lib.config.git_branch_default
+def checkout_from_review(project, gerrit_id):
+ """ checkout a given gerrit ID """
+ repo_path = get_repo_path(project)
+ lib.run_cmd(["git", "review", "-s"], cwd=repo_path)
+ lib.run_cmd(["git", "review", "-d", str(gerrit_id)], cwd=repo_path)
+
+
+def get_default_branch(project):
if project in lib.config.git_branch_other:
- branch = lib.config.git_branch_other[project]
+ return lib.config.git_branch_other[project]
+ return lib.config.git_branch_default
+
+def checkout_default_branch(project):
+ branch = get_default_branch(project)
checkout(project, f"origin/{branch}")
+def get_head(project):
+ repo_path = get_repo_path(project)
+ ret = lib.run_cmd(["git", "rev-parse", "HEAD"], cwd=repo_path)
+ return ret.output.rstrip()
+
+
+def get_head_remote(project, branch, branch_missing_ok=True):
+ if not branch:
+ branch = get_default_branch(project)
+ repo_url = get_repo_url(project)
+
+ print(f"{project}: getting head from git remote for {branch}")
+ ls_remote = lib.run_cmd(["git", "ls-remote", repo_url, f"heads/{branch}"])
+
+ ret = ls_remote.output.split("\t")[0]
+
+ # If the branch is missing from the remote, git ls-remote exits with 0 and
+ # the output is empty
+ if not ret:
+ if branch_missing_ok:
+ print(f"{project}: branch not found: {branch}")
+ return None
+ lib.exit_error_cmd(ls_remote, "failed to find head commit for"
+ f" {project} in output")
+
+ return ret
+
+
def get_latest_tag(project):
pattern_str = get_latest_tag_pattern(project)
pattern = re.compile(pattern_str)
diff --git a/scripts/obs/lib/metapkg.py b/scripts/obs/lib/metapkg.py
index dd261de..38ed930 100644
--- a/scripts/obs/lib/metapkg.py
+++ b/scripts/obs/lib/metapkg.py
@@ -7,17 +7,17 @@ import lib.debian
import lib.rpm_spec
-def get_conflicts(feed):
+def get_conflicts():
ret = []
for f in lib.config.feeds:
- if f == feed:
+ if f == lib.args.feed:
continue
ret += [f"osmocom-{f}"]
return ret
-def prepare_source_dir(feed):
- path = f"{lib.config.path_cache}/osmocom-{feed}"
+def prepare_source_dir():
+ path = f"{lib.config.path_cache}/osmocom-{lib.args.feed}"
if os.path.exists(path):
lib.run_cmd(["rm", "-rf", path])
@@ -26,9 +26,10 @@ def prepare_source_dir(feed):
os.makedirs(f"{path}/contrib")
-def generate_debian_pkg(feed, version):
+def generate_debian_pkg(version):
+ feed = lib.args.feed
path = f"{lib.config.path_cache}/osmocom-{feed}"
- conflicts = get_conflicts(feed)
+ conflicts = get_conflicts()
with open(f"{path}/debian/control", "w") as f:
f.write(f"Source: osmocom-{feed}\n")
@@ -63,11 +64,12 @@ def generate_debian_pkg(feed, version):
f.write("10\n")
-def generate_rpm_spec(feed, version):
+def generate_rpm_spec(version):
+ feed = lib.args.feed
print(f"osmocom-{feed}: generating rpm spec file")
path = (f"{lib.config.path_cache}/osmocom-{feed}/contrib/osmocom-{feed}"
".spec.in")
- conflicts = get_conflicts(feed)
+ conflicts = get_conflicts()
with open(path, "w") as f:
f.write(f"Name: osmocom-{feed}\n")
@@ -83,13 +85,15 @@ def generate_rpm_spec(feed, version):
f.write("%files\n")
-def build(feed, conflict_version):
- pkgname = f"osmocom-{feed}"
+def build():
+ feed = lib.args.feed
+ pkgname = lib.args.conflict_pkgname or f"osmocom-{feed}"
+ conflict_version = lib.args.conflict_version
version = conflict_version if conflict_version else "1.0.0"
print(f"{pkgname}: generating meta package {version}")
- prepare_source_dir(feed)
- generate_debian_pkg(feed, version)
+ prepare_source_dir()
+ generate_debian_pkg(version)
os.makedirs(lib.get_output_path(pkgname))
lib.remove_cache_extra_files()
@@ -97,7 +101,7 @@ def build(feed, conflict_version):
lib.debian.build_source_package(pkgname)
lib.debian.move_files_to_output(pkgname)
- generate_rpm_spec(feed, version)
+ generate_rpm_spec(version)
lib.rpm_spec.copy_to_output(pkgname)
lib.remove_cache_extra_files()
diff --git a/scripts/obs/lib/osc.py b/scripts/obs/lib/osc.py
index 2fcdbc6..ef2fc4b 100644
--- a/scripts/obs/lib/osc.py
+++ b/scripts/obs/lib/osc.py
@@ -6,22 +6,36 @@ import glob
import os
import shlex
import shutil
+import sys
import lib
import lib.config
apiurl = None
+proj = None
-def check_proj(obs_project):
- if ":" in obs_project:
- return
+def check_oscrc():
+ configdir = os.environ.get("XDG_CONFIG_HOME", "~/.config")
+ paths = ["~/.oscrc", f"{configdir}/osc/oscrc"]
+ for path in paths:
+ if os.path.exists(os.path.expanduser(path)):
+ return
- print(f"ERROR: this doesn't look like a valid OBS project: {obs_project}")
- exit(1)
+ print("ERROR: oscrc does not exist, please create one as explained in the"
+ " README.")
+ sys.exit(1)
-def set_apiurl(url):
+def set_apiurl(url, obs_proj=None):
global apiurl
+ global proj
+
+ if obs_proj is not None:
+ if ":" not in obs_proj:
+ print(f"ERROR: this doesn't look like a valid OBS project: {obs_proj}")
+ sys.exit(1)
+ proj = obs_proj
+
apiurl = url
@@ -44,13 +58,14 @@ def run_osc(cmd, *args, **kwargs):
return lib.run_cmd(cmd, *args, **kwargs)
-def get_remote_pkgs(proj):
+def get_remote_pkgs():
print(f"OBS: getting packages in {proj}")
ret = run_osc(["list", proj])
return ret.output.rstrip().split("\n")
-def get_package_version(proj, package):
+def get_package_version(package):
+ feed = lib.args.feed
print(f"{package}: getting OBS version")
ret = run_osc(["list", proj, os.path.basename(package)])
@@ -58,18 +73,24 @@ def get_package_version(proj, package):
if ret.output == '\n':
return "0"
- # Extract the version from the dsc filename
+ # Extract the version from the file list
for line in ret.output.split('\n'):
line = line.rstrip()
- if line.endswith(".dsc"):
- return line.split("_")[-1][:-4]
+ if feed == "master" and package != "osmocom-master":
+ # Use commit_*.txt
+ if line.startswith("commit_") and line.endswith(".txt"):
+ return line.split("_")[1].split(".")[0]
+ else:
+ # Use *.dsc
+ if line.endswith(".dsc"):
+ return line.split("_")[-1][:-4]
lib.exit_error_cmd(ret, "failed to find package version on OBS by"
- " extracting the version from the .dsc filename")
+ " extracting the version from the file list")
-def create_package(proj, package):
+def create_package(package):
print(f"{package}: creating new OBS package")
# cut off repository prefix like in "python/osmo-python-tests"
@@ -93,7 +114,7 @@ def remove_temp_osc():
lib.run_cmd(["rm", "-rf", f"{lib.config.path_temp}/osc"])
-def update_package(proj, package, version):
+def update_package(package, version):
print(f"{package}: updating OBS package")
# cut off repository prefix like in "python/osmo-python-tests"
@@ -118,3 +139,44 @@ def update_package(proj, package, version):
run_osc(["commit", "-m", f"upgrade to {version}"], cwd=path_temp_osc_pkg)
remove_temp_osc()
+
+
+def delete_package(package, commit_msg):
+ print(f"{package}: removing from OBS ({commit_msg})")
+ run_osc(["rdelete", "-m", commit_msg, proj, os.path.basename(package)])
+
+
+def get_prjconf(output_file):
+ print(f"{proj}: getting prjconf")
+ prjconf = lib.osc.run_osc(["meta", "prjconf", proj]).output
+ with open(output_file, "w") as h:
+ h.write(prjconf)
+
+
+def update_prjconf(prjconf_file, commit_msg):
+ print(f"{proj}: updating prjconf")
+ lib.osc.run_osc(["meta",
+ "prjconf",
+ "-F", prjconf_file,
+ "-m", commit_msg,
+ proj])
+
+
+def get_meta(output_file):
+ print(f"{proj}: getting meta")
+ meta = lib.osc.run_osc(["meta", "prj", proj]).output
+ with open(output_file, "w") as h:
+ h.write(meta)
+
+
+def update_meta(meta_file, commit_msg):
+ print(f"{proj}: updating meta")
+ lib.osc.run_osc(["meta",
+ "prj",
+ "-F", meta_file,
+ "-m", commit_msg,
+ proj])
+
+def get_projects():
+ print(f"OBS: getting list of projects ({apiurl})")
+ return lib.osc.run_osc(["ls"]).output.rstrip().split("\n")
diff --git a/scripts/obs/lib/srcpkg.py b/scripts/obs/lib/srcpkg.py
index 035cf81..aef7732 100644
--- a/scripts/obs/lib/srcpkg.py
+++ b/scripts/obs/lib/srcpkg.py
@@ -1,60 +1,80 @@
#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright 2022 sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
+import glob
import os
+import pathlib
+import sys
import lib.config
import lib.debian
import lib.rpm_spec
-def checkout_for_feed(project, feed, branch=None):
+def checkout_for_feed(project):
""" checkout a commit, either latest tag or master or 20YY branch """
+ feed = lib.args.feed
+ branch = lib.args.git_branch
if branch:
lib.git.checkout(project, f"origin/{branch}")
elif feed == "latest":
lib.git.checkout_latest_tag(project)
- elif feed == "nightly":
+ elif feed in ["master", "nightly"]:
lib.git.checkout_default_branch(project)
else: # 2022q1 etc
lib.git.checkout(project, f"origin/{feed}")
-def get_git_version_gen_path(project):
- # Use git-version-gen in the project's repository if available
- repo_path = lib.git.get_repo_path(project)
- ret = f"{repo_path}/git-version-gen"
- if os.path.exists(ret):
- return ret
-
- # Use git-version-gen script from libosmocore.git as fallback
- print(f"{project}: has no git-version-gen, using the one from libosmocore")
- repo_path = lib.git.get_repo_path("libosmocore")
- ret = f"{repo_path}/git-version-gen"
- if not os.path.exists(ret):
- lib.git.clone("libosmocore")
- if os.path.exists(ret):
- return ret
-
- print(f"ERROR: {project}.git doesn't have a git-version-gen script and"
- " couldn't find libosmocore.git's copy of the script here either: "
- + ret)
- exit(1)
-
-
def get_git_version(project):
""" :returns: the string from git-version-gen, e.g. '1.7.0.10-76bdb' """
repo_path = lib.git.get_repo_path(project)
- script_path = get_git_version_gen_path(project)
- ret = lib.run_cmd([script_path, "."], cwd=repo_path)
- if not ret.output:
- lib.exit_error_cmd(ret, "empty output from git-version-gen")
+ # Run git-version-gen if it is in the repository
+ script_path = f"{repo_path}/git-version-gen"
+ if os.path.exists(script_path):
+ ret = lib.run_cmd([script_path, "."], cwd=repo_path).output
+ if not ret:
+ lib.exit_error_cmd(ret, "empty output from git-version-gen")
+ return ret
+
+ # Generate a version string similar to git-version-gen, but run use git
+ # describe --tags, so it works with non-annotated tags as well (needed for
+ # e.g. limesuite's tags).
+ pattern = lib.git.get_latest_tag_pattern(project)
+ pattern = pattern.replace("^", "", 1)
+ pattern = pattern.replace("$", "", -1)
+ result = lib.run_cmd(["git", "describe",
+ "--abbrev=4",
+ "--tags",
+ f"--match={pattern}",
+ "HEAD"], cwd=repo_path, check=False)
+
+ if result.returncode == 128:
+ print(f"{project}: has no git tags, using 0.0.0 as version")
+ commit = lib.run_cmd(["git", "rev-parse", "HEAD"],
+ cwd=repo_path).output[0:4]
+ count = lib.run_cmd(["git", "rev-list", "--count", "HEAD"],
+ cwd=repo_path).output.rstrip()
+ return f"0.0.0.{count}-{commit}"
+
+ if result.returncode != 0:
+ lib.exit_error_cmd(result, "command failed unexpectedly")
+
+ ret = result.output.rstrip()
+
+ # Like git-version-gen:
+ # * Change the first '-' to '.'
+ # * Remove the 'g' in git describe's output string
+ # * Remove the leading 'v'
+ ret = ret.replace("-", ".", 1)
+ ret = ret.replace("-g", "-", 1)
+ if ret.startswith("v"):
+ ret = ret[1:]
- return ret.output
+ return ret
-def get_version_for_feed(project, feed, conflict_version):
- if feed == "latest":
+def get_version_for_feed(project):
+ if lib.args.feed == "latest":
# There's always a tag if we are here. If there was none, the build
# would have been skipped for latest.
ret = lib.git.get_latest_tag(project)
@@ -72,6 +92,7 @@ def get_version_for_feed(project, feed, conflict_version):
# Append the conflict_version to increase the version even if the commit
# did not change (OS#5135)
+ conflict_version = lib.args.conflict_version
if conflict_version:
ret = f"{ret}.{conflict_version}"
@@ -95,21 +116,33 @@ def get_epoch(project):
return ""
-def prepare_project_osmo_dia2gsup():
- """ Run erlang/osmo_dia2gsup's generate_build_dep.sh script to download
- sources for dependencies. """
- lib.run_cmd("contrib/generate_build_dep.sh",
- cwd=lib.git.get_repo_path("erlang/osmo_dia2gsup"))
-
-
def prepare_project_open5gs():
- """ Build fails without downloading freeDiameter sources. Also we can't
- just update all subprojects because it would fail with 'Subproject
- exists but has no meson.build file' for promethous-client-c. """
- lib.run_cmd(["meson", "subprojects", "download", "freeDiameter"],
+ """ Download the subproject sources here, so the package can be built in
+ OBS without Internet access. """
+ lib.run_cmd(["meson", "subprojects", "download"],
cwd=lib.git.get_repo_path("open5gs"))
+def prepare_project_limesuite():
+ """ Fix bug in 23.10: https://github.com/myriadrf/LimeSuite/pull/386 """
+ lib.run_cmd(["mv", "-v",
+ "liblimesuite22.09-1.install",
+ "liblimesuite23.10-1.install"],
+ cwd=f"{lib.git.get_repo_path('limesuite')}/debian",
+ check=False)
+
+
+def run_generate_build_dep(project):
+ """ Run contrib/generate_build_dep.sh if it exists in the given project, to
+ to download sources for dependencies (see e.g. osmo_dia2gsup.git). """
+ repo_path = lib.git.get_repo_path(project)
+ script_path = "contrib/generate_build_dep.sh"
+
+ if os.path.exists(f"{repo_path}/{script_path}"):
+ print(f"{project}: running {script_path}")
+ lib.run_cmd(script_path, cwd=repo_path)
+
+
def write_tarball_version(project, version):
repo_path = lib.git.get_repo_path(project)
@@ -117,25 +150,48 @@ def write_tarball_version(project, version):
f.write(f"{version}\n")
-def build(project, feed, branch, conflict_version, fetch):
- lib.git.clone(project, fetch)
+def write_commit_txt(project):
+ """ Write the current git commit to commit_$commit.txt file, so it gets
+ uploaded to OBS along with the rest of the source package. This allows
+ figuring out if the source package is still up-to-date or not for the
+ master feed. """
+ output_path = lib.get_output_path(project)
+ commit = lib.git.get_head(project)
+
+ print(f"{project}: adding commit_{commit}.txt")
+ pathlib.Path(f"{output_path}/commit_{commit}.txt").touch()
+
+
+def build(project, gerrit_id=0):
+ conflict_version = lib.args.conflict_version
+ feed = lib.args.feed
+ version_append = lib.args.version_append
+
+ lib.git.clone(project)
lib.git.clean(project)
- checkout_for_feed(project, feed, branch)
- version = get_version_for_feed(project, feed, conflict_version)
+ if gerrit_id > 0:
+ lib.git.checkout_from_review(project, gerrit_id)
+ else:
+ checkout_for_feed(project)
+
+ version = get_version_for_feed(project)
+ if version_append:
+ version += version_append
epoch = get_epoch(project)
version_epoch = f"{epoch}:{version}" if epoch else version
+
has_rpm_spec = lib.rpm_spec.get_spec_in_path(project) is not None
print(f"{project}: building source package {version_epoch}")
write_tarball_version(project, version_epoch)
- if project in lib.config.projects_osmocom:
- metapkg = f"osmocom-{feed}"
+ if project in lib.config.projects_osmocom and not lib.args.no_meta:
+ metapkg = lib.args.conflict_pkgname or f"osmocom-{feed}"
lib.debian.control_add_depend(project, metapkg, conflict_version)
if has_rpm_spec:
lib.rpm_spec.add_depend(project, metapkg, conflict_version)
- lib.debian.changelog_add_entry_if_needed(project, feed, version_epoch)
+ lib.debian.changelog_add_entry_if_needed(project, version_epoch)
os.makedirs(lib.get_output_path(project))
lib.remove_cache_extra_files()
@@ -145,6 +201,9 @@ def build(project, feed, branch, conflict_version, fetch):
print(f"{project}: running {project_specific_func}")
globals()[project_specific_func]()
+ if project in lib.config.projects_osmocom:
+ run_generate_build_dep(project)
+
lib.debian.build_source_package(project)
lib.debian.move_files_to_output(project)
@@ -152,5 +211,23 @@ def build(project, feed, branch, conflict_version, fetch):
lib.rpm_spec.generate(project, version, epoch)
lib.rpm_spec.copy_to_output(project)
+ if feed == "master":
+ write_commit_txt(project)
+
lib.remove_cache_extra_files()
return version_epoch
+
+
+def requires_osmo_gsm_manuals_dev(project):
+ """ Check if an already built source package has osmo-gsm-manuals-dev in
+ Build-Depends of the .dsc file """
+ path_dsc = glob.glob(f"{lib.get_output_path(project)}/*.dsc")
+ assert len(path_dsc) == 1, f"failed to get dsc path for {project}"
+
+ with open(path_dsc[0], "r") as handle:
+ for line in handle.readlines():
+ if line.startswith("Build-Depends:") \
+ and "osmo-gsm-manuals-dev" in line:
+ return True
+
+ return False
diff --git a/scripts/obs/sync_obs_projects.py b/scripts/obs/sync_obs_projects.py
new file mode 100755
index 0000000..965aca2
--- /dev/null
+++ b/scripts/obs/sync_obs_projects.py
@@ -0,0 +1,305 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright 2023 sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
+import argparse
+import glob
+import hashlib
+import html
+import os
+import shlex
+import shutil
+import sys
+import xml.etree.ElementTree
+
+import lib
+import lib.docker
+import lib.osc
+
+temp_source_prjconf = f"{lib.config.path_temp}/sync_source_prjconf"
+temp_source_meta = f"{lib.config.path_temp}/sync_source_meta"
+temp_dest_old_meta = f"{lib.config.path_temp}/sync_dest_old_meta"
+temp_dest_old_prjconf = f"{lib.config.path_temp}/sync_dest_old_prjconf"
+temp_dest_new_meta = f"{lib.config.path_temp}/sync_dest_new_meta"
+temp_dest_new_prjconf = f"{lib.config.path_temp}/sync_dest_new_prjconf"
+
+
+def parse_args():
+ parser = argparse.ArgumentParser(description="Sync OBS projects (prjconf,"
+ " meta) from another instance (OS#6165)")
+ parser.add_argument("-d", "--docker",
+ help="run in docker to avoid installing required pkgs",
+ action="store_true")
+ parser.add_argument("-n", "--no-skip-up-to-date",
+ dest="skip_up_to_date", action="store_false",
+ help="always assume projects are outdated")
+ parser.add_argument("-v", "--verbose", action="store_true",
+ help="always print shell commands and their output,"
+ " instead of only printing them on error")
+ parser.add_argument("projects",
+ help="source OBS project, e.g. Debian:12",
+ nargs="+")
+
+ advanced = parser.add_argument_group("advanced options")
+ advanced.add_argument("-A", "--apiurl", help="source OBS API URL"
+ " (default: https://api.opensuse.org)",
+ default="https://api.opensuse.org")
+ advanced.add_argument("-p", "--prefix", default="openSUSE.org-mirror",
+ help="destination OBS prefix"
+ " (default: openSUSE.org-mirror)")
+ advanced.add_argument("-t", "--to-apiurl", help="destination OBS API URL"
+ " (default: https://obs.osmocom.org)",
+ default="https://obs.osmocom.org")
+ advanced.add_argument("-w", "--weburl", default="https://build.opensuse.org",
+ help="source OBS web URL (default:"
+ " https://build.opensuse.org)")
+
+ args = parser.parse_args()
+ lib.set_args(args)
+
+ lib.osc.check_oscrc()
+
+ if args.docker:
+ lib.docker.run_in_docker_and_exit("sync_obs_projects.py", add_oscrc=True)
+
+
+def check_required_programs():
+ required_programs = [
+ "colordiff",
+ "xmllint",
+ ]
+
+ ok = True
+ for program in required_programs:
+ if not shutil.which(program):
+ print(f"ERROR: missing program: {program}")
+ ok = False
+
+ if not ok:
+ print("Either install them or use the -d argument to run in docker")
+ sys.exit(1)
+
+
+def generate_prjconf_header(project):
+ """ This header gets prepended to the prjconf, before it gets written to
+ the destination OBS. This script uses it to determine whether the
+ project needs to be updated next time it runs. """
+ with open(temp_source_prjconf, "rb") as h:
+ source_prjconf = h.read()
+ with open(temp_source_meta, "rb") as h:
+ source_meta = h.read()
+
+ ret = "### This project gets synced from:\n"
+ ret += f"### {lib.args.weburl}/project/show/{project}\n"
+ ret += "### \n"
+ ret += "### Do not modify manually. See OS#6165.\n"
+ ret += "### \n"
+ ret += f"### Sync information:\n"
+ ret += f"### - source meta: {hashlib.md5(source_meta).hexdigest()}\n"
+ ret += f"### - source prjconf: {hashlib.md5(source_prjconf).hexdigest()}\n"
+ ret += "\n"
+
+ return ret
+
+
+def is_up_to_date(header, projects, project):
+ project_new = f"{lib.args.prefix}:{project}"
+
+ if project_new not in projects:
+ print(f"{project_new}: is outdated (not in destination OBS)")
+ return False
+
+ lib.osc.get_prjconf(temp_dest_old_prjconf)
+ with open(temp_dest_old_prjconf, "r") as h:
+ dest_prjconf = h.read()
+
+ if dest_prjconf.startswith(header):
+ if not lib.args.skip_up_to_date:
+ print(f"{project_new}: is up-to-date, but -n is set")
+ return False
+ print(f"{project_new}: is up-to-date")
+ return True
+
+ print(f"{project_new}: is outdated")
+ return False
+
+
+def get_relevant_arches(project):
+ if project.startswith("Raspbian:"):
+ return ["armv7l"]
+
+ return ["aarch64",
+ "armv7l",
+ "i586",
+ "x86_64"]
+
+
+def rewrite_meta(project):
+ project_new = f"{lib.args.prefix}:{project}"
+ print(f"{project}: rewriting meta for {project_new}")
+ tree = xml.etree.ElementTree.parse(temp_source_meta)
+ root = tree.getroot()
+ arches = get_relevant_arches(project)
+
+ # Update <project name="...">
+ assert root.get("name") == project
+ root.set("name", project_new)
+
+ for description in root.findall("description"):
+ href = f"{lib.args.weburl}/project/show/{project}"
+ description.text = ("This project gets synced from:"
+ f" <a href='{html.escape(href)}'>{project}</a>\n"
+ "Do not modify manually. See OS#6165.\n")
+
+ for repository in root.findall(".repository"):
+ repo_name = repository.get("name")
+ print(f" adjusting repository: {repo_name}")
+ for path in repository.findall(".path"):
+ # Update <path project="...">
+ path_project_old = path.get("project")
+ path_project_new = f"{lib.args.prefix}:{path_project_old}"
+ path.set("project", path_project_new)
+
+ # Remove unneeded paths
+ for path_check in lib.config.sync_remove_paths:
+ if path_project_old == path_check:
+ print(f" removing path: {path_project_old}")
+ repository.remove(path)
+ break
+
+ # Remove arches we don't build for
+ for arch in repository.findall(".arch"):
+ if arch.text not in arches:
+ print(f" removing arch: {arch.text}")
+ repository.remove(arch)
+ for download in repository.findall(".download"):
+ if download.get("arch") not in arches:
+ repository.remove(download)
+
+ # Debian: meta configs on build.opensuse.org reference PGP keys with an
+ # experimental feature that is not yet in the stable version of OBS
+ # (e.g. <pubkey>debian-archive-12</pubkey>):
+ # https://github.com/openSUSE/open-build-service/pull/14528
+ # Also we don't have such a pubkeydir set up on our OBS server. Assume
+ # ftp.de.debian.org is a trusted mirror, switch to HTTPS and skip the
+ # PGP verification by removing the pubkey blocks.
+ if project.startswith("Debian:"):
+ for download in repository.findall(".download"):
+ url = download.get("url")
+ print(f" changing url to https: {url}")
+ assert url.startswith("http://ftp.de.debian.org/debian"), \
+ f"unexpected mirror URL"
+ download.set("url", url.replace("http://", "https://"))
+ for pubkey in download.findall("pubkey"):
+ download.remove(pubkey)
+
+ # Remove original maintainers
+ for person in root.findall(".person"):
+ root.remove(person)
+
+ # Add new maintainers
+ for userid in lib.config.sync_set_maintainers:
+ print(f" set maintainer: {userid}")
+ person = xml.etree.ElementTree.Element("person")
+ person.set("userid", userid)
+ person.set("role", "maintainer")
+ # Insert into same position: after title and description
+ root.insert(2, person)
+
+ tree.write(temp_dest_new_meta)
+
+
+def rewrite_prjconf(project, header):
+ project_new = f"{lib.args.prefix}:{project}"
+ print(f"{project}: rewriting prjconf for {project_new}")
+
+ prjconf = ""
+ with open(temp_source_prjconf, "r") as f:
+ for line in f:
+ line = line.rstrip()
+
+ # Remove unneeded dependencies
+ if line == "VMInstall: kernel-obs-build":
+ print(f" commenting out: {line}")
+ line = f"# (commented out by sync) {line}"
+
+ prjconf += f"{line}\n"
+
+ with open(temp_dest_new_prjconf, "w") as f:
+ f.write(header)
+ f.write(prjconf)
+
+
+def show_diff(projects, project):
+ project_new = f"{lib.args.prefix}:{project}"
+ if project_new not in projects:
+ return
+
+ # Show prjconf diff (old prjconf was retrieved in is_up_to_date())
+ diff = lib.run_cmd(["colordiff",
+ "-c3",
+ temp_dest_old_prjconf,
+ temp_dest_new_prjconf],
+ check=False)
+ if diff.returncode:
+ print(f"{project_new}: prjconf changes:")
+ print(diff.output, end="")
+ else:
+ print(f"{project_new}: prjconf is unchanged")
+
+ # Show meta diff
+ lib.osc.get_meta(temp_dest_old_meta)
+ for file in [temp_dest_old_meta, temp_dest_new_meta]:
+ lib.run_cmd(f"xmllint --format {shlex.quote(file)} > {shlex.quote(file)}.pretty",
+ shell=True)
+ diff = lib.run_cmd(["colordiff",
+ "-c3",
+ f"{temp_dest_old_meta}.pretty",
+ f"{temp_dest_new_meta}.pretty"],
+ check=False)
+ if diff.returncode:
+ print(f"{project_new}: meta changes:")
+ print(diff.output, end="")
+ else:
+ print(f"{project_new}: meta is unchanged")
+
+
+def main():
+ parse_args()
+ check_required_programs()
+
+ os.makedirs(lib.config.path_temp, exist_ok=True)
+
+ # Get destination OBS projects
+ lib.osc.set_apiurl(lib.args.to_apiurl, None)
+ dest_projects = lib.osc.get_projects()
+
+ for project in lib.args.projects:
+ # Talk to source OBS
+ lib.osc.set_apiurl(lib.args.apiurl, project)
+
+ # Get source prjconf, meta
+ lib.osc.get_prjconf(temp_source_prjconf)
+ lib.osc.get_meta(temp_source_meta)
+
+ # Talk to dest OBS
+ project_new = f"{lib.args.prefix}:{project}"
+ lib.osc.set_apiurl(lib.args.to_apiurl, project_new)
+
+ # Check if dest is up-to-date
+ header = generate_prjconf_header(project)
+ if is_up_to_date(header, dest_projects, project):
+ continue
+
+ # Rewrite configs and show diff
+ rewrite_prjconf(project, header)
+ rewrite_meta(project)
+ show_diff(dest_projects, project)
+
+ # Update dest prjconf & meta
+ commit_msg = f"sync with {lib.args.weburl}/project/show/{project}"
+ lib.osc.update_meta(temp_dest_new_meta, commit_msg)
+ lib.osc.update_prjconf(temp_dest_new_prjconf, commit_msg)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/scripts/obs/update_obs_project.py b/scripts/obs/update_obs_project.py
index 02db7ce..a456218 100755
--- a/scripts/obs/update_obs_project.py
+++ b/scripts/obs/update_obs_project.py
@@ -3,6 +3,7 @@
# Copyright 2022 sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
import argparse
import os
+import sys
import traceback
import lib
import lib.config
@@ -14,25 +15,29 @@ import lib.srcpkg
srcpkgs_built = {} # dict of pkgname: version
srcpkgs_skipped = [] # list of pkgnames
+srcpkgs_deleted = [] # list of pkgnames
srcpkgs_failed_build = [] # list of pkgnames
srcpkgs_failed_upload = [] # list of pkgnames
srcpkgs_updated = [] # list of pkgnames
def parse_packages(packages_arg):
+ ret = []
if packages_arg:
for package in packages_arg:
- lib.check_package(package)
- return packages_arg
+ if package == "ALL_OSMOCOM_PACKAGES":
+ ret += lib.config.projects_osmocom
+ else:
+ ret += [lib.set_proper_package_name(package)]
+ return ret
# Default to all
- ret = []
ret += lib.config.projects_osmocom
ret += lib.config.projects_other
return ret
-def build_srcpkg(feed, branch, package, conflict_version, fetch, is_meta_pkg):
+def build_srcpkg(package, is_meta_pkg):
global srcpkgs_built
global srcpkgs_failed_build
@@ -40,9 +45,9 @@ def build_srcpkg(feed, branch, package, conflict_version, fetch, is_meta_pkg):
try:
if is_meta_pkg:
- version = lib.metapkg.build(feed, conflict_version)
+ version = lib.metapkg.build()
else:
- version = lib.srcpkg.build(package, feed, branch, conflict_version, fetch)
+ version = lib.srcpkg.build(package)
srcpkgs_built[package] = version
except Exception as ex:
traceback.print_exception(type(ex), ex, ex.__traceback__)
@@ -51,6 +56,14 @@ def build_srcpkg(feed, branch, package, conflict_version, fetch, is_meta_pkg):
srcpkgs_failed_build += [package]
+def delete_srcpkg(package):
+ global srcpkgs_deleted
+ branch = lib.args.git_branch
+
+ lib.osc.delete_package(package, f"branch {branch} does not exist anymore")
+ srcpkgs_deleted += [package]
+
+
def is_up_to_date(obs_version, git_latest_version):
if obs_version == git_latest_version:
return True
@@ -62,29 +75,40 @@ def is_up_to_date(obs_version, git_latest_version):
return False
-def build_srcpkg_if_needed(proj, feed, branch, pkgs_remote, package, conflict_version,
- fetch, is_meta_pkg, skip_up_to_date):
+def build_srcpkg_if_needed(pkgs_remote, package, is_meta_pkg):
global srcpkgs_skipped
+ feed = lib.args.feed
+ branch = lib.args.git_branch
+ delete = lib.args.delete
- if feed != "latest":
- print(f"{package}: building source package (feed is {feed})")
- else:
+ if feed in ["master", "latest"]:
+ """ Check if we can skip this package by comparing the OBS version with
+ the git remote. """
if is_meta_pkg:
+ conflict_version = lib.args.conflict_version
latest_version = conflict_version if conflict_version else "1.0.0"
else:
- latest_version = lib.git.get_latest_tag_remote(package)
+ if feed == "master":
+ latest_version = lib.git.get_head_remote(package, branch,
+ branch_missing_ok=delete)
+ else:
+ latest_version = lib.git.get_latest_tag_remote(package)
if latest_version is None:
- print(f"{package}: skipping (no git tag found)")
+ if delete and os.path.basename(package) in pkgs_remote:
+ delete_srcpkg(package)
+ return
+
+ print(f"{package}: skipping (no git tag/branch found)")
srcpkgs_skipped += [package]
return
if os.path.basename(package) not in pkgs_remote:
print(f"{package}: building source package (not in OBS)")
else:
- obs_version = lib.osc.get_package_version(proj, package)
+ obs_version = lib.osc.get_package_version(package)
if is_up_to_date(obs_version, latest_version):
- if skip_up_to_date:
+ if lib.args.skip_up_to_date:
print(f"{package}: skipping ({obs_version} is up-to-date)")
srcpkgs_skipped += [package]
return
@@ -95,32 +119,32 @@ def build_srcpkg_if_needed(proj, feed, branch, pkgs_remote, package, conflict_ve
else:
print(f"{package}: building source package (outdated:"
f" {latest_version} <=> {obs_version} in OBS)")
+ else:
+ print(f"{package}: building source package (feed is {feed})")
- build_srcpkg(feed, branch, package, conflict_version, fetch, is_meta_pkg)
+ build_srcpkg(package, is_meta_pkg)
-def upload_srcpkg(proj, feed, pkgs_remote, package, version):
+def upload_srcpkg(pkgs_remote, package, version):
if os.path.basename(package) not in pkgs_remote:
- lib.osc.create_package(proj, package)
- lib.osc.update_package(proj, package, version)
+ lib.osc.create_package(package)
+ lib.osc.update_package(package, version)
-def build_srcpkgs(proj, feed, branch, pkgs_remote, packages, conflict_version, fetch,
- meta, skip_up_to_date):
+def build_srcpkgs(pkgs_remote, packages):
print()
print("### Building source packages ###")
print()
- if meta:
- build_srcpkg_if_needed(proj, feed, branch, pkgs_remote, f"osmocom-{feed}",
- conflict_version, fetch, True, skip_up_to_date)
+ if lib.args.meta:
+ feed = lib.args.feed
+ build_srcpkg_if_needed(pkgs_remote, f"osmocom-{feed}", True)
for package in packages:
- build_srcpkg_if_needed(proj, feed, branch, pkgs_remote, package,
- conflict_version, fetch, False, skip_up_to_date)
+ build_srcpkg_if_needed(pkgs_remote, package, False)
-def upload_srcpkgs(proj, feed, pkgs_remote):
+def upload_srcpkgs(pkgs_remote):
global srcpkgs_built
global srcpkgs_failed_upload
global srcpkgs_updated
@@ -137,7 +161,7 @@ def upload_srcpkgs(proj, feed, pkgs_remote):
for package, version in srcpkgs_built.items():
try:
- upload_srcpkg(proj, feed, pkgs_remote, package, version)
+ upload_srcpkg(pkgs_remote, package, version)
srcpkgs_updated += [package]
except Exception as ex:
traceback.print_exception(type(ex), ex, ex.__traceback__)
@@ -159,9 +183,10 @@ def exit_with_summary():
print(f"Skipped: {len(srcpkgs_skipped)}")
print(f"Failed (srcpkg build): {len(srcpkgs_failed_build)}")
print(f"Failed (srcpkg upload): {len(srcpkgs_failed_upload)}")
+ print(f"Deleted: {len(srcpkgs_deleted)}")
if not srcpkgs_failed_build and not srcpkgs_failed_upload:
- exit(0)
+ sys.exit(0)
print()
print("List of failed packages:")
@@ -170,7 +195,15 @@ def exit_with_summary():
for package in srcpkgs_failed_upload:
print(f"* {package} (srcpkg upload)")
- exit(1)
+ sys.exit(1)
+
+
+def validate_args(args):
+ # Only with feed=master we check the current commit of a branch on a remote
+ # git repository before trying to update/delete a package from OBS
+ if args.delete and args.feed != "master":
+ print("ERROR: --delete can only be used with --feed=master")
+ sys.exit(1)
def main():
@@ -183,32 +216,34 @@ def main():
dest="skip_up_to_date", action="store_false",
help="for latest feed, build and upload packages even"
" if the version did not change")
+ parser.add_argument("--delete", action="store_true",
+ help="remove packages from OBS if the git branch (-b)"
+ " does not exist anymore")
parser.add_argument("obs_project",
help="OBS project, e.g. home:osmith:nightly")
parser.add_argument("package", nargs="*",
- help="package name, e.g. libosmocore or open5gs,"
- " default is all packages")
+ help="package name, e.g. libosmocore or open5gs or"
+ " ALL_OSMOCOM_PACKAGES, default is all packages")
args = parser.parse_args()
- proj = args.obs_project
- feed = args.feed
- branch = args.git_branch
+ validate_args(args)
+ lib.set_args(args)
packages = parse_packages(args.package)
- lib.set_cmds_verbose(args.verbose)
-
if args.docker:
- lib.docker.run_in_docker_and_exit(__file__, args, True)
+ lib.docker.run_in_docker_and_exit("update_obs_project.py", True)
+
+ lib.osc.check_oscrc()
+ lib.osc.set_apiurl(args.apiurl, args.obs_project)
+
+ if not args.ignore_req:
+ lib.check_required_programs()
- lib.osc.check_proj(proj)
- lib.osc.set_apiurl(args.apiurl)
- lib.check_required_programs()
lib.remove_temp()
- pkgs_remote = lib.osc.get_remote_pkgs(proj)
+ pkgs_remote = lib.osc.get_remote_pkgs()
- build_srcpkgs(proj, feed, branch, pkgs_remote, packages, args.conflict_version,
- args.git_fetch, args.meta, args.skip_up_to_date)
- upload_srcpkgs(proj, feed, pkgs_remote)
+ build_srcpkgs(pkgs_remote, packages)
+ upload_srcpkgs(pkgs_remote)
exit_with_summary()
diff --git a/scripts/obs/update_obs_wireshark.sh b/scripts/obs/update_obs_wireshark.sh
new file mode 100755
index 0000000..e422292
--- /dev/null
+++ b/scripts/obs/update_obs_wireshark.sh
@@ -0,0 +1,47 @@
+#!/bin/sh -e
+DIR="$(realpath "$(dirname "$0")")"
+PROJ="$1"
+
+BRANCHES="
+ osmith/deb-packaging
+ osmocom/qcdiag
+ laforge/rspro
+"
+
+prepare_git_repo() {
+ cd "$DIR"
+ if ! [ -d _cache/wireshark ]; then
+ mkdir -p _cache
+ git -C _cache clone https://gitlab.com/wireshark/wireshark.git
+ git -C _cache/wireshark remote add osmocom https://gitea.osmocom.org/osmocom/wireshark
+ fi
+
+ cd _cache/wireshark
+ git fetch --all
+ git clean -fdx
+ git checkout -f -B osmocom/all-in-one origin/master
+
+ for b in $BRANCHES; do
+ git merge --no-edit "osmocom/$b"
+ done
+}
+
+update_obs_project() {
+ cd "$DIR"
+ ./update_obs_project.py \
+ --apiurl https://obs.osmocom.org \
+ --docker \
+ --allow-unknown-package \
+ --git-skip-checkout \
+ --git-skip-fetch \
+ --version-append "~osmocom" \
+ "$PROJ" \
+ wireshark
+}
+
+set -x
+prepare_git_repo
+
+if [ -n "$PROJ" ]; then
+ update_obs_project
+fi