From bf2bd689455ef1142b89e83236739aeebd90c37e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Crist=C3=B3bal=20Arroyo?= Date: Wed, 31 Jan 2024 14:49:03 -0500 Subject: [PATCH 01/18] Fix `pytest-rerunfailures` installation by using apt instead of pip (#1020) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Change installation of pytest-rerunfailures to use apt Signed-off-by: Cristóbal Arroyo * Initial commit * Remove os_code_name Signed-off-by: Cristóbal Arroyo --------- Signed-off-by: Cristóbal Arroyo --- ros_buildfarm/templates/devel/devel_task.Dockerfile.em | 6 +++++- .../snippet/install_pytest-rerunfailures.Dockerfile.em | 4 ++++ 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 ros_buildfarm/templates/snippet/install_pytest-rerunfailures.Dockerfile.em diff --git a/ros_buildfarm/templates/devel/devel_task.Dockerfile.em b/ros_buildfarm/templates/devel/devel_task.Dockerfile.em index 682df198c..95d1a79b3 100644 --- a/ros_buildfarm/templates/devel/devel_task.Dockerfile.em +++ b/ros_buildfarm/templates/devel/devel_task.Dockerfile.em @@ -66,7 +66,11 @@ RUN python3 -u /tmp/wrapper_scripts/apt.py update-install-clean -q -y git python RUN python3 -u /tmp/wrapper_scripts/apt.py update-install-clean -q -y python3-pip @# colcon-core.package_identification.python needs at least setuptools 30.3.0 @# pytest-rerunfailures enables usage of --retest-until-pass -RUN pip3 install -U setuptools==59.6.0 pytest-rerunfailures +@(TEMPLATE( + 'snippet/install_pytest-rerunfailures.Dockerfile.em', + os_name=os_name, +))@ +RUN pip3 install -U setuptools==59.6.0 @[end if]@ RUN python3 -u /tmp/wrapper_scripts/apt.py update-install-clean -q -y ccache diff --git a/ros_buildfarm/templates/snippet/install_pytest-rerunfailures.Dockerfile.em b/ros_buildfarm/templates/snippet/install_pytest-rerunfailures.Dockerfile.em new file mode 100644 index 000000000..727ebf0da --- /dev/null +++ b/ros_buildfarm/templates/snippet/install_pytest-rerunfailures.Dockerfile.em @@ -0,0 +1,4 @@ +@[if os_name == 'debian' or os_name == 'ubuntu']@ +@# python3-pytest-rerunfailures is supported since Ubuntu jammy +RUN for i in 1 2 3; do apt-get update && apt-get install -q -y python3-pytest-rerunfailures && apt-get clean && break || if [ $i -lt 3 ]; then sleep 5; else false; fi; done +@[end if]@ From 2c1ddb4a3f3e4c13f5a339aa502923c2bf63cc44 Mon Sep 17 00:00:00 2001 From: Jose Luis Rivero Date: Thu, 4 Jan 2024 13:12:27 +0100 Subject: [PATCH 02/18] Setup bazel single compilation thread in release deb/rpm Signed-off-by: Jose Luis Rivero --- .../templates/release/deb/binarypkg_task.Dockerfile.em | 5 +++++ .../templates/release/rpm/binarypkg_task.Dockerfile.em | 5 +++++ .../snippet/setup_bazel_single_thread_builds.Dockerfile.em | 1 + 3 files changed, 11 insertions(+) create mode 100644 ros_buildfarm/templates/snippet/setup_bazel_single_thread_builds.Dockerfile.em diff --git a/ros_buildfarm/templates/release/deb/binarypkg_task.Dockerfile.em b/ros_buildfarm/templates/release/deb/binarypkg_task.Dockerfile.em index da24e7d8e..bbf1dd046 100644 --- a/ros_buildfarm/templates/release/deb/binarypkg_task.Dockerfile.em +++ b/ros_buildfarm/templates/release/deb/binarypkg_task.Dockerfile.em @@ -23,6 +23,11 @@ ENV DEBIAN_FRONTEND noninteractive timezone=timezone, ))@ +@(TEMPLATE( + 'snippet/setup_bazel_single_thread_builds.Dockerfile.em', + bazelrc_dir='/etc', +))@ + RUN useradd -u @uid -l -m buildfarm @(TEMPLATE( diff --git a/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em b/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em index 9d451fba9..dea29f553 100644 --- a/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em +++ b/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em @@ -26,6 +26,11 @@ RUN crb enable RUN @(package_manager) install -y dnf{,-command\(download\)} mock{,-{core-configs,scm}} python@(python3_pkgversion){,-{catkin_pkg,empy,rosdistro,yaml}} +@(TEMPLATE( + 'snippet/setup_bazel_single_thread_builds.Dockerfile.em', + bazelrc_dir='/etc', +))@ + RUN useradd -u @(uid) -l -m buildfarm RUN usermod -a -G mock buildfarm diff --git a/ros_buildfarm/templates/snippet/setup_bazel_single_thread_builds.Dockerfile.em b/ros_buildfarm/templates/snippet/setup_bazel_single_thread_builds.Dockerfile.em new file mode 100644 index 000000000..1ef446be6 --- /dev/null +++ b/ros_buildfarm/templates/snippet/setup_bazel_single_thread_builds.Dockerfile.em @@ -0,0 +1 @@ +RUN echo 'build --jobs=1' >> @bazelrc_dir/bazel.bazelrc From 09a78f439aa21565a885bc863445f35b9eaa618e Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Fri, 8 Mar 2024 16:16:52 -0600 Subject: [PATCH 03/18] Handle virtual deb packages when querying apt cache (#1023) These virtual packages are supported by the rosdep tool, so should be acceptable for use here as well. --- ros_buildfarm/common.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/ros_buildfarm/common.py b/ros_buildfarm/common.py index 0deb7acfc..1347818ca 100644 --- a/ros_buildfarm/common.py +++ b/ros_buildfarm/common.py @@ -172,7 +172,11 @@ def get_distribution_repository_keys(urls, key_files): def get_binary_package_versions(apt_cache, debian_pkg_names): versions = {} for debian_pkg_name in debian_pkg_names: - pkg = apt_cache[debian_pkg_name] + pkg = apt_cache.get(debian_pkg_name) + if not pkg: + prov = apt_cache.get_providing_packages(debian_pkg_name) + assert len(prov) == 1 + pkg = apt_cache[prov[0]] versions[debian_pkg_name] = max(pkg.versions).version return versions From 7c12df582301a99f3d2e877f3c63e69581da5f8e Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Mon, 11 Mar 2024 16:06:00 -0500 Subject: [PATCH 04/18] Fix error message when a package isn't available (#1024) The original behavior was to raise a KeyError with the missing package name, but this was regressed to an assert with no actionable error message by a previous change. Fixes 09a78f4 --- ros_buildfarm/common.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/ros_buildfarm/common.py b/ros_buildfarm/common.py index 1347818ca..319d524af 100644 --- a/ros_buildfarm/common.py +++ b/ros_buildfarm/common.py @@ -175,6 +175,8 @@ def get_binary_package_versions(apt_cache, debian_pkg_names): pkg = apt_cache.get(debian_pkg_name) if not pkg: prov = apt_cache.get_providing_packages(debian_pkg_name) + if not prov: + raise KeyError("No packages available for '%s'" % (debian_pkg_name,)) assert len(prov) == 1 pkg = apt_cache[prov[0]] versions[debian_pkg_name] = max(pkg.versions).version From e8b22f9bbbd49a48ed549ba74c6c170e3e428c45 Mon Sep 17 00:00:00 2001 From: Jose Luis Rivero Date: Tue, 12 Mar 2024 19:11:25 +0100 Subject: [PATCH 05/18] Fix check for WORKSPACE/binarydeb to look use directory instead of file Signed-off-by: Jose Luis Rivero --- ros_buildfarm/templates/release/deb/binarypkg_job.xml.em | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em b/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em index 70e364ea3..5b55a558d 100644 --- a/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em +++ b/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em @@ -122,8 +122,8 @@ but disabled since the package is blacklisted (or not whitelisted) in the config 'echo "# END SECTION"', '', 'echo "# BEGIN SECTION: Run Dockerfile - binarydeb task"', - '# ensure to have write permission before trying to delete the folder', - 'if [ -f $WORKSPACE/binarydeb ] ; then chmod -R u+w $WORKSPACE/binarydeb ; fi', + '# ensure to have write permission before trying to delete the directory', + 'if [ -d $WORKSPACE/binarydeb ] ; then chmod -R u+w $WORKSPACE/binarydeb ; fi', 'rm -fr $WORKSPACE/binarydeb', 'rm -fr $WORKSPACE/docker_build_binarydeb', 'mkdir -p $WORKSPACE/binarydeb', From 050666fe16f0fbe61f26da0780e08cd74d0bd087 Mon Sep 17 00:00:00 2001 From: "Marco A. Gutierrez" Date: Thu, 14 Mar 2024 09:21:46 +0800 Subject: [PATCH 06/18] Adding break-system-packages pip option for noble in devel_task (#1026) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Remove pip-installed setuptools. colcon requires setuptools 30.3.0[1] and we had previously needed to install it from pypi to get that version on older Ubuntu distros, but since Ubuntu Focal had python3-setuptools 45.2.0 and Debian Buster has 40.8.0 so we've not needed this for most of the last few years. [1]: https://github.com/colcon/colcon-core/blob/master/setup.cfg#L41 --------- Signed-off-by: Marco A. Gutierrez Co-authored-by: Steven! Ragnarök --- ros_buildfarm/templates/devel/devel_task.Dockerfile.em | 3 --- 1 file changed, 3 deletions(-) diff --git a/ros_buildfarm/templates/devel/devel_task.Dockerfile.em b/ros_buildfarm/templates/devel/devel_task.Dockerfile.em index 95d1a79b3..b25ca4722 100644 --- a/ros_buildfarm/templates/devel/devel_task.Dockerfile.em +++ b/ros_buildfarm/templates/devel/devel_task.Dockerfile.em @@ -63,14 +63,11 @@ RUN echo "@today_str" RUN python3 -u /tmp/wrapper_scripts/apt.py update-install-clean -q -y git python3-yaml @[if build_tool == 'colcon']@ -RUN python3 -u /tmp/wrapper_scripts/apt.py update-install-clean -q -y python3-pip -@# colcon-core.package_identification.python needs at least setuptools 30.3.0 @# pytest-rerunfailures enables usage of --retest-until-pass @(TEMPLATE( 'snippet/install_pytest-rerunfailures.Dockerfile.em', os_name=os_name, ))@ -RUN pip3 install -U setuptools==59.6.0 @[end if]@ RUN python3 -u /tmp/wrapper_scripts/apt.py update-install-clean -q -y ccache From f8d8219b7b7566dcccd6b95a7cb880962cdd816e Mon Sep 17 00:00:00 2001 From: Tully Foote Date: Fri, 15 Mar 2024 11:33:26 -0700 Subject: [PATCH 07/18] Working to bring CI back to green (#1015) * drop python 2.7 no longer supported on github actions. * move to noetic and focal from melodic and bionic * pin empy to less than 4 * move from foxy to humble * focal to jammy for humble only * Use Ubuntu Noble for Rolling builds Co-authored-by: Scott K Logan --- .github/workflows/ci.yaml | 105 ++++++++++++++------------------------ setup.py | 2 +- 2 files changed, 40 insertions(+), 67 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 93479b019..01e5de971 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -25,7 +25,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - python: ['2.7', '3.6'] + python: ['3.6'] steps: - name: Check out project uses: actions/checkout@v2 @@ -38,8 +38,8 @@ jobs: - name: Run job uses: ./.github/actions/audit with: - ros_distro: melodic - os_code_name: bionic + ros_distro: noetic + os_code_name: focal ros1_config: name: ROS 1 Config Validation @@ -52,39 +52,12 @@ jobs: - name: Validate configration run: validate_config_index.py https://raw.githubusercontent.com/ros-infrastructure/ros_buildfarm_config/production/index.yaml - ros1_devel: - name: ROS 1 Devel - runs-on: ubuntu-20.04 - strategy: - matrix: - python: ['2.7', '3.5', '3.6'] - build_tool: [null] - include: - - python: '3.6' - build_tool: colcon - steps: - - name: Check out project - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: ${{matrix.python}} - - name: Install dependencies - uses: ./.github/actions/setup - - name: Run job - uses: ./.github/actions/devel - with: - ros_distro: melodic - os_code_name: bionic - build_tool: ${{matrix.build_tool}} - repo: roscpp_core - ros1_doc: name: ROS 1 Doc runs-on: ubuntu-20.04 strategy: matrix: - python: ['2.7', '3.5', '3.6'] + python: ['3.5', '3.6'] steps: - name: Check out project uses: actions/checkout@v2 @@ -97,8 +70,8 @@ jobs: - name: Run job uses: ./.github/actions/doc with: - ros_distro: melodic - os_code_name: bionic + ros_distro: noetic + os_code_name: focal repo: roscpp_core ros1_prerelease: @@ -106,7 +79,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - python: ['2.7', '3.5', '3.6'] + python: ['3.5', '3.6'] steps: - name: Check out project uses: actions/checkout@v2 @@ -119,8 +92,8 @@ jobs: - name: Run job uses: ./.github/actions/prerelease with: - ros_distro: melodic - os_code_name: bionic + ros_distro: noetic + os_code_name: focal overlay_pkg: roscpp underlay_repos: roscpp_core @@ -129,7 +102,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - python: ['2.7', '3.5', '3.6'] + python: ['3.5', '3.6'] steps: - name: Check out project uses: actions/checkout@v2 @@ -148,8 +121,8 @@ jobs: - name: Run job uses: ./.github/actions/prerelease with: - ros_distro: melodic - os_code_name: bionic + ros_distro: noetic + os_code_name: focal source_dir: ${{github.workspace}}/dummy_package ros1_release: @@ -157,7 +130,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - python: ['2.7', '3.5', '3.6'] + python: ['3.5', '3.6'] steps: - name: Check out project uses: actions/checkout@v2 @@ -170,8 +143,8 @@ jobs: - name: Run job uses: ./.github/actions/release with: - ros_distro: melodic - os_code_name: bionic + ros_distro: noetic + os_code_name: focal pkg_name: rostime ros1_release_reconfigure: @@ -179,7 +152,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - python: ['2.7', '3.5', '3.6'] + python: ['3.5', '3.6'] steps: - name: Check out project uses: actions/checkout@v2 @@ -192,7 +165,7 @@ jobs: - name: Run job uses: ./.github/actions/release_reconfigure with: - ros_distro: melodic + ros_distro: noetic pkg_names: rostime ros1_status_pages: @@ -200,7 +173,7 @@ jobs: runs-on: ubuntu-20.04 strategy: matrix: - python: ['2.7', '3.5', '3.6'] + python: ['3.5', '3.6'] steps: - name: Check out project uses: actions/checkout@v2 @@ -213,14 +186,14 @@ jobs: - name: Run job uses: ./.github/actions/status_pages with: - ros_distro: melodic + ros_distro: noetic ros1_sync_criteria_check: name: ROS 1 Sync Criteria Check runs-on: ubuntu-20.04 strategy: matrix: - python: ['2.7', '3.5', '3.6'] + python: ['3.5', '3.6'] steps: - name: Check out project uses: actions/checkout@v2 @@ -229,15 +202,15 @@ jobs: - name: Run job uses: ./.github/actions/sync_criteria_check with: - ros_distro: melodic - os_code_name: bionic + ros_distro: noetic + os_code_name: focal ros1_trigger: name: ROS 1 Trigger runs-on: ubuntu-20.04 strategy: matrix: - python: ['2.7', '3.5', '3.6'] + python: ['3.5', '3.6'] steps: - name: Check out project uses: actions/checkout@v2 @@ -246,7 +219,7 @@ jobs: - name: Run job uses: ./.github/actions/trigger with: - ros_distro: melodic + ros_distro: noetic ros2_audit: name: ROS 2 Audit @@ -260,8 +233,8 @@ jobs: uses: ./.github/actions/audit with: config_url: https://raw.githubusercontent.com/ros2/ros_buildfarm_config/ros2/index.yaml - ros_distro: foxy - os_code_name: focal + ros_distro: humble + os_code_name: jammy ros2_ci: name: ROS 2 CI @@ -276,24 +249,24 @@ jobs: uses: ./.github/actions/ci with: config_url: https://raw.githubusercontent.com/ros2/ros_buildfarm_config/ros2/index.yaml - ros_distro: foxy - os_code_name: focal + ros_distro: humble + os_code_name: jammy package_selection_args: --packages-up-to ament_flake8 - name: Run job 2 id: underlay2 uses: ./.github/actions/ci with: config_url: https://raw.githubusercontent.com/ros2/ros_buildfarm_config/ros2/index.yaml - ros_distro: foxy - os_code_name: focal + ros_distro: humble + os_code_name: jammy underlay_dirs: ${{steps.underlay1.outputs.install_dir}} package_selection_args: --packages-skip-up-to ament_flake8 --packages-up-to ament_pep257 - name: Run job 3 uses: ./.github/actions/ci with: config_url: https://raw.githubusercontent.com/ros2/ros_buildfarm_config/ros2/index.yaml - ros_distro: foxy - os_code_name: focal + ros_distro: humble + os_code_name: jammy underlay_dirs: ${{steps.underlay1.outputs.install_dir}} ${{steps.underlay2.outputs.install_dir}} package_selection_args: --packages-skip-up-to ament_flake8 ament_pep257 --packages-up-to ament_cmake_ros @@ -320,8 +293,8 @@ jobs: uses: ./.github/actions/devel with: config_url: https://raw.githubusercontent.com/ros2/ros_buildfarm_config/ros2/index.yaml - ros_distro: foxy - os_code_name: focal + ros_distro: humble + os_code_name: jammy repo: rcutils ros2_doc: @@ -337,7 +310,7 @@ jobs: with: config_url: https://raw.githubusercontent.com/ros2/ros_buildfarm_config/ros2/index.yaml ros_distro: rolling - os_code_name: jammy + os_code_name: noble repo: rcl output_directory: ws/docs_output @@ -353,8 +326,8 @@ jobs: uses: ./.github/actions/prerelease with: config_url: https://raw.githubusercontent.com/ros2/ros_buildfarm_config/ros2/index.yaml - ros_distro: foxy - os_code_name: focal + ros_distro: humble + os_code_name: jammy overlay_pkg: rcutils underlay_repos: ament_cmake_ros @@ -371,7 +344,7 @@ jobs: with: config_url: https://raw.githubusercontent.com/ros2/ros_buildfarm_config/ros2/index.yaml ros_distro: rolling - os_code_name: jammy + os_code_name: noble pkg_name: rcutils ros2_release_reconfigure: @@ -421,7 +394,7 @@ jobs: with: config_url: https://raw.githubusercontent.com/ros2/ros_buildfarm_config/ros2/index.yaml ros_distro: rolling - os_code_name: jammy + os_code_name: noble ros2_sync_criteria_check_rpm: name: ROS 2 Sync Criteria Check (RPM) diff --git a/setup.py b/setup.py index 82a41f3a2..a357f93e3 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ 'include_package_data': True, 'zip_safe': False, 'install_requires': [ - 'empy', + 'empy<4', 'PyYAML'], 'extras_require': { 'test': [ From bde772137570645b1d027d3016df7f38c1fcbdcd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Steven!=20Ragnar=C3=B6k?= Date: Mon, 18 Mar 2024 11:24:17 -0700 Subject: [PATCH 08/18] Mount rosdoc2 source directory read-write. (#1031) Our deprecated out-of-tree build option has been removed which means that we now need to host our rosdoc2 sources in a read-write location to allow build artifacts to be generated in-tree. Mounting read-write shouldn't matter overmuch because we delete and re-clone rosdoc2 each time the job runs[1]. [1]: https://github.com/ros-infrastructure/ros_buildfarm/blob/f8d8219b7b7566dcccd6b95a7cb880962cdd816e/ros_buildfarm/templates/doc/rosdoc2_job.xml.em#L94 --- ros_buildfarm/scripts/doc/build_rosdoc2.py | 3 +-- ros_buildfarm/templates/doc/rosdoc2_job.xml.em | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/ros_buildfarm/scripts/doc/build_rosdoc2.py b/ros_buildfarm/scripts/doc/build_rosdoc2.py index a66af9bad..25bb7cee5 100644 --- a/ros_buildfarm/scripts/doc/build_rosdoc2.py +++ b/ros_buildfarm/scripts/doc/build_rosdoc2.py @@ -49,8 +49,7 @@ def main(argv=sys.argv[1:]): '-m', 'pip', 'install', - '--no-warn-script-location', - '--use-deprecated=out-of-tree-build', + '--break-system-packages', '.'], cwd=args.rosdoc2_dir) if pip_rc: diff --git a/ros_buildfarm/templates/doc/rosdoc2_job.xml.em b/ros_buildfarm/templates/doc/rosdoc2_job.xml.em index d6654b91d..87ccede9b 100644 --- a/ros_buildfarm/templates/doc/rosdoc2_job.xml.em +++ b/ros_buildfarm/templates/doc/rosdoc2_job.xml.em @@ -178,7 +178,7 @@ else: ' --rm ' + ' --cidfile=$WORKSPACE/docker_doc/docker.cid' + ' -v $WORKSPACE/ros_buildfarm:/tmp/ros_buildfarm:ro' + - ' -v $WORKSPACE/rosdoc2:/tmp/rosdoc2:ro' + + ' -v $WORKSPACE/rosdoc2:/tmp/rosdoc2' + ' -v $WORKSPACE/ws:/tmp/ws' + ' rosdoc2.%s_%s' % (rosdistro_name, doc_repo_spec.name.lower()), 'echo "# END SECTION"', From 3634e05948836b6e72a13db9e1dd2a4132a940a9 Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Mon, 18 Mar 2024 16:28:32 -0500 Subject: [PATCH 09/18] Set a sane HOME for binarypkg jobs (#1013) The comment above this invocation indicates that apt needs HOME to be set to something, but the variable is set to an empty string. This is even more critical for running the buildfarm using rootless Podman, because `/` is no longer writable and seems to be the default of HOME is empty. --- ros_buildfarm/templates/release/deb/binarypkg_job.xml.em | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em b/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em index 70e364ea3..41e1ee362 100644 --- a/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em +++ b/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em @@ -168,7 +168,7 @@ but disabled since the package is blacklisted (or not whitelisted) in the config 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_build_binarydeb/docker.cid' + - ' -e=HOME=' + + ' -e=HOME=/home/buildfarm' + ' -e=TRAVIS=$TRAVIS' + ' --net=host' + ' -v $WORKSPACE/ros_buildfarm:/tmp/ros_buildfarm:ro' + From 6bb1db3604b62f8c4f92dcdc3cb767a22576b7d3 Mon Sep 17 00:00:00 2001 From: Tully Foote Date: Thu, 21 Mar 2024 14:39:29 -0700 Subject: [PATCH 10/18] Add display of cpu info (#1010) I had originally catted /proc/cpuinfo but it was very verbose lscpu is in util-linux which is in essential so should be available on systems. --- .../templates/snippet/builder_shell_docker-info.xml.em | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ros_buildfarm/templates/snippet/builder_shell_docker-info.xml.em b/ros_buildfarm/templates/snippet/builder_shell_docker-info.xml.em index 3ae918178..ece4428af 100644 --- a/ros_buildfarm/templates/snippet/builder_shell_docker-info.xml.em +++ b/ros_buildfarm/templates/snippet/builder_shell_docker-info.xml.em @@ -1,6 +1,9 @@ @(SNIPPET( 'builder_shell', script='\n'.join([ + 'echo "# BEGIN SECTION: cpu info"', + 'lscpu', + 'echo "# END SECTION"', 'echo "# BEGIN SECTION: docker version"', 'docker version', 'echo "# END SECTION"', From aa699e7bf3c8d2b6bda560a6069df5eb853c663d Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Wed, 27 Mar 2024 11:10:18 -0500 Subject: [PATCH 11/18] Deal with RPM *-SPECPARTS build subdirectory (#1033) In typical RPM builds, the `BUILD` directory contains only a single subdirectory, which is where the package sources are extracted to prior to building. Beginning with RPM 4.19, a -SPECPARTS subdirectory is created to facilitate dynamic spec file generation. We need to explicitly ignore it when we go looking for where the sources were extracted. --- ros_buildfarm/binaryrpm_job.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/ros_buildfarm/binaryrpm_job.py b/ros_buildfarm/binaryrpm_job.py index bd221b5b0..ea94e8645 100644 --- a/ros_buildfarm/binaryrpm_job.py +++ b/ros_buildfarm/binaryrpm_job.py @@ -80,7 +80,14 @@ def build_binaryrpm( mock_root_path = subprocess.check_output( ['mock', '--root', 'ros_buildfarm', '--print-root-path']).decode('utf-8').strip() mock_build_path = os.path.join(mock_root_path, 'builddir', 'build', 'BUILD') - package_root = os.path.join(mock_build_path, os.listdir(mock_build_path)[0]) + for subdir in os.listdir(mock_build_path): + if subdir.endswith('-SPECPARTS'): + continue + + package_root = os.path.join(mock_build_path, subdir) + break + else: + assert False, "Failed to determine package build root" # output package maintainers for job notification from catkin_pkg.package import parse_package From ed19495fa10107f32d5f2cd29e5e5f28bef4a622 Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Wed, 27 Mar 2024 11:19:54 -0500 Subject: [PATCH 12/18] Drop support for CentOS/RHEL 7 (#1034) We never officially supported RHEL 7, though we did target it prior to announcing official support for RHEL 8. The use of `config_opts.package_manager` in this way will break with newer releases of Fedora (and eventually RHEL) where it has become a macro itself, which may resolve to `dnf5`. --- .../release/rpm/binarypkg_task.Dockerfile.em | 30 ++++--------------- .../templates/release/rpm/mock_config.cfg.em | 13 ++------ .../release/rpm/sourcepkg_task.Dockerfile.em | 30 ++++--------------- 3 files changed, 12 insertions(+), 61 deletions(-) diff --git a/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em b/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em index dea29f553..29ce4c225 100644 --- a/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em +++ b/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em @@ -1,30 +1,17 @@ -@{ -package_manager = 'dnf' -python3_pkgversion = '3' - -if os_name == 'rhel' and os_code_name.isnumeric() and int(os_code_name) < 8: - package_manager = 'yum' - python3_pkgversion = '36' -}@ # generated from @template_name @[if os_name in ['rhel']]@ FROM almalinux:@(os_code_name) -# Enable EPEL on RHEL -RUN @(package_manager) install -y epel-release +# Enable CRB and EPEL on RHEL +RUN dnf install --refresh -y epel-release && crb enable @[else]@ FROM @(os_name):@(os_code_name) @[end if]@ -RUN @(package_manager) update -y - -@[if os_name in ['rhel']]@ -# Enable CRB on RHEL -RUN crb enable -@[end if]@ +RUN dnf update --refresh -y -RUN @(package_manager) install -y dnf{,-command\(download\)} mock{,-{core-configs,scm}} python@(python3_pkgversion){,-{catkin_pkg,empy,rosdistro,yaml}} +RUN dnf install --refresh -y --setopt=install_weak_deps=False dnf{,-command\(download\)} git mock{,-{core-configs,scm}} python3{,-{catkin_pkg,empy,rosdistro,yaml}} @(TEMPLATE( 'snippet/setup_bazel_single_thread_builds.Dockerfile.em', @@ -34,17 +21,10 @@ RUN @(package_manager) install -y dnf{,-command\(download\)} mock{,-{core-config RUN useradd -u @(uid) -l -m buildfarm RUN usermod -a -G mock buildfarm -# Clean up after updates and freshen cache -RUN @(package_manager) clean dbcache packages -RUN @(package_manager) makecache - -# "Expire" the cache to force the next operation to check again -RUN @(package_manager) clean expire-cache - # automatic invalidation once every day RUN echo "@(today_str)" -RUN @(package_manager) update -y +RUN dnf update --refresh -y # Workaround for broken mock configs for EPEL 8 RUN echo -e "include('templates/almalinux-8.tpl')\ninclude('templates/epel-8.tpl')\n\nconfig_opts['root'] = 'epel-8-x86_64'\nconfig_opts['target_arch'] = 'x86_64'\nconfig_opts['legal_host_arches'] = ('x86_64',)" > /etc/mock/epel-8-x86_64.cfg diff --git a/ros_buildfarm/templates/release/rpm/mock_config.cfg.em b/ros_buildfarm/templates/release/rpm/mock_config.cfg.em index 16e918c5f..4e577ad67 100644 --- a/ros_buildfarm/templates/release/rpm/mock_config.cfg.em +++ b/ros_buildfarm/templates/release/rpm/mock_config.cfg.em @@ -10,7 +10,7 @@ config_opts['use_bootstrap'] = False config_opts['chroot_setup_cmd'] += ' python3-rpm-macros' # Install weak dependencies to get group members -config_opts[f'{config_opts.package_manager}_builddep_opts'] = config_opts.get(f'{config_opts.package_manager}_builddep_opts', []) + ['--setopt=install_weak_deps=True'] +config_opts[f'dnf_builddep_opts'] = config_opts.get(f'dnf_builddep_opts', []) + ['--setopt=install_weak_deps=True'] @[if env_vars]@ # Set environment vars from the build config @@ -29,19 +29,10 @@ config_opts['macros']['%__cmake3_in_source_build'] = '1' # Required for running mock in Docker config_opts['use_nspawn'] = False -@[if os_name == 'rhel' and os_code_name == '7']@ -# Inject g++ 8 into RHEL 7 builds -config_opts['chroot_setup_cmd'] += ' devtoolset-8-gcc-c++ devtoolset-8-make-nonblocking' -config_opts['macros']['%_buildshell'] = '/usr/bin/scl enable devtoolset-8 -- /bin/sh' - -# Disable weak dependencies on RHEL 7 builds -config_opts['macros']['%_without_weak_deps'] = '1' -@[else]@ # Add g++, which is an assumed dependency in ROS config_opts['chroot_setup_cmd'] += ' gcc-c++ make' -@[end if]@ -config_opts[f'{config_opts.package_manager}.conf'] += """ +config_opts[f'dnf.conf'] += """ @[for i, url in enumerate(distribution_repository_urls)]@ [ros-buildfarm-@(i)] name=ROS Buildfarm Repository @(i) - $basearch diff --git a/ros_buildfarm/templates/release/rpm/sourcepkg_task.Dockerfile.em b/ros_buildfarm/templates/release/rpm/sourcepkg_task.Dockerfile.em index 2d75d1fb9..ebda8b4b1 100644 --- a/ros_buildfarm/templates/release/rpm/sourcepkg_task.Dockerfile.em +++ b/ros_buildfarm/templates/release/rpm/sourcepkg_task.Dockerfile.em @@ -1,45 +1,25 @@ -@{ -package_manager = 'dnf' -python3_pkgversion = '3' - -if os_name == 'rhel' and os_code_name.isnumeric() and int(os_code_name) < 8: - package_manager = 'yum' - python3_pkgversion = '36' -}@ # generated from @template_name @[if os_name in ['rhel']]@ FROM almalinux:@(os_code_name) -# Enable EPEL on RHEL -RUN @(package_manager) install -y epel-release +# Enable CRB and EPEL on RHEL +RUN dnf install --refresh -y epel-release && crb enable @[else]@ FROM @(os_name):@(os_code_name) @[end if]@ -RUN @(package_manager) update -y - -@[if os_name in ['rhel']]@ -# Enable CRB on RHEL -RUN crb enable -@[end if]@ +RUN dnf update --refresh -y -RUN @(package_manager) install -y dnf{,-command\(download\)} mock{,-{core-configs,scm}} python@(python3_pkgversion){,-{catkin_pkg,empy,rosdistro,yaml}} +RUN dnf install --refresh -y --setopt=install_weak_deps=False dnf{,-command\(download\)} git mock{,-{core-configs,scm}} python3{,-{catkin_pkg,empy,rosdistro,yaml}} RUN useradd -u @(uid) -l -m buildfarm RUN usermod -a -G mock buildfarm -# Clean up after updates and freshen cache -RUN @(package_manager) clean dbcache packages -RUN @(package_manager) makecache - -# "Expire" the cache to force the next operation to check again -RUN @(package_manager) clean expire-cache - # automatic invalidation once every day RUN echo "@(today_str)" -RUN @(package_manager) update -y +RUN dnf update --refresh -y # Workaround for broken mock configs for EPEL 8 RUN echo -e "include('templates/almalinux-8.tpl')\ninclude('templates/epel-8.tpl')\n\nconfig_opts['root'] = 'epel-8-x86_64'\nconfig_opts['target_arch'] = 'x86_64'\nconfig_opts['legal_host_arches'] = ('x86_64',)" > /etc/mock/epel-8-x86_64.cfg From 0e8d73a58de357524a5094c064ddc07e6e4c60ca Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Wed, 27 Mar 2024 11:21:46 -0500 Subject: [PATCH 13/18] Revert "Work around for broken EPEL 8 mock configs (#938)" (#1035) This reverts commit ebe91855e55d5a1e60aa72442c93e20a38bbad0a. --- .../templates/release/rpm/binarypkg_task.Dockerfile.em | 3 --- .../templates/release/rpm/sourcepkg_task.Dockerfile.em | 3 --- 2 files changed, 6 deletions(-) diff --git a/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em b/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em index 29ce4c225..a37b38d0b 100644 --- a/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em +++ b/ros_buildfarm/templates/release/rpm/binarypkg_task.Dockerfile.em @@ -26,9 +26,6 @@ RUN echo "@(today_str)" RUN dnf update --refresh -y -# Workaround for broken mock configs for EPEL 8 -RUN echo -e "include('templates/almalinux-8.tpl')\ninclude('templates/epel-8.tpl')\n\nconfig_opts['root'] = 'epel-8-x86_64'\nconfig_opts['target_arch'] = 'x86_64'\nconfig_opts['legal_host_arches'] = ('x86_64',)" > /etc/mock/epel-8-x86_64.cfg - @[for i, key in enumerate(distribution_repository_keys)]@ RUN echo -e "@('\\n'.join(key.splitlines()))" > /etc/pki/mock/RPM-GPG-KEY-ros-buildfarm-@(i) @[end for]@ diff --git a/ros_buildfarm/templates/release/rpm/sourcepkg_task.Dockerfile.em b/ros_buildfarm/templates/release/rpm/sourcepkg_task.Dockerfile.em index ebda8b4b1..8d61ba32b 100644 --- a/ros_buildfarm/templates/release/rpm/sourcepkg_task.Dockerfile.em +++ b/ros_buildfarm/templates/release/rpm/sourcepkg_task.Dockerfile.em @@ -21,9 +21,6 @@ RUN echo "@(today_str)" RUN dnf update --refresh -y -# Workaround for broken mock configs for EPEL 8 -RUN echo -e "include('templates/almalinux-8.tpl')\ninclude('templates/epel-8.tpl')\n\nconfig_opts['root'] = 'epel-8-x86_64'\nconfig_opts['target_arch'] = 'x86_64'\nconfig_opts['legal_host_arches'] = ('x86_64',)" > /etc/mock/epel-8-x86_64.cfg - @[for i, key in enumerate(distribution_repository_keys)]@ RUN echo -e "@('\\n'.join(key.splitlines()))" > /etc/pki/mock/RPM-GPG-KEY-ros-buildfarm-@(i) @[end for]@ From 9ad8f02a92d76eb1d89ff5cfc389745c62c2c1e2 Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Fri, 29 Mar 2024 10:35:49 -0500 Subject: [PATCH 14/18] Read virtual packages from deb/RPM repositories (#1022) These virtual packages are supported by the rosdep tool, so should be acceptable for use here as well. --- ros_buildfarm/debian_repo.py | 13 ++++++ ros_buildfarm/rpm_repo.py | 12 +++++ .../dists/noble/main/binary-amd64/Packages.gz | Bin 0 -> 133 bytes .../9/x86_64/repodata/primary.xml.gz | Bin 0 -> 277 bytes .../9/x86_64/repodata/repomd.xml | 7 +++ test/test_repo.py | 43 ++++++++++++++++++ 6 files changed, 75 insertions(+) create mode 100644 test/mock_deb_index/dists/noble/main/binary-amd64/Packages.gz create mode 100644 test/mock_rpm_index/9/x86_64/repodata/primary.xml.gz create mode 100644 test/mock_rpm_index/9/x86_64/repodata/repomd.xml create mode 100644 test/test_repo.py diff --git a/ros_buildfarm/debian_repo.py b/ros_buildfarm/debian_repo.py index c10bf9797..c229c590e 100644 --- a/ros_buildfarm/debian_repo.py +++ b/ros_buildfarm/debian_repo.py @@ -54,4 +54,17 @@ def get_debian_repo_index(debian_repository_baseurl, target, cache_dir): package_versions[debian_pkg_name] = PlatformPackageDescriptor(version, source_name) + prefix = 'Provides: ' + provides = [line[len(prefix):] for line in lines if line.startswith(prefix)] + provides = [provide.strip() for line in provides for provide in line.split(',')] + + for provide in provides: + provide_version = None + if ' ' in provide: + provide, provide_spec = provide.split(' ', 1) + if provide_spec.startswith('(=') and provide_spec.endswith(')'): + provide_version = provide_spec[2:-1].strip() + + package_versions[provide] = PlatformPackageDescriptor(provide_version, source_name) + return package_versions diff --git a/ros_buildfarm/rpm_repo.py b/ros_buildfarm/rpm_repo.py index c6e2fe338..a6bec2b18 100644 --- a/ros_buildfarm/rpm_repo.py +++ b/ros_buildfarm/rpm_repo.py @@ -66,6 +66,18 @@ def get_rpm_repo_index(rpm_repository_baseurl, target, cache_dir): pkg_source_name = None package_versions[pkg_name] = PlatformPackageDescriptor( pkg_version + '-' + pkg_release, pkg_source_name) + pkg_provides = pkg_format.getElementsByTagName('rpm:provides') + for provide in pkg_provides: + for entry in provide.getElementsByTagName('rpm:entry'): + entry_name = entry.getAttribute('name') + if entry.getAttribute('flags') == 'EQ': + entry_version = entry.getAttribute('ver') + entry_release = entry.getAttribute('rel') + desc_version = entry_version + '-' + entry_release + else: + desc_version = None + package_versions[entry_name] = PlatformPackageDescriptor( + desc_version, pkg_source_name) return package_versions diff --git a/test/mock_deb_index/dists/noble/main/binary-amd64/Packages.gz b/test/mock_deb_index/dists/noble/main/binary-amd64/Packages.gz new file mode 100644 index 0000000000000000000000000000000000000000..d0826024aabb1788f07e7b62a14f7fb938c620c4 GIT binary patch literal 133 zcmV;00DAu)iwFo+Cjw>w15jaOYhh<)a{voSOwLYBPqk7g$WGTS&n(H%Ehx$_<8mxY z&d4lDO)e=d0?H-krkI&YOP8JQ`m n#b~DKCMsyy!gOisph+eHC2`5+F1H?R5%fdqkWx84aTI0dfR_ z){;izGk_fX_F43q#8q6Rf!(+7?N?gNy3<*+o1I;1VaPc&Ld>IEZgsazSYx+{gC&f& z);rkPO$_!=1#y4{Z6@6#R~im~`5r3RBO&#ys#4{Z)z;D?uI2 zpqwwC6suFvE5$1r#^wD1Cq5UNwZ)^ETf&SZmFsTUuu&c!5$YBtF)(I8o>l;WlE zs-rhWQ&T{SN8XRND8>%c2JIla9Pg}j+`_{cTm!qzx~C1CKj4XGpGf)n|0$j4q(11| b@V^I@;@F`S`w5GA99vc2-!(8jPyzq|{&|D! literal 0 HcmV?d00001 diff --git a/test/mock_rpm_index/9/x86_64/repodata/repomd.xml b/test/mock_rpm_index/9/x86_64/repodata/repomd.xml new file mode 100644 index 000000000..d303567ea --- /dev/null +++ b/test/mock_rpm_index/9/x86_64/repodata/repomd.xml @@ -0,0 +1,7 @@ + + + 0 + + + + diff --git a/test/test_repo.py b/test/test_repo.py new file mode 100644 index 000000000..07bba413e --- /dev/null +++ b/test/test_repo.py @@ -0,0 +1,43 @@ +# Copyright 2024 Open Source Robotics Foundation, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest +from ros_buildfarm.common import PlatformPackageDescriptor +from ros_buildfarm.common import Target +from ros_buildfarm.package_repo import get_package_repo_data + +mock_deb_index_path = os.path.join(os.path.dirname(__file__), 'mock_deb_index') +mock_rpm_index_path = os.path.join(os.path.dirname(__file__), 'mock_rpm_index') + + +@pytest.mark.parametrize('target,mock_index_path', [ + (Target('rhel', '9', 'x86_64'), mock_rpm_index_path), + (Target('ubuntu', 'noble', 'amd64'), mock_deb_index_path), +]) +def test_get_repo_data(tmpdir, target, mock_index_path): + mock_index_url = 'file://' + mock_index_path + data = get_package_repo_data(mock_index_url, (target,), str(tmpdir)) + + assert target in data + target_data = data[target] + + expected = PlatformPackageDescriptor('1.2.3-1', 'pkg-with-prov') + assert expected == target_data.get('pkg-with-prov') + assert expected == target_data.get('pkg-with-prov-a') + assert expected == target_data.get('pkg-with-prov-b') + + expected = PlatformPackageDescriptor(None, 'pkg-with-prov') + assert expected == target_data.get('pkg-with-prov-no-ver') From a087b97d02c6e1f7ecb7680bbd9499b6e60af8bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Steven!=20Ragnar=C3=B6k?= Date: Fri, 29 Mar 2024 11:42:30 -0700 Subject: [PATCH 15/18] Use environment to configure breaking system packages. (#1036) This resolves the regression in this script on platforms with older versions of pip. --- ros_buildfarm/scripts/doc/build_rosdoc2.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/ros_buildfarm/scripts/doc/build_rosdoc2.py b/ros_buildfarm/scripts/doc/build_rosdoc2.py index 25bb7cee5..831b93ed4 100644 --- a/ros_buildfarm/scripts/doc/build_rosdoc2.py +++ b/ros_buildfarm/scripts/doc/build_rosdoc2.py @@ -45,12 +45,17 @@ def main(argv=sys.argv[1:]): clean_workspace(args.workspace_root) with Scope('SUBSECTION', 'Installing rosdoc2'): + env = { + **os.environ, + 'PIP_BREAK_SYSTEM_PACKAGES': '1', + } + pip_rc = subprocess.call(['python3', '-m', 'pip', 'install', - '--break-system-packages', '.'], + env=env, cwd=args.rosdoc2_dir) if pip_rc: return pip_rc From b9f642f1aaef448d3a753b65b7ff7900757eb2ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Steven!=20Ragnar=C3=B6k?= Date: Fri, 29 Mar 2024 12:49:01 -0700 Subject: [PATCH 16/18] Add jammy rosdoc2 jobs as well. (#1037) I couldn't figure out if there was a better solution than just doubling the existing job. I marked the previous one as for Noble and the one I just added for Humble as "jammy" I guess that when we introduce new platforms we'd roll these forward or back based on what is currently supported. --- .github/workflows/ci.yaml | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 01e5de971..c8b6efc4b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -297,8 +297,8 @@ jobs: os_code_name: jammy repo: rcutils - ros2_doc: - name: ROS 2 Doc + ros2_doc_noble: + name: ROS 2 Doc (Noble) runs-on: ubuntu-20.04 steps: - name: Check out project @@ -314,6 +314,23 @@ jobs: repo: rcl output_directory: ws/docs_output + ros2_doc_jammy: + name: ROS 2 Doc (Jammy) + runs-on: ubuntu-20.04 + steps: + - name: Check out project + uses: actions/checkout@v2 + - name: Install dependencies + uses: ./.github/actions/setup + - name: Run job + uses: ./.github/actions/doc + with: + config_url: https://raw.githubusercontent.com/ros2/ros_buildfarm_config/ros2/index.yaml + ros_distro: humble + os_code_name: jammy + repo: rcl + output_directory: ws/docs_output + ros2_prerelease: name: ROS 2 Prerelease runs-on: ubuntu-20.04 From a2331357f00b711c0f6c0299330c5fbe0da2e536 Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Wed, 3 Apr 2024 11:37:59 -0500 Subject: [PATCH 17/18] Eliminate Pulp from ros_buildfarm (#998) The createrepo-agent replacement for how we were previously using Pulp is proving to be at least as stable as our Pulp-based solution. It's time to remove all of the unused Pulp-related code from ros_buildfarm. --- doc/configuration_options.rst | 10 - ros_buildfarm/argument.py | 43 -- ros_buildfarm/config/release_build_file.py | 8 - ros_buildfarm/pulp.py | 373 ------------------ ros_buildfarm/release_job.py | 27 +- .../generate_release_maintenance_jobs.py | 2 - .../release/generate_release_script.py | 3 +- .../scripts/release/rpm/cull_repo.py | 58 --- .../scripts/release/rpm/import_package.py | 76 ---- .../scripts/release/rpm/mirror_repo.py | 77 ---- .../scripts/release/rpm/sync_repo.py | 118 ------ .../scripts/release/rpm/upload_package.py | 64 --- .../release/rpm/import_package_job.xml.em | 107 ----- .../templates/snippet/pulp_credentials.xml.em | 16 - 14 files changed, 12 insertions(+), 970 deletions(-) delete mode 100644 ros_buildfarm/pulp.py delete mode 100644 ros_buildfarm/scripts/release/rpm/cull_repo.py delete mode 100644 ros_buildfarm/scripts/release/rpm/import_package.py delete mode 100644 ros_buildfarm/scripts/release/rpm/mirror_repo.py delete mode 100644 ros_buildfarm/scripts/release/rpm/sync_repo.py delete mode 100644 ros_buildfarm/scripts/release/rpm/upload_package.py delete mode 100644 ros_buildfarm/templates/release/rpm/import_package_job.xml.em delete mode 100644 ros_buildfarm/templates/snippet/pulp_credentials.xml.em diff --git a/doc/configuration_options.rst b/doc/configuration_options.rst index aff876d97..acccc0489 100644 --- a/doc/configuration_options.rst +++ b/doc/configuration_options.rst @@ -173,13 +173,6 @@ Description of common options master which is commonly used to upload artifacts to another host. This credential id is set in the buildfarm_deployment. -* **Upload destination credential ID**: the ID of the credential entry managed - on the Jenkins master which contains the destination information used to - upload artifacts to another host. - This credential id is set in the buildfarm_deployment. - At present, this value is only used for RPM jobs. - - Specific options in release build files --------------------------------------- @@ -237,9 +230,6 @@ The following options are valid in version ``2`` (beside the generic options): * ``upload_credential_id``: the ID of the credential to upload the built packages to the repository host. -* ``upload_credential_id_pulp``: the ID of the credential to upload the built - RPM packages to the repository host instance of Pulp. - * ``upload_host``: the hostname of the repository host where built packages shoudl be uploaded to. Only affects RPM builds at present. diff --git a/ros_buildfarm/argument.py b/ros_buildfarm/argument.py index 20faefddf..ebfee8de3 100644 --- a/ros_buildfarm/argument.py +++ b/ros_buildfarm/argument.py @@ -395,49 +395,6 @@ def add_argument_build_tool_test_args(parser): help='Arbitrary arguments passed to the build tool during testing.') -def add_argument_pulp_base_url(parser): - from_env = os.environ.get('PULP_BASE_URL') - return parser.add_argument( - '--pulp-base-url', - default=from_env, required=not bool(from_env), - help='URL of the pulp API endpoint') - - -def add_argument_pulp_distribution_name(parser): - return parser.add_argument( - '--pulp-distribution-name', required=True, - help='Name of the pulp distribution to target with changes') - - -def add_argument_pulp_password(parser): - from_env = os.environ.get('PULP_PASSWORD') - return parser.add_argument( - '--pulp-password', - default=from_env, required=not bool(from_env), - help='Password used to access the pulp API endpoint') - - -def add_argument_pulp_resource_record(parser): - return parser.add_argument( - '--pulp-resource-record', default=None, metavar='FILE', - help='File in which to record the pulp HREFs of package resources') - - -def add_argument_pulp_task_timeout(parser): - return parser.add_argument( - '--pulp-task-timeout', - type=float, default=60.0, - help='Duration to wait (in seconds) for a pulp task to complete') - - -def add_argument_pulp_username(parser): - from_env = os.environ.get('PULP_USERNAME') - return parser.add_argument( - '--pulp-username', - default=from_env, required=not bool(from_env), - help='Username used to access the pulp API endpoint') - - def add_argument_repos_file_urls(parser): parser.add_argument( '--repos-file-urls', nargs='*', metavar='URL', diff --git a/ros_buildfarm/config/release_build_file.py b/ros_buildfarm/config/release_build_file.py index 2ed418163..76df34b6d 100644 --- a/ros_buildfarm/config/release_build_file.py +++ b/ros_buildfarm/config/release_build_file.py @@ -109,14 +109,6 @@ def __init__(self, name, data): # noqa: D107 assert 'upload_credential_id' in data self.upload_credential_id = data['upload_credential_id'] - self.upload_credential_id_pulp = None - if 'upload_credential_id_pulp' in data: - self.upload_credential_id_pulp = data['upload_credential_id_pulp'] - - self.upload_destination_credential_id = None - if 'upload_destination_credential_id' in data: - self.upload_destination_credential_id = data['upload_destination_credential_id'] - self.upload_host = None if 'upload_host' in data: self.upload_host = data['upload_host'] diff --git a/ros_buildfarm/pulp.py b/ros_buildfarm/pulp.py deleted file mode 100644 index ade00aa1c..000000000 --- a/ros_buildfarm/pulp.py +++ /dev/null @@ -1,373 +0,0 @@ -# Copyright 2020 Open Source Robotics Foundation, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import os -import re -import time - -from pulpcore.client import pulp_rpm -from pulpcore.client import pulpcore - -logger = logging.getLogger(__name__) - - -class PulpTaskError(RuntimeError): - - def __init__(self, task, state): # noqa: D107 - super().__init__("Pulp task '%s' did not complete (%s)" % (task.pulp_href, state)) - self.task = task - - -def format_pkg_ver(pkg): - return '%s%s-%s' % ( - (pkg.epoch + ':') if pkg.epoch != '0' else '', - pkg.version, - pkg.release) - - -def _enumerate_recursive_dependencies(packages, target_names): - new_names = set(target_names) - - while new_names: - target_names = new_names - new_names = set() - for pkg in packages: - if target_names.intersection(req[0] for req in pkg.requires): - yield (pkg.pulp_href, pkg) - new_names.add(pkg.name) - new_names.update(prov[0] for prov in pkg.provides) - - -class PulpPageIterator: - - def __init__(self, fetch_function, *args, **kwargs): # noqa: D107 - self._get_next = lambda offset: fetch_function(*args, **kwargs, offset=offset) - self._offset = 0 - self._next_page() - - def _next_page(self): - self._page = self._get_next(self._offset) - self._offset += len(self._page.results) - self._iter = iter(self._page.results) - if self._page.count: - logger.debug( - 'Fetched a page of %d results (%d%%)' % ( - len(self._page.results), 100.0 * self._offset / self._page.count)) - - def __iter__(self): - # Make sure we're at the beginning - if self._page.previous: - self._offset = 0 - self._next_page() - else: - self._iter = iter(self._page.results) - return self - - def __len__(self): - return self._page.count - - def __next__(self): - try: - return next(self._iter) - except StopIteration: - if not self._page.next: - raise - - self._next_page() - return next(self._iter) - - -class PulpRpmClient: - - def __init__( # noqa: D107 - self, base_url, username, password, - task_timeout=120.0, task_polling_interval=0.5): - self._task_timeout = task_timeout - self._task_polling_interval = task_polling_interval - - self._config = pulpcore.Configuration( - base_url, username=username, password=password) - - # https://pulp.plan.io/issues/5932 - self._config.safe_chars_for_path_param = '/' - - # Core APIs - self._core_client = pulpcore.ApiClient(self._config) - self._core_tasks_api = pulpcore.TasksApi(self._core_client) - self._core_orphans_api = pulpcore.OrphansApi(self._core_client) - - # RPM APIs - self._rpm_client = pulp_rpm.ApiClient(self._config) - self._rpm_distributions_api = pulp_rpm.DistributionsRpmApi(self._rpm_client) - self._rpm_packages_api = pulp_rpm.ContentPackagesApi(self._rpm_client) - self._rpm_publications_api = pulp_rpm.PublicationsRpmApi(self._rpm_client) - self._rpm_remotes_api = pulp_rpm.RemotesRpmApi(self._rpm_client) - self._rpm_repos_api = pulp_rpm.RepositoriesRpmApi(self._rpm_client) - self._rpm_repo_vers_api = pulp_rpm.RepositoriesRpmVersionsApi(self._rpm_client) - - def _wait_for_task(self, task_href): - task = self._core_tasks_api.read(task_href) - - timeout = self._task_timeout - while task.state != 'completed': - if task.state in ['failed', 'canceled']: - raise PulpTaskError(task, task.state) - logger.debug( - "Pulp task '%s' is '%s': checking again in %ds" % ( - task.pulp_href, task.state, self._task_polling_interval)) - time.sleep(self._task_polling_interval) - timeout -= self._task_polling_interval - if timeout <= 0: - task_cancel = pulpcore.PatchedTaskCancel('canceled') - task = self._core_tasks_api.tasks_cancel(task.pulp_href, task_cancel) - if task.state != 'completed': - raise PulpTaskError(task, 'timed out') - - task = self._core_tasks_api.read(task.pulp_href) - - return task - - def _publish_and_distribute(self, distribution, repo_version_href): - logger.debug('Publishing and distributing: ' + repo_version_href) - - # Publish the new version - publish_data = pulp_rpm.RpmRpmPublication(repository_version=repo_version_href) - publish_task_href = self._rpm_publications_api.create(publish_data).task - publish_task = self._wait_for_task(publish_task_href) - logger.debug('Created publication: ' + publish_task.created_resources[0]) - - # Distribute the publication at the original endpoint - distribution.publication = publish_task.created_resources[0] - distribute_task_href = self._rpm_distributions_api.partial_update( - distribution.pulp_href, distribution).task - self._wait_for_task(distribute_task_href) - logger.debug('Updated distribution: ' + distribution.pulp_href) - - def enumerate_distributions(self): - return PulpPageIterator( - self._rpm_distributions_api.list) - - def enumerate_pkgs_in_distribution_name(self, distribution_name): - distribution = self._rpm_distributions_api.list( - name=distribution_name).results[0] - publication = self._rpm_publications_api.read(distribution.publication) - return self.enumerate_pkgs_in_repo_ver(publication.repository_version) - - def enumerate_pkgs_in_repo_ver(self, repo_ver_href, names=None): - name__in = ','.join(names) if names is not None else None - return PulpPageIterator( - self._rpm_packages_api.list, repository_version=repo_ver_href, - fields='pulp_href,name,epoch,version,arch,release,provides,requires', - limit=250, name__in=name__in) - - def enumerate_remotes(self): - return PulpPageIterator(self._rpm_remotes_api.list) - - def enumerate_unused_repo_vers(self, distribution_name): - distribution = self._rpm_distributions_api.list( - name=distribution_name).results[0] - publication = self._rpm_publications_api.read(distribution.publication) - all_repo_vers = { - repo_ver.pulp_href: repo_ver for repo_ver in - PulpPageIterator( - self._rpm_repo_vers_api.list, rpm_rpm_repository_href=publication.repository, - limit=500)} - - logger.debug( - 'Fetched %d total repository versions for %s' % ( - len(all_repo_vers), distribution_name)) - - unused_repo_vers = [] - current_repo_ver = all_repo_vers.get( - all_repo_vers[publication.repository_version].base_version) - while current_repo_ver and current_repo_ver.number: - unused_repo_vers.append(current_repo_ver.pulp_href) - current_repo_ver = all_repo_vers.get(current_repo_ver.base_version) - - logger.debug( - 'Found %d ancestors of %s' % ( - len(unused_repo_vers), publication.repository_version)) - - return unused_repo_vers - - def import_and_invalidate( - self, distribution_name, packages_to_add, - invalidate_expression, invalidate_downstream, package_cache=None, dry_run=False): - logger.debug("Performing import and invalidation for '%s'" % (distribution_name,)) - distribution = self._rpm_distributions_api.list( - name=distribution_name).results[0] - logger.debug('Got distribution: ' + distribution.pulp_href) - old_publication = self._rpm_publications_api.read(distribution.publication) - logger.debug('Got old publication: ' + old_publication.pulp_href) - - if package_cache is None: - package_cache = {} - - # If we need to invalidate, fetch everything up front - if invalidate_expression or invalidate_downstream: - logger.debug('Getting package list for ' + old_publication.repository_version) - # Get the current packages - current_pkgs = { - pkg.pulp_href: pkg for pkg in - self.enumerate_pkgs_in_repo_ver(old_publication.repository_version)} - package_cache.update(current_pkgs) - - # Get the packages we're adding - logger.debug( - 'Getting information about %d packages being added' % (len(packages_to_add),)) - new_pkgs = { - pkg.pulp_href: pkg for pkg in - [package_cache.get(pkg_href) or self._rpm_packages_api.read(pkg_href) - for pkg_href in packages_to_add]} - - # If we didn't already fetch everything, enumerate only - # packages with the same name - if not invalidate_expression and not invalidate_downstream: - logger.debug('Getting information about packages being replaced') - # Get the current packages - names = set(p.name for p in new_pkgs.values()) - current_pkgs = { - pkg.pulp_href: pkg for pkg in - self.enumerate_pkgs_in_repo_ver( - old_publication.repository_version, names=names)} - package_cache.update(current_pkgs) - - # Invalidate packages - logger.debug('Determining packages to invalidate') - pkgs_to_remove = {} - new_pkg_names = set([pkg.name for pkg in new_pkgs.values()]) - # 1. Remove packages with the same name - pkgs_to_remove.update({ - pkg.pulp_href: pkg for pkg in current_pkgs.values() - if pkg.name in new_pkg_names}) - # 2. Remove downstream packages - if invalidate_downstream: - new_pkg_provides = new_pkg_names.union( - prov[0] for pkg in new_pkgs.values() for prov in pkg.provides) - pkgs_to_remove.update( - _enumerate_recursive_dependencies(current_pkgs.values(), new_pkg_provides)) - # 3. Remove packages matching the invalidation expression - if invalidate_expression: - compiled_expression = re.compile(invalidate_expression) - for pkg in current_pkgs.values(): - if compiled_expression.match(pkg.name): - pkgs_to_remove[pkg.pulp_href] = pkg - - # Prune the list of packages to add and remove - # 1. Packages being added *always* end up in the repo - for href_to_add in new_pkgs.keys(): - pkgs_to_remove.pop(href_to_add, None) - # 2. Packages being added don't need to get re-added - for href_in_current in current_pkgs.keys(): - new_pkgs.pop(href_in_current, None) - - if dry_run: - logger.debug('Finished (dry-run)') - return (new_pkgs.values(), pkgs_to_remove.values()) - - # Commit the changes - logger.debug('Committing changes') - mod_data = pulp_rpm.RepositoryAddRemoveContent( - add_content_units=list(new_pkgs.keys()), - remove_content_units=list(pkgs_to_remove.keys()), - base_version=old_publication.repository_version) - mod_task_href = self._rpm_repos_api.modify(old_publication.repository, mod_data).task - mod_task = self._wait_for_task(mod_task_href) - - # WORKAROUND FOR https://pulp.plan.io/issues/6811 - # - # This implementation doesn't care what the "latest" version of the repository - # looks like, but the implementation details of the add/remove operation in Pulp - # seem to. The expectation is that when `created_resources` is empty, the changes - # are unnecessary and already satisfied, but another case can lead to this - # behavior. See the referenced ticket for details. - # - # We can work around the behavior by specifically modifying the 'latest' version - # to look different from our intended state, and then ignoring that new version. - # This way we'll keep our revision history intact. - if not mod_task.created_resources and (new_pkgs or pkgs_to_remove): - # Create a new version with nothing in it. Hopefully that will be different - # from the 'latest'. - logger.warning('Working around pulp issue #6811') - workaround_mod_data = pulp_rpm.RepositoryAddRemoveContent( - add_content_units=[], - remove_content_units=['*'], - base_version=old_publication.repository_version) - workaround_mod_task_href = self._rpm_repos_api.modify( - old_publication.repository, - workaround_mod_data).task - workaround_mod_task = self._wait_for_task(workaround_mod_task_href) - assert workaround_mod_task.created_resources - - # Now that the 'latest' version has changed, re-run the original operation - mod_task_href = self._rpm_repos_api.modify( - old_publication.repository, - mod_data).task - mod_task = self._wait_for_task(mod_task_href) - # END WORKAROUND - - if mod_task.created_resources: - self._publish_and_distribute(distribution, mod_task.created_resources[0]) - else: - logger.warning('Modification operations resulted in no apparent changes') - - return (new_pkgs.values(), pkgs_to_remove.values()) - - def mirror_remote_to_distribution(self, remote_name, distribution_name, dry_run=False): - remote = self._rpm_remotes_api.list(name=remote_name).results[0] - distribution = self._rpm_distributions_api.list(name=distribution_name).results[0] - old_publication = self._rpm_publications_api.read(distribution.publication) - - sync_data = pulp_rpm.RpmRepositorySyncURL( - remote=remote.pulp_href, - mirror=True) - - if dry_run: - return - - sync_task_href = self._rpm_repos_api.sync(old_publication.repository, sync_data).task - sync_task = self._wait_for_task(sync_task_href) - - if sync_task.created_resources: - self._publish_and_distribute(distribution, sync_task.created_resources[0]) - - def remove_unused_content(self): - delete_task_href = self._core_orphans_api.delete().task - delete_task = self._wait_for_task(delete_task_href) - print('%s' % (delete_task,)) - - def remove_unused_repo_vers(self, distribution_name, dry_run=False): - unused_repo_vers = self.enumerate_unused_repo_vers(distribution_name) - - # Start removing the oldest ones first - unused_repo_vers.reverse() - - for repo_ver in unused_repo_vers: - if dry_run: - logger.debug('Removing %s (dry-run)' % (repo_ver,)) - continue - logger.debug('Removing %s' % (repo_ver,)) - delete_task_href = self._rpm_repo_vers_api.delete(repo_ver).task - self._wait_for_task(delete_task_href) - - def upload_pkg(self, file_path): - relative_path = os.path.basename(file_path) - upload_task_href = self._rpm_packages_api.create( - relative_path, file=file_path).task - logger.debug( - "Upload task for '%s': %s" % (os.path.basename(file_path), upload_task_href)) - upload_task = self._wait_for_task(upload_task_href) - - return self._rpm_packages_api.read(upload_task.created_resources[0]) diff --git a/ros_buildfarm/release_job.py b/ros_buildfarm/release_job.py index c34949e1a..69e54031d 100644 --- a/ros_buildfarm/release_job.py +++ b/ros_buildfarm/release_job.py @@ -139,11 +139,14 @@ def configure_release_jobs( all_view_configs = {} all_job_configs = OrderedDict() - job_name, job_config = configure_import_package_job( - config_url, rosdistro_name, release_build_name, - config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) - if not jenkins: - all_job_configs[job_name] = job_config + for os_name, _ in platforms: + if package_format_mapping[os_name] not in ('rpm',): + job_name, job_config = configure_import_package_job( + config_url, rosdistro_name, release_build_name, + config=config, build_file=build_file, jenkins=jenkins, dry_run=dry_run) + if not jenkins: + all_job_configs[job_name] = job_config + break job_name, job_config = configure_sync_packages_to_main_job( config_url, rosdistro_name, release_build_name, @@ -390,7 +393,7 @@ def configure_release_job( config=None, build_file=None, index=None, dist_file=None, cached_pkgs=None, jenkins=None, views=None, - generate_import_package_job=True, + generate_import_package_job=None, generate_sync_packages_jobs=True, is_disabled=False, other_build_files_same_platform=None, groovy_script=None, @@ -408,6 +411,8 @@ def configure_release_job( if build_file is None: build_files = get_release_build_files(config, rosdistro_name) build_file = build_files[release_build_name] + if generate_import_package_job is None: + generate_import_package_job = package_format_mapping[os_name] not in ('rpm',) if index is None: index = get_index(config.rosdistro_index_url) @@ -664,8 +669,6 @@ def _get_sourcedeb_job_config( 'upload_host': build_file.upload_host, 'credential_id': build_file.upload_credential_id, - 'credential_id_pulp': build_file.upload_credential_id_pulp, - 'dest_credential_id': build_file.upload_destination_credential_id, 'git_ssh_credential_id': config.git_ssh_credential_id, } @@ -753,8 +756,6 @@ def _get_binarydeb_job_config( 'upload_host': build_file.upload_host, 'credential_id': build_file.upload_credential_id, - 'credential_id_pulp': build_file.upload_credential_id_pulp, - 'dest_credential_id': build_file.upload_destination_credential_id, 'shared_ccache': build_file.shared_ccache, } @@ -802,8 +803,6 @@ def _get_import_package_job_config(build_file, package_format): 'abi_incompatibility_assumed': build_file.abi_incompatibility_assumed, 'notify_emails': build_file.notify_emails, 'ros_buildfarm_repository': get_repository(), - 'credential_id_pulp': build_file.upload_credential_id_pulp, - 'dest_credential_id': build_file.upload_destination_credential_id, } job_config = expand_template(template_name, job_data) return job_config @@ -868,8 +867,6 @@ def _get_sync_packages_to_testing_job_config( rosdistro_name, package_format), 'notify_emails': build_file.notify_emails, - 'credential_id_pulp': build_file.upload_credential_id_pulp, - 'dest_credential_id': build_file.upload_destination_credential_id, } job_config = expand_template(template_name, job_data) return job_config @@ -920,8 +917,6 @@ def _get_sync_packages_to_main_job_config(rosdistro_name, build_file, package_fo 'sync_targets': build_file.targets, 'notify_emails': build_file.notify_emails, - 'credential_id_pulp': build_file.upload_credential_id_pulp, - 'dest_credential_id': build_file.upload_destination_credential_id, } job_config = expand_template(template_name, job_data) return job_config diff --git a/ros_buildfarm/scripts/release/generate_release_maintenance_jobs.py b/ros_buildfarm/scripts/release/generate_release_maintenance_jobs.py index e992a836b..3b552b2fe 100644 --- a/ros_buildfarm/scripts/release/generate_release_maintenance_jobs.py +++ b/ros_buildfarm/scripts/release/generate_release_maintenance_jobs.py @@ -124,8 +124,6 @@ def get_import_upstream_job_config(args, config, build_file, package_format): data = { 'import_targets': build_file.targets, 'credential_id': build_file.upload_credential_id, - 'credential_id_pulp': build_file.upload_credential_id_pulp, - 'dest_credential_id': build_file.upload_destination_credential_id, } return _get_job_config( args, config, config.notify_emails, template_name, diff --git a/ros_buildfarm/scripts/release/generate_release_script.py b/ros_buildfarm/scripts/release/generate_release_script.py index 4224ec03e..fb31278af 100644 --- a/ros_buildfarm/scripts/release/generate_release_script.py +++ b/ros_buildfarm/scripts/release/generate_release_script.py @@ -78,8 +78,7 @@ def beforeInclude(self, *args, **kwargs): ] script = '\n'.join(lines) elif 'Upload binary' in script or 'Upload source' in script: - # Skip scripts which are responsible for uploading resources - # to Pulp. + # Skip scripts which are responsible for uploading resources. return self.scripts.append(script) diff --git a/ros_buildfarm/scripts/release/rpm/cull_repo.py b/ros_buildfarm/scripts/release/rpm/cull_repo.py deleted file mode 100644 index feb2783c1..000000000 --- a/ros_buildfarm/scripts/release/rpm/cull_repo.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright 2022 Open Source Robotics Foundation, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import logging -import sys - -from ros_buildfarm.argument import add_argument_dry_run -from ros_buildfarm.argument import add_argument_pulp_base_url -from ros_buildfarm.argument import add_argument_pulp_distribution_name -from ros_buildfarm.argument import add_argument_pulp_password -from ros_buildfarm.argument import add_argument_pulp_task_timeout -from ros_buildfarm.argument import add_argument_pulp_username -from ros_buildfarm.common import Scope -from ros_buildfarm.pulp import PulpRpmClient - - -def main(argv=sys.argv[1:]): - logging.basicConfig( - level=logging.DEBUG, format='%(name)s %(levelname)s %(asctime)s: %(message)s') - - parser = argparse.ArgumentParser( - description='Cull unused repository snapshots and packages from Pulp') - add_argument_dry_run(parser) - add_argument_pulp_base_url(parser) - add_argument_pulp_distribution_name(parser) - add_argument_pulp_password(parser) - add_argument_pulp_task_timeout(parser) - add_argument_pulp_username(parser) - args = parser.parse_args(argv) - - pulp_client = PulpRpmClient( - args.pulp_base_url, args.pulp_username, args.pulp_password, - task_timeout=args.pulp_task_timeout) - - with Scope('SUBSECTION', 'removing unused snapshots'): - pulp_client.remove_unused_repo_vers(args.pulp_distribution_name, dry_run=args.dry_run) - - with Scope('SUBSECTION', 'removing unused content'): - if args.dry_run: - print('Skipped (dry-run)') - else: - pulp_client.remove_unused_content() - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/ros_buildfarm/scripts/release/rpm/import_package.py b/ros_buildfarm/scripts/release/rpm/import_package.py deleted file mode 100644 index 4068f58f6..000000000 --- a/ros_buildfarm/scripts/release/rpm/import_package.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright 2020 Open Source Robotics Foundation, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import logging -import sys - -from ros_buildfarm.argument import add_argument_dry_run -from ros_buildfarm.argument import add_argument_invalidate -from ros_buildfarm.argument import add_argument_pulp_base_url -from ros_buildfarm.argument import add_argument_pulp_distribution_name -from ros_buildfarm.argument import add_argument_pulp_password -from ros_buildfarm.argument import add_argument_pulp_task_timeout -from ros_buildfarm.argument import add_argument_pulp_username -from ros_buildfarm.common import Scope -from ros_buildfarm.pulp import format_pkg_ver -from ros_buildfarm.pulp import PulpRpmClient - - -def main(argv=sys.argv[1:]): - logging.basicConfig( - level=logging.DEBUG, format='%(name)s %(levelname)s %(asctime)s: %(message)s') - - parser = argparse.ArgumentParser( - description='Import packages into a repository and publish it') - parser.add_argument( - 'package_resources', - nargs='*', metavar="PULP_HREF", - help='Identifiers for packages which should be imported') - add_argument_dry_run(parser) - add_argument_invalidate(parser) - parser.add_argument( - '--invalidate-expression', - default=None, - help='Any existing package names matching this expression will be removed') - add_argument_pulp_base_url(parser) - add_argument_pulp_distribution_name(parser) - add_argument_pulp_password(parser) - add_argument_pulp_task_timeout(parser) - add_argument_pulp_username(parser) - args = parser.parse_args(argv) - - pulp_client = PulpRpmClient( - args.pulp_base_url, args.pulp_username, args.pulp_password, - task_timeout=args.pulp_task_timeout) - - with Scope('SUBSECTION', 'performing repository transaction'): - pkgs_added, pkgs_removed = pulp_client.import_and_invalidate( - args.pulp_distribution_name, args.package_resources, - args.invalidate_expression, args.invalidate, dry_run=args.dry_run) - - with Scope('SUBSECTION', 'enumerating results'): - if not pkgs_added: - print('Not importing any new packages') - for pkg in pkgs_added: - print('Importing package: %s-%s.%s' % (pkg.name, format_pkg_ver(pkg), pkg.arch)) - - if not pkgs_removed: - print('Not removing any existing packages') - for pkg in pkgs_removed: - print('Removing package: %s-%s.%s' % (pkg.name, format_pkg_ver(pkg), pkg.arch)) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/ros_buildfarm/scripts/release/rpm/mirror_repo.py b/ros_buildfarm/scripts/release/rpm/mirror_repo.py deleted file mode 100644 index aa513a05f..000000000 --- a/ros_buildfarm/scripts/release/rpm/mirror_repo.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright 2020 Open Source Robotics Foundation, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import re -import sys - -from ros_buildfarm.argument import add_argument_dry_run -from ros_buildfarm.argument import add_argument_pulp_base_url -from ros_buildfarm.argument import add_argument_pulp_password -from ros_buildfarm.argument import add_argument_pulp_task_timeout -from ros_buildfarm.argument import add_argument_pulp_username -from ros_buildfarm.common import Scope -from ros_buildfarm.pulp import PulpRpmClient - - -def main(argv=sys.argv[1:]): - parser = argparse.ArgumentParser( - description='Mirror a remote RPM repository to a pulp distribution') - add_argument_dry_run(parser) - add_argument_pulp_base_url(parser) - add_argument_pulp_password(parser) - add_argument_pulp_task_timeout(parser) - add_argument_pulp_username(parser) - parser.add_argument( - '--remote-source-expression', required=True, - help='Expression to match for pulp remote names') - parser.add_argument( - '--distribution-dest-expression', required=True, - help='Expression to transform matching source remote names ' - 'into destination distribution names') - args = parser.parse_args(argv) - - pulp_client = PulpRpmClient( - args.pulp_base_url, args.pulp_username, args.pulp_password, - task_timeout=args.pulp_task_timeout) - - dists_to_sync = [] - with Scope('SUBSECTION', 'enumerating remotes and distributions to sync'): - remote_expression = re.compile(args.remote_source_expression) - distributions = {dist.name for dist in pulp_client.enumerate_distributions()} - for remote in pulp_client.enumerate_remotes(): - (dist_dest_pattern, matched_source) = remote_expression.subn( - args.distribution_dest_expression, remote.name) - if matched_source: - dist_dest_matches = [ - dist for dist in distributions if re.match(dist_dest_pattern, dist)] - if not dist_dest_matches: - print( - "No distributions match destination pattern '%s'" % dist_dest_pattern, - file=sys.stderr) - return 1 - dists_to_sync.extend((remote.name, dist_dest) for dist_dest in dist_dest_matches) - - dists_to_sync = sorted(set(dists_to_sync)) - print('Syncing %d distributions:' % len(dists_to_sync)) - for remote_name_and_dist_dest in dists_to_sync: - print('- %s => %s' % remote_name_and_dist_dest) - - with Scope('SUBSECTION', 'synchronizing remotes and publishing mirrors'): - for remote_name, dist_name in dists_to_sync: - pulp_client.mirror_remote_to_distribution(remote_name, dist_name, dry_run=args.dry_run) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/ros_buildfarm/scripts/release/rpm/sync_repo.py b/ros_buildfarm/scripts/release/rpm/sync_repo.py deleted file mode 100644 index 8dab92259..000000000 --- a/ros_buildfarm/scripts/release/rpm/sync_repo.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright 2020 Open Source Robotics Foundation, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import re -import sys - -from ros_buildfarm.argument import add_argument_dry_run -from ros_buildfarm.argument import add_argument_invalidate -from ros_buildfarm.argument import add_argument_pulp_base_url -from ros_buildfarm.argument import add_argument_pulp_password -from ros_buildfarm.argument import add_argument_pulp_task_timeout -from ros_buildfarm.argument import add_argument_pulp_username -from ros_buildfarm.common import Scope -from ros_buildfarm.pulp import format_pkg_ver -from ros_buildfarm.pulp import PulpRpmClient - - -def main(argv=sys.argv[1:]): - parser = argparse.ArgumentParser( - description='Sync packages between pulp distributions') - add_argument_dry_run(parser) - add_argument_invalidate(parser) - parser.add_argument( - '--invalidate-expression', - default=None, - help='Any existing package names matching this expression will be removed') - add_argument_pulp_base_url(parser) - add_argument_pulp_password(parser) - add_argument_pulp_task_timeout(parser) - add_argument_pulp_username(parser) - parser.add_argument( - '--package-name-expression', default='.*', - help='Expression to match against packages in the repositories') - parser.add_argument( - '--distribution-source-expression', required=True, - help='Expression to match for source distribution names') - parser.add_argument( - '--distribution-dest-expression', required=True, - help='Expression to transform matching source distribution names into destination names') - args = parser.parse_args(argv) - - pulp_client = PulpRpmClient( - args.pulp_base_url, args.pulp_username, args.pulp_password, - task_timeout=args.pulp_task_timeout) - - dists_to_sync = [] - with Scope('SUBSECTION', 'enumerating distributions to sync'): - dist_expression = re.compile(args.distribution_source_expression) - distributions = {dist.name for dist in pulp_client.enumerate_distributions()} - for dist_source in sorted(distributions): - (dist_dest_pattern, matched_source) = dist_expression.subn( - args.distribution_dest_expression, dist_source) - if matched_source: - dist_dest_matches = [ - dist for dist in distributions if re.match(dist_dest_pattern, dist)] - if not dist_dest_matches: - print( - "No distributions match destination pattern '%s'" % dist_dest_pattern, - file=sys.stderr) - return 1 - dists_to_sync.extend((dist_source, dist_dest) for dist_dest in dist_dest_matches) - - dists_to_sync = sorted(set(dists_to_sync)) - print('Syncing %d distributions:' % len(dists_to_sync)) - for dist_source_dest in dists_to_sync: - print('- %s => %s' % dist_source_dest) - - packages = {} - with Scope('SUBSECTION', 'enumerating packages to sync'): - package_expression = re.compile(args.package_name_expression) - for dist_source, _ in dists_to_sync: - packages[dist_source] = { - pkg.pulp_href: pkg - for pkg in pulp_client.enumerate_pkgs_in_distribution_name(dist_source) - if package_expression.match(pkg.name)} - - print('Matched %d packages from source distributions:' % ( - sum([len(pkgs) for pkgs in packages.values()]))) - for dist_source, _ in dists_to_sync: - print('- %s: %d matching packages' % (dist_source, len(packages[dist_source]))) - - with Scope('SUBSECTION', 'invalidation and committing changes'): - for dist_source, dist_dest in dists_to_sync: - packages_to_sync = packages[dist_source] - if not packages_to_sync: - print('Skipping sync from %s to %s' % (dist_source, dist_dest)) - continue - print('Syncing %d packages from %s to %s...%s' % ( - len(packages_to_sync), dist_source, dist_dest, - ' (dry run)' if args.dry_run else '')) - package_cache = dict(packages_to_sync) - new_pkgs, pkgs_removed = pulp_client.import_and_invalidate( - dist_dest, packages_to_sync, args.invalidate_expression, - args.invalidate, package_cache=package_cache, dry_run=args.dry_run) - print('- Added %d packages%s' % ( - len(new_pkgs), ' (dry run)' if args.dry_run else '')) - for pkg in sorted(new_pkgs, key=lambda pkg: pkg.name): - print(' - %s-%s' % (pkg.name, format_pkg_ver(pkg))) - print('- Removed %d packages%s' % ( - len(pkgs_removed), ' (dry run)' if args.dry_run else '')) - for pkg in sorted(pkgs_removed, key=lambda pkg: pkg.name): - print(' - %s-%s' % (pkg.name, format_pkg_ver(pkg))) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/ros_buildfarm/scripts/release/rpm/upload_package.py b/ros_buildfarm/scripts/release/rpm/upload_package.py deleted file mode 100644 index 76d00fa8f..000000000 --- a/ros_buildfarm/scripts/release/rpm/upload_package.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2020 Open Source Robotics Foundation, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import sys - -from ros_buildfarm.argument import add_argument_pulp_base_url -from ros_buildfarm.argument import add_argument_pulp_password -from ros_buildfarm.argument import add_argument_pulp_resource_record -from ros_buildfarm.argument import add_argument_pulp_task_timeout -from ros_buildfarm.argument import add_argument_pulp_username -from ros_buildfarm.common import Scope -from ros_buildfarm.pulp import format_pkg_ver -from ros_buildfarm.pulp import PulpRpmClient - - -def main(argv=sys.argv[1:]): - parser = argparse.ArgumentParser( - description='Upload package to pulp') - parser.add_argument( - 'package_file', - nargs='+', metavar='FILE', - help='Package file paths to upload') - add_argument_pulp_base_url(parser) - add_argument_pulp_password(parser) - add_argument_pulp_task_timeout(parser) - add_argument_pulp_username(parser) - add_argument_pulp_resource_record(parser) - args = parser.parse_args(argv) - - pulp_client = PulpRpmClient( - args.pulp_base_url, args.pulp_username, args.pulp_password, - task_timeout=args.pulp_task_timeout) - - with Scope('SUBSECTION', 'upload package(s) to pulp'): - created_resources = [] - - for file_path in args.package_file: - print("Uploading '%s'." % file_path) - created_rpm = pulp_client.upload_pkg(file_path) - created_resources.append(created_rpm.pulp_href) - - print('Created RPM resource: %s' % created_rpm.pulp_href) - print("Package '%s' version: %s" % (created_rpm.name, format_pkg_ver(created_rpm))) - - if args.pulp_resource_record: - print("Saving upload record to '%s'." % args.pulp_resource_record) - with open(args.pulp_resource_record, 'w') as resource_record: - resource_record.write('PULP_RESOURCES=%s\n' % ' '.join(created_resources)) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/ros_buildfarm/templates/release/rpm/import_package_job.xml.em b/ros_buildfarm/templates/release/rpm/import_package_job.xml.em deleted file mode 100644 index 9c20421a0..000000000 --- a/ros_buildfarm/templates/release/rpm/import_package_job.xml.em +++ /dev/null @@ -1,107 +0,0 @@ - - - Generated at @ESCAPE(now_str) from template '@ESCAPE(template_name)' - false - -@(SNIPPET( - 'property_log-rotator', - days_to_keep=30, - num_to_keep=10000, -))@ -@(SNIPPET( - 'property_job-priority', - priority=-1, -))@ -@(SNIPPET( - 'property_rebuild-settings', -))@ -@(SNIPPET( - 'property_requeue-job', -))@ -@(SNIPPET( - 'property_parameters-definition', - parameters=[ - { - 'type': 'string', - 'name': 'DISTRIBUTION_NAME', - }, - { - 'type': 'string', - 'name': 'PULP_RESOURCES', - }, - { - 'type': 'boolean', - 'name': 'INVALIDATE_DOWNSTREAM', - }, - { - 'type': 'string', - 'name': 'INVALIDATE_EXPRESSION', - }, - { - 'type': 'boolean', - 'name': 'DRY_RUN', - 'description': 'Skip the commit operation but show what would change', - }, - ], -))@ -@(SNIPPET( - 'property_job-weight', -))@ - -@(SNIPPET( - 'scm_git', - url=ros_buildfarm_repository.url, - branch_name=ros_buildfarm_repository.version or 'master', - relative_target_dir='ros_buildfarm', - refspec=None, -))@ - 2 - building_repository - false - false - false - false - - false - -@(SNIPPET( - 'builder_shell', - script='\n'.join([ - 'echo "# BEGIN SECTION: import RPM package"', - 'if [ "$INVALIDATE_DOWNSTREAM" = "true" ]; then INVALIDATE_ARG=--invalidate; fi', - 'if [ "$DRY_RUN" = "true" ]; then DRY_RUN_ARG="--dry-run"; fi', - 'if [ "$INVALIDATE_EXPRESSION" != "" ]; then INVALIDATE_EXPRESSION_ARG="--invalidate-expression $INVALIDATE_EXPRESSION"; fi', - 'export PYTHONPATH=$WORKSPACE/ros_buildfarm:$PYTHONPATH', - 'python3 -u $WORKSPACE/ros_buildfarm/scripts/release/rpm/import_package.py' + - ' --pulp-distribution-name $DISTRIBUTION_NAME' + - ' $PULP_RESOURCES' + - ' $INVALIDATE_ARG' + - ' $INVALIDATE_EXPRESSION_ARG' + - ' $DRY_RUN_ARG', - 'echo "# END SECTION"', - ]), -))@ - - -@(SNIPPET( - 'publisher_description-setter', - regexp='Importing package: (\S+)', - # to prevent overwriting the description of failed builds - regexp_for_failed='ThisRegExpShouldNeverMatch', -))@ -@(SNIPPET( - 'publisher_extended-email', - recipients=notify_emails, -))@ - - -@(SNIPPET( - 'pulp_credentials', - credential_id=credential_id_pulp, - dest_credential_id=dest_credential_id, -))@ -@(SNIPPET( - 'build-wrapper_timestamper', -))@ - - diff --git a/ros_buildfarm/templates/snippet/pulp_credentials.xml.em b/ros_buildfarm/templates/snippet/pulp_credentials.xml.em deleted file mode 100644 index 560f4a762..000000000 --- a/ros_buildfarm/templates/snippet/pulp_credentials.xml.em +++ /dev/null @@ -1,16 +0,0 @@ -@(SNIPPET( - 'credentials_binding', - bindings=[ - { - 'id': credential_id, - 'type': 'user-pass', - 'user_var': 'PULP_USERNAME', - 'pass_var': 'PULP_PASSWORD', - }, - { - 'id': dest_credential_id, - 'type': 'string', - 'var': 'PULP_BASE_URL', - }, - ], -))@ From d6df50a3429bc2725b95b6f16a92215a91b7ed59 Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Thu, 4 Apr 2024 16:16:37 -0500 Subject: [PATCH 18/18] Set PODMAN_USERNS=keep-id when invoking 'docker run' (#1032) The way Dockerfiles are structured in ros_buildfarm, we're dependent on the UID staying the same when the container is run. Setting PODMAN_USERNS=keep-id when the `docker` command is backed by Podman will in effect preserve the behavior of Docker we're relying on here. When the `docker` command is backed by Docker, this shouldn't change the behavior at all. --- .github/actions/release_reconfigure/action.yaml | 2 ++ .github/actions/sync_criteria_check/action.yaml | 2 ++ .github/actions/trigger/action.yaml | 2 ++ doc/index.rst | 7 +++++++ ros_buildfarm/templates/ci/ci_job.xml.em | 16 ++++++++++++++++ .../templates/ci/ci_reconfigure-jobs_job.xml.em | 2 ++ ros_buildfarm/templates/devel/devel_job.xml.em | 6 ++++++ .../devel/devel_reconfigure-jobs_job.xml.em | 2 ++ .../doc/doc_independent_docker_job.xml.em | 2 ++ .../templates/doc/doc_independent_job.xml.em | 2 ++ .../templates/doc/doc_metadata_job.xml.em | 2 ++ .../doc/doc_reconfigure-jobs_job.xml.em | 2 ++ ros_buildfarm/templates/doc/rosdoc2_job.xml.em | 4 ++++ .../templates/misc/rosdistro_cache_job.xml.em | 2 ++ .../templates/release/deb/binarypkg_job.xml.em | 6 ++++++ .../templates/release/deb/sourcepkg_job.xml.em | 2 ++ .../release/release_reconfigure-jobs_job.xml.em | 2 ++ .../release/release_trigger-jobs_job.xml.em | 2 ++ .../rpm/sync_packages_to_testing_job.xml.em | 2 ++ .../status/blocked_releases_page_job.xml.em | 2 ++ .../blocked_source_entries_page_job.xml.em | 2 ++ .../status/release_compare_page_job.xml.em | 2 ++ .../status/release_status_page_job.xml.em | 2 ++ 23 files changed, 75 insertions(+) diff --git a/.github/actions/release_reconfigure/action.yaml b/.github/actions/release_reconfigure/action.yaml index d84760284..c1485aed7 100644 --- a/.github/actions/release_reconfigure/action.yaml +++ b/.github/actions/release_reconfigure/action.yaml @@ -21,6 +21,8 @@ runs: using: composite steps: - id: ros_buildfarm_job + env: + PODMAN_USERNS: keep-id shell: bash run: | echo ::group::Generate job diff --git a/.github/actions/sync_criteria_check/action.yaml b/.github/actions/sync_criteria_check/action.yaml index 2c443f8fc..f6486360b 100644 --- a/.github/actions/sync_criteria_check/action.yaml +++ b/.github/actions/sync_criteria_check/action.yaml @@ -33,6 +33,8 @@ runs: using: composite steps: - id: ros_buildfarm_job + env: + PODMAN_USERNS: keep-id shell: bash run: | echo ::group::Generate job diff --git a/.github/actions/trigger/action.yaml b/.github/actions/trigger/action.yaml index 5e6f37153..063d96fe4 100644 --- a/.github/actions/trigger/action.yaml +++ b/.github/actions/trigger/action.yaml @@ -18,6 +18,8 @@ runs: using: composite steps: - id: ros_buildfarm_job + env: + PODMAN_USERNS: keep-id shell: bash run: | echo ::group::Generate job diff --git a/doc/index.rst b/doc/index.rst index 1018ddb83..dccf40d3c 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -120,9 +120,16 @@ Another job type can be used locally which is not offered on the build farm. * `prerelease jobs `_ build and test ROS repositories as well as build and test released ROS packages depending on them + Optimization ------------ If you are going to be running one or more jobs on any machine we recommend `using squid-in-a-can `_ to cache downloads. It can greatly speed up download times and saves a lot of bandwidth. It's used by all our developers as well as on all the build machines. + + +Software required to execute jobs +--------------------------------- +Beyond the administrative requirements for generating jobs, the only noteworth software for executing jobs whether locally or via Jenkins is a container engine compatible with the Docker client CLI. +Currently, the only engines tested with ``ros_buildfarm`` are the Docker CE or rootless Podman. diff --git a/ros_buildfarm/templates/ci/ci_job.xml.em b/ros_buildfarm/templates/ci/ci_job.xml.em index ae06ba7b3..4bc993da9 100644 --- a/ros_buildfarm/templates/ci/ci_job.xml.em +++ b/ros_buildfarm/templates/ci/ci_job.xml.em @@ -230,6 +230,8 @@ parameters = [ 'mkdir -p $WORKSPACE/docker_create_workspace', 'mkdir -p $WORKSPACE/docker_build_and_install', 'mkdir -p $WORKSPACE/docker_build_and_test', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_generating_dockers/docker.cid' + @@ -271,6 +273,8 @@ parameters = [ ] + [ 'mkdir -p %s' % (dir) for dir in underlay_source_paths ] + [ + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_create_workspace/docker.cid' + @@ -316,6 +320,8 @@ parameters = [ ] + ([ 'echo "# BEGIN SECTION: ccache stats (before)"', 'mkdir -p $HOME/.ccache', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_build_and_install/docker_ccache_before.cid' + @@ -331,6 +337,8 @@ parameters = [ 'export UNDERLAY%d_JOB_SPACE=$WORKSPACE/underlay%d/ros%d-linux' % (i + 1, i + 1, local_ros_version) for i, local_ros_version in zip(range(len(underlay_source_jobs)), [ros_version] * len(underlay_source_jobs)) ] + [ + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_build_and_install/docker.cid' + @@ -348,6 +356,8 @@ parameters = [ ] + ([ '', 'echo "# BEGIN SECTION: ccache stats (after)"', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_build_and_install/docker_ccache_after.cid' + @@ -394,6 +404,8 @@ parameters = [ ] + ([ 'echo "# BEGIN SECTION: ccache stats (before)"', 'mkdir -p $HOME/.ccache', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_build_and_test/docker_ccache_before.cid' + @@ -411,6 +423,8 @@ parameters = [ ] + [ 'rm -fr $WORKSPACE/ws/test_results', 'mkdir -p $WORKSPACE/ws/test_results', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_build_and_test/docker.cid' + @@ -428,6 +442,8 @@ parameters = [ ] + ([ '', 'echo "# BEGIN SECTION: ccache stats (after)"', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_build_and_test/docker_ccache_after.cid' + diff --git a/ros_buildfarm/templates/ci/ci_reconfigure-jobs_job.xml.em b/ros_buildfarm/templates/ci/ci_reconfigure-jobs_job.xml.em index 3db683890..c995daede 100644 --- a/ros_buildfarm/templates/ci/ci_reconfigure-jobs_job.xml.em +++ b/ros_buildfarm/templates/ci/ci_reconfigure-jobs_job.xml.em @@ -111,6 +111,8 @@ if (repository_names) { 'echo "# END SECTION"', '', 'echo "# BEGIN SECTION: Run Dockerfile - reconfigure jobs for %s"' % (ci_build_name), + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', '# -e=GIT_BRANCH= is required since Jenkins leaves the wc in detached state', 'docker run' + ' --rm ' + diff --git a/ros_buildfarm/templates/devel/devel_job.xml.em b/ros_buildfarm/templates/devel/devel_job.xml.em index d0f322b63..0dcf3b463 100644 --- a/ros_buildfarm/templates/devel/devel_job.xml.em +++ b/ros_buildfarm/templates/devel/devel_job.xml.em @@ -167,6 +167,8 @@ if pull_request: 'rm -fr $WORKSPACE/docker_build_and_test', 'mkdir -p $WORKSPACE/docker_build_and_install', 'mkdir -p $WORKSPACE/docker_build_and_test', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_generating_dockers/docker.cid' + @@ -205,6 +207,8 @@ if pull_request: 'if [ ! -d "$HOME/.ccache" ]; then mkdir $HOME/.ccache; fi', ] if shared_ccache else []) + [ ('if [ ! -c /dev/nvidia[0-9] ]; then echo "--require-gpu-support is enabled but can not detect nvidia support installed" && exit 1; fi' if require_gpu_support else ''), + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + (' --env=DISPLAY=:0.0 --env=QT_X11_NO_MITSHM=1 --volume=/tmp/.X11-unix:/tmp/.X11-unix:rw --gpus all' if require_gpu_support else '') + ' --rm ' + @@ -238,6 +242,8 @@ if pull_request: ] + ([ 'if [ ! -d "$HOME/.ccache" ]; then mkdir $HOME/.ccache; fi', ] if shared_ccache else []) + [ + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + (' --env=DISPLAY=:0.0 --env=QT_X11_NO_MITSHM=1 --volume=/tmp/.X11-unix:/tmp/.X11-unix:rw --gpus all' if require_gpu_support else '') + ' --rm ' + diff --git a/ros_buildfarm/templates/devel/devel_reconfigure-jobs_job.xml.em b/ros_buildfarm/templates/devel/devel_reconfigure-jobs_job.xml.em index 1504d5b46..05f793437 100644 --- a/ros_buildfarm/templates/devel/devel_reconfigure-jobs_job.xml.em +++ b/ros_buildfarm/templates/devel/devel_reconfigure-jobs_job.xml.em @@ -113,6 +113,8 @@ if (repository_names) { 'echo "# END SECTION"', '', 'echo "# BEGIN SECTION: Run Dockerfile - reconfigure jobs"', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', '# -e=GIT_BRANCH= is required since Jenkins leaves the wc in detached state', 'docker run' + ' --rm ' + diff --git a/ros_buildfarm/templates/doc/doc_independent_docker_job.xml.em b/ros_buildfarm/templates/doc/doc_independent_docker_job.xml.em index f034999e8..05863f561 100644 --- a/ros_buildfarm/templates/doc/doc_independent_docker_job.xml.em +++ b/ros_buildfarm/templates/doc/doc_independent_docker_job.xml.em @@ -120,6 +120,8 @@ else: 'echo "# END SECTION"', '', 'echo "# BEGIN SECTION: Run Docker - %s"' % doc_repository_name, + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm' + ' --net=host' + diff --git a/ros_buildfarm/templates/doc/doc_independent_job.xml.em b/ros_buildfarm/templates/doc/doc_independent_job.xml.em index 8c6b7dbd5..bfb17d7c4 100644 --- a/ros_buildfarm/templates/doc/doc_independent_job.xml.em +++ b/ros_buildfarm/templates/doc/doc_independent_job.xml.em @@ -120,6 +120,8 @@ else: 'echo "# END SECTION"', '', 'echo "# BEGIN SECTION: Run Dockerfile - doc independent"', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_doc_independent/docker.cid' + diff --git a/ros_buildfarm/templates/doc/doc_metadata_job.xml.em b/ros_buildfarm/templates/doc/doc_metadata_job.xml.em index 2ebec62bc..6485c469a 100644 --- a/ros_buildfarm/templates/doc/doc_metadata_job.xml.em +++ b/ros_buildfarm/templates/doc/doc_metadata_job.xml.em @@ -95,6 +95,8 @@ 'echo "# END SECTION"', '', 'echo "# BEGIN SECTION: Run Dockerfile - doc metadata"', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_doc_metadata/docker.cid' + diff --git a/ros_buildfarm/templates/doc/doc_reconfigure-jobs_job.xml.em b/ros_buildfarm/templates/doc/doc_reconfigure-jobs_job.xml.em index 436177188..68b4e9c36 100644 --- a/ros_buildfarm/templates/doc/doc_reconfigure-jobs_job.xml.em +++ b/ros_buildfarm/templates/doc/doc_reconfigure-jobs_job.xml.em @@ -113,6 +113,8 @@ if (repository_names) { 'echo "# END SECTION"', '', 'echo "# BEGIN SECTION: Run Dockerfile - reconfigure jobs"', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', '# -e=GIT_BRANCH= is required since Jenkins leaves the wc in detached state', 'docker run' + ' --rm ' + diff --git a/ros_buildfarm/templates/doc/rosdoc2_job.xml.em b/ros_buildfarm/templates/doc/rosdoc2_job.xml.em index 87ccede9b..4105c162f 100644 --- a/ros_buildfarm/templates/doc/rosdoc2_job.xml.em +++ b/ros_buildfarm/templates/doc/rosdoc2_job.xml.em @@ -141,6 +141,8 @@ else: 'echo "# BEGIN SECTION: Run Dockerfile - generating doc task"', 'rm -fr $WORKSPACE/docker_doc', 'mkdir -p $WORKSPACE/docker_doc', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_generating_docker/docker.cid' + @@ -174,6 +176,8 @@ else: 'echo "# END SECTION"', '', 'echo "# BEGIN SECTION: Run Dockerfile - doc"', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_doc/docker.cid' + diff --git a/ros_buildfarm/templates/misc/rosdistro_cache_job.xml.em b/ros_buildfarm/templates/misc/rosdistro_cache_job.xml.em index bf1dfe514..e18148781 100644 --- a/ros_buildfarm/templates/misc/rosdistro_cache_job.xml.em +++ b/ros_buildfarm/templates/misc/rosdistro_cache_job.xml.em @@ -81,6 +81,8 @@ 'echo "# BEGIN SECTION: Run Dockerfile - rosdistro cache"', 'rm -fr $WORKSPACE/rosdistro_cache', 'mkdir -p $WORKSPACE/rosdistro_cache', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_generate_rosdistro_cache/docker.cid' + diff --git a/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em b/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em index d2134b13f..8d7c4a429 100644 --- a/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em +++ b/ros_buildfarm/templates/release/deb/binarypkg_job.xml.em @@ -131,6 +131,8 @@ but disabled since the package is blacklisted (or not whitelisted) in the config ] + ([ 'if [ ! -d "$HOME/.ccache" ]; then mkdir $HOME/.ccache; fi', ] if shared_ccache else []) + [ + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_generating_docker/docker.cid' + @@ -165,6 +167,8 @@ but disabled since the package is blacklisted (or not whitelisted) in the config ] + ([ 'if [ ! -d "$HOME/.ccache" ]; then mkdir $HOME/.ccache; fi', ] if shared_ccache else []) + [ + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_build_binarydeb/docker.cid' + @@ -223,6 +227,8 @@ but disabled since the package is blacklisted (or not whitelisted) in the config @# 'echo "# END SECTION"', @# '', @# 'echo "# BEGIN SECTION: Run Dockerfile - install"', +@# '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', +@# 'export PODMAN_USERNS=keep-id', @# 'docker run' + @# ' --rm ' + @# ' --cidfile=$WORKSPACE/docker_install_binarydeb/docker.cid' + diff --git a/ros_buildfarm/templates/release/deb/sourcepkg_job.xml.em b/ros_buildfarm/templates/release/deb/sourcepkg_job.xml.em index 9d79e390b..3a21e49bd 100644 --- a/ros_buildfarm/templates/release/deb/sourcepkg_job.xml.em +++ b/ros_buildfarm/templates/release/deb/sourcepkg_job.xml.em @@ -110,6 +110,8 @@ but disabled since the package is blacklisted (or not whitelisted) in the config 'echo "# BEGIN SECTION: Run Dockerfile - generate sourcedeb"', 'rm -fr $WORKSPACE/sourcedeb', 'mkdir -p $WORKSPACE/sourcedeb/source', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_sourcedeb/docker.cid' + diff --git a/ros_buildfarm/templates/release/release_reconfigure-jobs_job.xml.em b/ros_buildfarm/templates/release/release_reconfigure-jobs_job.xml.em index c1866ef6c..f24ecf49c 100644 --- a/ros_buildfarm/templates/release/release_reconfigure-jobs_job.xml.em +++ b/ros_buildfarm/templates/release/release_reconfigure-jobs_job.xml.em @@ -113,6 +113,8 @@ if (package_names) { 'echo "# END SECTION"', '', 'echo "# BEGIN SECTION: Run Dockerfile - reconfigure jobs"', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', '# -e=GIT_BRANCH= is required since Jenkins leaves the wc in detached state', 'docker run' + ' --rm ' + diff --git a/ros_buildfarm/templates/release/release_trigger-jobs_job.xml.em b/ros_buildfarm/templates/release/release_trigger-jobs_job.xml.em index 6d52a38c6..80d863b49 100644 --- a/ros_buildfarm/templates/release/release_trigger-jobs_job.xml.em +++ b/ros_buildfarm/templates/release/release_trigger-jobs_job.xml.em @@ -122,6 +122,8 @@ if missed_jobs: 'echo "# BEGIN SECTION: Run Dockerfile - trigger jobs"', 'rm -fr $WORKSPACE/package_repo_cache', 'mkdir -p $WORKSPACE/package_repo_cache', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_trigger_jobs/docker.cid' + diff --git a/ros_buildfarm/templates/release/rpm/sync_packages_to_testing_job.xml.em b/ros_buildfarm/templates/release/rpm/sync_packages_to_testing_job.xml.em index 0035c5f94..c723693c6 100644 --- a/ros_buildfarm/templates/release/rpm/sync_packages_to_testing_job.xml.em +++ b/ros_buildfarm/templates/release/rpm/sync_packages_to_testing_job.xml.em @@ -82,6 +82,8 @@ 'echo "# BEGIN SECTION: Run Dockerfile - check sync condition"', 'rm -fr $WORKSPACE/package_repo_cache', 'mkdir -p $WORKSPACE/package_repo_cache', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_check_sync_criteria/docker.cid' + diff --git a/ros_buildfarm/templates/status/blocked_releases_page_job.xml.em b/ros_buildfarm/templates/status/blocked_releases_page_job.xml.em index 624889b8b..078339376 100644 --- a/ros_buildfarm/templates/status/blocked_releases_page_job.xml.em +++ b/ros_buildfarm/templates/status/blocked_releases_page_job.xml.em @@ -82,6 +82,8 @@ 'echo "# BEGIN SECTION: Run Dockerfile - blocked_releases page"', 'rm -fr $WORKSPACE/blocked_releases_page', 'mkdir -p $WORKSPACE/blocked_releases_page', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_generate_blocked_releases_page/docker.cid' + diff --git a/ros_buildfarm/templates/status/blocked_source_entries_page_job.xml.em b/ros_buildfarm/templates/status/blocked_source_entries_page_job.xml.em index 1bd909ed0..77c789185 100644 --- a/ros_buildfarm/templates/status/blocked_source_entries_page_job.xml.em +++ b/ros_buildfarm/templates/status/blocked_source_entries_page_job.xml.em @@ -82,6 +82,8 @@ 'echo "# BEGIN SECTION: Run Dockerfile - blocked_source_entries page"', 'rm -fr $WORKSPACE/blocked_source_entries_page', 'mkdir -p $WORKSPACE/blocked_source_entries_page', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_generate_blocked_source_entries_page/docker.cid' + diff --git a/ros_buildfarm/templates/status/release_compare_page_job.xml.em b/ros_buildfarm/templates/status/release_compare_page_job.xml.em index 801476884..d9dc42060 100644 --- a/ros_buildfarm/templates/status/release_compare_page_job.xml.em +++ b/ros_buildfarm/templates/status/release_compare_page_job.xml.em @@ -83,6 +83,8 @@ 'echo "# BEGIN SECTION: Run Dockerfile - compare page"', 'rm -fr $WORKSPACE/compare_page', 'mkdir -p $WORKSPACE/compare_page', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_generate_compare_page/docker.cid' + diff --git a/ros_buildfarm/templates/status/release_status_page_job.xml.em b/ros_buildfarm/templates/status/release_status_page_job.xml.em index d3400db4f..b8bcea5b0 100644 --- a/ros_buildfarm/templates/status/release_status_page_job.xml.em +++ b/ros_buildfarm/templates/status/release_status_page_job.xml.em @@ -85,6 +85,8 @@ 'rm -fr $WORKSPACE/status_page', 'mkdir -p $WORKSPACE/package_repo_cache', 'mkdir -p $WORKSPACE/status_page', + '# If using Podman, change the user namespace to preserve UID. No effect if using Docker.', + 'export PODMAN_USERNS=keep-id', 'docker run' + ' --rm ' + ' --cidfile=$WORKSPACE/docker_generate_status_page/docker.cid' +