This is an automated email from the ASF dual-hosted git repository.
rkk pushed a commit to branch develop
in repository https://gitbox.apache.org/repos/asf/sdap-nexus.git
The following commit(s) were added to refs/heads/develop by this push:
new 1126439 [SDAP-497] - Release build script (#290)
1126439 is described below
commit 11264396719deaac7446311fdde6addbad8e999f
Author: Riley Kuttruff <[email protected]>
AuthorDate: Mon Sep 23 13:01:08 2024 -0700
[SDAP-497] - Release build script (#290)
* Build script
* Build script
* Rebased onto 1.2.0 release branch to avoid messing up changelog
* Retry failed builds/pushes once to avoid time loss from transient errors
* Moved changelog entry
* Start of src pull changes
* mv build script
* Better ASF pulling
* Build from GitHub
* Simple docs
* Some cleanup, renaming, deleting unused stuff etc
* Update build.py
* Added build from ASF archive
* Updates and fixes
* Updates for post-grad changes
* Add support for custom nexusproto builds from git
* Build tool updates
* Update changelog
* Implement build from local directory
---------
Co-authored-by: rileykk <[email protected]>
---
CHANGELOG.md | 1 +
tools/build/README.md | 31 +++
tools/build/build.py | 616 +++++++++++++++++++++++++++++++++++++++++++
tools/build/requirements.txt | 4 +
4 files changed, 652 insertions(+)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9a46719..616ddad 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,6 +8,7 @@ and this project adheres to [Semantic
Versioning](https://semver.org/spec/v2.0.0
### Added
- SDAP-469: Support for three dimensional data. 2D data defaults to a layer at
0m elevation.
- SDAP-492: Added some demo algorithms for working with and visualizing
tomography data. Currently designed for data from airborne SAR campaigns, but
can be readily generalized.
+- SDAP-497: Added tool to ease building of releases. Can build from ASF
distributions, git repos, and local
### Changed
### Deprecated
### Removed
diff --git a/tools/build/README.md b/tools/build/README.md
new file mode 100644
index 0000000..0a06930
--- /dev/null
+++ b/tools/build/README.md
@@ -0,0 +1,31 @@
+# SDAP Docker image build helper
+
+This script will help ease the process of building a full set of SDAP Docker
images, whether building from an official ASF release, a release candidate,
GitHub (any branch, official repo or forks), or
+the local filesystem (not yet implemented). Builds from ASF will also have
their checksums and signatures checked, making this useful in the release
process. You can also choose to not use build cache
+and can also push the images once they've been built.
+
+## Images built
+
+- `sdap-nexus-webapp`
+- `sdap-collection-manager`
+- `sdap-granule-ingester`
+- `sdap-solr-cloud`
+- `sdap-solr-cloud-init`
+
+## Requirements
+
+- Docker must be installed and running
+- gpg
+- git
+- tar
+- Python with the dependencies in `requirements.txt` installed
+
+## Usage
+
+Basic usage is simple:
+
+```shell
+python build.py
+```
+
+You will be prompted for all the information needed. There are, however, a
couple extra options. Run `python build.py -h` to learn more on them.
\ No newline at end of file
diff --git a/tools/build/build.py b/tools/build/build.py
new file mode 100644
index 0000000..da2393f
--- /dev/null
+++ b/tools/build/build.py
@@ -0,0 +1,616 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import hashlib
+import os
+import shutil
+import subprocess
+import tempfile
+
+import requests
+from bs4 import BeautifulSoup
+from tenacity import retry, stop_after_attempt, wait_fixed
+
+DOCKER = shutil.which('docker')
+TAR = shutil.which('tar')
+GIT = shutil.which('git')
+GPG = shutil.which('gpg')
+
+if any([req is None for req in [DOCKER, TAR, GPG, GIT]]):
+ raise OSError(f'Requirement(s) not found in PATH:\n'
+ f' docker: {DOCKER if DOCKER is not None else "MISSING"}\n'
+ f' tar: {TAR if TAR is not None else "MISSING"}\n'
+ f' git: {GIT if GIT is not None else "MISSING"}\n'
+ f' gpg: {GPG if GPG is not None else "MISSING"}')
+
+
+ASF_NEXUS_REPO = 'https://github.com/apache/sdap-nexus.git'
+ASF_INGESTER_REPO = 'https://github.com/apache/sdap-ingester.git'
+ASF_NEXUSPROTO_REPO = 'https://github.com/apache/sdap-nexusproto.git'
+ASF_NEXUSPROTO_BRANCH = 'develop'
+
+SKIP_KEYS = ['webapp', 'solr', 'solr-init', 'gi', 'cm']
+
+
+def build_cmd(
+ tag,
+ context,
+ dockerfile='',
+ cache=True,
+ proto=None,
+ proto_repo=ASF_NEXUSPROTO_REPO,
+ proto_branch=ASF_NEXUSPROTO_BRANCH
+):
+ command = [DOCKER, 'build', context]
+
+ if dockerfile != '':
+ command.extend(['-f', os.path.join(context, dockerfile)])
+
+ command.extend(['-t', tag])
+
+ if proto == 'git':
+ command.extend([
+ '--build-arg', 'BUILD_NEXUSPROTO=true',
+ '--build-arg', f'APACHE_NEXUSPROTO={proto_repo}',
+ '--build-arg', f'APACHE_NEXUSPROTO_BRANCH={proto_branch}',
+ ])
+
+ if not cache:
+ command.append('--no-cache')
+
+ return command
+
+
+@retry(stop=stop_after_attempt(2), wait=wait_fixed(2))
+def run_subprocess(cmd, suppress_output=False, err_on_fail=True, **kwargs):
+ if not kwargs.pop('dryrun', False):
+ stdout = subprocess.DEVNULL if suppress_output else None
+
+ p = subprocess.Popen(cmd, stdout=stdout, stderr=subprocess.STDOUT,
**kwargs)
+
+ p.wait()
+
+ if err_on_fail and p.returncode != 0:
+ raise OSError(f'Subprocess returned nonzero: {p.returncode}')
+ else:
+ cmd_str = ' '.join(cmd)
+
+ if suppress_output:
+ cmd_str += ' > /dev/null'
+
+ print(cmd_str)
+
+
+def yes_no_prompt(prompt, default=True):
+ do_continue = input(prompt).lower()
+
+ while do_continue not in ['', 'y', 'n']:
+ do_continue = input(prompt).lower()
+
+ if do_continue == '':
+ return default
+ else:
+ return do_continue == 'y'
+
+
+def choice_prompt(prompt: str, choices: list, default: str = None) -> str:
+ assert len(choices) > 0
+
+ if len(choices) == 1:
+ return choices[0]
+
+ valid_choices = [str(i) for i in range(len(choices))]
+
+ if default is not None:
+ assert default in choices
+ valid_choices.append('')
+
+ print(prompt)
+
+ choice = None
+
+ while choice not in valid_choices:
+ for i, c in enumerate(choices):
+ print('[{:2d}] {}-> {}'.format(i, ''.ljust(10, '-'), c))
+
+ print()
+
+ if default is None:
+ choice = input('Selection: ')
+ else:
+ choice = input(f'Selection [{choices.index(default)}]: ')
+
+ if default is not None and choice == '':
+ return default
+ else:
+ return choices[int(choice)]
+
+
+def basic_prompt(prompt, default=None):
+ if default is not None:
+ prompt = f'{prompt} [{default}] : '
+ else:
+ prompt += ': '
+
+ while True:
+ response = input(prompt)
+
+ if response == '' and default is not None:
+ response = default
+
+ if yes_no_prompt(f'Confirm: "{response}" [Y]/N '):
+ return response
+
+
+def pull_source(dst_dir: tempfile.TemporaryDirectory, build: dict):
+ ASF = 'ASF (dist.apache.org)'
+ GHB = 'GitHub'
+ LFS = 'Local Filesystem'
+
+ source_location = choice_prompt(
+ 'Where is the source you\'re building from stored?',
+ [ASF, GHB, LFS],
+ ASF
+ )
+
+ if source_location == ASF:
+ DEV = 'Dev area (release candidates)'
+ REL = 'Most recent release area'
+ ARC = 'Archive (full release history)'
+
+ url_map = {
+ DEV: 'https://dist.apache.org/repos/dist/dev/',
+ REL: 'https://dist.apache.org/repos/dist/release/',
+ ARC: 'https://archive.apache.org/dist/'
+ }
+
+ release_area = choice_prompt(
+ 'Where is the release you\'re looking for?',
+ [DEV, REL, ARC]
+ )
+
+ url = url_map[release_area] + 'sdap/'
+
+ response = requests.get(url)
+ response.raise_for_status()
+
+ soup = BeautifulSoup(response.text, 'html.parser')
+
+ versions = [
+ node.text.rstrip('/') for node in soup.find_all('a') if
node.get('href').rstrip('/') not in ['KEYS', '..']
+ ]
+
+ # Extra filtering to remove some special values in archive HTML page
+ versions = [
+ v for v in versions if v not in ['Parent Directory', 'Name', 'Last
modified', 'Size', 'Description']
+ ]
+
+ if len(versions) == 0:
+ print('There is nothing in this area to build...')
+ exit(0)
+
+ version = choice_prompt(
+ 'Choose a release/release candidate to build',
+ versions,
+ )
+
+ url = url + version + '/'
+
+ response = requests.get(url)
+ response.raise_for_status()
+
+ soup = BeautifulSoup(response.text, 'html.parser')
+
+ def remove_suffixes(s: str, suffixes):
+ for suffix in suffixes:
+ s = s.removesuffix(suffix)
+
+ return s
+
+ build_artifacts = list(set([
+ remove_suffixes(node.text, ['.sha512', '.asc'])
+ for node in soup.find_all('a') if node.get('href').rstrip('/') not
in ['KEYS', '..']
+ ]))
+
+ build_artifacts = [
+ a for a in build_artifacts if a not in ['Parent Directory',
'Name', 'Last modified', 'Size', 'Description']
+ ]
+
+ nexus_tarball = None
+ ingester_tarball = None
+
+ for artifact in build_artifacts:
+ if '-nexus-' in artifact:
+ if any([build['webapp'], build['solr'], build['solr-init']]):
+ nexus_tarball = os.path.join(dst_dir.name, artifact)
+ else:
+ continue
+ elif '-ingester-' in artifact:
+ if any([build['cm'], build['gi']]):
+ ingester_tarball = os.path.join(dst_dir.name, artifact)
+ else:
+ continue
+
+ for ext in ['', '.sha512', '.asc']:
+ filename = artifact + ext
+ dst = os.path.join(dst_dir.name, filename)
+
+ print(f'Downloading {url + artifact + ext}')
+
+ response = requests.get(url + artifact + ext)
+ response.raise_for_status()
+
+ with open(dst, 'wb') as fp:
+ fp.write(response.content)
+ fp.flush()
+
+ print(f'Verifying checksum for {artifact}')
+
+ m = hashlib.sha512()
+ m.update(open(os.path.join(dst_dir.name, artifact), 'rb').read())
+
+ if m.hexdigest() != open(os.path.join(dst_dir.name, artifact) +
'.sha512', 'r').read().split(' ')[0]:
+ raise ValueError('Bad checksum!')
+
+ print(f'Verifying signature for {artifact}')
+
+ try:
+ run_subprocess(
+ [GPG, '--verify', os.path.join(dst_dir.name, artifact) +
'.asc', os.path.join(dst_dir.name, artifact)],
+ True
+ )
+ except:
+ raise ValueError('Bad signature!')
+
+ print('Extracting release source files...')
+
+ if any([build['webapp'], build['solr'], build['solr-init']]):
+ run_subprocess(
+ [TAR, 'xvf', nexus_tarball, '-C', dst_dir.name],
+ suppress_output=True
+ )
+ shutil.move(
+ os.path.join(dst_dir.name, 'Apache-SDAP',
nexus_tarball.split('/')[-1].removesuffix('.tar.gz')),
+ os.path.join(dst_dir.name, 'nexus')
+ )
+
+ if any([build['cm'], build['gi']]):
+ run_subprocess(
+ [TAR, 'xvf', ingester_tarball, '-C', dst_dir.name],
+ suppress_output=True
+ )
+ shutil.move(
+ os.path.join(dst_dir.name, 'Apache-SDAP',
ingester_tarball.split('/')[-1].removesuffix('.tar.gz')),
+ os.path.join(dst_dir.name, 'ingester')
+ )
+ elif source_location == GHB:
+ if any([build['webapp'], build['solr'], build['solr-init']]):
+ if not yes_no_prompt('Will you be using a fork for the Nexus
repository? Y/[N]: ', False):
+ nexus_repo = ASF_NEXUS_REPO
+ else:
+ nexus_repo = basic_prompt('Enter Nexus fork URL')
+
+ # TODO Maybe fetch list of branches?
+
+ nexus_branch = basic_prompt('Enter Nexus branch to build')
+
+ print(f'Cloning Nexus repo {nexus_repo} at {nexus_branch}')
+
+ run_subprocess(
+ [GIT, 'clone', '--branch', nexus_branch, nexus_repo],
+ suppress_output=True,
+ cwd=dst_dir.name
+ )
+ shutil.move(
+ os.path.join(dst_dir.name, 'sdap-nexus'),
+ os.path.join(dst_dir.name, 'nexus')
+ )
+
+ if any([build['cm'], build['gi']]):
+ if not yes_no_prompt('Will you be using a fork for the Ingester
repository? Y/[N]: ', False):
+ ingester_repo = ASF_INGESTER_REPO
+ else:
+ ingester_repo = basic_prompt('Enter Ingester fork URL')
+
+ # TODO Maybe fetch list of branches?
+
+ ingester_branch = basic_prompt('Enter Ingester branch to build')
+
+ print(f'Cloning Nexus repo {ingester_repo} at {ingester_branch}')
+
+ run_subprocess(
+ [GIT, 'clone', '--branch', ingester_branch, ingester_repo],
+ suppress_output=True,
+ cwd=dst_dir.name
+ )
+ shutil.move(
+ os.path.join(dst_dir.name, 'sdap-ingester'),
+ os.path.join(dst_dir.name, 'ingester')
+ )
+ else:
+ print('NOTE: Building from local FS should only be done for testing
purposes. Please use other sources for '
+ 'official release images (ASF) or anything pushed publicly for
production or distribution outside of an '
+ 'official release (GitHub).')
+
+ if any([build['webapp'], build['solr'], build['solr-init']]):
+ path = basic_prompt('Enter path to Nexus repository')
+
+ if not os.path.isdir(path):
+ print(f'{path} either does not exist or is not a directory')
+ exit(1)
+
+ print(f'Copying Nexus {os.path.abspath(path)} ->
{os.path.join(dst_dir.name, "nexus")}')
+
+ shutil.copytree(
+ path,
+ os.path.join(dst_dir.name, 'nexus')
+ )
+ if any([build['cm'], build['gi']]):
+ path = basic_prompt('Enter path to Ingester repository')
+
+ if not os.path.isdir(path):
+ print(f'{path} either does not exist or is not a directory')
+ exit(1)
+
+ print(f'Copying Ingester {os.path.abspath(path)} ->
{os.path.join(dst_dir.name, "nexus")}')
+
+ shutil.copytree(
+ path,
+ os.path.join(dst_dir.name, 'ingester')
+ )
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ epilog="With the exception of the --skip-nexus and --skip-ingester
options, the user will be "
+ "prompted to set options at runtime."
+ )
+
+ parser.add_argument(
+ '-t', '--tag',
+ dest='tag',
+ help='Tag for built docker images',
+ )
+
+ parser.add_argument(
+ '--docker-registry',
+ dest='registry',
+ help='Docker registry to tag images with. Important if you want to
push the images.'
+ )
+
+ cache = parser.add_mutually_exclusive_group(required=False)
+
+ cache.add_argument(
+ '--no-cache',
+ dest='cache',
+ action='store_false',
+ help='Don\'t use build cache'
+ )
+
+ cache.add_argument(
+ '--cache',
+ dest='cache',
+ action='store_true',
+ help='Use build cache'
+ )
+
+ push = parser.add_mutually_exclusive_group(required=False)
+
+ push.add_argument(
+ '--push',
+ dest='push',
+ action='store_true',
+ help='Push images after building'
+ )
+
+ push.add_argument(
+ '--no-push',
+ dest='push',
+ action='store_false',
+ help='Don\'t push images after building'
+ )
+
+ parser.add_argument(
+ '--dry-run',
+ dest='dry',
+ action='store_true',
+ help="Don't execute build/push commands, but print them"
+ )
+
+ parser.add_argument(
+ '--skip-nexus',
+ dest='skip_nexus',
+ action='store_true',
+ help='Don\'t build Nexus webapp, Solr cloud & Solr cloud init images'
+ )
+
+ parser.add_argument(
+ '--skip-ingester',
+ dest='skip_ingester',
+ action='store_true',
+ help='Don\'t build Collection Manager & Granule Ingester images'
+ )
+
+ parser.add_argument(
+ '--skip',
+ dest='skip',
+ nargs='*',
+ choices=['webapp', 'solr', 'solr-init', 'gi', 'cm'],
+ help='List of individual images to not build',
+ default=[],
+ )
+
+ parser.add_argument(
+ '--nexusproto',
+ dest='proto_src',
+ choices=['pip', 'git', None],
+ default=None,
+ help='Source for nexusproto build. \'pip\' to use the latest published
version from PyPi; \'git\' to build '
+ 'from a git repo (see --nexusproto-repo and --nexusproto-branch).
Omit to be prompted'
+ )
+
+ parser.add_argument(
+ '--nexusproto-repo',
+ dest='proto_repo',
+ default=None,
+ help='Repository URL for nexusproto build. Omit to be prompted if
--nexusproto=git'
+ )
+
+ parser.add_argument(
+ '--nexusproto-branch',
+ dest='proto_branch',
+ default=None,
+ help='Repository branch name for nexusproto build. Omit to be prompted
if --nexusproto=git'
+ )
+
+ parser.set_defaults(cache=None, push=None)
+
+ args = parser.parse_args()
+
+ tag, registry, cache, push = args.tag, args.registry, args.cache, args.push
+
+ proto, proto_repo, proto_branch = args.proto_src, args.proto_repo,
args.proto_branch
+
+ build = {key: key not in args.skip for key in SKIP_KEYS}
+
+ if args.skip_ingester:
+ build['cm'] = False
+ build['gi'] = False
+
+ if args.skip_nexus:
+ build['webapp'] = False
+ build['solr'] = False
+ build['solr-init'] = False
+
+ # TODO: Prompting is a bit cumbersome. Maybe do all prompts then ask for
confirmation for all entries
+
+ if tag is None:
+ tag = basic_prompt('Enter the tag to use for built images')
+
+ if registry is None:
+ registry = basic_prompt('Enter Docker image registry')
+
+ if cache is None:
+ cache = yes_no_prompt('Use Docker build cache? [Y]/N: ')
+
+ if push is None:
+ push = yes_no_prompt('Push built images? [Y]/N: ')
+
+ if proto is None:
+ proto = 'git' if yes_no_prompt('Custom build nexusproto? Y/[N]: ',
default=False) else 'pip'
+
+ if proto == 'git':
+ if proto_repo is None:
+ proto_repo = basic_prompt('Enter nexusproto repository URL',
default=ASF_NEXUSPROTO_REPO)
+
+ if proto_branch is None:
+ proto_branch = basic_prompt('Enter nexusproto repository branch',
default=ASF_NEXUSPROTO_BRANCH)
+
+ extract_dir = tempfile.TemporaryDirectory()
+
+ pull_source(extract_dir, build)
+
+ os.environ['DOCKER_DEFAULT_PLATFORM'] = 'linux/amd64'
+
+ built_images = []
+
+ if any([build['cm'], build['gi']]):
+ print('Building ingester images...')
+
+ cm_tag = f'{registry}/sdap-collection-manager:{tag}'
+
+ if build['cm']:
+ run_subprocess(build_cmd(
+ cm_tag,
+ os.path.join(extract_dir.name, 'ingester'),
+ dockerfile='collection_manager/docker/Dockerfile',
+ cache=cache
+ ), dryrun=args.dry)
+
+ built_images.append(cm_tag)
+
+ gi_tag = f'{registry}/sdap-granule-ingester:{tag}'
+
+ if build['gi']:
+ run_subprocess(build_cmd(
+ gi_tag,
+ os.path.join(extract_dir.name, 'ingester'),
+ dockerfile='granule_ingester/docker/Dockerfile',
+ cache=cache,
+ proto=proto,
+ proto_repo=proto_repo,
+ proto_branch=proto_branch
+ ), dryrun=args.dry)
+
+ built_images.append(gi_tag)
+
+ if any([build['webapp'], build['solr'], build['solr-init']]):
+ solr_tag = f'{registry}/sdap-solr-cloud:{tag}'
+
+ if build['solr']:
+ run_subprocess(build_cmd(
+ solr_tag,
+ os.path.join(extract_dir.name, 'nexus/docker/solr'),
+ cache=cache
+ ), dryrun=args.dry)
+
+ built_images.append(solr_tag)
+
+ solr_init_tag = f'{registry}/sdap-solr-cloud-init:{tag}'
+
+ if build['solr-init']:
+ run_subprocess(build_cmd(
+ solr_init_tag,
+ os.path.join(extract_dir.name, 'nexus/docker/solr'),
+ dockerfile='cloud-init/Dockerfile',
+ cache=cache
+ ), dryrun=args.dry)
+
+ built_images.append(solr_init_tag)
+
+ webapp_tag = f'{registry}/sdap-nexus-webapp:{tag}'
+
+ if build['webapp']:
+ run_subprocess(build_cmd(
+ webapp_tag,
+ os.path.join(extract_dir.name, 'nexus'),
+ dockerfile='docker/nexus-webapp/Dockerfile',
+ cache=cache,
+ proto=proto,
+ proto_repo=proto_repo,
+ proto_branch=proto_branch
+ ), dryrun=args.dry)
+
+ built_images.append(webapp_tag)
+
+ if not args.dry:
+ print('Image builds completed')
+
+ if push:
+ for image in built_images:
+ run_subprocess(
+ [DOCKER, 'push', image], dryrun=args.dry
+ )
+
+ print('done')
+
+
+if __name__ == '__main__':
+ try:
+ main()
+ except KeyboardInterrupt:
+ print('\nBuild cancelled by user')
+ exit(0)
diff --git a/tools/build/requirements.txt b/tools/build/requirements.txt
new file mode 100644
index 0000000..5301c2b
--- /dev/null
+++ b/tools/build/requirements.txt
@@ -0,0 +1,4 @@
+requests
+beautifulsoup4
+tenacity
+