This is an automated email from the ASF dual-hosted git repository.

ebenizzy pushed a commit to branch asf-release-scripts
in repository https://gitbox.apache.org/repos/asf/burr.git

commit cc0c5e5cd18b55764f5bee1c270284d3803b1896
Author: Elijah ben Izzy <[email protected]>
AuthorDate: Sun Nov 16 21:07:31 2025 -0800

    Adds scripts for releasing burr
---
 burr/cli/__main__.py           |  31 +++-
 burr/examples                  |   1 -
 pyproject.toml                 |   2 +-
 scripts/README.md              |  64 +++++++
 scripts/build_artifacts.py     | 279 +++++++++++++++++++++++++++++
 scripts/release_helper.py      | 390 +++++++++++++++++++++++++++++++++++++++++
 scripts/setup_keys.sh          |  95 ++++++++++
 scripts/simulate_release.sh    | 105 +++++++++++
 telemetry/ui/package-lock.json |   2 +
 telemetry/ui/package.json      |   2 +
 10 files changed, 966 insertions(+), 5 deletions(-)

diff --git a/burr/cli/__main__.py b/burr/cli/__main__.py
index cc89b06f..109eb835 100644
--- a/burr/cli/__main__.py
+++ b/burr/cli/__main__.py
@@ -27,7 +27,9 @@ import time
 import webbrowser
 from contextlib import contextmanager
 from importlib.resources import files
+from pathlib import Path
 from types import ModuleType
+from typing import Optional
 
 from burr import system, telemetry
 from burr.core.persistence import PersistedStateData
@@ -54,7 +56,7 @@ def _telemetry_if_enabled(event: str):
         telemetry.create_and_send_cli_event(event)
 
 
-def _command(command: str, capture_output: bool, addl_env: dict = None) -> str:
+def _command(command: str, capture_output: bool, addl_env: dict | None = None) 
-> str:
     """Runs a simple command"""
     if addl_env is None:
         addl_env = {}
@@ -78,7 +80,27 @@ def _command(command: str, capture_output: bool, addl_env: 
dict = None) -> str:
 
 
 def _get_git_root() -> str:
-    return _command("git rev-parse --show-toplevel", capture_output=True)
+    env_root = os.environ.get("BURR_PROJECT_ROOT")
+    if env_root:
+        return env_root
+    try:
+        return _command("git rev-parse --show-toplevel", capture_output=True)
+    except subprocess.CalledProcessError:
+        package_root = _locate_package_root()
+        if package_root is not None:
+            logger.warning("Not inside a git repository; using package root 
%s.", package_root)
+            return package_root
+        logger.warning("Not inside a git repository; defaulting to current 
directory.")
+        return os.getcwd()
+
+
+def _locate_package_root() -> Optional[str]:
+    path = Path(__file__).resolve()
+    for candidate in (path.parent,) + tuple(path.parents):
+        telemetry_dir = candidate / "telemetry" / "ui"
+        if telemetry_dir.exists():
+            return str(candidate)
+    return None
 
 
 def open_when_ready(check_url: str, open_url: str):
@@ -118,13 +140,16 @@ def _build_ui():
     # create a symlink so we can get packages inside it...
     cmd = "rm -rf burr/tracking/server/build"
     _command(cmd, capture_output=False)
-    cmd = "cp -R telemetry/ui/build burr/tracking/server/build"
+    cmd = "mkdir -p burr/tracking/server/build"
+    _command(cmd, capture_output=False)
+    cmd = "cp -a telemetry/ui/build/. burr/tracking/server/build/"
     _command(cmd, capture_output=False)
 
 
 @cli.command()
 def build_ui():
     git_root = _get_git_root()
+    logger.info("UI build: using project root %s", git_root)
     with cd(git_root):
         _build_ui()
 
diff --git a/burr/examples b/burr/examples
deleted file mode 120000
index a6573af9..00000000
--- a/burr/examples
+++ /dev/null
@@ -1 +0,0 @@
-../examples
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 7839fb3e..0ebf8704 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
 
 [project]
 name = "burr"
-version = "0.40.2"
+version = "0.41.0"
 dependencies = [] # yes, there are none
 requires-python = ">=3.9"
 authors = [
diff --git a/scripts/README.md b/scripts/README.md
new file mode 100644
index 00000000..4e16e6af
--- /dev/null
+++ b/scripts/README.md
@@ -0,0 +1,64 @@
+# Burr Release Scripts
+
+This directory contains helper scripts to automate the Apache release workflow.
+
+## 1. Create the Source Release Candidate
+
+From the repo root:
+
+```bash
+python scripts/release_helper.py <version> <rc_num> <apache_id>
+```
+
+Use `--dry-run` if you just want to regenerate `dist/` locally without tagging 
or SVN:
+
+```bash
+python scripts/release_helper.py 0.41.0 0 myid --dry-run
+```
+
+This produces:
+
+- `dist/burr-<version>.tar.gz` — source-only tarball
+- `dist/apache-burr-<version>-incubating.tar.gz` — ASF-branded tarball plus 
signatures/hashes
+
+## 2. Test the Source Release (ASF voter simulation)
+
+1. Make a clean temp directory and unpack the ASF tarball.
+2. Create and activate a fresh virtual environment.
+3. Install burr from the source release (`pip install -e .`).
+4. Build UI artifacts and the wheel using `scripts/build_artifacts.py`.
+
+Example command sequence:
+
+```bash
+cd /tmp
+tar -xzf /path/to/dist/apache-burr-<version>-incubating.tar.gz
+cd burr-<version>
+python -m venv venv && source venv/bin/activate
+pip install -e .
+pip install flit
+# build the UI artifacts, then package everything into a wheel
+python scripts/build_artifacts.py all --clean
+ls dist/*.whl
+deactivate
+```
+
+Alternatively, run the automated helper:
+
+```bash
+bash scripts/simulate_release.sh
+```
+
+## 3. Build Wheel for PyPI Upload
+
+From the release source tree (after building artifacts):
+
+```bash
+python scripts/build_artifacts.py wheel --clean
+```
+
+The resulting `.whl` in `dist/` can be uploaded with:
+
+```bash
+twine upload dist/*.whl
+```
diff --git a/scripts/build_artifacts.py b/scripts/build_artifacts.py
new file mode 100644
index 00000000..4a445a31
--- /dev/null
+++ b/scripts/build_artifacts.py
@@ -0,0 +1,279 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+Build artifacts/wheels helper with subcommands:
+
+    python scripts/build_artifacts.py artifacts [--skip-install]
+    python scripts/build_artifacts.py wheel [--clean]
+    python scripts/build_artifacts.py all [--skip-install] [--clean]
+
+Subcommands:
+    artifacts  -> Build UI artifacts only
+    wheel      -> Build wheel (requires artifacts to exist)
+    all        -> Run both steps (artifacts then wheel)
+"""
+
+import argparse
+import os
+import shutil
+import subprocess
+import sys
+
+
+def _ensure_project_root() -> bool:
+    if not os.path.exists("pyproject.toml"):
+        print("Error: pyproject.toml not found.")
+        print("Please run this script from the root of the Burr source 
directory.")
+        return False
+    return True
+
+
+def _check_node_prereqs() -> bool:
+    print("Checking for required tools...")
+    required_tools = ["node", "npm"]
+    missing_tools = []
+
+    for tool in required_tools:
+        if shutil.which(tool) is None:
+            missing_tools.append(tool)
+            print(f"  ✗ '{tool}' not found")
+        else:
+            print(f"  ✓ '{tool}' found")
+
+    if missing_tools:
+        print(f"\nError: Missing required tools: {', '.join(missing_tools)}")
+        print("Please install Node.js and npm to build the UI.")
+        return False
+
+    print("All required tools found.\n")
+    return True
+
+
+def _require_flit() -> bool:
+    if shutil.which("flit") is None:
+        print("✗ flit CLI not found. Please install it with: pip install flit")
+        return False
+    print("✓ flit CLI found.\n")
+    return True
+
+
+def _install_burr(skip_install: bool) -> bool:
+    if skip_install:
+        print("Skipping burr installation as requested.\n")
+        return True
+
+    print("Installing burr from source...")
+    try:
+        subprocess.run(
+            [sys.executable, "-m", "pip", "install", "-e", "."],
+            check=True,
+            cwd=os.getcwd(),
+        )
+        print("✓ Burr installed successfully.\n")
+        return True
+    except subprocess.CalledProcessError as exc:
+        print(f"✗ Error installing burr: {exc}")
+        return False
+
+
+def _build_ui() -> bool:
+    print("Building UI assets...")
+    try:
+        env = os.environ.copy()
+        env["BURR_PROJECT_ROOT"] = os.getcwd()
+        subprocess.run(["burr-admin-build-ui"], check=True, env=env)
+        print("✓ UI build completed successfully.\n")
+        return True
+    except subprocess.CalledProcessError as exc:
+        print(f"✗ Error building UI: {exc}")
+        return False
+
+
+def _verify_artifacts() -> bool:
+    build_dir = "burr/tracking/server/build"
+    print(f"Verifying build output in {build_dir}...")
+
+    if not os.path.exists(build_dir):
+        print(f"Build directory missing, creating placeholder at 
{build_dir}...")
+        os.makedirs(build_dir, exist_ok=True)
+
+    if not os.listdir(build_dir):
+        print(f"✗ Build directory is empty: {build_dir}")
+        return False
+
+    print("✓ Build output verified.\n")
+    return True
+
+
+def _clean_dist():
+    if os.path.exists("dist"):
+        print("Cleaning dist/ directory...")
+        shutil.rmtree("dist")
+        print("✓ dist/ directory cleaned.\n")
+
+
+def _build_wheel() -> bool:
+    print("Building wheel distribution with 'flit build --format wheel'...")
+    try:
+        env = os.environ.copy()
+        env["FLIT_USE_VCS"] = "0"
+        subprocess.run(["flit", "build", "--format", "wheel"], check=True, 
env=env)
+        print("✓ Wheel build completed successfully.\n")
+        return True
+    except subprocess.CalledProcessError as exc:
+        print(f"✗ Error building wheel: {exc}")
+        return False
+
+
+def _verify_wheel() -> bool:
+    print("Verifying wheel output...")
+
+    if not os.path.exists("dist"):
+        print("✗ dist/ directory not found")
+        return False
+
+    wheel_files = [f for f in os.listdir("dist") if f.endswith(".whl")]
+    if not wheel_files:
+        print("✗ No wheel files found in dist/")
+        if os.listdir("dist"):
+            print("Contents of dist/ directory:")
+            for item in os.listdir("dist"):
+                print(f"  - {item}")
+        return False
+
+    print(f"✓ Found {len(wheel_files)} wheel file(s):")
+    for wheel_file in wheel_files:
+        wheel_path = os.path.join("dist", wheel_file)
+        size = os.path.getsize(wheel_path)
+        print(f"  - {wheel_file} ({size:,} bytes)")
+
+    print()
+    return True
+
+
+def create_artifacts(skip_install: bool) -> bool:
+    if not _ensure_project_root():
+        print("Failed to confirm project root.")
+        return False
+    if not _check_node_prereqs():
+        print("Node/npm prerequisite check failed.")
+        return False
+    if not _install_burr(skip_install):
+        print("Installing burr from source failed.")
+        return False
+    if not _build_ui():
+        print("UI build failed.")
+        return False
+    if not _verify_artifacts():
+        print("UI artifact verification failed.")
+        return False
+    return True
+
+
+def create_wheel(clean: bool) -> bool:
+    if not _ensure_project_root():
+        print("Failed to confirm project root.")
+        return False
+    if not _require_flit():
+        print("Missing flit CLI.")
+        return False
+    if not _verify_artifacts():
+        print("Please run the 'artifacts' subcommand first.")
+        return False
+    if clean:
+        _clean_dist()
+    if not _build_wheel():
+        return False
+    if not _verify_wheel():
+        return False
+    return True
+
+
+def build_all(skip_install: bool, clean: bool) -> bool:
+    if not create_artifacts(skip_install=skip_install):
+        return False
+    if not create_wheel(clean=clean):
+        return False
+    return True
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description="Build artifacts/wheels for Burr using subcommands."
+    )
+    subparsers = parser.add_subparsers(dest="command", required=True)
+
+    artifacts_parser = subparsers.add_parser("artifacts", help="Build UI 
artifacts only.")
+    artifacts_parser.add_argument(
+        "--skip-install",
+        action="store_true",
+        help="Skip reinstalling burr when building artifacts",
+    )
+
+    wheel_parser = subparsers.add_parser(
+        "wheel", help="Build wheel distribution (requires artifacts)."
+    )
+    wheel_parser.add_argument(
+        "--clean",
+        action="store_true",
+        help="Clean dist/ directory before building wheel",
+    )
+
+    all_parser = subparsers.add_parser("all", help="Build artifacts and wheel 
in sequence.")
+    all_parser.add_argument(
+        "--skip-install",
+        action="store_true",
+        help="Skip reinstalling burr when building artifacts",
+    )
+    all_parser.add_argument(
+        "--clean",
+        action="store_true",
+        help="Clean dist/ directory before building wheel",
+    )
+
+    args = parser.parse_args()
+
+    print("=" * 80)
+    print(f"Burr Build Helper - command: {args.command}")
+    print("=" * 80)
+    print()
+
+    success = False
+    if args.command == "artifacts":
+        success = create_artifacts(skip_install=args.skip_install)
+    elif args.command == "wheel":
+        success = create_wheel(clean=args.clean)
+    elif args.command == "all":
+        success = build_all(skip_install=args.skip_install, clean=args.clean)
+
+    if success:
+        print("=" * 80)
+        print("✅ Build Complete!")
+        print("=" * 80)
+        if args.command in {"wheel", "all"}:
+            print("\nWheel files are in the dist/ directory.")
+            print("You can now upload to PyPI with:")
+            print("  twine upload dist/*.whl")
+        print()
+    else:
+        print("\n❌ Build failed.")
+        sys.exit(1)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/release_helper.py b/scripts/release_helper.py
new file mode 100644
index 00000000..06cb1c89
--- /dev/null
+++ b/scripts/release_helper.py
@@ -0,0 +1,390 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import argparse
+import glob
+import hashlib
+import os
+import re
+import shutil
+import subprocess
+import sys
+
+# --- Configuration ---
+# You need to fill these in for your project.
+# The name of your project's short name (e.g., 'myproject').
+PROJECT_SHORT_NAME = "burr"
+# The file where you want to update the version number.
+VERSION_FILE = "pyproject.toml"
+# A regular expression pattern to find the version string in the VERSION_FILE.
+VERSION_PATTERN = r'version\s*=\s*"(\d+\.\d+\.\d+)"'
+
+
+def _fail(message: str) -> None:
+    print(f"\n❌ {message}")
+    sys.exit(1)
+
+
+def get_version_from_file(file_path: str) -> str:
+    """Get the version from a file."""
+    with open(file_path, encoding="utf-8") as f:
+        content = f.read()
+    match = re.search(VERSION_PATTERN, content)
+    if match:
+        version = match.group(1)
+        return version
+    raise ValueError(f"Could not find version in {file_path}")
+
+
+def check_prerequisites():
+    """Checks for necessary command-line tools and Python modules."""
+    print("Checking for required tools...")
+    required_tools = ["git", "gpg", "svn", "flit"]
+    for tool in required_tools:
+        if shutil.which(tool) is None:
+            _fail(
+                f"Required tool '{tool}' not found. Please install it and 
ensure it's in your PATH."
+            )
+
+    print("All required tools found.")
+
+
+def update_version(version, _rc_num):
+    """Updates the version number in the specified file."""
+    print(f"Updating version in {VERSION_FILE} to {version}...")
+    try:
+        with open(VERSION_FILE, "r", encoding="utf-8") as f:
+            content = f.read()
+        # For pyproject.toml, we just update the version string directly
+        new_version_string = f'version = "{version}"'
+        new_content = re.sub(VERSION_PATTERN, new_version_string, content)
+        if new_content == content:
+            print("Error: Could not find or replace version string. Check your 
VERSION_PATTERN.")
+            return False
+
+        with open(VERSION_FILE, "w", encoding="utf-8") as f:
+            f.write(new_content)
+
+        print("Version updated successfully.")
+        return True
+
+    except FileNotFoundError:
+        _fail(f"{VERSION_FILE} not found.")
+    except (OSError, re.error) as e:
+        _fail(f"An error occurred while updating the version: {e}")
+
+
+def sign_artifacts(archive_name: str) -> list[str]:
+    """Creates signed files for the designated artifact."""
+    files = []
+    # Sign the tarball with GPG. The user must have a key configured.
+    try:
+        subprocess.run(
+            ["gpg", "--armor", "--output", f"{archive_name}.asc", 
"--detach-sig", archive_name],
+            check=True,
+        )
+        files.append(f"{archive_name}.asc")
+        print(f"Created GPG signature: {archive_name}.asc")
+    except subprocess.CalledProcessError as e:
+        _fail(f"Error signing tarball {archive_name}: {e}")
+
+    # Generate SHA512 checksum.
+    sha512_hash = hashlib.sha512()
+    with open(archive_name, "rb") as f:
+        while True:
+            data = f.read(65536)
+            if not data:
+                break
+            sha512_hash.update(data)
+
+    with open(f"{archive_name}.sha512", "w", encoding="utf-8") as f:
+        f.write(f"{sha512_hash.hexdigest()}\n")
+    print(f"Created SHA512 checksum: {archive_name}.sha512")
+    files.append(f"{archive_name}.sha512")
+    return files
+
+
+def create_release_artifacts(version) -> tuple[list[str], list[str]]:
+    """Creates the source tarball, GPG signature, and checksums using flit."""
+    print("\n[Step 1/3] Creating source release artifacts with 'flit build 
--format sdist'...")
+
+    # Clean the dist directory before building.
+    if os.path.exists("dist"):
+        shutil.rmtree("dist")
+    # Ensure no pre-built UI assets slip into the source package.
+    ui_build_dir = os.path.join("burr", "tracking", "server", "build")
+    if os.path.exists(ui_build_dir):
+        print("Removing previously built UI artifacts...")
+        shutil.rmtree(ui_build_dir)
+
+    # Warn if git working tree is dirty/untracked
+    try:
+        dirty = (
+            subprocess.check_output(["git", "status", "--porcelain"], 
stderr=subprocess.DEVNULL)
+            .decode()
+            .strip()
+        )
+        if dirty:
+            print(
+                "⚠️  Detected untracked or modified files. flit may refuse to 
build; "
+                "consider committing/stashing or verify FLIT_USE_VCS=0."
+            )
+            print("    Git status summary:")
+            for line in dirty.splitlines():
+                print(f"     {line}")
+    except subprocess.CalledProcessError:
+        pass
+
+    # Use flit to create the source distribution.
+    try:
+        env = os.environ.copy()
+        env["FLIT_USE_VCS"] = "0"
+        subprocess.run(["flit", "build", "--format", "sdist"], check=True, 
env=env)
+        print("✓ flit sdist created successfully.")
+    except subprocess.CalledProcessError as e:
+        _fail(f"Error creating source distribution: {e}")
+
+    # Find the created tarball in the dist directory.
+    expected_tar_ball = f"dist/burr-{version.lower()}.tar.gz"
+    tarball_path = glob.glob(expected_tar_ball)
+
+    if not tarball_path:
+        details = []
+        if os.path.exists("dist"):
+            details.append("Contents of 'dist':")
+            for item in os.listdir("dist"):
+                details.append(f"- {item}")
+        else:
+            details.append("'dist' directory not found.")
+        _fail(
+            "Could not find the generated source tarball in the 'dist' 
directory.\n"
+            + "\n".join(details)
+        )
+
+    # copy the tarball to be apache-burr-{version.lower()}-incubating.tar.gz
+    new_tar_ball = f"dist/apache-burr-{version.lower()}-incubating.tar.gz"
+    shutil.copy(tarball_path[0], new_tar_ball)
+    archive_name = new_tar_ball
+    print(f"✓ Found source tarball: {archive_name}")
+    main_signed_files = sign_artifacts(archive_name)
+    # create burr release artifacts
+    burr_signed_files = sign_artifacts(expected_tar_ball)
+    return [new_tar_ball] + main_signed_files, [expected_tar_ball] + 
burr_signed_files
+
+
+def svn_upload(version, rc_num, archive_files, burr_archive_files, apache_id):
+    """Uploads the artifacts to the ASF dev distribution repository."""
+    print("Uploading artifacts to ASF SVN...")
+    svn_path = 
f"https://dist.apache.org/repos/dist/dev/incubator/{PROJECT_SHORT_NAME}/apache-burr/{version}-incubating-RC{rc_num}";
+
+    try:
+        # Create a new directory for the release candidate.
+        subprocess.run(
+            [
+                "svn",
+                "mkdir",
+                "-m",
+                f"Creating directory for {version}-incubating-RC{rc_num}",
+                svn_path,
+            ],
+            check=True,
+        )
+
+        # Get the files to import (tarball, asc, sha512).
+        files_to_import = archive_files + burr_archive_files
+
+        # Use svn import for the new directory.
+        for file_path in files_to_import:
+            subprocess.run(
+                [
+                    "svn",
+                    "import",
+                    file_path,
+                    f"{svn_path}/{os.path.basename(file_path)}",
+                    "-m",
+                    f"Adding {os.path.basename(file_path)}",
+                    "--username",
+                    apache_id,
+                ],
+                check=True,
+            )
+
+        print(f"Artifacts successfully uploaded to: {svn_path}")
+        return svn_path
+
+    except subprocess.CalledProcessError as e:
+        print(f"Error during SVN upload: {e}")
+        print("Make sure you have svn access configured for your Apache ID.")
+        return None
+
+
+def generate_email_template(version, rc_num, svn_url):
+    """Generates the content for the [VOTE] email."""
+    print("Generating email template...")
+    version_with_incubating = f"{version}-incubating"
+    tag = f"v{version}"
+
+    email_content = f"""[VOTE] Release Apache {PROJECT_SHORT_NAME} 
{version_with_incubating} (release candidate {rc_num})
+
+Hi all,
+
+This is a call for a vote on releasing Apache {PROJECT_SHORT_NAME} 
{version_with_incubating},
+release candidate {rc_num}.
+
+This release includes the following changes (see CHANGELOG for details):
+- [List key changes here]
+
+The artifacts for this release candidate can be found at:
+{svn_url}
+
+The Git tag to be voted upon is:
+{tag}
+
+The release hash is:
+[Insert git commit hash here]
+
+
+Release artifacts are signed with the following key:
+[Insert your GPG key ID here]
+The KEYS file is available at:
+https://downloads.apache.org/incubator/{PROJECT_SHORT_NAME}/KEYS
+
+Please download, verify, and test the release candidate.
+
+For testing, please run some of the examples, scripts/qualify.sh has
+a sampling of them to run.
+
+The vote will run for a minimum of 72 hours.
+Please vote:
+
+[ ] +1 Release this package as Apache {PROJECT_SHORT_NAME} 
{version_with_incubating}
+[ ] +0 No opinion
+[ ] -1 Do not release this package because... (Please provide a reason)
+
+Checklist for reference:
+[ ] Download links are valid.
+[ ] Checksums and signatures.
+[ ] LICENSE/NOTICE files exist
+[ ] No unexpected binary files
+[ ] All source files have ASF headers
+[ ] Can compile from source
+
+On behalf of the Apache {PROJECT_SHORT_NAME} PPMC,
+[Your Name]
+"""
+    print("\n" + "=" * 80)
+    print("EMAIL TEMPLATE (COPY AND PASTE TO YOUR MAILING LIST)")
+    print("=" * 80)
+    print(email_content)
+    print("=" * 80)
+
+
+def main():
+    """
+    ### How to Use the Updated Script
+
+    1.  **Install flit**:
+        ```bash
+        pip install flit
+        ```
+    2.  **Configure the Script**: Open `apache_release_helper.py` in a text 
editor and update the three variables at the top of the file with your 
project's details:
+        * `PROJECT_SHORT_NAME`
+        * `VERSION_FILE` and `VERSION_PATTERN`
+    3.  **Prerequisites**:
+        * You must have `git`, `gpg`, `svn`, and `flit` installed.
+        * Your GPG key and SVN access must be configured for your Apache ID.
+    4.  **Run the Script**:
+        Open your terminal, navigate to the root of your project directory, 
and run the script with the desired version, release candidate number, and 
Apache ID.
+
+
+    python apache_release_helper.py 1.2.3 0 your_apache_id
+    """
+    parser = argparse.ArgumentParser(description="Automates parts of the 
Apache release process.")
+    parser.add_argument("version", help="The new release version (e.g., 
'1.0.0').")
+    parser.add_argument("rc_num", help="The release candidate number (e.g., 
'0' for RC0).")
+    parser.add_argument("apache_id", help="Your apache user ID.")
+    parser.add_argument(
+        "--dry-run",
+        action="store_true",
+        help="Run in dry-run mode (skip git tag creation and SVN upload)",
+    )
+    args = parser.parse_args()
+
+    version = args.version
+    rc_num = args.rc_num
+    apache_id = args.apache_id
+    dry_run = args.dry_run
+
+    if dry_run:
+        print("\n*** DRY RUN MODE - No git tags or SVN uploads will be 
performed ***\n")
+
+    check_prerequisites()
+
+    current_version = get_version_from_file(VERSION_FILE)
+    print(current_version)
+    if current_version != version:
+        _fail(
+            "Version mismatch. Update pyproject.toml to the requested version 
before running the script."
+        )
+
+    tag_name = f"v{version}-incubating-RC{rc_num}"
+    if dry_run:
+        print(f"\n[DRY RUN] Would create git tag '{tag_name}'")
+    else:
+        print(f"\nChecking for git tag '{tag_name}'...")
+        try:
+            # Check if the tag already exists
+            existing_tag = subprocess.check_output(["git", "tag", "-l", 
tag_name]).decode().strip()
+            if existing_tag == tag_name:
+                print(f"Git tag '{tag_name}' already exists.")
+                response = (
+                    input("Do you want to continue without creating a new tag? 
(y/n): ")
+                    .lower()
+                    .strip()
+                )
+                if response != "y":
+                    print("Aborting.")
+                    sys.exit(1)
+            else:
+                # Tag does not exist, create it
+                print(f"Creating git tag '{tag_name}'...")
+                subprocess.run(["git", "tag", tag_name], check=True)
+                print(f"Git tag {tag_name} created.")
+        except subprocess.CalledProcessError as e:
+            _fail(f"Error checking or creating Git tag: {e}")
+
+    # Create artifacts
+    main_archive_files, burr_archive_files = create_release_artifacts(version)
+
+    # Upload artifacts
+    # NOTE: You MUST have your SVN client configured to use your Apache ID and 
have permissions.
+    if dry_run:
+        svn_url = 
f"https://dist.apache.org/repos/dist/dev/incubator/{PROJECT_SHORT_NAME}/apache-burr/{version}-incubating-RC{rc_num}";
+        print(f"\n[DRY RUN] Would upload artifacts to: {svn_url}")
+    else:
+        svn_url = svn_upload(version, rc_num, main_archive_files, 
burr_archive_files, apache_id)
+        if not svn_url:
+            _fail("SVN upload failed.")
+
+    # Generate email
+    generate_email_template(version, rc_num, svn_url)
+
+    print("\nProcess complete. Please copy the email template to your mailing 
list.")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/setup_keys.sh b/scripts/setup_keys.sh
new file mode 100755
index 00000000..a4f12615
--- /dev/null
+++ b/scripts/setup_keys.sh
@@ -0,0 +1,95 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This script helps new Apache committers set up their GPG keys for releases.
+# It guides you through creating a new key, exports the public key, and
+# provides instructions on how to add it to your project's KEYS file.
+
+echo "========================================================"
+echo "      Apache GPG Key Setup Script"
+echo "========================================================"
+echo " "
+echo "Step 1: Generating a new GPG key."
+echo " "
+echo "Please be aware of Apache's best practices for GPG keys:"
+echo "- **Key Type:** Select **(1) RSA and RSA**."
+echo "- **Key Size:** Enter **4096**."
+echo "- **Email Address:** Use your official **@apache.org** email address."
+echo "- **Passphrase:** Use a strong, secure passphrase."
+echo " "
+read -p "Press [Enter] to start the GPG key generation..."
+
+# Generate a new GPG key
+# The --batch and --passphrase-fd 0 options are used for automation,
+# but the script will still require interactive input.
+gpg --full-gen-key
+
+if [ $? -ne 0 ]; then
+  echo "Error: GPG key generation failed. Please check your GPG installation."
+  exit 1
+fi
+
+echo " "
+echo "Step 2: Listing your GPG keys to find the new key ID."
+echo "Your new key is listed under 'pub' with a string of 8 or 16 characters 
after the '/'."
+
+# List all GPG keys
+gpg --list-keys
+
+echo " "
+read -p "Please copy and paste your new key ID here (e.g., A1B2C3D4 or 
1234ABCD5678EF01): " KEY_ID
+
+if [ -z "$KEY_ID" ]; then
+  echo "Error: Key ID cannot be empty. Exiting."
+  exit 1
+fi
+
+echo " "
+echo "Step 3: Exporting your public key to a file."
+
+# Export the public key in ASCII armored format
+gpg --armor --export "$KEY_ID" > "$KEY_ID.asc"
+
+if [ $? -ne 0 ]; then
+  echo "Error: Public key export failed. Please ensure the Key ID is correct."
+  rm -f "$KEY_ID.asc"
+  exit 1
+fi
+
+echo "Checking out dist repository to update KEYS file"
+svn checkout --depth immediates https://dist.apache.org/repos/dist dist
+cd dist/release
+svn checkout https://dist.apache.org/repos/dist/release/incubator/burr 
incubator/burr
+
+cd ../../
+gpg --list-keys "$KEY_ID" >> dist/release/incubator/burr/KEYS
+cat "$KEY_ID.asc" >> dist/release/incubator/burr/KEYS
+cd dist/release/incubator/burr
+
+echo " "
+echo "========================================================"
+echo "      Setup Complete!"
+echo "========================================================"
+echo "Your public key has been saved to: $KEY_ID.asc"
+echo " "
+echo "NEXT STEPS (VERY IMPORTANT):"
+echo "1. Please inspect the KEYS file to ensure the new key is added 
correctly. It should be in the current directory."
+echo "2. If all good run: svn update KEYS && svn commit -m \"Adds new key 
$KEY_ID for YOUR NAME\""
+echo "3. Inform the mailing list that you've updated the KEYS file."
+echo "   The updated KEYS file is essential for others to verify your release 
signatures."
+echo " "
diff --git a/scripts/simulate_release.sh b/scripts/simulate_release.sh
new file mode 100755
index 00000000..57ac7c66
--- /dev/null
+++ b/scripts/simulate_release.sh
@@ -0,0 +1,105 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -euo pipefail
+
+# --- Configuration / inputs 
---------------------------------------------------
+
+ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
+DIST_DIR="${ROOT_DIR}/dist"
+RELEASE_VERSION="0.41.0"
+RC_NUM="0"
+TARGET_TARBALL="apache-burr-${RELEASE_VERSION}-incubating.tar.gz"
+EXTRACT_DIR="burr-${RELEASE_VERSION}"
+WORK_DIR="/tmp/burr-release-test"
+SDIST_PATH="${DIST_DIR}/${TARGET_TARBALL}"
+
+# --- Dry-run release helper to generate artifacts 
-----------------------------
+
+echo "==> Running dry run of release_helper.py to regenerate dist/ artifacts"
+(
+  cd "${ROOT_DIR}"
+  python scripts/release_helper.py "${RELEASE_VERSION}" "${RC_NUM}" 
"dry-run-user" --dry-run
+)
+
+# --- Clean previous run 
-------------------------------------------------------
+
+echo "==> Cleaning ${WORK_DIR}"
+rm -rf "${WORK_DIR}"
+mkdir -p "${WORK_DIR}"
+
+# --- Copy tarball into workspace ---------------------------------------------
+
+echo "==> Copying ${TARGET_TARBALL} from ${DIST_DIR}"
+if [[ ! -f "${SDIST_PATH}" ]]; then
+  echo "❌ Expected source tarball ${SDIST_PATH} not found. Did 
release_helper.py succeed?"
+  exit 1
+fi
+cp "${SDIST_PATH}" "${WORK_DIR}/"
+cd "${WORK_DIR}"
+
+# --- Verify sdist format 
------------------------------------------------------
+
+SDIST_MIME=$(file --brief --mime-type "${TARGET_TARBALL}")
+if [[ "${SDIST_MIME}" != "application/gzip" ]]; then
+  echo "❌ Source tarball ${TARGET_TARBALL} is not a gzip archive (detected 
${SDIST_MIME})."
+  exit 1
+fi
+echo "==> Verified ${TARGET_TARBALL} is a gzip archive"
+
+# --- Extract source release 
---------------------------------------------------
+
+echo "==> Extracting ${TARGET_TARBALL}"
+tar -xzf "${TARGET_TARBALL}"
+cd "${EXTRACT_DIR}"
+
+# --- Dry-run reminder 
---------------------------------------------------------
+
+echo "==> Reminder: release_helper.py supports --dry-run for tag/SVN steps."
+
+# --- Build UI artifacts 
-------------------------------------------------------
+
+echo "==> Running: python scripts/build_artifacts.py artifacts --skip-install"
+python scripts/build_artifacts.py artifacts --skip-install
+
+# --- Build wheel 
--------------------------------------------------------------
+
+echo "==> Running: python scripts/build_artifacts.py wheel --clean"
+python scripts/build_artifacts.py wheel --clean
+
+# --- Show resulting wheel 
-----------------------------------------------------
+
+echo "==> Wheel files:"
+ls -lh dist/*.whl
+
+BUILT_WHEEL=$(ls dist/*.whl 2>/dev/null | tail -n 1 || true)
+if [[ -z "${BUILT_WHEEL}" ]]; then
+  echo "❌ No wheel file produced."
+  exit 1
+fi
+if [[ "${BUILT_WHEEL##*.}" != "whl" ]]; then
+  echo "❌ Built artifact ${BUILT_WHEEL} is not a .whl file."
+  exit 1
+fi
+BUILT_WHEEL_ABS="$(cd "$(dirname "${BUILT_WHEEL}")" && pwd)/$(basename 
"${BUILT_WHEEL}")"
+
+echo
+echo "Simulation complete."
+echo "  - Apache source tarball (pre-built): ${SDIST_PATH}"
+echo "  - PyPI wheel (built from source):   ${BUILT_WHEEL_ABS}"
+echo "Artifacts located in: ${WORK_DIR}/${TARGET_TARBALL%.tar.gz}"
diff --git a/telemetry/ui/package-lock.json b/telemetry/ui/package-lock.json
index b71fbefc..21461ed4 100644
--- a/telemetry/ui/package-lock.json
+++ b/telemetry/ui/package-lock.json
@@ -25,6 +25,7 @@
         "@uiw/react-json-view": "^2.0.0-alpha.12",
         "clsx": "^2.1.0",
         "dagre": "^0.8.5",
+        "es-abstract": "^1.22.4",
         "fuse.js": "^7.0.0",
         "heroicons": "^2.1.1",
         "react": "^18.2.0",
@@ -38,6 +39,7 @@
         "react-syntax-highlighter": "^15.5.0",
         "reactflow": "^11.10.4",
         "remark-gfm": "^4.0.0",
+        "string.prototype.matchall": "^4.0.10",
         "tailwindcss-question-mark": "^0.4.0",
         "typescript": "^4.9.5",
         "web-vitals": "^2.1.4"
diff --git a/telemetry/ui/package.json b/telemetry/ui/package.json
index 1c492bf3..1a391fcc 100644
--- a/telemetry/ui/package.json
+++ b/telemetry/ui/package.json
@@ -20,6 +20,7 @@
     "@uiw/react-json-view": "^2.0.0-alpha.12",
     "clsx": "^2.1.0",
     "dagre": "^0.8.5",
+    "es-abstract": "^1.22.4",
     "fuse.js": "^7.0.0",
     "heroicons": "^2.1.1",
     "react": "^18.2.0",
@@ -33,6 +34,7 @@
     "react-syntax-highlighter": "^15.5.0",
     "reactflow": "^11.10.4",
     "remark-gfm": "^4.0.0",
+    "string.prototype.matchall": "^4.0.10",
     "tailwindcss-question-mark": "^0.4.0",
     "typescript": "^4.9.5",
     "web-vitals": "^2.1.4"


Reply via email to