This is an automated email from the ASF dual-hosted git repository.

sbp pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tooling-trusted-release.git


The following commit(s) were added to refs/heads/main by this push:
     new 304a7ac  Ensure that the appropriate checks are run again after any 
draft modification
304a7ac is described below

commit 304a7aca3e61df748b8fef52a0f6759fa5ddf95d
Author: Sean B. Palmer <[email protected]>
AuthorDate: Wed Apr 2 20:02:36 2025 +0100

    Ensure that the appropriate checks are run again after any draft 
modification
    
    This commit also:
    
    - Fixes further unstructured release name constructions.
    - Updates the terminology used on the file upload form page.
---
 atr/db/models.py                   |  1 +
 atr/routes/candidate.py            |  4 +--
 atr/routes/draft.py                | 46 +++++++++++++++----------
 atr/routes/preview.py              |  4 +--
 atr/routes/release.py              |  4 +--
 atr/tasks/__init__.py              | 69 ++++++++++++++++++++++++++++++++++----
 atr/tasks/checks/paths.py          | 14 ++++----
 atr/tasks/checks/signature.py      | 15 +++++++--
 atr/tasks/rsync.py                 | 59 ++------------------------------
 atr/templates/draft-add-files.html | 10 +++---
 10 files changed, 123 insertions(+), 103 deletions(-)

diff --git a/atr/db/models.py b/atr/db/models.py
index e42e00d..5612438 100644
--- a/atr/db/models.py
+++ b/atr/db/models.py
@@ -145,6 +145,7 @@ class Committee(sqlmodel.SQLModel, table=True):
 
 class Project(sqlmodel.SQLModel, table=True):
     id: int = sqlmodel.Field(default=None, primary_key=True)
+    # TODO: Make name the primary key, and remove id
     name: str = sqlmodel.Field(unique=True)
     # TODO: Ideally full_name would be unique for str only, but that's complex
     full_name: str | None = sqlmodel.Field(default=None)
diff --git a/atr/routes/candidate.py b/atr/routes/candidate.py
index 801ca60..c1e819e 100644
--- a/atr/routes/candidate.py
+++ b/atr/routes/candidate.py
@@ -75,7 +75,7 @@ async def viewer(session: routes.CommitterSession, 
project_name: str, version_na
 
     # Check that the release exists
     async with db.session() as data:
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
@@ -108,7 +108,7 @@ async def viewer_path(
         )
 
     async with db.session() as data:
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
diff --git a/atr/routes/draft.py b/atr/routes/draft.py
index 8f36e37..144ac2b 100644
--- a/atr/routes/draft.py
+++ b/atr/routes/draft.py
@@ -163,16 +163,16 @@ async def add(session: routes.CommitterSession) -> 
response.Response | str:
 
 @routes.committer("/draft/add/<project_name>/<version_name>", methods=["GET", 
"POST"])
 async def add_file(session: routes.CommitterSession, project_name: str, 
version_name: str) -> response.Response | str:
-    """Show a page to allow the user to add a single file to a candidate 
draft."""
+    """Show a page to allow the user to add files to a candidate draft."""
 
     class AddFilesForm(util.QuartFormTyped):
-        """Form for adding file(s) to a release candidate."""
+        """Form for adding files to a release candidate."""
 
         file_name = wtforms.StringField("File name (optional)")
         file_data = wtforms.MultipleFileField(
-            "File(s)", validators=[wtforms.validators.InputRequired("File(s) 
are required")]
+            "Files", validators=[wtforms.validators.InputRequired("At least 
one file is required")]
         )
-        submit = wtforms.SubmitField("Add file or files")
+        submit = wtforms.SubmitField("Add files")
 
         def validate_file_name(self, field: wtforms.Field) -> bool:
             if field.data and len(self.file_data.data) > 1:
@@ -187,13 +187,16 @@ async def add_file(session: routes.CommitterSession, 
project_name: str, version_
                 file_name = pathlib.Path(form.file_name.data)
             file_data = form.file_data.data
 
-            await _upload_files(project_name, version_name, file_name, 
file_data)
+            number_of_files = await _upload_files(project_name, version_name, 
file_name, file_data)
             return await session.redirect(
-                review, success="File or files added successfully", 
project_name=project_name, version_name=version_name
+                review,
+                success=f"{number_of_files} file{'' if number_of_files == 1 
else 's'} added successfully",
+                project_name=project_name,
+                version_name=version_name,
             )
         except Exception as e:
-            logging.exception("Error adding file(s):")
-            await quart.flash(f"Error adding file(s): {e!s}", "error")
+            logging.exception("Error adding file:")
+            await quart.flash(f"Error adding file: {e!s}", "error")
 
     return await quart.render_template(
         "draft-add-files.html",
@@ -263,7 +266,7 @@ async def delete_file(session: routes.CommitterSession, 
project_name: str, versi
 
     async with db.session() as data:
         # Check that the release exists
-        await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
@@ -277,6 +280,9 @@ async def delete_file(session: routes.CommitterSession, 
project_name: str, versi
         # Delete the file
         await aiofiles.os.remove(full_path)
 
+        # Ensure that checks are queued again
+        await tasks.draft_checks(project_name, version_name, caller_data=data)
+
     return await session.redirect(
         review, success="File deleted successfully", 
project_name=project_name, version_name=version_name
     )
@@ -293,7 +299,7 @@ async def hashgen(
 
     async with db.session() as data:
         # Check that the release exists
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
@@ -329,9 +335,8 @@ async def hashgen(
         async with aiofiles.open(full_hash_path, "w") as f:
             await f.write(f"{hash_value}  {file_path}\n")
 
-        # Add any relevant tasks to the database
-        for task in await tasks.sha_checks(release, str(hash_path)):
-            data.add(task)
+        # Ensure that checks are queued again
+        await tasks.draft_checks(project_name, version_name, caller_data=data)
         await data.commit()
 
     return await session.redirect(
@@ -508,7 +513,7 @@ async def review_path(session: routes.CommitterSession, 
project_name: str, versi
 
     async with db.session() as data:
         # Check that the release exists
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
@@ -620,7 +625,7 @@ async def tools(session: routes.CommitterSession, 
project_name: str, version_nam
 
     async with db.session() as data:
         # Check that the release exists
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
@@ -660,7 +665,7 @@ async def viewer(session: routes.CommitterSession, 
project_name: str, version_na
 
     # Check that the release exists
     async with db.session() as data:
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
@@ -693,7 +698,7 @@ async def viewer_path(
         )
 
     async with db.session() as data:
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
@@ -853,7 +858,7 @@ async def _upload_files(
     version_name: str,
     file_name: pathlib.Path | None,
     files: Sequence[datastructures.FileStorage],
-) -> None:
+) -> int:
     """Process and save the uploaded files."""
     # Create target directory
     target_dir = util.get_release_candidate_draft_dir() / project_name / 
version_name
@@ -876,6 +881,11 @@ async def _upload_files(
 
         await _save_file(file, target_path)
 
+    # Ensure that checks are queued again
+    await tasks.draft_checks(project_name, version_name)
+
+    return len(files)
+
 
 async def _save_file(file: datastructures.FileStorage, target_path: 
pathlib.Path) -> None:
     async with aiofiles.open(target_path, "wb") as f:
diff --git a/atr/routes/preview.py b/atr/routes/preview.py
index 336bcde..2839ddd 100644
--- a/atr/routes/preview.py
+++ b/atr/routes/preview.py
@@ -210,7 +210,7 @@ async def viewer(session: routes.CommitterSession, 
project_name: str, version_na
 
     # Check that the release exists
     async with db.session() as data:
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
@@ -241,7 +241,7 @@ async def viewer_path(
         )
 
     async with db.session() as data:
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
diff --git a/atr/routes/release.py b/atr/routes/release.py
index 83d81e6..e8c16df 100644
--- a/atr/routes/release.py
+++ b/atr/routes/release.py
@@ -102,7 +102,7 @@ async def viewer(session: routes.CommitterSession, 
project_name: str, version_na
 
     # Check that the release exists
     async with db.session() as data:
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
@@ -129,7 +129,7 @@ async def viewer_path(
     # Releases are public, no specific access check needed here beyond being a 
committer
 
     async with db.session() as data:
-        release = await data.release(name=f"{project_name}-{version_name}", 
_project=True).demand(
+        release = await data.release(name=models.release_name(project_name, 
version_name), _project=True).demand(
             base.ASFQuartException("Release does not exist", errorcode=404)
         )
 
diff --git a/atr/tasks/__init__.py b/atr/tasks/__init__.py
index 2c200b6..1346e8a 100644
--- a/atr/tasks/__init__.py
+++ b/atr/tasks/__init__.py
@@ -15,10 +15,12 @@
 # specific language governing permissions and limitations
 # under the License.
 
-from collections.abc import Awaitable, Callable
+from collections.abc import Awaitable, Callable, Coroutine
+from typing import Any, Final
 
 import aiofiles.os
 
+import atr.db as db
 import atr.db.models as models
 import atr.tasks.checks.archive as archive
 import atr.tasks.checks.hashing as hashing
@@ -39,11 +41,6 @@ async def asc_checks(release: models.Release, 
signature_path: str) -> list[model
     full_signature_path = str(draft_dir / signature_path)
     modified = int(await aiofiles.os.path.getmtime(full_signature_path))
 
-    artifact_path = signature_path.removesuffix(".asc")
-    full_artifact_path = str(draft_dir / artifact_path)
-    if not (await aiofiles.os.path.exists(full_artifact_path)):
-        raise RuntimeError(f"Artifact {full_artifact_path} does not exist")
-
     if release.committee:
         tasks.append(
             models.Task(
@@ -52,7 +49,6 @@ async def asc_checks(release: models.Release, signature_path: 
str) -> list[model
                 task_args=signature.Check(
                     release_name=release.name,
                     committee_name=release.committee.name,
-                    abs_artifact_path=full_artifact_path,
                     abs_signature_path=full_signature_path,
                 ).model_dump(),
                 release_name=release.name,
@@ -64,6 +60,57 @@ async def asc_checks(release: models.Release, 
signature_path: str) -> list[model
     return tasks
 
 
+async def draft_checks(project_name: str, release_version: str, caller_data: 
db.Session | None = None) -> int:
+    """Core logic to analyse an rsync upload and queue checks."""
+    base_path = util.get_release_candidate_draft_dir() / project_name / 
release_version
+    paths_recursive = await util.paths_recursive(base_path)
+
+    if caller_data is None:
+        data = db.session()
+        await data.__aenter__()
+    else:
+        data = caller_data
+
+    release = await data.release(name=models.release_name(project_name, 
release_version), _committee=True).demand(
+        RuntimeError("Release not found")
+    )
+    for path in paths_recursive:
+        # This works because path is relative
+        full_path = base_path / path
+
+        # We only want to analyse files that are new or have changed
+        # But rsync can set timestamps to the past, so we can't rely on them
+        # Instead, we can run any tasks when the file has a different modified 
time
+        # TODO: This may cause problems if the file is backdated
+        modified = int(await aiofiles.os.path.getmtime(full_path))
+        cached_tasks = await db.recent_tasks(data, release.name, str(path), 
modified)
+
+        # Add new tasks for each path
+        for task_type, task_function in TASK_FUNCTIONS.items():
+            if path.name.endswith(task_type):
+                for task in await task_function(release, str(path)):
+                    if task.task_type not in cached_tasks:
+                        data.add(task)
+
+    # TODO: Should we hash the set of paths to detect changes?
+    path_check_task = models.Task(
+        status=models.TaskStatus.QUEUED,
+        task_type=models.TaskType.PATHS_CHECK,
+        task_args=paths.Check(
+            release_name=release.name,
+            base_release_dir=str(base_path),
+        ).model_dump(),
+    )
+    data.add(path_check_task)
+
+    if caller_data is None:
+        await data.commit()
+        await data.__aexit__(None, None, None)
+    # Otherwise the caller is responsible for committing
+
+    return len(paths_recursive)
+
+
 def resolve(task_type: models.TaskType) -> Callable[..., Awaitable[str | 
None]]:  # noqa: C901
     match task_type:
         case models.TaskType.ARCHIVE_INTEGRITY:
@@ -177,3 +224,11 @@ async def tar_gz_checks(release: models.Release, path: 
str) -> list[models.Task]
     ]
 
     return tasks
+
+
+TASK_FUNCTIONS: Final[dict[str, Callable[..., Coroutine[Any, Any, 
list[models.Task]]]]] = {
+    ".asc": asc_checks,
+    ".sha256": sha_checks,
+    ".sha512": sha_checks,
+    ".tar.gz": tar_gz_checks,
+}
diff --git a/atr/tasks/checks/paths.py b/atr/tasks/checks/paths.py
index 64451f5..b4465e5 100644
--- a/atr/tasks/checks/paths.py
+++ b/atr/tasks/checks/paths.py
@@ -81,12 +81,6 @@ async def _check_metadata_rules(
     if ext_metadata not in {".asc", ".sha256", ".sha512", ".md5", ".sha", 
".sha1"}:
         warnings.append("The use of this metadata file is discouraged")
 
-    # Check whether the corresponding artifact exists
-    artifact_path_base = str(relative_path).removesuffix(ext_metadata)
-    full_artifact_path = base_path / artifact_path_base
-    if not await aiofiles.os.path.exists(full_artifact_path):
-        errors.append(f"Metadata file exists but corresponding artifact 
'{artifact_path_base}' is missing")
-
 
 async def _check_path_process_single(
     base_path: pathlib.Path,
@@ -94,6 +88,7 @@ async def _check_path_process_single(
     check_errors: checks.Check,
     check_warnings: checks.Check,
     check_success: checks.Check,
+    relative_paths: set[str],
 ) -> None:
     """Process and check a single path within the release directory."""
     full_path = base_path / relative_path
@@ -123,6 +118,11 @@ async def _check_path_process_single(
     elif ext_metadata:
         _LOGGER.info("Checking metadata rules for %s", full_path)
         await _check_metadata_rules(base_path, relative_path, ext_metadata, 
errors, warnings)
+
+        # Check whether the corresponding artifact exists
+        artifact_path_base = str(relative_path).removesuffix(ext_metadata)
+        if artifact_path_base not in relative_paths:
+            errors.append(f"Metadata file exists but corresponding artifact 
'{artifact_path_base}' is missing")
     else:
         _LOGGER.info("Checking general rules for %s", full_path)
         allowed_top_level = {"LICENSE", "NOTICE", "README", "CHANGES"}
@@ -163,7 +163,6 @@ async def check(args: Check) -> None:
         checker=checks.function_key(check) + "_success", 
release_name=args.release_name, path=None, afresh=True
     )
     relative_paths = await util.paths_recursive(base_path)
-
     for relative_path in relative_paths:
         # Delegate processing of each path to the helper function
         await _check_path_process_single(
@@ -172,6 +171,7 @@ async def check(args: Check) -> None:
             check_errors,
             check_warnings,
             check_success,
+            set(str(p) for p in relative_paths),
         )
 
     return None
diff --git a/atr/tasks/checks/signature.py b/atr/tasks/checks/signature.py
index 7b9595b..83d0624 100644
--- a/atr/tasks/checks/signature.py
+++ b/atr/tasks/checks/signature.py
@@ -20,6 +20,7 @@ import logging
 import tempfile
 from typing import Any, Final
 
+import aiofiles.os
 import gnupg
 import pydantic
 import sqlmodel
@@ -27,6 +28,7 @@ import sqlmodel
 import atr.db as db
 import atr.db.models as models
 import atr.tasks.checks as checks
+import atr.util as util
 
 _LOGGER = logging.getLogger(__name__)
 
@@ -36,7 +38,6 @@ class Check(pydantic.BaseModel):
 
     release_name: str = pydantic.Field(..., description="Release name")
     committee_name: str = pydantic.Field(..., description="Name of the 
committee whose keys should be used")
-    abs_artifact_path: str = pydantic.Field(..., description="Absolute path to 
the artifact file")
     abs_signature_path: str = pydantic.Field(..., description="Absolute path 
to the signature file (.asc)")
 
 
@@ -45,15 +46,23 @@ async def check(args: Check) -> str | None:
     """Check a signature file."""
     rel_path = checks.rel_path(args.abs_signature_path)
     check_instance = await checks.Check.create(checker=check, 
release_name=args.release_name, path=rel_path)
+
+    # Get the artifact path
+    artifact_path = args.abs_signature_path.removesuffix(".asc")
+    abs_artifact_path = str(util.get_release_candidate_draft_dir() / 
artifact_path)
+    if not (await aiofiles.os.path.exists(abs_artifact_path)):
+        await check_instance.failure(f"Artifact {abs_artifact_path} does not 
exist", {})
+        return None
+
     _LOGGER.info(
-        f"Checking signature {args.abs_signature_path} for 
{args.abs_artifact_path}"
+        f"Checking signature {args.abs_signature_path} for {abs_artifact_path}"
         f" using {args.committee_name} keys (rel: {rel_path})"
     )
 
     try:
         result_data = await _check_core_logic(
             committee_name=args.committee_name,
-            artifact_path=args.abs_artifact_path,
+            artifact_path=abs_artifact_path,
             signature_path=args.abs_signature_path,
         )
         if result_data.get("error"):
diff --git a/atr/tasks/rsync.py b/atr/tasks/rsync.py
index 5c40a51..046e6c5 100644
--- a/atr/tasks/rsync.py
+++ b/atr/tasks/rsync.py
@@ -16,20 +16,12 @@
 # under the License.
 
 import logging
-from typing import TYPE_CHECKING, Any, Final
+from typing import Final
 
-import aiofiles.os
 import pydantic
 
-import atr.db as db
-import atr.db.models as models
 import atr.tasks as tasks
 import atr.tasks.checks as checks
-import atr.tasks.checks.paths as paths
-import atr.util as util
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Coroutine
 
 # _CONFIG: Final = config.get()
 _LOGGER: Final = logging.getLogger(__name__)
@@ -47,60 +39,13 @@ async def analyse(args: Analyse) -> str | None:
     """Analyse an rsync upload by queuing specific checks for discovered 
files."""
     _LOGGER.info(f"Starting rsync analysis for {args.project_name} 
{args.release_version}")
     try:
-        result_data = await _analyse_core(
+        num_paths = await tasks.draft_checks(
             args.project_name,
             args.release_version,
         )
-        num_paths = len(result_data.get("paths", []))
         _LOGGER.info(f"Finished rsync analysis for {args.project_name} 
{args.release_version}, found {num_paths} paths")
     except Exception as e:
         _LOGGER.exception(f"Rsync analysis failed for {args.project_name} 
{args.release_version}: {e}")
         raise e
 
     return None
-
-
-async def _analyse_core(project_name: str, release_version: str) -> dict[str, 
Any]:
-    """Core logic to analyse an rsync upload and queue checks."""
-    base_path = util.get_release_candidate_draft_dir() / project_name / 
release_version
-    paths_recursive = await util.paths_recursive(base_path)
-    async with db.session() as data:
-        release = await data.release(name=models.release_name(project_name, 
release_version), _committee=True).demand(
-            RuntimeError("Release not found")
-        )
-        for path in paths_recursive:
-            # This works because path is relative
-            full_path = base_path / path
-
-            # We only want to analyse files that are new or have changed
-            # But rsync can set timestamps to the past, so we can't rely on 
them
-            # Instead, we can run any tasks when the file has a different 
modified time
-            # TODO: This may cause problems if the file is backdated
-            modified = int(await aiofiles.os.path.getmtime(full_path))
-            cached_tasks = await db.recent_tasks(data, release.name, 
str(path), modified)
-
-            # Add new tasks for each path
-            task_functions: dict[str, Callable[..., Coroutine[Any, Any, 
list[models.Task]]]] = {
-                ".asc": tasks.asc_checks,
-                ".sha256": tasks.sha_checks,
-                ".sha512": tasks.sha_checks,
-                ".tar.gz": tasks.tar_gz_checks,
-            }
-            for task_type, task_function in task_functions.items():
-                if path.name.endswith(task_type):
-                    for task in await task_function(release, str(path)):
-                        if task.task_type not in cached_tasks:
-                            data.add(task)
-
-        path_check_task = models.Task(
-            status=models.TaskStatus.QUEUED,
-            task_type=models.TaskType.PATHS_CHECK,
-            task_args=paths.Check(
-                release_name=release.name,
-                base_release_dir=str(base_path),
-            ).model_dump(),
-        )
-        data.add(path_check_task)
-
-        await data.commit()
-    return {"paths": [str(path) for path in paths_recursive]}
diff --git a/atr/templates/draft-add-files.html 
b/atr/templates/draft-add-files.html
index 39923a7..b024bec 100644
--- a/atr/templates/draft-add-files.html
+++ b/atr/templates/draft-add-files.html
@@ -1,18 +1,18 @@
 {% extends "layouts/base.html" %}
 
 {% block title %}
-  Add file(s) to {{ project_name }} {{ version_name }} ~ ATR
+  Add files to {{ project_name }} {{ version_name }} ~ ATR
 {% endblock title %}
 
 {% block description %}
-  Add file(s) to a release candidate.
+  Add files to a release candidate.
 {% endblock description %}
 
 {% block content %}
   <a href="{{ as_url(routes.draft.add) }}" class="back-link">← Back to add 
draft</a>
 
-  <h1>Add file(s) to {{ project_name }} {{ version_name }}</h1>
-  <p class="intro">Use this form to add a single file to this candidate 
draft.</p>
+  <h1>Add files to {{ project_name }} {{ version_name }}</h1>
+  <p class="intro">Use this form to add files to this candidate draft.</p>
 
   <form method="post"
         enctype="multipart/form-data"
@@ -24,7 +24,7 @@
              class="col-sm-3 col-form-label text-sm-end">{{ 
form.file_data.label.text }}:</label>
       <div class="col-sm-8">
         {{ form.file_data(class_="form-control" + (" is-invalid" if 
form.file_data.errors else "") ) }}
-        <span id="file_data-help" class="form-text text-muted">Select the 
file(s) to upload</span>
+        <span id="file_data-help" class="form-text text-muted">Select the 
files to upload</span>
         {% if form.file_data.errors %}
           {% for error in form.file_data.errors %}<div 
class="invalid-feedback">{{ error }}</div>{% endfor %}
         {% endif %}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to