This is an automated email from the ASF dual-hosted git repository.

skrawcz pushed a commit to branch update_references
in repository https://gitbox.apache.org/repos/asf/hamilton.git

commit eed2e88c3002b60caf09757c33dd5f6ca5bfea72
Author: Stefan Krawczyk <[email protected]>
AuthorDate: Fri Jun 20 22:41:41 2025 -0700

    Changes references to hamilton.apache.org
    
    This change updates links to the right documentation domain.
---
 contrib/hamilton/contrib/dagworks/sphinx_doc_chunking/__init__.py  | 2 +-
 .../hamilton/contrib/dagworks/translate_to_hamilton/__init__.py    | 2 +-
 .../hamilton/contrib/user/skrawcz/customize_embeddings/__init__.py | 2 +-
 docs/conf.py                                                       | 7 ++++++-
 .../LLM_Workflows/RAG_document_extract_chunk_embed/pipeline.py     | 2 +-
 examples/LLM_Workflows/scraping_and_chunking/doc_pipeline.py       | 2 +-
 .../LLM_Workflows/scraping_and_chunking/spark/spark_pipeline.py    | 2 +-
 examples/people_data_labs/run.py                                   | 2 +-
 hamilton/caching/adapter.py                                        | 2 +-
 hamilton/experimental/h_cache.py                                   | 2 +-
 hamilton/function_modifiers/adapters.py                            | 2 +-
 hamilton/function_modifiers/macros.py                              | 4 ++--
 hamilton/graph.py                                                  | 2 +-
 hamilton/io/materialization.py                                     | 2 +-
 hamilton/lifecycle/default.py                                      | 2 +-
 hamilton/plugins/h_diskcache.py                                    | 2 +-
 hamilton/plugins/h_openlineage.py                                  | 2 +-
 hamilton/telemetry.py                                              | 2 +-
 18 files changed, 24 insertions(+), 19 deletions(-)

diff --git a/contrib/hamilton/contrib/dagworks/sphinx_doc_chunking/__init__.py 
b/contrib/hamilton/contrib/dagworks/sphinx_doc_chunking/__init__.py
index 547e236b..57055a9e 100644
--- a/contrib/hamilton/contrib/dagworks/sphinx_doc_chunking/__init__.py
+++ b/contrib/hamilton/contrib/dagworks/sphinx_doc_chunking/__init__.py
@@ -24,7 +24,7 @@ with contrib.catch_import_errors(__name__, __file__, logger):
 from hamilton.htypes import Collect, Parallelizable
 
 
-def sitemap_text(sitemap_url: str = 
"https://hamilton.dagworks.io/en/latest/sitemap.xml";) -> str:
+def sitemap_text(sitemap_url: str = "https://hamilton.apache.org/sitemap.xml";) 
-> str:
     """Takes in a sitemap URL and returns the sitemap.xml file.
 
     :param sitemap_url: the URL of sitemap.xml file
diff --git 
a/contrib/hamilton/contrib/dagworks/translate_to_hamilton/__init__.py 
b/contrib/hamilton/contrib/dagworks/translate_to_hamilton/__init__.py
index 5da44400..57b8f945 100644
--- a/contrib/hamilton/contrib/dagworks/translate_to_hamilton/__init__.py
+++ b/contrib/hamilton/contrib/dagworks/translate_to_hamilton/__init__.py
@@ -10,7 +10,7 @@ with contrib.catch_import_errors(__name__, __file__, logger):
 
 def system_prompt() -> str:
     """Base system prompt for translating code to Hamilton."""
-    return '''You created the Hamilton micro-orchestration framework in Python 
while you were at Stitch Fix. Therefore you are the world renowned expert on 
it, and enjoy helping others get started with the framework. Here's the 
documentation for it - https://hamilton.dagworks.io/en/latest/.
+    return '''You created the Hamilton micro-orchestration framework in Python 
while you were at Stitch Fix. Therefore you are the world renowned expert on 
it, and enjoy helping others get started with the framework. Here's the 
documentation for it - https://hamilton.apache.org/.
 
 The framework you invented is a cute programming paradigm where users write 
declarative functions that express a dataflow. The user does not need to 
expressly connect components in the dataflow like with other frameworks, 
instead the name of the function declares an output one can request, with the 
function input arguments declaring what is required to compute the output. So 
function names become nouns. The framework then orchestrates calling the right 
function in the right order based o [...]
 
diff --git 
a/contrib/hamilton/contrib/user/skrawcz/customize_embeddings/__init__.py 
b/contrib/hamilton/contrib/user/skrawcz/customize_embeddings/__init__.py
index afb215ea..69da00ac 100644
--- a/contrib/hamilton/contrib/user/skrawcz/customize_embeddings/__init__.py
+++ b/contrib/hamilton/contrib/user/skrawcz/customize_embeddings/__init__.py
@@ -163,7 +163,7 @@ def processed_local_dataset__snli(
 @load_from.csv(
     path=source("local_dataset_path")
     # see data loader docuemntation and the PandasCSVReader for values you can 
pass in:
-    #  - 
https://hamilton.dagworks.io/en/latest/reference/io/available-data-adapters/#data-loaders
+    #  - 
https://hamilton.apache.org/reference/io/available-data-adapters/#data-loaders
     #  - 
https://github.com/apache/hamilton/blob/main/hamilton/plugins/pandas_extensions.py#L89-L255
 )
 def processed_local_dataset__local(
diff --git a/docs/conf.py b/docs/conf.py
index e8175844..347fe296 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -64,6 +64,11 @@ if 
re.match(r"^sf-hamilton-(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)$", current_t
 else:
     version = "latest"
 language = "en"
-html_baseurl = "https://hamilton.dagworks.io/";
+GIT_BRANCH_OUTPUT = subprocess.check_output(["git", "rev-parse", 
"--abbrev-ref", "HEAD"])
+current_branch = GIT_BRANCH_OUTPUT.decode().strip()
+if current_branch == "main":
+    html_baseurl = "https://hamilton.apache.org/";
+else:
+    html_baseurl = "https://hamilton.staged.apache.org/";
 html_extra_path = ["robots.txt"]
 # ---
diff --git 
a/examples/LLM_Workflows/RAG_document_extract_chunk_embed/pipeline.py 
b/examples/LLM_Workflows/RAG_document_extract_chunk_embed/pipeline.py
index 7fba7964..8ee379fe 100644
--- a/examples/LLM_Workflows/RAG_document_extract_chunk_embed/pipeline.py
+++ b/examples/LLM_Workflows/RAG_document_extract_chunk_embed/pipeline.py
@@ -166,7 +166,7 @@ if __name__ == "__main__":
 
     # execute the pipeline for the given URL
     results = pipeline_driver.execute(
-        ["store"], inputs={"url": "https://hamilton.dagworks.io/en/latest/"}
+        ["store"], inputs={"url": "https://hamilton.apache.org/"}
     )
 
     # show the dataframe for this document
diff --git a/examples/LLM_Workflows/scraping_and_chunking/doc_pipeline.py 
b/examples/LLM_Workflows/scraping_and_chunking/doc_pipeline.py
index f1d3af5f..c1a383ec 100644
--- a/examples/LLM_Workflows/scraping_and_chunking/doc_pipeline.py
+++ b/examples/LLM_Workflows/scraping_and_chunking/doc_pipeline.py
@@ -18,7 +18,7 @@ from langchain_core import documents
 from hamilton.htypes import Collect, Parallelizable
 
 
-def sitemap_text(sitemap_url: str = 
"https://hamilton.dagworks.io/en/latest/sitemap.xml";) -> str:
+def sitemap_text(sitemap_url: str = "https://hamilton.apache.org/sitemap.xml";) 
-> str:
     """Takes in a sitemap URL and returns the sitemap.xml file.
 
     :param sitemap_url: the URL of sitemap.xml file
diff --git 
a/examples/LLM_Workflows/scraping_and_chunking/spark/spark_pipeline.py 
b/examples/LLM_Workflows/scraping_and_chunking/spark/spark_pipeline.py
index fdb37a1c..b060c42a 100644
--- a/examples/LLM_Workflows/scraping_and_chunking/spark/spark_pipeline.py
+++ b/examples/LLM_Workflows/scraping_and_chunking/spark/spark_pipeline.py
@@ -18,7 +18,7 @@ def spark_session(app_name: str) -> ps.SparkSession:
     return ps.SparkSession.builder.appName(app_name).getOrCreate()
 
 
-def sitemap_text(sitemap_url: str = 
"https://hamilton.dagworks.io/en/latest/sitemap.xml";) -> str:
+def sitemap_text(sitemap_url: str = "https://hamilton.apache.org/sitemap.xml";) 
-> str:
     """Takes in a sitemap URL and returns the sitemap.xml file.
 
     :param sitemap_url: the URL of sitemap.xml file
diff --git a/examples/people_data_labs/run.py b/examples/people_data_labs/run.py
index a9a09869..8edae24f 100644
--- a/examples/people_data_labs/run.py
+++ b/examples/people_data_labs/run.py
@@ -23,7 +23,7 @@ def main():
     print(results["augmented_company_info"].head())
 
     # add code to store results
-    # ref: https://hamilton.dagworks.io/en/latest/concepts/materialization/
+    # ref: https://hamilton.apache.org/concepts/materialization/
 
 
 if __name__ == "__main__":
diff --git a/hamilton/caching/adapter.py b/hamilton/caching/adapter.py
index 59fd1813..e0a6050c 100644
--- a/hamilton/caching/adapter.py
+++ b/hamilton/caching/adapter.py
@@ -794,7 +794,7 @@ class HamiltonCacheAdapter(
             logger.warning(
                 f"Node `{node_name}` has unhashable result of type 
`{type(result)}`. "
                 "Set `CachingBehavior.IGNORE` or register a versioning 
function to silence warning. "
-                "Learn more: 
https://hamilton.dagworks.io/en/latest/concepts/caching/#caching-behavior\n";
+                "Learn more: 
https://hamilton.apache.org/concepts/caching/#caching-behavior\n";
             )
             # if the data version is unhashable, we need to set a random 
suffix to the cache_key
             # to prevent the cache from thinking this value is constant, 
causing a cache hit.
diff --git a/hamilton/experimental/h_cache.py b/hamilton/experimental/h_cache.py
index b5a970db..e48d1225 100644
--- a/hamilton/experimental/h_cache.py
+++ b/hamilton/experimental/h_cache.py
@@ -18,7 +18,7 @@ logger.warning(
     "are deprecated and will be removed in Hamilton 2.0. "
     "Consider enabling the core caching feature via `Builder.with_cache()`. "
     "This might not be 1-to-1 replacement, so please reach out if there are 
missing features. "
-    "See https://hamilton.dagworks.io/en/latest/concepts/caching/ to learn 
more."
+    "See https://hamilton.apache.org/concepts/caching/ to learn more."
 )
 
 
diff --git a/hamilton/function_modifiers/adapters.py 
b/hamilton/function_modifiers/adapters.py
index 11133822..21733d58 100644
--- a/hamilton/function_modifiers/adapters.py
+++ b/hamilton/function_modifiers/adapters.py
@@ -486,7 +486,7 @@ class save_to__meta__(type):
                 "If you've gotten to this point, you either (1) spelled the "
                 "loader name wrong, (2) are trying to use a saver that does"
                 "not exist (yet). For a list of available savers, see "
-                
"https://hamilton.dagworks.io/en/latest/reference/io/available-data-adapters/";
+                
"https://hamilton.apache.org/reference/io/available-data-adapters/";
             ) from e
 
 
diff --git a/hamilton/function_modifiers/macros.py 
b/hamilton/function_modifiers/macros.py
index c608e581..33348740 100644
--- a/hamilton/function_modifiers/macros.py
+++ b/hamilton/function_modifiers/macros.py
@@ -243,7 +243,7 @@ def get_default_tags(fn: Callable) -> Dict[str, str]:
     "to utilize config for resolving decorators. Note this allows you to use 
any"
     "existing decorators.",
     current_version=(1, 19, 0),
-    
migration_guide="https://hamilton.dagworks.io/en/latest/reference/decorators/";,
+    migration_guide="https://hamilton.apache.org/reference/decorators/";,
 )
 class dynamic_transform(base.NodeCreator):
     def __init__(
@@ -1074,7 +1074,7 @@ class pipe_input(base.NodeInjector):
     explanation="pipe has been replaced with pipe_input -- a clearer name 
since "
     "we also added pipe_output with complimentary functionality.",
     current_version=(1, 77, 0),
-    
migration_guide="https://hamilton.dagworks.io/en/latest/reference/decorators/";,
+    migration_guide="https://hamilton.apache.org/reference/decorators/";,
 )
 class pipe(pipe_input):
     def __init__(
diff --git a/hamilton/graph.py b/hamilton/graph.py
index c72fd6c5..ace6c5a6 100644
--- a/hamilton/graph.py
+++ b/hamilton/graph.py
@@ -175,7 +175,7 @@ def create_function_graph(
                 raise ValueError(
                     f"Cannot define function {n.name} more than once."
                     f" Already defined by function {f}"
-                    f" In case you want to override the previous functions 
check out .allow_module_overrides() at: 
https://hamilton.dagworks.io/en/latest/reference/drivers/Driver/";
+                    f" In case you want to override the previous functions 
check out .allow_module_overrides() at: 
https://hamilton.apache.org/reference/drivers/Driver/";
                 )
             nodes[n.name] = n
     # add dependencies -- now that all nodes except input nodes, we just run 
through edges & validate graph.
diff --git a/hamilton/io/materialization.py b/hamilton/io/materialization.py
index 2035bb43..b7dc5d3a 100644
--- a/hamilton/io/materialization.py
+++ b/hamilton/io/materialization.py
@@ -413,7 +413,7 @@ def _set_materializer_attrs():
         wrapper.__signature__ = new_signature
         wrapper.__doc__ = f"""
         Materializes data to {key} format. Note that the parameters are a 
superset of possible parameters -- this might depend on
-        the actual type of the data passed in. For more information, see: 
https://hamilton.dagworks.io/en/latest/reference/io/available-data-adapters/#data-loaders.
+        the actual type of the data passed in. For more information, see: 
https://hamilton.apache.org/reference/io/available-data-adapters/#data-loaders.
         You can also pass `source` and `value` in as kwargs.
         """
         return wrapper
diff --git a/hamilton/lifecycle/default.py b/hamilton/lifecycle/default.py
index 66ca515f..64310f32 100644
--- a/hamilton/lifecycle/default.py
+++ b/hamilton/lifecycle/default.py
@@ -361,7 +361,7 @@ class CacheAdapter(NodeExecutionHook, NodeExecutionMethod, 
GraphExecutionHook):
             "The `CacheAdapter` is deprecated and will be removed in Hamilton 
2.0. "
             "Consider enabling the core caching feature via 
`Builder.with_cache()`. "
             "This might not be 1-to-1 replacement, so please reach out if 
there are missing features. "
-            "See https://hamilton.dagworks.io/en/latest/concepts/caching/ to 
learn more."
+            "See https://hamilton.apache.org/concepts/caching/ to learn more."
         )
 
     def run_before_graph_execution(self, *, graph: HamiltonGraph, **kwargs):
diff --git a/hamilton/plugins/h_diskcache.py b/hamilton/plugins/h_diskcache.py
index f2453b20..43f09518 100644
--- a/hamilton/plugins/h_diskcache.py
+++ b/hamilton/plugins/h_diskcache.py
@@ -91,7 +91,7 @@ class DiskCacheAdapter(
             "The `DiskCacheAdapter` is deprecated and will be removed in 
Hamilton 2.0. "
             "Consider enabling the core caching feature via 
`Builder.with_cache()`. "
             "This might not be 1-to-1 replacement, so please reach out if 
there are missing features. "
-            "See https://hamilton.dagworks.io/en/latest/concepts/caching/ to 
learn more."
+            "See https://hamilton.apache.org/concepts/caching/ to learn more."
         )
 
     def run_before_graph_execution(self, *, graph: graph_types.HamiltonGraph, 
**kwargs):
diff --git a/hamilton/plugins/h_openlineage.py 
b/hamilton/plugins/h_openlineage.py
index ea066994..fa0d6102 100644
--- a/hamilton/plugins/h_openlineage.py
+++ b/hamilton/plugins/h_openlineage.py
@@ -161,7 +161,7 @@ class OpenLineageAdapter(
         dr.execute(...)
 
     Note for data lineage to be emitted, you must use the "materializer" 
abstraction to provide
-    metadata. See 
https://hamilton.dagworks.io/en/latest/concepts/materialization/.
+    metadata. See https://hamilton.apache.org/concepts/materialization/.
     This can be done via the `@datasaver()` and `@dataloader()` decorators, or
     using the `@load_from` or `@save_to` decorators, as well as passing in 
data savers
     and data loaders via `.with_materializers()` on the Driver Builder, or via 
`.materialize()`
diff --git a/hamilton/telemetry.py b/hamilton/telemetry.py
index a1479098..2c068287 100644
--- a/hamilton/telemetry.py
+++ b/hamilton/telemetry.py
@@ -135,7 +135,7 @@ def is_telemetry_enabled() -> bool:
             logger.info(
                 "Note: Hamilton collects completely anonymous data about 
usage. "
                 "This will help us improve Hamilton over time. "
-                "See 
https://hamilton.dagworks.io/en/latest/get-started/license/#usage-analytics-data-privacy";
+                "See 
https://hamilton.apache.org/get-started/license/#usage-analytics-data-privacy";
                 " for details."
             )
         call_counter += 1

Reply via email to