This is an automated email from the ASF dual-hosted git repository.
github-bot pushed a commit to branch gh-pages
in repository https://gitbox.apache.org/repos/asf/airflow-site.git
The following commit(s) were added to refs/heads/gh-pages by this push:
new 94b0635 Deploying to gh-pages from @
19589659778a59c46e705f35ad9574b83659aa13 🚀
94b0635 is described below
commit 94b0635b202ea536f52e6c3e6c317d3467529df9
Author: potiuk <[email protected]>
AuthorDate: Thu Jul 15 13:00:24 2021 +0000
Deploying to gh-pages from @ 19589659778a59c46e705f35ad9574b83659aa13 🚀
---
blog/airflow-1.10.10/index.html | 4 +-
blog/airflow-1.10.12/index.html | 4 +-
blog/airflow-1.10.8-1.10.9/index.html | 4 +-
blog/airflow-survey-2020/index.html | 4 +-
blog/airflow-survey/index.html | 4 +-
blog/airflow-two-point-oh-is-here/index.html | 4 +-
blog/airflow_summit_2021/index.html | 4 +-
blog/announcing-new-website/index.html | 4 +-
blog/apache-airflow-for-newcomers/index.html | 4 +-
.../index.html | 4 +-
.../index.html | 4 +-
.../index.html | 4 +-
.../index.html | 4 +-
.../index.html | 4 +-
.../index.html | 4 +-
docs/docker-stack/_sources/entrypoint.rst.txt | 57 +++++++-------
docs/docker-stack/_static/check-solid.svg | 2 +-
docs/docker-stack/_static/clipboard.min.js | 8 +-
docs/docker-stack/_static/copy-button.svg | 6 +-
docs/docker-stack/_static/copybutton.css | 28 +++++--
docs/docker-stack/_static/copybutton.js | 5 +-
docs/docker-stack/build.html | 4 +-
docs/docker-stack/entrypoint.html | 72 +++++++++---------
docs/docker-stack/genindex.html | 36 ++++-----
docs/docker-stack/searchindex.js | 2 +-
index.html | 32 ++++----
search/index.html | 4 +-
sitemap.xml | 86 +++++++++++-----------
use-cases/adobe/index.html | 4 +-
use-cases/big-fish-games/index.html | 4 +-
use-cases/dish/index.html | 4 +-
use-cases/experity/index.html | 4 +-
use-cases/onefootball/index.html | 4 +-
use-cases/plarium-krasnodar/index.html | 4 +-
use-cases/sift/index.html | 4 +-
35 files changed, 221 insertions(+), 209 deletions(-)
diff --git a/blog/airflow-1.10.10/index.html b/blog/airflow-1.10.10/index.html
index 7103ce8..281ade1 100644
--- a/blog/airflow-1.10.10/index.html
+++ b/blog/airflow-1.10.10/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2020-04-09T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Apache Airflow 1.10.10">
<meta itemprop="description" content="We are happy to present Apache Airflow
1.10.10">
<meta itemprop="datePublished" content="2020-04-09T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="1143">
diff --git a/blog/airflow-1.10.12/index.html b/blog/airflow-1.10.12/index.html
index 0ea9b24..d4440aa 100644
--- a/blog/airflow-1.10.12/index.html
+++ b/blog/airflow-1.10.12/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2020-08-25T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Apache Airflow 1.10.12">
<meta itemprop="description" content="We are happy to present Apache Airflow
1.10.12">
<meta itemprop="datePublished" content="2020-08-25T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="911">
diff --git a/blog/airflow-1.10.8-1.10.9/index.html
b/blog/airflow-1.10.8-1.10.9/index.html
index da8ab89..9146783 100644
--- a/blog/airflow-1.10.8-1.10.9/index.html
+++ b/blog/airflow-1.10.8-1.10.9/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2020-02-23T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Apache Airflow 1.10.8 & 1.10.9">
<meta itemprop="description" content="We are happy to present the new 1.10.8
and 1.10.9 releases of Apache Airflow.">
<meta itemprop="datePublished" content="2020-02-23T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="437">
diff --git a/blog/airflow-survey-2020/index.html
b/blog/airflow-survey-2020/index.html
index cda03af..220b742 100644
--- a/blog/airflow-survey-2020/index.html
+++ b/blog/airflow-survey-2020/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2021-03-09T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Airflow Survey 2020">
<meta itemprop="description" content="We observe steady growth in number of
users as well as in an amount of active contributors. So listening and
understanding our community is of high importance.">
<meta itemprop="datePublished" content="2021-03-09T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="2106">
diff --git a/blog/airflow-survey/index.html b/blog/airflow-survey/index.html
index 8c4a027..2b43056 100644
--- a/blog/airflow-survey/index.html
+++ b/blog/airflow-survey/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2019-12-11T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Airflow Survey 2019">
<meta itemprop="description" content="Receiving and adjusting to our users’
feedback is a must. Let’s see who Airflow users are, how they play with it, and
what they miss.">
<meta itemprop="datePublished" content="2019-12-11T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="1775">
diff --git a/blog/airflow-two-point-oh-is-here/index.html
b/blog/airflow-two-point-oh-is-here/index.html
index 3cf691c..c26f16e 100644
--- a/blog/airflow-two-point-oh-is-here/index.html
+++ b/blog/airflow-two-point-oh-is-here/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2020-12-17T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Apache Airflow 2.0 is here!">
<meta itemprop="description" content="We're proud to announce that Apache
Airflow 2.0.0 has been released.">
<meta itemprop="datePublished" content="2020-12-17T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="1166">
diff --git a/blog/airflow_summit_2021/index.html
b/blog/airflow_summit_2021/index.html
index 5110252..f992c74 100644
--- a/blog/airflow_summit_2021/index.html
+++ b/blog/airflow_summit_2021/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2021-03-21T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Airflow Summit 2021">
<meta itemprop="description" content="We are thrilled about Airflow Summit
2021!">
<meta itemprop="datePublished" content="2021-03-21T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="77">
diff --git a/blog/announcing-new-website/index.html
b/blog/announcing-new-website/index.html
index 572ea17..76dd4af 100644
--- a/blog/announcing-new-website/index.html
+++ b/blog/announcing-new-website/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2019-12-11T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="New Airflow website">
<meta itemprop="description" content="We are thrilled about our new website!">
<meta itemprop="datePublished" content="2019-12-11T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="282">
diff --git a/blog/apache-airflow-for-newcomers/index.html
b/blog/apache-airflow-for-newcomers/index.html
index 408e5e5..072be03 100644
--- a/blog/apache-airflow-for-newcomers/index.html
+++ b/blog/apache-airflow-for-newcomers/index.html
@@ -37,14 +37,14 @@ Authoring Workflow in Apache Airflow. Airflow makes it easy
to author workflows
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2020-08-17T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Apache Airflow For Newcomers">
<meta itemprop="description" content="Apache Airflow is a platform to
programmatically author, schedule, and monitor workflows. A workflow is a
sequence of tasks that processes a set of data. You can think of workflow as
the path that describes how tasks go from being undone to done. Scheduling, on
the other hand, is the process of planning, controlling, and optimizing when a
particular task should be done.
Authoring Workflow in Apache Airflow. Airflow makes it easy to author
workflows using python scripts.">
<meta itemprop="datePublished" content="2020-08-17T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="1070">
diff --git
a/blog/apache-con-europe-2019-thoughts-and-insights-by-airflow-committers/index.html
b/blog/apache-con-europe-2019-thoughts-and-insights-by-airflow-committers/index.html
index 4eb81e2..3966e68 100644
---
a/blog/apache-con-europe-2019-thoughts-and-insights-by-airflow-committers/index.html
+++
b/blog/apache-con-europe-2019-thoughts-and-insights-by-airflow-committers/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2019-11-22T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="ApacheCon Europe 2019 — Thoughts and Insights
by Airflow Committers">
<meta itemprop="description" content="Here come some thoughts by Airflow
committers and contributors from the ApacheCon Europe 2019. Get to know the ASF
community!">
<meta itemprop="datePublished" content="2019-11-22T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="138">
diff --git a/blog/documenting-using-local-development-environments/index.html
b/blog/documenting-using-local-development-environments/index.html
index a2d6651..bc12dc4 100644
--- a/blog/documenting-using-local-development-environments/index.html
+++ b/blog/documenting-using-local-development-environments/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2019-11-22T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Documenting using local development
environment">
<meta itemprop="description" content="The story behind documenting local
development environment of Apache Airflow">
<meta itemprop="datePublished" content="2019-11-22T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="256">
diff --git
a/blog/experience-in-google-season-of-docs-2019-with-apache-airflow/index.html
b/blog/experience-in-google-season-of-docs-2019-with-apache-airflow/index.html
index 6034c85..0c0bb6e 100644
---
a/blog/experience-in-google-season-of-docs-2019-with-apache-airflow/index.html
+++
b/blog/experience-in-google-season-of-docs-2019-with-apache-airflow/index.html
@@ -37,14 +37,14 @@ About Me I have been writing tech articles on medium as
well as my blog for the
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2019-12-20T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Experience in Google Season of Docs 2019 with
Apache Airflow">
<meta itemprop="description" content="I came across Google Season of Docs
(GSoD) almost by accident, thanks to my extensive HackerNews and Twitter
addiction. I was familiar with the Google Summer of Code but not with this
program. It turns out it was the inaugural phase. I read the details, and the
process felt a lot like GSoC except that this was about documentation.
About Me I have been writing tech articles on medium as well as my blog for
the past 1.">
<meta itemprop="datePublished" content="2019-12-20T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="1521">
diff --git a/blog/experience-with-airflow-as-an-outreachy-intern/index.html
b/blog/experience-with-airflow-as-an-outreachy-intern/index.html
index 955f20b..44e0986 100644
--- a/blog/experience-with-airflow-as-an-outreachy-intern/index.html
+++ b/blog/experience-with-airflow-as-an-outreachy-intern/index.html
@@ -37,14 +37,14 @@ Contribution Period The first thing I had to do was choose
a project under an or
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2020-08-30T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Journey with Airflow as an Outreachy Intern">
<meta itemprop="description" content="Outreachy is a program which organises
three months paid internships with FOSS projects for people who are typically
underrepresented in those projects.
Contribution Period The first thing I had to do was choose a project under an
organisation. After going through all the projects I chose “Extending the REST
API of Apache Airflow”, because I had a good idea of what REST API(s) are, so I
thought it would be easier to get started with the contributions.">
<meta itemprop="datePublished" content="2020-08-30T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="532">
diff --git a/blog/implementing-stable-api-for-apache-airflow/index.html
b/blog/implementing-stable-api-for-apache-airflow/index.html
index ccb14b0..0db307c 100644
--- a/blog/implementing-stable-api-for-apache-airflow/index.html
+++ b/blog/implementing-stable-api-for-apache-airflow/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2020-07-19T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Implementing Stable API for Apache Airflow">
<meta itemprop="description" content="An Outreachy intern's progress
report on contributing to Apache Airflow REST API.">
<meta itemprop="datePublished" content="2020-07-19T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="703">
diff --git a/blog/its-a-breeze-to-develop-apache-airflow/index.html
b/blog/its-a-breeze-to-develop-apache-airflow/index.html
index 0ae21fc..75fd1d9 100644
--- a/blog/its-a-breeze-to-develop-apache-airflow/index.html
+++ b/blog/its-a-breeze-to-develop-apache-airflow/index.html
@@ -36,13 +36,13 @@
<meta property="og:image" content="/images/feature-image.png" />
<meta property="article:published_time" content="2019-11-22T00:00:00+00:00" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="It's a "Breeze" to develop Apache
Airflow">
<meta itemprop="description" content="A Principal Software Engineer's
journey to developer productivity. Learn how Jarek and his team speeded up and
simplified Airflow development for the community.">
<meta itemprop="datePublished" content="2019-11-22T00:00:00+00:00" />
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="124">
diff --git a/docs/docker-stack/_sources/entrypoint.rst.txt
b/docs/docker-stack/_sources/entrypoint.rst.txt
index c386a67..8ac7355 100644
--- a/docs/docker-stack/_sources/entrypoint.rst.txt
+++ b/docs/docker-stack/_sources/entrypoint.rst.txt
@@ -94,32 +94,14 @@ You can read more about it in the "Support arbitrary user
ids" chapter in the
Waits for Airflow DB connection
-------------------------------
-In case Postgres or MySQL DB is used, the entrypoint will wait until the
airflow DB connection becomes
-available. This happens always when you use the default entrypoint.
+The entrypoint is waiting for a connection to the database independent of the
database engine. This allows us to increase
+the stability of the environment.
-The script detects backend type depending on the URL schema and assigns
default port numbers if not specified
-in the URL. Then it loops until the connection to the host/port specified can
be established
+Waiting for connection involves executing ``airflow db check`` command, which
means that a ``select 1 as is_alive;`` statement
+is executed. Then it loops until the the command will be successful.
It tries :envvar:`CONNECTION_CHECK_MAX_COUNT` times and sleeps
:envvar:`CONNECTION_CHECK_SLEEP_TIME` between checks
To disable check, set ``CONNECTION_CHECK_MAX_COUNT=0``.
-Supported schemes:
-
-* ``postgres://`` - default port 5432
-* ``mysql://`` - default port 3306
-* ``sqlite://``
-
-In case of SQLite backend, there is no connection to establish and waiting is
skipped.
-
-For older than Airflow 1.10.14, waiting for connection involves checking if a
matching port is open.
-The host information is derived from the variables
:envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN` and
-:envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD`. If
:envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD` variable
-is passed to the container, it is evaluated as a command to execute and result
of this evaluation is used
-as :envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN`. The
:envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD` variable
-takes precedence over the :envvar:`AIRFLOW__CORE__SQL_ALCHEMY_CONN` variable.
-
-For newer versions, the ``airflow db check`` command is used, which means that
a ``select 1 as is_alive;`` query
-is executed. This also means that you can keep your password in secret backend.
-
Waits for celery broker connection
----------------------------------
@@ -155,7 +137,7 @@ if you specify extra arguments. For example:
.. code-block:: bash
- docker run -it apache/airflow:2.1.0-python3.6 bash -c "ls -la"
+ docker run -it apache/airflow:2.1.2-python3.6 bash -c "ls -la"
total 16
drwxr-xr-x 4 airflow root 4096 Jun 5 18:12 .
drwxr-xr-x 1 root root 4096 Jun 5 18:12 ..
@@ -167,7 +149,7 @@ you pass extra parameters. For example:
.. code-block:: bash
- > docker run -it apache/airflow:2.1.0-python3.6 python -c "print('test')"
+ > docker run -it apache/airflow:2.1.2-python3.6 python -c "print('test')"
test
If first argument equals to "airflow" - the rest of the arguments is treated
as an airflow command
@@ -175,14 +157,14 @@ to execute. Example:
.. code-block:: bash
- docker run -it apache/airflow:2.1.0-python3.6 airflow webserver
+ docker run -it apache/airflow:2.1.2-python3.6 airflow webserver
If there are any other arguments - they are simply passed to the "airflow"
command
.. code-block:: bash
- > docker run -it apache/airflow:2.1.0-python3.6 version
- 2.1.0
+ > docker run -it apache/airflow:2.1.2-python3.6 version
+ 2.1.2
Additional quick test options
-----------------------------
@@ -262,11 +244,28 @@ and Admin role. They also forward local port ``8080`` to
the webserver port and
Installing additional requirements
..................................
+.. warning:: Installing requirements this way is a very convenient method of
running Airflow, very useful for
+ testing and debugging. However, do not be tricked by its convenience. You
should never, ever use it in
+ production environment. We have deliberately chose to make it a
development/test dependency and we print
+ a warning, whenever it is used. There is an inherent security-related
issue with using this method in
+ production. Installing the requirements this way can happen at literally
any time - when your containers
+ get restarted, when your machines in K8S cluster get restarted. In a K8S
Cluster those events can happen
+ literally any time. This opens you up to a serious vulnerability where
your production environment
+ might be brought down by a single dependency being removed from PyPI - or
even dependency of your
+ dependency. This means that you put your production service availability
in hands of 3rd-party developers.
+ At any time, any moment including weekends and holidays those 3rd party
developers might bring your
+ production Airflow instance down, without you even knowing it. This is a
serious vulnerability that
+ is similar to the infamous
+ `leftpad
<https://qz.com/646467/how-one-programmer-broke-the-internet-by-deleting-a-tiny-piece-of-code/>`_
+ problem. You can fully protect against this case by building your own,
immutable custom image, where the
+ dependencies are baked in. You have been warned.
+
Installing additional requirements can be done by specifying
``_PIP_ADDITIONAL_REQUIREMENTS`` variable.
The variable should contain a list of requirements that should be installed
additionally when entering
the containers. Note that this option slows down starting of Airflow as every
time any container starts
-it must install new packages. Therefore this option should only be used for
testing. When testing is
-finished, you should create your custom image with dependencies baked in.
+it must install new packages and it opens up huge potential security
vulnerability when used in production
+(see below). Therefore this option should only be used for testing. When
testing is finished,
+you should create your custom image with dependencies baked in.
Example:
diff --git a/docs/docker-stack/_static/check-solid.svg
b/docs/docker-stack/_static/check-solid.svg
index 9cbca86..92fad4b 100644
--- a/docs/docker-stack/_static/check-solid.svg
+++ b/docs/docker-stack/_static/check-solid.svg
@@ -1,4 +1,4 @@
-<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler
icon-tabler-check" width="44" height="44" viewBox="0 0 24 24"
stroke-width="1.5" stroke="currentColor" fill="none" stroke-linecap="round"
stroke-linejoin="round">
+<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler
icon-tabler-check" width="44" height="44" viewBox="0 0 24 24" stroke-width="2"
stroke="#22863a" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
<path d="M5 12l5 5l10 -10" />
</svg>
diff --git a/docs/docker-stack/_static/clipboard.min.js
b/docs/docker-stack/_static/clipboard.min.js
index 02c549e..54b3c46 100644
--- a/docs/docker-stack/_static/clipboard.min.js
+++ b/docs/docker-stack/_static/clipboard.min.js
@@ -1,7 +1,7 @@
/*!
- * clipboard.js v2.0.4
- * https://zenorocha.github.io/clipboard.js
- *
+ * clipboard.js v2.0.8
+ * https://clipboardjs.com/
+ *
* Licensed MIT © Zeno Rocha
*/
-!function(t,e){"object"==typeof exports&&"object"==typeof
module?module.exports=e():"function"==typeof
define&&define.amd?define([],e):"object"==typeof
exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return
function(n){var o={};function r(t){if(o[t])return o[t].exports;var
e=o[t]={i:t,l:!1,exports:{}};return
n[t].call(e.exports,e,e.exports,r),e.l=!0,e.exports}return
r.m=n,r.c=o,r.d=function(t,e,n){r.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:n})},r.r=function
[...]
\ No newline at end of file
+!function(t,e){"object"==typeof exports&&"object"==typeof
module?module.exports=e():"function"==typeof
define&&define.amd?define([],e):"object"==typeof
exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return
n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var
e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return
document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return
a("cut"),t};var l=function(t){var [...]
\ No newline at end of file
diff --git a/docs/docker-stack/_static/copy-button.svg
b/docs/docker-stack/_static/copy-button.svg
index 62e0e0d..b888a20 100644
--- a/docs/docker-stack/_static/copy-button.svg
+++ b/docs/docker-stack/_static/copy-button.svg
@@ -1,5 +1,5 @@
-<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24
24" stroke-width="1.5" stroke="#607D8B" fill="none" stroke-linecap="round"
stroke-linejoin="round">
+<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler
icon-tabler-clipboard" width="44" height="44" viewBox="0 0 24 24"
stroke-width="1.5" stroke="#2c3e50" fill="none" stroke-linecap="round"
stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"/>
- <rect x="8" y="8" width="12" height="12" rx="2" />
- <path d="M16 8v-2a2 2 0 0 0 -2 -2h-8a2 2 0 0 0 -2 2v8a2 2 0 0 0 2 2h2" />
+ <path d="M9 5h-2a2 2 0 0 0 -2 2v12a2 2 0 0 0 2 2h10a2 2 0 0 0 2 -2v-12a2 2 0
0 0 -2 -2h-2" />
+ <rect x="9" y="3" width="6" height="4" rx="2" />
</svg>
diff --git a/docs/docker-stack/_static/copybutton.css
b/docs/docker-stack/_static/copybutton.css
index 3a863dd..5d29149 100644
--- a/docs/docker-stack/_static/copybutton.css
+++ b/docs/docker-stack/_static/copybutton.css
@@ -1,20 +1,29 @@
/* Copy buttons */
button.copybtn {
position: absolute;
+ display: flex;
top: .3em;
right: .5em;
- width: 1.7rem;
- height: 1.7rem;
+ width: 1.7em;
+ height: 1.7em;
opacity: 0;
- transition: opacity 0.3s, border .3s;
+ transition: opacity 0.3s, border .3s, background-color .3s;
user-select: none;
padding: 0;
border: none;
outline: none;
+ border-radius: 0.4em;
+ border: #e1e1e1 1px solid;
+ background-color: rgb(245, 245, 245);
+}
+
+button.copybtn.success {
+ border-color: #22863a;
}
button.copybtn img {
width: 100%;
+ padding: .2em;
}
div.highlight {
@@ -22,11 +31,15 @@ div.highlight {
}
.highlight:hover button.copybtn {
- opacity: .7;
+ opacity: 1;
}
.highlight button.copybtn:hover {
- opacity: 1;
+ background-color: rgb(235, 235, 235);
+}
+
+.highlight button.copybtn:active {
+ background-color: rgb(187, 187, 187);
}
/**
@@ -46,11 +59,10 @@ div.highlight {
visibility: hidden;
position: absolute;
content: attr(data-tooltip);
- padding: 2px;
- top: 0;
+ padding: .2em;
+ font-size: .8em;
left: -.2em;
background: grey;
- font-size: 1rem;
color: white;
white-space: nowrap;
z-index: 2;
diff --git a/docs/docker-stack/_static/copybutton.js
b/docs/docker-stack/_static/copybutton.js
index c4a9f92..482bda0 100644
--- a/docs/docker-stack/_static/copybutton.js
+++ b/docs/docker-stack/_static/copybutton.js
@@ -81,7 +81,9 @@ const clearSelection = () => {
// Changes tooltip text for two seconds, then changes it back
const temporarilyChangeTooltip = (el, oldText, newText) => {
el.setAttribute('data-tooltip', newText)
+ el.classList.add('success')
setTimeout(() => el.setAttribute('data-tooltip', oldText), 2000)
+ setTimeout(() => el.classList.remove('success'), 2000)
}
// Changes the copy button icon for two seconds, then changes it back
@@ -104,10 +106,9 @@ const addCopyButtonToCodeCells = () => {
codeCells.forEach((codeCell, index) => {
const id = codeCellId(index)
codeCell.setAttribute('id', id)
- const pre_bg = getComputedStyle(codeCell).backgroundColor;
const clipboardButton = id =>
- `<button class="copybtn o-tooltip--left" style="background-color:
${pre_bg}" data-tooltip="${messages[locale]['copy']}"
data-clipboard-target="#${id}">
+ `<button class="copybtn o-tooltip--left"
data-tooltip="${messages[locale]['copy']}" data-clipboard-target="#${id}">
<img src="${path_static}copy-button.svg"
alt="${messages[locale]['copy_to_clipboard']}">
</button>`
codeCell.insertAdjacentHTML('afterend', clipboardButton(id))
diff --git a/docs/docker-stack/build.html b/docs/docker-stack/build.html
index bf89c80..1dff47c 100644
--- a/docs/docker-stack/build.html
+++ b/docs/docker-stack/build.html
@@ -1283,7 +1283,7 @@ to provide this library from you repository if you want
to build Airflow image i
<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>rm docker-context-files/*.whl
docker-context-files/*.tar.gz docker-context-files/*.txt <span
class="o">||</span> <span class="nb">true</span>
curl -Lo <span
class="s2">"docker-context-files/constraints-3.7.txt"</span> <span
class="se">\</span>
-
https://raw.githubusercontent.com/apache/airflow/constraints-2.0.2/constraints-3.7.txt
+
https://raw.githubusercontent.com/apache/airflow/constraints-2.1.2/constraints-3.7.txt
<span class="c1"># For Airflow pre 2.1 you need to use PIP 20.2.4 to
install/download Airflow packages.</span>
pip install <span class="nv">pip</span><span class="o">==</span><span
class="m">20</span>.2.4
@@ -1323,7 +1323,7 @@ to the below:</p>
<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>docker build . <span class="se">\</span>
--build-arg <span class="nv">PYTHON_BASE_IMAGE</span><span
class="o">=</span><span class="s2">"python:3.7-slim-buster"</span>
<span class="se">\</span>
--build-arg <span class="nv">AIRFLOW_INSTALLATION_METHOD</span><span
class="o">=</span><span class="s2">"apache-airflow"</span> <span
class="se">\</span>
- --build-arg <span class="nv">AIRFLOW_VERSION</span><span
class="o">=</span><span class="s2">"2.0.2"</span> <span
class="se">\</span>
+ --build-arg <span class="nv">AIRFLOW_VERSION</span><span
class="o">=</span><span class="s2">"2.1.2"</span> <span
class="se">\</span>
--build-arg <span class="nv">INSTALL_MYSQL_CLIENT</span><span
class="o">=</span><span class="s2">"false"</span> <span
class="se">\</span>
--build-arg <span class="nv">AIRFLOW_PRE_CACHED_PIP_PACKAGES</span><span
class="o">=</span><span class="s2">"false"</span> <span
class="se">\</span>
--build-arg <span class="nv">INSTALL_FROM_DOCKER_CONTEXT_FILES</span><span
class="o">=</span><span class="s2">"true"</span> <span
class="se">\</span>
diff --git a/docs/docker-stack/entrypoint.html
b/docs/docker-stack/entrypoint.html
index abe7398..1350a79 100644
--- a/docs/docker-stack/entrypoint.html
+++ b/docs/docker-stack/entrypoint.html
@@ -643,27 +643,12 @@ that need group access will also be writable for the
group. This can be done for
</div>
<div class="section" id="waits-for-airflow-db-connection">
<h2>Waits for Airflow DB connection<a class="headerlink"
href="#waits-for-airflow-db-connection" title="Permalink to this
headline">¶</a></h2>
-<p>In case Postgres or MySQL DB is used, the entrypoint will wait until the
airflow DB connection becomes
-available. This happens always when you use the default entrypoint.</p>
-<p>The script detects backend type depending on the URL schema and assigns
default port numbers if not specified
-in the URL. Then it loops until the connection to the host/port specified can
be established
+<p>The entrypoint is waiting for a connection to the database independent of
the database engine. This allows us to increase
+the stability of the environment.</p>
+<p>Waiting for connection involves executing <code class="docutils literal
notranslate"><span class="pre">airflow</span> <span class="pre">db</span> <span
class="pre">check</span></code> command, which means that a <code
class="docutils literal notranslate"><span class="pre">select</span> <span
class="pre">1</span> <span class="pre">as</span> <span
class="pre">is_alive;</span></code> statement
+is executed. Then it loops until the the command will be successful.
It tries <span class="target" id="index-0"></span><code class="xref std
std-envvar docutils literal notranslate"><span
class="pre">CONNECTION_CHECK_MAX_COUNT</span></code> times and sleeps <span
class="target" id="index-1"></span><code class="xref std std-envvar docutils
literal notranslate"><span
class="pre">CONNECTION_CHECK_SLEEP_TIME</span></code> between checks
To disable check, set <code class="docutils literal notranslate"><span
class="pre">CONNECTION_CHECK_MAX_COUNT=0</span></code>.</p>
-<p>Supported schemes:</p>
-<ul class="simple">
-<li><p><code class="docutils literal notranslate"><span
class="pre">postgres://</span></code> - default port 5432</p></li>
-<li><p><code class="docutils literal notranslate"><span
class="pre">mysql://</span></code> - default port 3306</p></li>
-<li><p><code class="docutils literal notranslate"><span
class="pre">sqlite://</span></code></p></li>
-</ul>
-<p>In case of SQLite backend, there is no connection to establish and waiting
is skipped.</p>
-<p>For older than Airflow 1.10.14, waiting for connection involves checking if
a matching port is open.
-The host information is derived from the variables <span class="target"
id="index-2"></span><code class="xref std std-envvar docutils literal
notranslate"><span class="pre">AIRFLOW__CORE__SQL_ALCHEMY_CONN</span></code> and
-<span class="target" id="index-3"></span><code class="xref std std-envvar
docutils literal notranslate"><span
class="pre">AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD</span></code>. If <span
class="target" id="index-4"></span><code class="xref std std-envvar docutils
literal notranslate"><span
class="pre">AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD</span></code> variable
-is passed to the container, it is evaluated as a command to execute and result
of this evaluation is used
-as <span class="target" id="index-5"></span><code class="xref std std-envvar
docutils literal notranslate"><span
class="pre">AIRFLOW__CORE__SQL_ALCHEMY_CONN</span></code>. The <span
class="target" id="index-6"></span><code class="xref std std-envvar docutils
literal notranslate"><span
class="pre">AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD</span></code> variable
-takes precedence over the <span class="target" id="index-7"></span><code
class="xref std std-envvar docutils literal notranslate"><span
class="pre">AIRFLOW__CORE__SQL_ALCHEMY_CONN</span></code> variable.</p>
-<p>For newer versions, the <code class="docutils literal notranslate"><span
class="pre">airflow</span> <span class="pre">db</span> <span
class="pre">check</span></code> command is used, which means that a <code
class="docutils literal notranslate"><span class="pre">select</span> <span
class="pre">1</span> <span class="pre">as</span> <span
class="pre">is_alive;</span></code> query
-is executed. This also means that you can keep your password in secret
backend.</p>
</div>
<div class="section" id="waits-for-celery-broker-connection">
<h2>Waits for celery broker connection<a class="headerlink"
href="#waits-for-celery-broker-connection" title="Permalink to this
headline">¶</a></h2>
@@ -671,7 +656,7 @@ is executed. This also means that you can keep your
password in secret backend.<
commands are used the entrypoint will wait until the celery broker DB
connection is available.</p>
<p>The script detects backend type depending on the URL schema and assigns
default port numbers if not specified
in the URL. Then it loops until connection to the host/port specified can be
established
-It tries <span class="target" id="index-8"></span><code class="xref std
std-envvar docutils literal notranslate"><span
class="pre">CONNECTION_CHECK_MAX_COUNT</span></code> times and sleeps <span
class="target" id="index-9"></span><code class="xref std std-envvar docutils
literal notranslate"><span
class="pre">CONNECTION_CHECK_SLEEP_TIME</span></code> between checks.
+It tries <span class="target" id="index-2"></span><code class="xref std
std-envvar docutils literal notranslate"><span
class="pre">CONNECTION_CHECK_MAX_COUNT</span></code> times and sleeps <span
class="target" id="index-3"></span><code class="xref std std-envvar docutils
literal notranslate"><span
class="pre">CONNECTION_CHECK_SLEEP_TIME</span></code> between checks.
To disable check, set <code class="docutils literal notranslate"><span
class="pre">CONNECTION_CHECK_MAX_COUNT=0</span></code>.</p>
<p>Supported schemes:</p>
<ul class="simple">
@@ -681,17 +666,17 @@ To disable check, set <code class="docutils literal
notranslate"><span class="pr
<li><p><code class="docutils literal notranslate"><span
class="pre">mysql://</span></code> - default port 3306</p></li>
</ul>
<p>Waiting for connection involves checking if a matching port is open.
-The host information is derived from the variables <span class="target"
id="index-10"></span><code class="xref std std-envvar docutils literal
notranslate"><span class="pre">AIRFLOW__CELERY__BROKER_URL</span></code> and
-<span class="target" id="index-11"></span><code class="xref std std-envvar
docutils literal notranslate"><span
class="pre">AIRFLOW__CELERY__BROKER_URL_CMD</span></code>. If <span
class="target" id="index-12"></span><code class="xref std std-envvar docutils
literal notranslate"><span
class="pre">AIRFLOW__CELERY__BROKER_URL_CMD</span></code> variable
+The host information is derived from the variables <span class="target"
id="index-4"></span><code class="xref std std-envvar docutils literal
notranslate"><span class="pre">AIRFLOW__CELERY__BROKER_URL</span></code> and
+<span class="target" id="index-5"></span><code class="xref std std-envvar
docutils literal notranslate"><span
class="pre">AIRFLOW__CELERY__BROKER_URL_CMD</span></code>. If <span
class="target" id="index-6"></span><code class="xref std std-envvar docutils
literal notranslate"><span
class="pre">AIRFLOW__CELERY__BROKER_URL_CMD</span></code> variable
is passed to the container, it is evaluated as a command to execute and result
of this evaluation is used
-as <span class="target" id="index-13"></span><code class="xref std std-envvar
docutils literal notranslate"><span
class="pre">AIRFLOW__CELERY__BROKER_URL</span></code>. The <span class="target"
id="index-14"></span><code class="xref std std-envvar docutils literal
notranslate"><span class="pre">AIRFLOW__CELERY__BROKER_URL_CMD</span></code>
variable
-takes precedence over the <span class="target" id="index-15"></span><code
class="xref std std-envvar docutils literal notranslate"><span
class="pre">AIRFLOW__CELERY__BROKER_URL</span></code> variable.</p>
+as <span class="target" id="index-7"></span><code class="xref std std-envvar
docutils literal notranslate"><span
class="pre">AIRFLOW__CELERY__BROKER_URL</span></code>. The <span class="target"
id="index-8"></span><code class="xref std std-envvar docutils literal
notranslate"><span class="pre">AIRFLOW__CELERY__BROKER_URL_CMD</span></code>
variable
+takes precedence over the <span class="target" id="index-9"></span><code
class="xref std std-envvar docutils literal notranslate"><span
class="pre">AIRFLOW__CELERY__BROKER_URL</span></code> variable.</p>
</div>
<div class="section" id="executing-commands">
<span id="entrypoint-commands"></span><h2>Executing commands<a
class="headerlink" href="#executing-commands" title="Permalink to this
headline">¶</a></h2>
<p>If first argument equals to “bash” - you are dropped to a bash shell or you
can executes bash command
if you specify extra arguments. For example:</p>
-<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>docker run -it
apache/airflow:2.1.0-python3.6 bash -c <span class="s2">"ls
-la"</span>
+<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>docker run -it
apache/airflow:2.1.2-python3.6 bash -c <span class="s2">"ls
-la"</span>
total <span class="m">16</span>
drwxr-xr-x <span class="m">4</span> airflow root <span class="m">4096</span>
Jun <span class="m">5</span> <span class="m">18</span>:12 .
drwxr-xr-x <span class="m">1</span> root root <span class="m">4096</span>
Jun <span class="m">5</span> <span class="m">18</span>:12 ..
@@ -701,18 +686,18 @@ drwxr-xr-x <span class="m">2</span> airflow root <span
class="m">4096</span> Jun
</div>
<p>If first argument is equal to <code class="docutils literal
notranslate"><span class="pre">python</span></code> - you are dropped in python
shell or python commands are executed if
you pass extra parameters. For example:</p>
-<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>> docker run -it
apache/airflow:2.1.0-python3.6 python -c <span
class="s2">"print('test')"</span>
+<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>> docker run -it
apache/airflow:2.1.2-python3.6 python -c <span
class="s2">"print('test')"</span>
<span class="nb">test</span>
</pre></div>
</div>
<p>If first argument equals to “airflow” - the rest of the arguments is
treated as an airflow command
to execute. Example:</p>
-<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>docker run -it
apache/airflow:2.1.0-python3.6 airflow webserver
+<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>docker run -it
apache/airflow:2.1.2-python3.6 airflow webserver
</pre></div>
</div>
<p>If there are any other arguments - they are simply passed to the “airflow”
command</p>
-<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>> docker run -it
apache/airflow:2.1.0-python3.6 version
-<span class="m">2</span>.1.0
+<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>> docker run -it
apache/airflow:2.1.2-python3.6 version
+<span class="m">2</span>.1.2
</pre></div>
</div>
</div>
@@ -726,7 +711,7 @@ either as maintenance operations on the database or should
be embedded in the cu
(when you want to add new packages).</p>
<div class="section" id="upgrading-airflow-db">
<h3>Upgrading Airflow DB<a class="headerlink" href="#upgrading-airflow-db"
title="Permalink to this headline">¶</a></h3>
-<p>If you set <span class="target" id="index-16"></span><code class="xref std
std-envvar docutils literal notranslate"><span
class="pre">_AIRFLOW_DB_UPGRADE</span></code> variable to a non-empty value,
the entrypoint will run
+<p>If you set <span class="target" id="index-10"></span><code class="xref std
std-envvar docutils literal notranslate"><span
class="pre">_AIRFLOW_DB_UPGRADE</span></code> variable to a non-empty value,
the entrypoint will run
the <code class="docutils literal notranslate"><span
class="pre">airflow</span> <span class="pre">db</span> <span
class="pre">upgrade</span></code> command right after verifying the connection.
You can also use this
when you are running airflow with internal SQLite database (default) to
upgrade the db and create
admin users at entrypoint, so that you can start the webserver immediately.
Note - using SQLite is
@@ -736,10 +721,10 @@ comes to concurrency.</p>
<div class="section" id="creating-admin-user">
<h3>Creating admin user<a class="headerlink" href="#creating-admin-user"
title="Permalink to this headline">¶</a></h3>
<p>The entrypoint can also create webserver user automatically when you enter
it. you need to set
-<span class="target" id="index-17"></span><code class="xref std std-envvar
docutils literal notranslate"><span
class="pre">_AIRFLOW_WWW_USER_CREATE</span></code> to a non-empty value in
order to do that. This is not intended for
+<span class="target" id="index-11"></span><code class="xref std std-envvar
docutils literal notranslate"><span
class="pre">_AIRFLOW_WWW_USER_CREATE</span></code> to a non-empty value in
order to do that. This is not intended for
production, it is only useful if you would like to run a quick test with the
production image.
You need to pass at least password to create such user via <code
class="docutils literal notranslate"><span
class="pre">_AIRFLOW_WWW_USER_PASSWORD</span></code> or
-<span class="target" id="index-18"></span><code class="xref std std-envvar
docutils literal notranslate"><span
class="pre">_AIRFLOW_WWW_USER_PASSWORD_CMD</span></code> similarly like for
other <code class="docutils literal notranslate"><span
class="pre">*_CMD</span></code> variables, the content of
+<span class="target" id="index-12"></span><code class="xref std std-envvar
docutils literal notranslate"><span
class="pre">_AIRFLOW_WWW_USER_PASSWORD_CMD</span></code> similarly like for
other <code class="docutils literal notranslate"><span
class="pre">*_CMD</span></code> variables, the content of
the <code class="docutils literal notranslate"><span
class="pre">*_CMD</span></code> will be evaluated as shell command and it’s
output will be set as password.</p>
<p>User creation will fail if none of the <code class="docutils literal
notranslate"><span class="pre">PASSWORD</span></code> variables are set - there
is no default for
password for security reasons.</p>
@@ -805,11 +790,30 @@ and Admin role. They also forward local port <code
class="docutils literal notra
</div>
<div class="section" id="installing-additional-requirements">
<h3>Installing additional requirements<a class="headerlink"
href="#installing-additional-requirements" title="Permalink to this
headline">¶</a></h3>
+<div class="admonition warning">
+<p class="admonition-title">Warning</p>
+<p>Installing requirements this way is a very convenient method of running
Airflow, very useful for
+testing and debugging. However, do not be tricked by its convenience. You
should never, ever use it in
+production environment. We have deliberately chose to make it a
development/test dependency and we print
+a warning, whenever it is used. There is an inherent security-related issue
with using this method in
+production. Installing the requirements this way can happen at literally any
time - when your containers
+get restarted, when your machines in K8S cluster get restarted. In a K8S
Cluster those events can happen
+literally any time. This opens you up to a serious vulnerability where your
production environment
+might be brought down by a single dependency being removed from PyPI - or even
dependency of your
+dependency. This means that you put your production service availability in
hands of 3rd-party developers.
+At any time, any moment including weekends and holidays those 3rd party
developers might bring your
+production Airflow instance down, without you even knowing it. This is a
serious vulnerability that
+is similar to the infamous
+<a class="reference external"
href="https://qz.com/646467/how-one-programmer-broke-the-internet-by-deleting-a-tiny-piece-of-code/">leftpad</a>
+problem. You can fully protect against this case by building your own,
immutable custom image, where the
+dependencies are baked in. You have been warned.</p>
+</div>
<p>Installing additional requirements can be done by specifying <code
class="docutils literal notranslate"><span
class="pre">_PIP_ADDITIONAL_REQUIREMENTS</span></code> variable.
The variable should contain a list of requirements that should be installed
additionally when entering
the containers. Note that this option slows down starting of Airflow as every
time any container starts
-it must install new packages. Therefore this option should only be used for
testing. When testing is
-finished, you should create your custom image with dependencies baked in.</p>
+it must install new packages and it opens up huge potential security
vulnerability when used in production
+(see below). Therefore this option should only be used for testing. When
testing is finished,
+you should create your custom image with dependencies baked in.</p>
<p>Example:</p>
<div class="highlight-bash notranslate"><div
class="highlight"><pre><span></span>docker run -it -p <span
class="m">8080</span>:8080 <span class="se">\</span>
--env <span class="s2">"_PIP_ADDITIONAL_REQUIREMENTS=lxml==4.6.3
charset-normalizer==1.4.1"</span> <span class="se">\</span>
diff --git a/docs/docker-stack/genindex.html b/docs/docker-stack/genindex.html
index ff5cb63..c3fecc8 100644
--- a/docs/docker-stack/genindex.html
+++ b/docs/docker-stack/genindex.html
@@ -567,13 +567,13 @@
<h2 id="_">_</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
<td style="width: 33%; vertical-align: top;"><ul>
- <li><a href="entrypoint.html#index-16">_AIRFLOW_DB_UPGRADE</a>
+ <li><a href="entrypoint.html#index-10">_AIRFLOW_DB_UPGRADE</a>
</li>
</ul></td>
<td style="width: 33%; vertical-align: top;"><ul>
- <li><a href="entrypoint.html#index-17">_AIRFLOW_WWW_USER_CREATE</a>
+ <li><a href="entrypoint.html#index-11">_AIRFLOW_WWW_USER_CREATE</a>
</li>
- <li><a href="entrypoint.html#index-18">_AIRFLOW_WWW_USER_PASSWORD_CMD</a>
+ <li><a href="entrypoint.html#index-12">_AIRFLOW_WWW_USER_PASSWORD_CMD</a>
</li>
</ul></td>
</tr></table>
@@ -581,15 +581,13 @@
<h2 id="A">A</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
<td style="width: 33%; vertical-align: top;"><ul>
- <li><a href="entrypoint.html#index-10">AIRFLOW__CELERY__BROKER_URL</a>,
<a href="entrypoint.html#index-13">[1]</a>, <a
href="entrypoint.html#index-15">[2]</a>
+ <li><a href="entrypoint.html#index-4">AIRFLOW__CELERY__BROKER_URL</a>,
<a href="entrypoint.html#index-7">[1]</a>, <a
href="entrypoint.html#index-9">[2]</a>
</li>
- <li><a
href="entrypoint.html#index-11">AIRFLOW__CELERY__BROKER_URL_CMD</a>, <a
href="entrypoint.html#index-12">[1]</a>, <a
href="entrypoint.html#index-14">[2]</a>
+ <li><a
href="entrypoint.html#index-5">AIRFLOW__CELERY__BROKER_URL_CMD</a>, <a
href="entrypoint.html#index-6">[1]</a>, <a
href="entrypoint.html#index-8">[2]</a>
</li>
</ul></td>
<td style="width: 33%; vertical-align: top;"><ul>
- <li><a
href="entrypoint.html#index-2">AIRFLOW__CORE__SQL_ALCHEMY_CONN</a>, <a
href="entrypoint.html#index-5">[1]</a>, <a
href="entrypoint.html#index-7">[2]</a>, <a href="index.html#index-1">[3]</a>
-</li>
- <li><a
href="entrypoint.html#index-3">AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD</a>, <a
href="entrypoint.html#index-4">[1]</a>, <a
href="entrypoint.html#index-6">[2]</a>
+ <li><a href="index.html#index-1">AIRFLOW__CORE__SQL_ALCHEMY_CONN</a>
</li>
<li><a href="index.html#index-0">AIRFLOW_HOME</a>
</li>
@@ -599,11 +597,11 @@
<h2 id="C">C</h2>
<table style="width: 100%" class="indextable genindextable"><tr>
<td style="width: 33%; vertical-align: top;"><ul>
- <li><a href="entrypoint.html#index-0">CONNECTION_CHECK_MAX_COUNT</a>, <a
href="entrypoint.html#index-8">[1]</a>
+ <li><a href="entrypoint.html#index-0">CONNECTION_CHECK_MAX_COUNT</a>, <a
href="entrypoint.html#index-2">[1]</a>
</li>
</ul></td>
<td style="width: 33%; vertical-align: top;"><ul>
- <li><a href="entrypoint.html#index-1">CONNECTION_CHECK_SLEEP_TIME</a>,
<a href="entrypoint.html#index-9">[1]</a>
+ <li><a href="entrypoint.html#index-1">CONNECTION_CHECK_SLEEP_TIME</a>,
<a href="entrypoint.html#index-3">[1]</a>
</li>
</ul></td>
</tr></table>
@@ -615,25 +613,23 @@
environment variable
<ul>
- <li><a href="entrypoint.html#index-16">_AIRFLOW_DB_UPGRADE</a>
-</li>
- <li><a href="entrypoint.html#index-17">_AIRFLOW_WWW_USER_CREATE</a>
+ <li><a href="entrypoint.html#index-10">_AIRFLOW_DB_UPGRADE</a>
</li>
- <li><a
href="entrypoint.html#index-18">_AIRFLOW_WWW_USER_PASSWORD_CMD</a>
+ <li><a href="entrypoint.html#index-11">_AIRFLOW_WWW_USER_CREATE</a>
</li>
- <li><a
href="entrypoint.html#index-10">AIRFLOW__CELERY__BROKER_URL</a>, <a
href="entrypoint.html#index-13">[1]</a>, <a
href="entrypoint.html#index-15">[2]</a>
+ <li><a
href="entrypoint.html#index-12">_AIRFLOW_WWW_USER_PASSWORD_CMD</a>
</li>
- <li><a
href="entrypoint.html#index-11">AIRFLOW__CELERY__BROKER_URL_CMD</a>, <a
href="entrypoint.html#index-12">[1]</a>, <a
href="entrypoint.html#index-14">[2]</a>
+ <li><a href="entrypoint.html#index-4">AIRFLOW__CELERY__BROKER_URL</a>,
<a href="entrypoint.html#index-7">[1]</a>, <a
href="entrypoint.html#index-9">[2]</a>
</li>
- <li><a
href="entrypoint.html#index-2">AIRFLOW__CORE__SQL_ALCHEMY_CONN</a>, <a
href="entrypoint.html#index-5">[1]</a>, <a
href="entrypoint.html#index-7">[2]</a>, <a href="index.html#index-1">[3]</a>
+ <li><a
href="entrypoint.html#index-5">AIRFLOW__CELERY__BROKER_URL_CMD</a>, <a
href="entrypoint.html#index-6">[1]</a>, <a
href="entrypoint.html#index-8">[2]</a>
</li>
- <li><a
href="entrypoint.html#index-3">AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD</a>, <a
href="entrypoint.html#index-4">[1]</a>, <a
href="entrypoint.html#index-6">[2]</a>
+ <li><a href="index.html#index-1">AIRFLOW__CORE__SQL_ALCHEMY_CONN</a>
</li>
<li><a href="index.html#index-0">AIRFLOW_HOME</a>
</li>
- <li><a href="entrypoint.html#index-0">CONNECTION_CHECK_MAX_COUNT</a>,
<a href="entrypoint.html#index-8">[1]</a>
+ <li><a href="entrypoint.html#index-0">CONNECTION_CHECK_MAX_COUNT</a>,
<a href="entrypoint.html#index-2">[1]</a>
</li>
- <li><a href="entrypoint.html#index-1">CONNECTION_CHECK_SLEEP_TIME</a>,
<a href="entrypoint.html#index-9">[1]</a>
+ <li><a href="entrypoint.html#index-1">CONNECTION_CHECK_SLEEP_TIME</a>,
<a href="entrypoint.html#index-3">[1]</a>
</li>
</ul></li>
</ul></td>
diff --git a/docs/docker-stack/searchindex.js b/docs/docker-stack/searchindex.js
index 4ef34de..f80a14e 100644
--- a/docs/docker-stack/searchindex.js
+++ b/docs/docker-stack/searchindex.js
@@ -1 +1 @@
-Search.setIndex({docnames:["build","build-arg-ref","entrypoint","index","recipes"],envversion:{"sphinx.domains.c":2,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":3,"sphinx.domains.index":1,"sphinx.domains.javascript":2,"sphinx.domains.math":2,"sphinx.domains.python":2,"sphinx.domains.rst":2,"sphinx.domains.std":1,"sphinx.ext.intersphinx":1,"sphinx.ext.viewcode":1,sphinx:56},filenames:["build.rst","build-arg-ref.rst","entrypoint.rst","index.rst","recipes.rs
[...]
\ No newline at end of file
+Search.setIndex({docnames:["build","build-arg-ref","entrypoint","index","recipes"],envversion:{"sphinx.domains.c":2,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":3,"sphinx.domains.index":1,"sphinx.domains.javascript":2,"sphinx.domains.math":2,"sphinx.domains.python":2,"sphinx.domains.rst":2,"sphinx.domains.std":1,"sphinx.ext.intersphinx":1,"sphinx.ext.viewcode":1,sphinx:56},filenames:["build.rst","build-arg-ref.rst","entrypoint.rst","index.rst","recipes.rs
[...]
\ No newline at end of file
diff --git a/index.html b/index.html
index f6ab866..70b78ff 100644
--- a/index.html
+++ b/index.html
@@ -1227,12 +1227,12 @@ if (!doNotTrack) {
<div id="integrations-container" class="list-items">
- <a class="list-item"
href="/docs/apache-airflow-providers-databricks/stable/operators.html">
+ <a class="list-item"
href="/docs/apache-airflow-providers-google/stable/operators/cloud/natural_language.html">
<div class="card">
<div class="box-event box-event__integration">
- <span class="box-event__integration--name">Databricks</span>
+ <span class="box-event__integration--name">Google Cloud Natural
Language</span>
</div>
</div>
@@ -1240,12 +1240,12 @@ if (!doNotTrack) {
- <a class="list-item"
href="/docs/apache-airflow-providers-amazon/stable/index.html">
+ <a class="list-item"
href="/docs/apache-airflow-providers-google/stable/operators/cloud/bigquery.html">
<div class="card">
<div class="box-event box-event__integration">
- <span class="box-event__integration--name">Amazon ECS</span>
+ <span class="box-event__integration--name">Google BigQuery</span>
</div>
</div>
@@ -1253,12 +1253,12 @@ if (!doNotTrack) {
- <a class="list-item"
href="/docs/apache-airflow-providers-microsoft-azure/stable/index.html">
+ <a class="list-item"
href="/docs/apache-airflow-providers-amazon/stable/index.html">
<div class="card">
<div class="box-event box-event__integration">
- <span class="box-event__integration--name">Microsoft Azure Blob
Storage</span>
+ <span class="box-event__integration--name">Amazon Simple Storage
Service (S3)</span>
</div>
</div>
@@ -1266,12 +1266,12 @@ if (!doNotTrack) {
- <a class="list-item"
href="/docs/apache-airflow-providers-google/stable/index.html">
+ <a class="list-item"
href="/docs/apache-airflow-providers-google/stable/operators/cloud/gcs.html">
<div class="card">
<div class="box-event box-event__integration">
- <span class="box-event__integration--name">Google Discovery API</span>
+ <span class="box-event__integration--name">Google Cloud Storage
(GCS)</span>
</div>
</div>
@@ -1279,12 +1279,12 @@ if (!doNotTrack) {
- <a class="list-item"
href="/docs/apache-airflow-providers-jenkins/stable/index.html">
+ <a class="list-item"
href="/docs/apache-airflow-providers-google/stable/operators/cloud/dataprep.html">
<div class="card">
<div class="box-event box-event__integration">
- <span class="box-event__integration--name">Jenkins</span>
+ <span class="box-event__integration--name">Google Dataprep</span>
</div>
</div>
@@ -1292,12 +1292,12 @@ if (!doNotTrack) {
- <a class="list-item"
href="/docs/apache-airflow-providers-google/stable/index.html">
+ <a class="list-item"
href="/docs/apache-airflow-providers-amazon/stable/index.html">
<div class="card">
<div class="box-event box-event__integration">
- <span class="box-event__integration--name">Google Cloud Key Management
Service (KMS)</span>
+ <span class="box-event__integration--name">Amazon Web Services</span>
</div>
</div>
@@ -1305,12 +1305,12 @@ if (!doNotTrack) {
- <a class="list-item"
href="/docs/apache-airflow-providers-google/stable/operators/cloud/dataproc.html">
+ <a class="list-item"
href="/docs/apache-airflow-providers-redis/stable/index.html">
<div class="card">
<div class="box-event box-event__integration">
- <span class="box-event__integration--name">Google Dataproc</span>
+ <span class="box-event__integration--name">Redis</span>
</div>
</div>
@@ -1318,12 +1318,12 @@ if (!doNotTrack) {
- <a class="list-item"
href="/docs/apache-airflow-providers-discord/stable/index.html">
+ <a class="list-item"
href="/docs/apache-airflow-providers-google/stable/operators/firebase/firestore.html">
<div class="card">
<div class="box-event box-event__integration">
- <span class="box-event__integration--name">Discord</span>
+ <span class="box-event__integration--name">Google Cloud
Firestore</span>
</div>
</div>
diff --git a/search/index.html b/search/index.html
index 7329324..64db952 100644
--- a/search/index.html
+++ b/search/index.html
@@ -35,12 +35,12 @@
<meta property="og:url" content="/search/" />
<meta property="og:image" content="/images/feature-image.png" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Search Results">
<meta itemprop="description" content="">
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="0">
diff --git a/sitemap.xml b/sitemap.xml
index a2a6f12..f2ae102 100644
--- a/sitemap.xml
+++ b/sitemap.xml
@@ -4,147 +4,147 @@
<url>
<loc>/docs/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/tags/airflow-summit/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/airflow_summit_2021/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/tags/community/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/tags/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/airflow-survey-2020/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/tags/survey/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/tags/users/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/airflow-two-point-oh-is-here/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/tags/release/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/experience-with-airflow-as-an-outreachy-intern/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/airflow-1.10.12/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/apache-airflow-for-newcomers/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/announcements/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/implementing-stable-api-for-apache-airflow/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/tags/rest-api/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/airflow-1.10.10/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/airflow-1.10.8-1.10.9/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/tags/documentation/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/experience-in-google-season-of-docs-2019-with-apache-airflow/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/airflow-survey/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/announcing-new-website/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/apache-con-europe-2019-thoughts-and-insights-by-airflow-committers/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/tags/development/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/documenting-using-local-development-environments/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/its-a-breeze-to-develop-apache-airflow/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/use-cases/adobe/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/use-cases/big-fish-games/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/blog/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
@@ -153,72 +153,72 @@
<url>
<loc>/community/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/use-cases/dish/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/ecosystem/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/use-cases/experity/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/install/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/meetups/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/use-cases/onefootball/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/use-cases/plarium-krasnodar/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/privacy-notice/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/roadmap/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/search/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/use-cases/sift/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
<url>
<loc>/use-cases/</loc>
- <lastmod>2021-07-14T22:15:45+01:00</lastmod>
+ <lastmod>2021-07-15T14:54:08+02:00</lastmod>
</url>
</urlset>
\ No newline at end of file
diff --git a/use-cases/adobe/index.html b/use-cases/adobe/index.html
index 75ef754..d6df76a 100644
--- a/use-cases/adobe/index.html
+++ b/use-cases/adobe/index.html
@@ -35,12 +35,12 @@
<meta property="og:url" content="/use-cases/adobe/" />
<meta property="og:image" content="/images/feature-image.png" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Adobe">
<meta itemprop="description" content="What was the problem? Modern big data
platforms need sophisticated data pipelines connecting to many backend services
enabling complex workflows. These workflows need to be deployed, monitored, and
run either on regular schedules or triggered by external events. Adobe
Experience Platform component services architected and built an orchestration
service to enable their users to author, schedule, and monitor complex
hierarchical (including sequential a [...]
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="251">
diff --git a/use-cases/big-fish-games/index.html
b/use-cases/big-fish-games/index.html
index c9f115d..1573806 100644
--- a/use-cases/big-fish-games/index.html
+++ b/use-cases/big-fish-games/index.html
@@ -35,12 +35,12 @@
<meta property="og:url" content="/use-cases/big-fish-games/" />
<meta property="og:image" content="/images/feature-image.png" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Big Fish Games">
<meta itemprop="description" content="What was the problem? The main challenge
is the lack of standardized ETL workflow orchestration tools. PowerShell and
Python-based ETL frameworks built in-house are currently used for scheduling
and running analytical workloads. However, there is no web UI through which we
can monitor these workflows and it requires additional effort to maintain this
framework. These scheduled jobs based on external dependencies are not well
suited to modern Big Data [...]
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="336">
diff --git a/use-cases/dish/index.html b/use-cases/dish/index.html
index e84b280..2f9566b 100644
--- a/use-cases/dish/index.html
+++ b/use-cases/dish/index.html
@@ -35,12 +35,12 @@
<meta property="og:url" content="/use-cases/dish/" />
<meta property="og:image" content="/images/feature-image.png" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Dish">
<meta itemprop="description" content="What was the problem? We faced
increasing complexity managing lengthy crontabs with scheduling being an issue,
this required carefully planning timing due to resource constraints, usage
patterns, and especially custom code needed for retry logic. In the last case,
having to verify success of previous jobs and/or steps prior to running the
next. Furthermore, time to results is important, but we were increasingly
relying on buffers for processing, wher [...]
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="249">
diff --git a/use-cases/experity/index.html b/use-cases/experity/index.html
index e4d69e7..bd74368 100644
--- a/use-cases/experity/index.html
+++ b/use-cases/experity/index.html
@@ -36,13 +36,13 @@ How did Apache Airflow help to solve this problem?
Ultimately we decided flexibl
<meta property="og:url" content="/use-cases/experity/" />
<meta property="og:image" content="/images/feature-image.png" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Experity">
<meta itemprop="description" content="What was the problem? We had to deploy
our complex, flagship app to multiple nodes in multiple ways. This required
tasks to communicate across Windows nodes and coordinate timing perfectly. We
did not want to buy an expensive enterprise scheduling tool and needed ultimate
flexibility.
How did Apache Airflow help to solve this problem? Ultimately we decided
flexible, multi-node, DAG capable tooling was key and airflow was one of the
few tools that fit that bill.">
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="191">
diff --git a/use-cases/onefootball/index.html b/use-cases/onefootball/index.html
index 623f10d..d5fd6ab 100644
--- a/use-cases/onefootball/index.html
+++ b/use-cases/onefootball/index.html
@@ -36,13 +36,13 @@ On top of that, new data tools appear each month: third
party data sources, clou
<meta property="og:url" content="/use-cases/onefootball/" />
<meta property="og:image" content="/images/feature-image.png" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Onefootball">
<meta itemprop="description" content="What was the problem? With millions of
daily active users, managing the complexity of data engineering at Onefootball
is a constant challenge. Lengthy crontabs, multiplication of custom API
clients, erosion of confidence in the analytics served, increasing heroism
(“only one person can solve this issue”). Those are the challenges
that most teams face unless they consciously invest in their tools and
processes.
On top of that, new data tools appear each month: third party data sources,
cloud providers solutions, different storage technologies… Managing all
those integrations is costly and brittle, especially for small data engineering
teams that are trying to do more with less.">
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="294">
diff --git a/use-cases/plarium-krasnodar/index.html
b/use-cases/plarium-krasnodar/index.html
index 8cc2174..bd615ab 100644
--- a/use-cases/plarium-krasnodar/index.html
+++ b/use-cases/plarium-krasnodar/index.html
@@ -35,12 +35,12 @@
<meta property="og:url" content="/use-cases/plarium-krasnodar/" />
<meta property="og:image" content="/images/feature-image.png" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Plarium Krasnodar">
<meta itemprop="description" content="What was the problem? Our Research &
Development department carries out various experiments, and in all of them, we
need to create workflow orchestrations for solving tasks in game dev.
Previously, we didn’t have any suitable tools with a sufficient number of
built-in functions, and we had to orchestrate processes manually and entirely
from scratch every time. This led to difficulties with dependencies and
monitoring when building complex w [...]
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="257">
diff --git a/use-cases/sift/index.html b/use-cases/sift/index.html
index 1c01bda..9375847 100644
--- a/use-cases/sift/index.html
+++ b/use-cases/sift/index.html
@@ -35,12 +35,12 @@
<meta property="og:url" content="/use-cases/sift/" />
<meta property="og:image" content="/images/feature-image.png" />
-<meta property="article:modified_time" content="2021-07-14T22:15:45+01:00"
/><meta property="og:site_name" content="Apache Airflow" />
+<meta property="article:modified_time" content="2021-07-15T14:54:08+02:00"
/><meta property="og:site_name" content="Apache Airflow" />
<meta itemprop="name" content="Sift">
<meta itemprop="description" content="What was the problem? At Sift, we’re
constantly training machine learning models that feed into the core of Sift’s
Digital Trust & Safety platform. The platform gives our customers a way to
discern suspicious online behavior from trustworthy behavior, allowing our
customers to protect their online transactions, maintain the integrity of their
content platforms, and keep their users’ accounts secure. To make this
possible, we’ve built model traini [...]
-<meta itemprop="dateModified" content="2021-07-14T22:15:45+01:00" />
+<meta itemprop="dateModified" content="2021-07-15T14:54:08+02:00" />
<meta itemprop="wordCount" content="641">