Repository: incubator-airflow
Updated Branches:
  refs/heads/master 2920d0475 -> 15b8a36b9


[AIRFLOW-2083] Docs: Use "its" instead of "it's" where appropriate

Closes #3020 from wrp/spelling


Project: http://git-wip-us.apache.org/repos/asf/incubator-airflow/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-airflow/commit/15b8a36b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-airflow/tree/15b8a36b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-airflow/diff/15b8a36b

Branch: refs/heads/master
Commit: 15b8a36b9011166b06f176f684b71703a4aebddd
Parents: 2920d04
Author: William Pursell <willi...@wepay.com>
Authored: Fri Feb 9 10:08:06 2018 +0100
Committer: Fokko Driesprong <fokkodriespr...@godatadriven.com>
Committed: Fri Feb 9 10:08:06 2018 +0100

----------------------------------------------------------------------
 airflow/bin/cli.py                       |  2 +-
 airflow/contrib/hooks/redshift_hook.py   |  2 +-
 airflow/jobs.py                          |  6 +++---
 airflow/ti_deps/deps/trigger_rule_dep.py |  2 +-
 airflow/utils/dates.py                   |  2 +-
 airflow/www/views.py                     | 10 ++++++----
 docs/plugins.rst                         |  2 +-
 tests/jobs.py                            |  2 +-
 8 files changed, 15 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/15b8a36b/airflow/bin/cli.py
----------------------------------------------------------------------
diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py
index 6bfcdcc..424fcda 100755
--- a/airflow/bin/cli.py
+++ b/airflow/bin/cli.py
@@ -1572,7 +1572,7 @@ class CLIFactory(object):
             'func': test,
             'help': (
                 "Test a task instance. This will run a task without checking 
for "
-                "dependencies or recording it's state in the database."),
+                "dependencies or recording its state in the database."),
             'args': (
                 'dag_id', 'task_id', 'execution_date', 'subdir', 'dry_run',
                 'task_params'),

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/15b8a36b/airflow/contrib/hooks/redshift_hook.py
----------------------------------------------------------------------
diff --git a/airflow/contrib/hooks/redshift_hook.py 
b/airflow/contrib/hooks/redshift_hook.py
index 70a4854..baa11e7 100644
--- a/airflow/contrib/hooks/redshift_hook.py
+++ b/airflow/contrib/hooks/redshift_hook.py
@@ -79,7 +79,7 @@ class RedshiftHook(AwsHook):
 
     def restore_from_cluster_snapshot(self, cluster_identifier, 
snapshot_identifier):
         """
-        Restores a cluster from it's snapshot
+        Restores a cluster from its snapshot
 
         :param cluster_identifier: unique identifier of a cluster
         :type cluster_identifier: str

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/15b8a36b/airflow/jobs.py
----------------------------------------------------------------------
diff --git a/airflow/jobs.py b/airflow/jobs.py
index 172792d..35a3fb6 100644
--- a/airflow/jobs.py
+++ b/airflow/jobs.py
@@ -68,7 +68,7 @@ class BaseJob(Base, LoggingMixin):
     """
     Abstract class to be derived for jobs. Jobs are processing items with state
     and duration that aren't task instances. For instance a BackfillJob is
-    a collection of task instance runs, but should have it's own state, start
+    a collection of task instance runs, but should have its own state, start
     and end time.
     """
 
@@ -1796,8 +1796,8 @@ class SchedulerJob(BaseJob):
             dep_context = DepContext(deps=QUEUE_DEPS, ignore_task_deps=True)
 
             # Only schedule tasks that have their dependencies met, e.g. to 
avoid
-            # a task that recently got it's state changed to RUNNING from 
somewhere
-            # other than the scheduler from getting it's state overwritten.
+            # a task that recently got its state changed to RUNNING from 
somewhere
+            # other than the scheduler from getting its state overwritten.
             # TODO(aoen): It's not great that we have to check all the task 
instance
             # dependencies twice; once to get the task scheduled, and again to 
actually
             # run the task. We should try to come up with a way to only check 
them once.

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/15b8a36b/airflow/ti_deps/deps/trigger_rule_dep.py
----------------------------------------------------------------------
diff --git a/airflow/ti_deps/deps/trigger_rule_dep.py 
b/airflow/ti_deps/deps/trigger_rule_dep.py
index 5a80314..30a5a13 100644
--- a/airflow/ti_deps/deps/trigger_rule_dep.py
+++ b/airflow/ti_deps/deps/trigger_rule_dep.py
@@ -127,7 +127,7 @@ class TriggerRuleDep(BaseTIDep):
             "total": upstream, "successes": successes, "skipped": skipped,
             "failed": failed, "upstream_failed": upstream_failed, "done": done
         }
-        # TODO(aoen): Ideally each individual trigger rules would be it's own 
class, but
+        # TODO(aoen): Ideally each individual trigger rules would be its own 
class, but
         # this isn't very feasible at the moment since the database queries 
need to be
         # bundled together for efficiency.
         # handling instant state assignment based on trigger rules

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/15b8a36b/airflow/utils/dates.py
----------------------------------------------------------------------
diff --git a/airflow/utils/dates.py b/airflow/utils/dates.py
index 2ca2b2c..aceb426 100644
--- a/airflow/utils/dates.py
+++ b/airflow/utils/dates.py
@@ -146,7 +146,7 @@ def round_time(dt, delta, 
start_date=timezone.make_aware(datetime.min)):
 
     # We are looking for a datetime in the form start_date + i * delta
     # which is as close as possible to dt. Since delta could be a relative
-    # delta we don't know it's exact length in seconds so we cannot rely on
+    # delta we don't know its exact length in seconds so we cannot rely on
     # division to find i. Instead we employ a binary search algorithm, first
     # finding an upper and lower limit and then disecting the interval until
     # we have found the closest match.

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/15b8a36b/airflow/www/views.py
----------------------------------------------------------------------
diff --git a/airflow/www/views.py b/airflow/www/views.py
index 89641e9..a41f5fe 100644
--- a/airflow/www/views.py
+++ b/airflow/www/views.py
@@ -811,10 +811,12 @@ class Airflow(BaseView):
             - The scheduler is down or under heavy load<br/>
             {}
             <br/>
-            If this task instance does not start soon please contact your 
Airflow administrator for assistance."""
-                .format(
-                "- This task instance already ran and had it's state changed 
manually (e.g. cleared in the UI)<br/>"
-                if ti.state == State.NONE else "")))]
+            If this task instance does not start soon please contact your 
Airflow """
+                   """administrator for assistance."""
+                   .format(
+                       "- This task instance already ran and had its state 
changed "
+                       "manually (e.g. cleared in the UI)<br/>"
+                       if ti.state == State.NONE else "")))]
 
         # Use the scheduler's context to figure out which dependencies are not 
met
         dep_context = DepContext(SCHEDULER_DEPS)

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/15b8a36b/docs/plugins.rst
----------------------------------------------------------------------
diff --git a/docs/plugins.rst b/docs/plugins.rst
index 8d2078f..9fb9c0e 100644
--- a/docs/plugins.rst
+++ b/docs/plugins.rst
@@ -41,7 +41,7 @@ Airflow has many components that can be reused when building 
an application:
 * A metadata database to store your models
 * Access to your databases, and knowledge of how to connect to them
 * An array of workers that your application can push workload to
-* Airflow is deployed, you can just piggy back on it's deployment logistics
+* Airflow is deployed, you can just piggy back on its deployment logistics
 * Basic charting capabilities, underlying libraries and abstractions
 
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/15b8a36b/tests/jobs.py
----------------------------------------------------------------------
diff --git a/tests/jobs.py b/tests/jobs.py
index b2ca15e..5771bf1 100644
--- a/tests/jobs.py
+++ b/tests/jobs.py
@@ -2548,7 +2548,7 @@ class SchedulerJobTest(unittest.TestCase):
 
     def test_dag_get_active_runs(self):
         """
-        Test to check that a DAG returns it's active runs
+        Test to check that a DAG returns its active runs
         """
 
         now = timezone.utcnow()

Reply via email to