Repository: incubator-airflow Updated Branches: refs/heads/master d8891d906 -> 7b620391a
[AIRFLOW-1317] Fix minor issues in API reference Project: http://git-wip-us.apache.org/repos/asf/incubator-airflow/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-airflow/commit/1ae7e5b9 Tree: http://git-wip-us.apache.org/repos/asf/incubator-airflow/tree/1ae7e5b9 Diff: http://git-wip-us.apache.org/repos/asf/incubator-airflow/diff/1ae7e5b9 Branch: refs/heads/master Commit: 1ae7e5b9af8ec2fdb40bcd4b9587afbd7861488e Parents: d8891d9 Author: Kengo Seki <[email protected]> Authored: Sun Jun 18 04:26:57 2017 -0400 Committer: Kengo Seki <[email protected]> Committed: Tue Jun 20 20:16:35 2017 -0400 ---------------------------------------------------------------------- airflow/macros/__init__.py | 1 - airflow/models.py | 10 ++++++---- airflow/operators/sensors.py | 4 +++- docs/code.rst | 8 ++++---- 4 files changed, 13 insertions(+), 10 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1ae7e5b9/airflow/macros/__init__.py ---------------------------------------------------------------------- diff --git a/airflow/macros/__init__.py b/airflow/macros/__init__.py index 59b9a25..6f80c04 100644 --- a/airflow/macros/__init__.py +++ b/airflow/macros/__init__.py @@ -13,7 +13,6 @@ # limitations under the License. from __future__ import absolute_import -from random import random from datetime import datetime, timedelta import dateutil import time http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1ae7e5b9/airflow/models.py ---------------------------------------------------------------------- diff --git a/airflow/models.py b/airflow/models.py index 30e18a4..c628958 100755 --- a/airflow/models.py +++ b/airflow/models.py @@ -910,7 +910,7 @@ class TaskInstance(Base): :param local: Whether to run the task locally :type local: bool :param pickle_id: If the DAG was serialized to the DB, the ID - associated with the pickled DAG + associated with the pickled DAG :type pickle_id: unicode :param file_path: path to the file containing the DAG definition :param raw: raw mode (needs more details) @@ -1239,6 +1239,7 @@ class TaskInstance(Base): def get_dagrun(self, session): """ Returns the DagRun for this TaskInstance + :param session: :return: DagRun """ @@ -2684,7 +2685,7 @@ class DAG(BaseDag, LoggingMixin): :param orientation: Specify DAG orientation in graph view (LR, TB, RL, BT) :type orientation: string :param catchup: Perform scheduler catchup (or only run latest)? Defaults to True - "type catchup: bool" + :type catchup: bool """ def __init__( @@ -2989,6 +2990,7 @@ class DAG(BaseDag, LoggingMixin): """ Returns the dag run for a given execution date if it exists, otherwise none. + :param execution_date: The execution date of the DagRun to find. :param session: :return: The DagRun if found, otherwise None. @@ -3093,6 +3095,7 @@ class DAG(BaseDag, LoggingMixin): Heavily inspired by: http://blog.jupo.org/2012/04/06/topological-sorting-acyclic-directed-graphs/ + :return: list of tasks in topological order """ @@ -3479,7 +3482,6 @@ class DAG(BaseDag, LoggingMixin): :param dag: the DAG object to save to the DB :type dag: DAG - :own :param sync_time: The time that the DAG should be marked as sync'ed :type sync_time: datetime :return: None @@ -3528,7 +3530,7 @@ class DAG(BaseDag, LoggingMixin): the expiration date. These DAGs were likely deleted. :param expiration_date: set inactive DAGs that were touched before this - time + time :type expiration_date: datetime :return: None """ http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1ae7e5b9/airflow/operators/sensors.py ---------------------------------------------------------------------- diff --git a/airflow/operators/sensors.py b/airflow/operators/sensors.py index c28e88b..4f276ad 100644 --- a/airflow/operators/sensors.py +++ b/airflow/operators/sensors.py @@ -405,6 +405,7 @@ class HdfsSensor(BaseSensorOperator): def filter_for_filesize(result, size=None): """ Will test the filepath result and test if its size is at least self.filesize + :param result: a list of dicts returned by Snakebite ls :param size: the file size in MB a file should be at least to trigger True :return: (bool) depending on the matching criteria @@ -420,10 +421,11 @@ class HdfsSensor(BaseSensorOperator): def filter_for_ignored_ext(result, ignored_ext, ignore_copying): """ Will filter if instructed to do so the result to remove matching criteria + :param result: (list) of dicts returned by Snakebite ls :param ignored_ext: (list) of ignored extentions :param ignore_copying: (bool) shall we ignore ? - :return: + :return: (list) of dicts which were not removed """ if ignore_copying: regex_builder = "^.*\.(%s$)$" % '$|'.join(ignored_ext) http://git-wip-us.apache.org/repos/asf/incubator-airflow/blob/1ae7e5b9/docs/code.rst ---------------------------------------------------------------------- diff --git a/docs/code.rst b/docs/code.rst index c31061c..b17c3fe 100644 --- a/docs/code.rst +++ b/docs/code.rst @@ -145,13 +145,13 @@ Variable Description key within the JSON object ``{{ task_instance_key_str }}`` a unique, human-readable key to the task instance formatted ``{dag_id}_{task_id}_{ds}`` -``conf`` the full configuration object located at +``{{ conf }}`` the full configuration object located at ``airflow.configuration.conf`` which represents the content of your ``airflow.cfg`` -``run_id`` the ``run_id`` of the current DAG run -``dag_run`` a reference to the DagRun object -``test_mode`` whether the task instance was called using +``{{ run_id }}`` the ``run_id`` of the current DAG run +``{{ dag_run }}`` a reference to the DagRun object +``{{ test_mode }}`` whether the task instance was called using the CLI's test subcommand ================================= ====================================
