[ 
https://issues.apache.org/jira/browse/AIRFLOW-6515?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

t oo updated AIRFLOW-6515:
--------------------------
    Description: 
log level should be error on some (but there are false positives):

grep -iE 
'log\.(info|warn).*(error|exceptio|fail|unab|couldn|lost|gone|missing|not 
fou|abort|exit|could not)' -R *
airflow/sensors/base_sensor_operator.py:        self.log.info("Success criteria 
met. Exiting.")
airflow/logging_config.py:        log.warning('Unable to load the config, 
contains a configuration error.')
airflow/operators/check_operator.py:            self.log.warning("The following 
%s tests out of %s failed:", j, n)
airflow/operators/sql_to_gcs.py:            self.log.warning('Using default 
schema due to missing name or type. Please '
airflow/operators/bash_operator.py:            self.log.info('Command exited 
with return code %s', self.sub_process.returncode)
airflow/serialization/serialized_objects.py:            LOG.warning('Failed to 
stringify.', exc_info=True)
airflow/providers/amazon/aws/operators/batch.py:            self.log.info("AWS 
Batch Job has failed")
airflow/providers/amazon/aws/hooks/s3.py:            
self.log.info(e.response["Error"]["Message"])
airflow/providers/amazon/aws/hooks/s3.py:            
self.log.info(e.response["Error"]["Message"])
airflow/utils/dag_processing.py:        self.log.info("Exiting gracefully upon 
receiving signal %s", signum)
airflow/utils/dag_processing.py:                self.log.info("Exiting dag 
parsing loop as all files "
airflow/utils/dag_processing.py:            self.log.info("Failing jobs without 
heartbeat after %s", limit_dttm)
airflow/utils/dag_processing.py:            self.log.info("Waiting up to %s 
seconds for processes to exit...", timeout)
airflow/utils/helpers.py:        log.info("Process %s (%s) terminated with exit 
code %s", p, p.pid, p.returncode)
airflow/models/dagrun.py:            self.log.info('Marking run %s failed', 
self)
airflow/models/dagrun.py:            self.log.info('Deadlock; marking run %s 
failed', self)
airflow/models/dagrun.py:                    self.log.warning("Failed to get 
task '{}' for dag '{}'. "
airflow/gcp/sensors/gcs.py:            self.log.warning("FAILURE: Inactivity 
Period passed, not enough objects found in %s", path)
airflow/gcp/operators/spanner.py:            self.log.info("The Cloud Spanner 
database was missing: "
airflow/gcp/hooks/kubernetes_engine.py:            self.log.info('Assuming 
Success: %s', error.message)
airflow/gcp/hooks/kubernetes_engine.py:            self.log.info('Assuming 
Success: %s', error.message)
airflow/gcp/hooks/cloud_memorystore.py:        self.log.info("Failovering 
Instance: %s", name)
airflow/gcp/hooks/cloud_memorystore.py:        self.log.info("Instance 
failovered: %s", name)
airflow/gcp/hooks/bigquery.py:            self.log.info(error_msg)
airflow/gcp/hooks/bigtable.py:            self.log.info("The instance '%s' does 
not exist in project '%s'. Exiting", instance_id,
airflow/contrib/sensors/bash_sensor.py:                self.log.info("Command 
exited with return code %s", sp.returncode)
airflow/contrib/sensors/ftp_sensor.py:                self.log.info('Ftp error 
encountered: %s', str(e))
airflow/contrib/operators/azure_container_instances_operator.py:            
self.log.info("Container had exit code: %s", exit_code)
airflow/contrib/operators/azure_container_instances_operator.py:                
    self.log.info("Container exited with detail_status %s", detail_status)
airflow/contrib/operators/azure_container_instances_operator.py:                
    self.log.info("Azure provision failure")
airflow/contrib/operators/winrm_operator.py:            self.log.info("Hook not 
found, creating...")
airflow/contrib/operators/docker_swarm_operator.py:                
self.log.info('Service status before exiting: %s', status)
airflow/contrib/auth/backends/ldap_auth.py:        log.warning("Unable to find 
group for %s %s", search_base, search_filter)
airflow/contrib/auth/backends/ldap_auth.py:        log.warning("""Missing 
attribute "%s" when looked-up in Ldap database.
airflow/contrib/auth/backends/ldap_auth.py:        log.warning("Parsing error 
when retrieving the user's group(s)."
airflow/contrib/utils/sendgrid.py:        log.warning('Failed to send out email 
with subject %s, status code: %s',
airflow/contrib/example_dags/example_kubernetes_operator.py:    
log.warning("Could not import KubernetesPodOperator: " + str(e))
airflow/contrib/hooks/gdrive_hook.py:                self.log.info("Not found 
%s directory", current_folder)
airflow/contrib/hooks/sqoop_hook.py:        self.log.info("Command exited with 
return code %s", self.sp.returncode)
airflow/contrib/hooks/salesforce_hook.py:            log.warning("Could not 
convert field to timestamps: %s", column.name)
airflow/contrib/hooks/qubole_check_hook.py:            log.info("Qubole command 
not found")
airflow/contrib/hooks/spark_submit_hook.py:                    
self.log.info("Exception when attempting to kill Spark on K8s:")
airflow/www/node_modules/eslint/lib/util/npm-util.js:        log.info("Could 
not read package.json file. Please check that the file contains valid JSON.");
airflow/www/node_modules/eslint/lib/config/config-initializer.js:            
log.info("Local ESLint installation not found.");
grep: airflow/www/static/docs: No such file or directory
airflow/www/api/experimental/endpoints.py:            _log.info(error_message)
airflow/www/api/experimental/endpoints.py:        _log.info(error_message)
airflow/www/api/experimental/endpoints.py:        _log.info(error_message)
airflow/executors/kubernetes_executor.py:            self.log.info('Event: %s 
Failed', pod_id)
airflow/executors/kubernetes_executor.py:            self.log.warning("could 
not get try_number as an int: %s", labels.get('try_number', '1'))
airflow/executors/kubernetes_executor.py:                    
self.log.warning('ApiException when attempting to run task, re-queueing. '
airflow/executors/base_executor.py:            self.log.info("could not queue 
task %s", simple_task_instance.key)
airflow/executors/debug_executor.py:                self.log.info("Setting %s 
to %s", ti.key, State.UPSTREAM_FAILED)
airflow/executors/debug_executor.py:            self.log.info("Setting %s to 
%s", ti.key, State.UPSTREAM_FAILED)
airflow/hooks/hive_hooks.py:                self.log.info("Could not connect to 
%s:%s", conn.host, conn.port)
airflow/stats.py:    log.warning("Could not configure StatsClient: %s, using 
DummyStatsLogger instead.", e)
airflow/task/task_runner/cgroup_task_runner.py:            
self.log.warning("Task failed with return code of 137. This may indicate "
airflow/jobs/local_task_job.py:                    self.log.info("Task exited 
with return code %s", return_code)
airflow/jobs/scheduler_job.py:        self.log.info("Exiting gracefully upon 
receiving signal %s", signum)
airflow/jobs/scheduler_job.py:                    
self.log.warning("TaskInstance %s went missing from the database", ti)
airflow/jobs/scheduler_job.py:            self.log.info("Exited execute loop")
airflow/jobs/scheduler_job.py:                self.log.info("Exiting scheduler 
loop as all files"
airflow/jobs/backfill_job.py:        self.log.info("Backfill done. Exiting.")
airflow/kubernetes/pod_launcher.py:            self.log.info('Event with job id 
%s Failed', job_id)
grep: logs/scheduler/latest: No such file or directory
tests/sensors/test_timeout_sensor.py:        self.log.info("Success criteria 
met. Exiting.")
tests/gcp/operators/test_cloud_sql_system_helper.py:            
self.log.info("File doesn't exits. Creating dir...")
tests/gcp/operators/test_cloud_sql_system_helper.py:                
self.log.info("Error while creating dir.")
tests/gcp/utils/gcp_authenticator.py:            self.log.info('Airflow DB 
Session error:' + str(ex))
tests/gcp/utils/gcp_authenticator.py:            self.log.info('Airflow DB 
Session error:' + str(ex))
tests/gcp/utils/gcp_authenticator.py:            self.log.info("The {} is 
missing".format(key_path))
tests/contrib/utils/logging_command_executor.py:                
self.log.warning("Error when executing %s", " ".join(cmd))
tests/contrib/utils/logging_command_executor.py:            
self.log.info("Error when executing '{}'".format(" ".join(cmd)))

  was:
log level should be error on some (but there are false positives):

grep -iE 'log\.(info|warn).*(error|exceptio|fail|unab)' -R *
airflow/logging_config.py:        log.warning('Unable to load the config, 
contains a configuration error.')
airflow/operators/check_operator.py:            self.log.warning("The following 
%s tests out of %s failed:", j, n)
airflow/serialization/serialized_objects.py:            LOG.warning('Failed to 
stringify.', exc_info=True)
airflow/providers/amazon/aws/operators/batch.py:            self.log.info("AWS 
Batch Job has failed")
airflow/providers/amazon/aws/hooks/s3.py:            
self.log.info(e.response["Error"]["Message"])
airflow/providers/amazon/aws/hooks/s3.py:            
self.log.info(e.response["Error"]["Message"])
airflow/utils/dag_processing.py:            self.log.info("Failing jobs without 
heartbeat after %s", limit_dttm)
airflow/models/dagrun.py:            self.log.info('Marking run %s failed', 
self)
airflow/models/dagrun.py:            self.log.info('Deadlock; marking run %s 
failed', self)
airflow/models/dagrun.py:                    self.log.warning("Failed to get 
task '{}' for dag '{}'. "
airflow/gcp/sensors/gcs.py:            self.log.warning("FAILURE: Inactivity 
Period passed, not enough objects found in %s", path)
airflow/gcp/hooks/kubernetes_engine.py:            self.log.info('Assuming 
Success: %s', error.message)
airflow/gcp/hooks/kubernetes_engine.py:            self.log.info('Assuming 
Success: %s', error.message)
airflow/gcp/hooks/cloud_memorystore.py:        self.log.info("Failovering 
Instance: %s", name)
airflow/gcp/hooks/cloud_memorystore.py:        self.log.info("Instance 
failovered: %s", name)
airflow/gcp/hooks/bigquery.py:            self.log.info(error_msg)
airflow/contrib/sensors/ftp_sensor.py:                self.log.info('Ftp error 
encountered: %s', str(e))
airflow/contrib/operators/azure_container_instances_operator.py:                
    self.log.info("Azure provision failure")
airflow/contrib/auth/backends/ldap_auth.py:        log.warning("Unable to find 
group for %s %s", search_base, search_filter)
airflow/contrib/auth/backends/ldap_auth.py:        log.warning("Parsing error 
when retrieving the user's group(s)."
airflow/contrib/utils/sendgrid.py:        log.warning('Failed to send out email 
with subject %s, status code: %s',
airflow/contrib/hooks/spark_submit_hook.py:                    
self.log.info("Exception when attempting to kill Spark on K8s:")
grep: airflow/www/static/docs: No such file or directory
airflow/www/api/experimental/endpoints.py:            _log.info(error_message)
airflow/www/api/experimental/endpoints.py:        _log.info(error_message)
airflow/www/api/experimental/endpoints.py:        _log.info(error_message)
airflow/executors/kubernetes_executor.py:            self.log.info('Event: %s 
Failed', pod_id)
airflow/executors/kubernetes_executor.py:                    
self.log.warning('ApiException when attempting to run task, re-queueing. '
airflow/executors/debug_executor.py:                self.log.info("Setting %s 
to %s", ti.key, State.UPSTREAM_FAILED)
airflow/executors/debug_executor.py:            self.log.info("Setting %s to 
%s", ti.key, State.UPSTREAM_FAILED)
airflow/task/task_runner/cgroup_task_runner.py:            
self.log.warning("Task failed with return code of 137. This may indicate "
airflow/kubernetes/pod_launcher.py:            self.log.info('Event with job id 
%s Failed', job_id)
grep: logs/scheduler/latest: No such file or directory
tests/gcp/operators/test_cloud_sql_system_helper.py:                
self.log.info("Error while creating dir.")
tests/gcp/utils/gcp_authenticator.py:            self.log.info('Airflow DB 
Session error:' + str(ex))
tests/gcp/utils/gcp_authenticator.py:            self.log.info('Airflow DB 
Session error:' + str(ex))
tests/contrib/utils/logging_command_executor.py:                
self.log.warning("Error when executing %s", " ".join(cmd))
tests/contrib/utils/logging_command_executor.py:            
self.log.info("Error when executing '{}'".format(" ".join(cmd)))
Minjeongs-Air:MYAIRFLOWFORK root# grep -iE 
'log\.(info|warn).*(error|exceptio|fail|unab|couldn|lost|gone|missing|not 
fou|abort|exit|could not)' -R *
airflow/sensors/base_sensor_operator.py:        self.log.info("Success criteria 
met. Exiting.")
airflow/logging_config.py:        log.warning('Unable to load the config, 
contains a configuration error.')
airflow/operators/check_operator.py:            self.log.warning("The following 
%s tests out of %s failed:", j, n)
airflow/operators/sql_to_gcs.py:            self.log.warning('Using default 
schema due to missing name or type. Please '
airflow/operators/bash_operator.py:            self.log.info('Command exited 
with return code %s', self.sub_process.returncode)
airflow/serialization/serialized_objects.py:            LOG.warning('Failed to 
stringify.', exc_info=True)
airflow/providers/amazon/aws/operators/batch.py:            self.log.info("AWS 
Batch Job has failed")
airflow/providers/amazon/aws/hooks/s3.py:            
self.log.info(e.response["Error"]["Message"])
airflow/providers/amazon/aws/hooks/s3.py:            
self.log.info(e.response["Error"]["Message"])
airflow/utils/dag_processing.py:        self.log.info("Exiting gracefully upon 
receiving signal %s", signum)
airflow/utils/dag_processing.py:                self.log.info("Exiting dag 
parsing loop as all files "
airflow/utils/dag_processing.py:            self.log.info("Failing jobs without 
heartbeat after %s", limit_dttm)
airflow/utils/dag_processing.py:            self.log.info("Waiting up to %s 
seconds for processes to exit...", timeout)
airflow/utils/helpers.py:        log.info("Process %s (%s) terminated with exit 
code %s", p, p.pid, p.returncode)
airflow/models/dagrun.py:            self.log.info('Marking run %s failed', 
self)
airflow/models/dagrun.py:            self.log.info('Deadlock; marking run %s 
failed', self)
airflow/models/dagrun.py:                    self.log.warning("Failed to get 
task '{}' for dag '{}'. "
airflow/gcp/sensors/gcs.py:            self.log.warning("FAILURE: Inactivity 
Period passed, not enough objects found in %s", path)
airflow/gcp/operators/spanner.py:            self.log.info("The Cloud Spanner 
database was missing: "
airflow/gcp/hooks/kubernetes_engine.py:            self.log.info('Assuming 
Success: %s', error.message)
airflow/gcp/hooks/kubernetes_engine.py:            self.log.info('Assuming 
Success: %s', error.message)
airflow/gcp/hooks/cloud_memorystore.py:        self.log.info("Failovering 
Instance: %s", name)
airflow/gcp/hooks/cloud_memorystore.py:        self.log.info("Instance 
failovered: %s", name)
airflow/gcp/hooks/bigquery.py:            self.log.info(error_msg)
airflow/gcp/hooks/bigtable.py:            self.log.info("The instance '%s' does 
not exist in project '%s'. Exiting", instance_id,
airflow/contrib/sensors/bash_sensor.py:                self.log.info("Command 
exited with return code %s", sp.returncode)
airflow/contrib/sensors/ftp_sensor.py:                self.log.info('Ftp error 
encountered: %s', str(e))
airflow/contrib/operators/azure_container_instances_operator.py:            
self.log.info("Container had exit code: %s", exit_code)
airflow/contrib/operators/azure_container_instances_operator.py:                
    self.log.info("Container exited with detail_status %s", detail_status)
airflow/contrib/operators/azure_container_instances_operator.py:                
    self.log.info("Azure provision failure")
airflow/contrib/operators/winrm_operator.py:            self.log.info("Hook not 
found, creating...")
airflow/contrib/operators/docker_swarm_operator.py:                
self.log.info('Service status before exiting: %s', status)
airflow/contrib/auth/backends/ldap_auth.py:        log.warning("Unable to find 
group for %s %s", search_base, search_filter)
airflow/contrib/auth/backends/ldap_auth.py:        log.warning("""Missing 
attribute "%s" when looked-up in Ldap database.
airflow/contrib/auth/backends/ldap_auth.py:        log.warning("Parsing error 
when retrieving the user's group(s)."
airflow/contrib/utils/sendgrid.py:        log.warning('Failed to send out email 
with subject %s, status code: %s',
airflow/contrib/example_dags/example_kubernetes_operator.py:    
log.warning("Could not import KubernetesPodOperator: " + str(e))
airflow/contrib/hooks/gdrive_hook.py:                self.log.info("Not found 
%s directory", current_folder)
airflow/contrib/hooks/sqoop_hook.py:        self.log.info("Command exited with 
return code %s", self.sp.returncode)
airflow/contrib/hooks/salesforce_hook.py:            log.warning("Could not 
convert field to timestamps: %s", column.name)
airflow/contrib/hooks/qubole_check_hook.py:            log.info("Qubole command 
not found")
airflow/contrib/hooks/spark_submit_hook.py:                    
self.log.info("Exception when attempting to kill Spark on K8s:")
airflow/www/node_modules/eslint/lib/util/npm-util.js:        log.info("Could 
not read package.json file. Please check that the file contains valid JSON.");
airflow/www/node_modules/eslint/lib/config/config-initializer.js:            
log.info("Local ESLint installation not found.");
grep: airflow/www/static/docs: No such file or directory
airflow/www/api/experimental/endpoints.py:            _log.info(error_message)
airflow/www/api/experimental/endpoints.py:        _log.info(error_message)
airflow/www/api/experimental/endpoints.py:        _log.info(error_message)
airflow/executors/kubernetes_executor.py:            self.log.info('Event: %s 
Failed', pod_id)
airflow/executors/kubernetes_executor.py:            self.log.warning("could 
not get try_number as an int: %s", labels.get('try_number', '1'))
airflow/executors/kubernetes_executor.py:                    
self.log.warning('ApiException when attempting to run task, re-queueing. '
airflow/executors/base_executor.py:            self.log.info("could not queue 
task %s", simple_task_instance.key)
airflow/executors/debug_executor.py:                self.log.info("Setting %s 
to %s", ti.key, State.UPSTREAM_FAILED)
airflow/executors/debug_executor.py:            self.log.info("Setting %s to 
%s", ti.key, State.UPSTREAM_FAILED)
airflow/hooks/hive_hooks.py:                self.log.info("Could not connect to 
%s:%s", conn.host, conn.port)
airflow/stats.py:    log.warning("Could not configure StatsClient: %s, using 
DummyStatsLogger instead.", e)
airflow/task/task_runner/cgroup_task_runner.py:            
self.log.warning("Task failed with return code of 137. This may indicate "
airflow/jobs/local_task_job.py:                    self.log.info("Task exited 
with return code %s", return_code)
airflow/jobs/scheduler_job.py:        self.log.info("Exiting gracefully upon 
receiving signal %s", signum)
airflow/jobs/scheduler_job.py:                    
self.log.warning("TaskInstance %s went missing from the database", ti)
airflow/jobs/scheduler_job.py:            self.log.info("Exited execute loop")
airflow/jobs/scheduler_job.py:                self.log.info("Exiting scheduler 
loop as all files"
airflow/jobs/backfill_job.py:        self.log.info("Backfill done. Exiting.")
airflow/kubernetes/pod_launcher.py:            self.log.info('Event with job id 
%s Failed', job_id)
grep: logs/scheduler/latest: No such file or directory
tests/sensors/test_timeout_sensor.py:        self.log.info("Success criteria 
met. Exiting.")
tests/gcp/operators/test_cloud_sql_system_helper.py:            
self.log.info("File doesn't exits. Creating dir...")
tests/gcp/operators/test_cloud_sql_system_helper.py:                
self.log.info("Error while creating dir.")
tests/gcp/utils/gcp_authenticator.py:            self.log.info('Airflow DB 
Session error:' + str(ex))
tests/gcp/utils/gcp_authenticator.py:            self.log.info('Airflow DB 
Session error:' + str(ex))
tests/gcp/utils/gcp_authenticator.py:            self.log.info("The {} is 
missing".format(key_path))
tests/contrib/utils/logging_command_executor.py:                
self.log.warning("Error when executing %s", " ".join(cmd))
tests/contrib/utils/logging_command_executor.py:            
self.log.info("Error when executing '{}'".format(" ".join(cmd)))


> log level of INFO/WARN when ERROR happened
> ------------------------------------------
>
>                 Key: AIRFLOW-6515
>                 URL: https://issues.apache.org/jira/browse/AIRFLOW-6515
>             Project: Apache Airflow
>          Issue Type: Bug
>          Components: logging
>    Affects Versions: 1.10.7
>            Reporter: t oo
>            Priority: Major
>
> log level should be error on some (but there are false positives):
> grep -iE 
> 'log\.(info|warn).*(error|exceptio|fail|unab|couldn|lost|gone|missing|not 
> fou|abort|exit|could not)' -R *
> airflow/sensors/base_sensor_operator.py:        self.log.info("Success 
> criteria met. Exiting.")
> airflow/logging_config.py:        log.warning('Unable to load the config, 
> contains a configuration error.')
> airflow/operators/check_operator.py:            self.log.warning("The 
> following %s tests out of %s failed:", j, n)
> airflow/operators/sql_to_gcs.py:            self.log.warning('Using default 
> schema due to missing name or type. Please '
> airflow/operators/bash_operator.py:            self.log.info('Command exited 
> with return code %s', self.sub_process.returncode)
> airflow/serialization/serialized_objects.py:            LOG.warning('Failed 
> to stringify.', exc_info=True)
> airflow/providers/amazon/aws/operators/batch.py:            
> self.log.info("AWS Batch Job has failed")
> airflow/providers/amazon/aws/hooks/s3.py:            
> self.log.info(e.response["Error"]["Message"])
> airflow/providers/amazon/aws/hooks/s3.py:            
> self.log.info(e.response["Error"]["Message"])
> airflow/utils/dag_processing.py:        self.log.info("Exiting gracefully 
> upon receiving signal %s", signum)
> airflow/utils/dag_processing.py:                self.log.info("Exiting dag 
> parsing loop as all files "
> airflow/utils/dag_processing.py:            self.log.info("Failing jobs 
> without heartbeat after %s", limit_dttm)
> airflow/utils/dag_processing.py:            self.log.info("Waiting up to %s 
> seconds for processes to exit...", timeout)
> airflow/utils/helpers.py:        log.info("Process %s (%s) terminated with 
> exit code %s", p, p.pid, p.returncode)
> airflow/models/dagrun.py:            self.log.info('Marking run %s failed', 
> self)
> airflow/models/dagrun.py:            self.log.info('Deadlock; marking run %s 
> failed', self)
> airflow/models/dagrun.py:                    self.log.warning("Failed to get 
> task '{}' for dag '{}'. "
> airflow/gcp/sensors/gcs.py:            self.log.warning("FAILURE: Inactivity 
> Period passed, not enough objects found in %s", path)
> airflow/gcp/operators/spanner.py:            self.log.info("The Cloud Spanner 
> database was missing: "
> airflow/gcp/hooks/kubernetes_engine.py:            self.log.info('Assuming 
> Success: %s', error.message)
> airflow/gcp/hooks/kubernetes_engine.py:            self.log.info('Assuming 
> Success: %s', error.message)
> airflow/gcp/hooks/cloud_memorystore.py:        self.log.info("Failovering 
> Instance: %s", name)
> airflow/gcp/hooks/cloud_memorystore.py:        self.log.info("Instance 
> failovered: %s", name)
> airflow/gcp/hooks/bigquery.py:            self.log.info(error_msg)
> airflow/gcp/hooks/bigtable.py:            self.log.info("The instance '%s' 
> does not exist in project '%s'. Exiting", instance_id,
> airflow/contrib/sensors/bash_sensor.py:                self.log.info("Command 
> exited with return code %s", sp.returncode)
> airflow/contrib/sensors/ftp_sensor.py:                self.log.info('Ftp 
> error encountered: %s', str(e))
> airflow/contrib/operators/azure_container_instances_operator.py:            
> self.log.info("Container had exit code: %s", exit_code)
> airflow/contrib/operators/azure_container_instances_operator.py:              
>       self.log.info("Container exited with detail_status %s", detail_status)
> airflow/contrib/operators/azure_container_instances_operator.py:              
>       self.log.info("Azure provision failure")
> airflow/contrib/operators/winrm_operator.py:            self.log.info("Hook 
> not found, creating...")
> airflow/contrib/operators/docker_swarm_operator.py:                
> self.log.info('Service status before exiting: %s', status)
> airflow/contrib/auth/backends/ldap_auth.py:        log.warning("Unable to 
> find group for %s %s", search_base, search_filter)
> airflow/contrib/auth/backends/ldap_auth.py:        log.warning("""Missing 
> attribute "%s" when looked-up in Ldap database.
> airflow/contrib/auth/backends/ldap_auth.py:        log.warning("Parsing error 
> when retrieving the user's group(s)."
> airflow/contrib/utils/sendgrid.py:        log.warning('Failed to send out 
> email with subject %s, status code: %s',
> airflow/contrib/example_dags/example_kubernetes_operator.py:    
> log.warning("Could not import KubernetesPodOperator: " + str(e))
> airflow/contrib/hooks/gdrive_hook.py:                self.log.info("Not found 
> %s directory", current_folder)
> airflow/contrib/hooks/sqoop_hook.py:        self.log.info("Command exited 
> with return code %s", self.sp.returncode)
> airflow/contrib/hooks/salesforce_hook.py:            log.warning("Could not 
> convert field to timestamps: %s", column.name)
> airflow/contrib/hooks/qubole_check_hook.py:            log.info("Qubole 
> command not found")
> airflow/contrib/hooks/spark_submit_hook.py:                    
> self.log.info("Exception when attempting to kill Spark on K8s:")
> airflow/www/node_modules/eslint/lib/util/npm-util.js:        log.info("Could 
> not read package.json file. Please check that the file contains valid JSON.");
> airflow/www/node_modules/eslint/lib/config/config-initializer.js:            
> log.info("Local ESLint installation not found.");
> grep: airflow/www/static/docs: No such file or directory
> airflow/www/api/experimental/endpoints.py:            _log.info(error_message)
> airflow/www/api/experimental/endpoints.py:        _log.info(error_message)
> airflow/www/api/experimental/endpoints.py:        _log.info(error_message)
> airflow/executors/kubernetes_executor.py:            self.log.info('Event: %s 
> Failed', pod_id)
> airflow/executors/kubernetes_executor.py:            self.log.warning("could 
> not get try_number as an int: %s", labels.get('try_number', '1'))
> airflow/executors/kubernetes_executor.py:                    
> self.log.warning('ApiException when attempting to run task, re-queueing. '
> airflow/executors/base_executor.py:            self.log.info("could not queue 
> task %s", simple_task_instance.key)
> airflow/executors/debug_executor.py:                self.log.info("Setting %s 
> to %s", ti.key, State.UPSTREAM_FAILED)
> airflow/executors/debug_executor.py:            self.log.info("Setting %s to 
> %s", ti.key, State.UPSTREAM_FAILED)
> airflow/hooks/hive_hooks.py:                self.log.info("Could not connect 
> to %s:%s", conn.host, conn.port)
> airflow/stats.py:    log.warning("Could not configure StatsClient: %s, using 
> DummyStatsLogger instead.", e)
> airflow/task/task_runner/cgroup_task_runner.py:            
> self.log.warning("Task failed with return code of 137. This may indicate "
> airflow/jobs/local_task_job.py:                    self.log.info("Task exited 
> with return code %s", return_code)
> airflow/jobs/scheduler_job.py:        self.log.info("Exiting gracefully upon 
> receiving signal %s", signum)
> airflow/jobs/scheduler_job.py:                    
> self.log.warning("TaskInstance %s went missing from the database", ti)
> airflow/jobs/scheduler_job.py:            self.log.info("Exited execute loop")
> airflow/jobs/scheduler_job.py:                self.log.info("Exiting 
> scheduler loop as all files"
> airflow/jobs/backfill_job.py:        self.log.info("Backfill done. Exiting.")
> airflow/kubernetes/pod_launcher.py:            self.log.info('Event with job 
> id %s Failed', job_id)
> grep: logs/scheduler/latest: No such file or directory
> tests/sensors/test_timeout_sensor.py:        self.log.info("Success criteria 
> met. Exiting.")
> tests/gcp/operators/test_cloud_sql_system_helper.py:            
> self.log.info("File doesn't exits. Creating dir...")
> tests/gcp/operators/test_cloud_sql_system_helper.py:                
> self.log.info("Error while creating dir.")
> tests/gcp/utils/gcp_authenticator.py:            self.log.info('Airflow DB 
> Session error:' + str(ex))
> tests/gcp/utils/gcp_authenticator.py:            self.log.info('Airflow DB 
> Session error:' + str(ex))
> tests/gcp/utils/gcp_authenticator.py:            self.log.info("The {} is 
> missing".format(key_path))
> tests/contrib/utils/logging_command_executor.py:                
> self.log.warning("Error when executing %s", " ".join(cmd))
> tests/contrib/utils/logging_command_executor.py:            
> self.log.info("Error when executing '{}'".format(" ".join(cmd)))



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to