saurabhladhe commented on issue #14672:
URL: https://github.com/apache/airflow/issues/14672#issuecomment-797687616


   @jedcunningham : 
   I checked multiple instances and not seeing any evicted pods for the ones 
that got sigterm 
   `16m         Normal    Scheduled                      pod/xyzypod.somestr    
    Successfully assigned <namesapce>/xyz.a205feb0f53c40abab3dddc46939ce38 to 
aks-<namesapce>
   16m         Normal    Pulled                         pod/xyzypod.somestr     
   Container image "airflow:2.0.1" already present on machine
   16m         Normal    Created                        pod/xyzypod.somestr     
   Created container base
   16m         Normal    Started                        pod/xyzypod.somestr     
   Started container base`
   and got sigterm in the logs with some traceback related to sqlalchemy:
   `[2021-03-12 18:46:30,373] {xyz.py:104} DEBUG - Processing finished for 
pipeline id: 570 (0:00:07.470379 s elapsed)
   [2021-03-12 18:46:30,373] {xyz.py:91} DEBUG - Processing pipeline id: 316
   [2021-03-12 18:46:34,417] {taskinstance.py:605} DEBUG - Refreshed 
TaskInstance <TaskInstance: somedagid.sometaskid 2021-03-01T01:14:00+00:00 
[failed]>
   [2021-03-12 18:46:34,419] {local_task_job.py:188} WARNING - State of this 
instance has been externally set to failed. Terminating instance.
   [2021-03-12 18:46:34,420] {process_utils.py:100} INFO - Sending 
Signals.SIGTERM to GPID 22
   [2021-03-12 18:46:35,793] {taskinstance.py:1239} ERROR - Received SIGTERM. 
Terminating subprocesses.
   [2021-03-12 18:46:35,794] {xyz.py:100} ERROR - Processing failed for 
pipeline id: 316 -- Reason: Task received SIGTERM signal
   Traceback (most recent call last):
     File "/opt/airflow/mnt/dags/gas/pentaho/tasks/xyz.py", line 96, in 
consume_context
       status_thresholds, context)
     File "/opt/airflow/mnt/dags/gas/pentaho/tasks/xyz.py", line 117, in 
_process_pipeline_id
       count = _count_available_records(read_hook, count_sql, pipeline_id)
     File "/opt/airflow/mnt/dags/gas/pentaho/tasks/xyz.py", line 233, in 
_count_available_records
       df = hook.get_pandas_df(sql, parameters={'pipeline_id': pipeline_id})
     File 
"/home/airflow/.local/lib/python3.7/site-packages/airflow/hooks/dbapi.py", line 
116, in get_pandas_df
       with closing(self.get_conn()) as conn:
     File 
"/home/airflow/.local/lib/python3.7/site-packages/airflow/providers/postgres/hooks/postgres.py",
 line 83, in get_conn
       conn = self.connection or self.get_connection(conn_id)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/airflow/hooks/base.py", line 
63, in get_connection
       conn = Connection.get_connection_from_secrets(conn_id)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/airflow/models/connection.py",
 line 351, in get_connection_from_secrets
       conn = secrets_backend.get_connection(conn_id=conn_id)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/airflow/utils/session.py", 
line 65, in wrapper
       return func(*args, session=session, **kwargs)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/airflow/secrets/metastore.py",
 line 37, in get_connection
       conn = session.query(Connection).filter(Connection.conn_id == 
conn_id).first()
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/query.py", 
line 3429, in first
       ret = list(self[0:1])
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/query.py", 
line 3203, in __getitem__
       return list(res)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/query.py", 
line 3535, in __iter__
       return self._execute_and_instances(context)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/query.py", 
line 3557, in _execute_and_instances
       querycontext, self._connection_from_session, close_with_result=True
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/query.py", 
line 3572, in _get_bind_args
       mapper=self._bind_mapper(), clause=querycontext.statement, **kw
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/query.py", 
line 3550, in _connection_from_session
       conn = self.session.connection(**kw)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/session.py", 
line 1145, in connection
       execution_options=execution_options,
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/session.py", 
line 1151, in _connection_for_bind
       engine, execution_options
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/orm/session.py", 
line 433, in _connection_for_bind
       conn = bind._contextual_connect()
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/base.py", 
line 2302, in _contextual_connect
       self._wrap_pool_connect(self.pool.connect, None),
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/base.py", 
line 2336, in _wrap_pool_connect
       return fn()
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/pool/base.py", 
line 364, in connect
       return _ConnectionFairy._checkout(self)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/pool/base.py", 
line 778, in _checkout
       fairy = _ConnectionRecord.checkout(pool)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/pool/base.py", 
line 495, in checkout
       rec = pool._do_get()
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/pool/impl.py", 
line 241, in _do_get
       return self._create_connection()
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/pool/base.py", 
line 309, in _create_connection
       return _ConnectionRecord(self)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/pool/base.py", 
line 440, in __init__
       self.__connect(first_connect_check=True)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/pool/base.py", 
line 661, in __connect
       pool.logger.debug("Error on connect(): %s", e)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/util/langhelpers.py",
 line 70, in __exit__
       with_traceback=exc_tb,
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/util/compat.py", 
line 182, in raise_
       raise exception
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/pool/base.py", 
line 656, in __connect
       connection = pool._invoke_creator(self)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/strategies.py",
 line 114, in connect
       return dialect.connect(*cargs, **cparams)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/sqlalchemy/engine/default.py",
 line 508, in connect
       return self.dbapi.connect(*cargs, **cparams)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/psycopg2/__init__.py", line 
127, in connect
       conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
     File 
"/home/airflow/.local/lib/python3.7/site-packages/airflow/models/taskinstance.py",
 line 1241, in signal_handler
       raise AirflowException("Task received SIGTERM signal")
   airflow.exceptions.AirflowException: Task received SIGTERM signal`
   
   


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to