[ 
https://issues.apache.org/jira/browse/AIRFLOW-3372?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16694503#comment-16694503
 ] 

MADHANKUMAR C commented on AIRFLOW-3372:
----------------------------------------

+*The below are the output i am getting while install airflow using helm*+

[root@kubernetes-cpal-master-0 kube-airflow]# make helm-install 
NAMESPACE=yournamespace HELM_VALUES=airflow/values.yaml
helm upgrade -f airflow/values.yaml \
 --install \
 --debug \
 airflow \
 ./airflow
[debug] Created tunnel using local port: '45999'

[debug] SERVER: "127.0.0.1:45999"

Release "airflow" does not exist. Installing it now.
[debug] CHART PATH: /home/centos/madhan3/kube-airflow/airflow

NAME: airflow
REVISION: 1
RELEASED: Wed Nov 21 09:52:49 2018
CHART: airflow-v0.2.1
USER-SUPPLIED VALUES:
airflow:
 config: {}
 dag_path: /dags
 fernet_key: ""
 image: rcmadhankumar/docker-airflow
 image_pull_policy: IfNotPresent
 imageTag: 1.9.0
 init_retry_loop: null
 scheduler_num_runs: "-1"
 service:
 type: ClusterIP
 url_prefix: /airflow
celery:
 num_workers: 1
dags:
 git_branch: master
 git_repo: null
 git_sync_debug: false
 git_sync_enabled: false
 load_examples: true
 pickle_dag: true
 poll_interval_sec: 60
flower:
 url_prefix: /flower
ingress:
 annotations:
 flower: null
 web: null
 enabled: false
 host: ""
 path:
 flower: /airflow/flower
 web: /airflow
persistence:
 accessMode: ReadWriteOnce
 enabled: false
 size: 1Gi
postgresql:
 enabled: true
 persistence:
 enabled: true
 postgresDatabase: airflow
 postgresPassword: airflow
 postgresUser: airflow
redis:
 enabled: true
 persistence:
 enabled: true
 redisPassword: redis

COMPUTED VALUES:
airflow:
 config: {}
 dag_path: /dags
 fernet_key: ""
 image: rcmadhankumar/docker-airflow
 image_pull_policy: IfNotPresent
 imageTag: 1.9.0
 init_retry_loop: null
 scheduler_num_runs: "-1"
 service:
 type: ClusterIP
 url_prefix: /airflow
celery:
 num_workers: 1
dags:
 git_branch: master
 git_repo: null
 git_sync_debug: false
 git_sync_enabled: false
 load_examples: true
 pickle_dag: true
 poll_interval_sec: 60
flower:
 url_prefix: /flower
ingress:
 annotations:
 flower: null
 web: null
 enabled: false
 host: ""
 path:
 flower: /airflow/flower
 web: /airflow
persistence:
 accessMode: ReadWriteOnce
 enabled: false
 size: 1Gi
postgresql:
 affinity: {}
 enabled: true
 global: {}
 image: postgres
 imageTag: 9.6.2
 metrics:
 enabled: false
 image: wrouesnel/postgres_exporter
 imagePullPolicy: IfNotPresent
 imageTag: v0.1.1
 resources:
 requests:
 cpu: 100m
 memory: 256Mi
 networkPolicy:
 allowExternal: true
 enabled: false
 nodeSelector: {}
 persistence:
 accessMode: ReadWriteOnce
 enabled: true
 mountPath: /var/lib/postgresql/data/pgdata
 size: 8Gi
 subPath: postgresql-db
 postgresDatabase: airflow
 postgresPassword: airflow
 postgresUser: airflow
 resources:
 requests:
 cpu: 100m
 memory: 256Mi
 service:
 externalIPs: []
 port: 5432
 type: ClusterIP
 tolerations: []
redis:
 enabled: true
 global: {}
 image: bitnami/redis:4.0.8-r0
 imagePullPolicy: IfNotPresent
 metrics:
 annotations:
 prometheus.io/port: "9121"
 prometheus.io/scrape: "true"
 enabled: false
 image: oliver006/redis_exporter
 imagePullPolicy: IfNotPresent
 imageTag: v0.11
 resources: {}
 networkPolicy:
 allowExternal: true
 enabled: false
 nodeSelector: {}
 persistence:
 accessMode: ReadWriteOnce
 enabled: true
 path: /bitnami
 size: 8Gi
 subPath: ""
 podAnnotations: {}
 podLabels: {}
 redisPassword: redis
 resources:
 requests:
 cpu: 100m
 memory: 256Mi
 securityContext:
 enabled: true
 fsGroup: 1001
 runAsUser: 1001
 service:
 annotations: {}
 loadBalancerIP: null
 serviceType: ClusterIP
 tolerations: []
 usePassword: true

HOOKS:
MANIFEST:

---
# Source: airflow/charts/postgresql/templates/secrets.yaml
apiVersion: v1
kind: Secret
metadata:
 name: airflow-postgresql
 labels:
 app: airflow-postgresql
 chart: "postgresql-0.8.12"
 release: "airflow"
 heritage: "Tiller"
type: Opaque
data:

postgres-password: "YWlyZmxvdw=="
---
# Source: airflow/charts/redis/templates/secrets.yaml
apiVersion: v1
kind: Secret
metadata:
 name: airflow-redis
 labels:
 app: airflow-redis
 chart: "redis-1.1.12"
 release: "airflow"
 heritage: "Tiller"
type: Opaque
data:
 redis-password: "cmVkaXM="
---
# Source: airflow/charts/postgresql/templates/pvc.yaml
kind: PersistentVolumeClaim
apiVersion: v1
metadata:
 name: airflow-postgresql
 labels:
 app: airflow-postgresql
 chart: "postgresql-0.8.12"
 release: "airflow"
 heritage: "Tiller"
spec:
 accessModes:
 - "ReadWriteOnce"
 resources:
 requests:
 storage: "8Gi"
---
# Source: airflow/charts/redis/templates/pvc.yaml
kind: PersistentVolumeClaim
apiVersion: v1
metadata:
 name: airflow-redis
 labels:
 app: airflow-redis
 chart: "redis-1.1.12"
 release: "airflow"
 heritage: "Tiller"
spec:
 accessModes:
 - "ReadWriteOnce"
 resources:
 requests:
 storage: "8Gi"
---
# Source: airflow/charts/postgresql/templates/svc.yaml
apiVersion: v1
kind: Service
metadata:
 name: airflow-postgresql
 labels:
 app: airflow-postgresql
 chart: "postgresql-0.8.12"
 release: "airflow"
 heritage: "Tiller"
spec:
 type: ClusterIP
 ports:
 - name: postgresql
 port: 5432
 targetPort: postgresql
 selector:
 app: airflow-postgresql
---
# Source: airflow/charts/redis/templates/svc.yaml
apiVersion: v1
kind: Service
metadata:
 name: airflow-redis
 labels:
 app: airflow-redis
 chart: "redis-1.1.12"
 release: "airflow"
 heritage: "Tiller"
 annotations:
spec:
 type: ClusterIP
 ports:
 - name: redis
 port: 6379
 targetPort: redis
 selector:
 app: airflow-redis
---
# Source: airflow/templates/services.yaml
apiVersion: v1
kind: Service
metadata:
 name: airflow-airflow-web
 labels:
 app: airflow-web
 chart: airflow-v0.2.1
 release: airflow
 heritage: Tiller
spec:
 type: ClusterIP
 selector:
 app: airflow-web
 release: airflow
 ports:
 - name: web
 protocol: TCP
 port: 8080
---
# Source: airflow/templates/services.yaml
apiVersion: v1
kind: Service
metadata:
 name: airflow-airflow-flower
 labels:
 app: airflow-flower
 chart: airflow-v0.2.1
 release: airflow
 heritage: Tiller
spec:
 type: ClusterIP
 selector:
 app: airflow-flower
 release: airflow
 ports:
 - name: flower
 protocol: TCP
 port: 5555
---
# Source: airflow/templates/services.yaml
# Headless service for stable DNS entries of StatefulSet members.
apiVersion: v1
kind: Service
metadata:
 name: airflow-airflow-worker
 labels:
 app: airflow-worker
 chart: airflow-v0.2.1
 release: airflow
 heritage: Tiller
spec:
 ports:
 - name: worker
 protocol: TCP
 port: 8793
 clusterIP: None
 selector:
 app: airflow-worker
---
# Source: airflow/charts/postgresql/templates/deployment.yaml
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
 name: airflow-postgresql
 labels:
 app: airflow-postgresql
 chart: "postgresql-0.8.12"
 release: "airflow"
 heritage: "Tiller"
spec:
 template:
 metadata:
 labels:
 app: airflow-postgresql
 spec:
 containers:
 - name: airflow-postgresql
 image: "postgres:9.6.2"
 imagePullPolicy: ""
 env:
 - name: POSTGRES_USER
 value: "airflow"
 # Required for pg_isready in the health probes.
 - name: PGUSER
 value: "airflow"
 - name: POSTGRES_DB
 value: "airflow"
 - name: POSTGRES_INITDB_ARGS
 value: ""
 - name: PGDATA
 value: /var/lib/postgresql/data/pgdata
 - name: POSTGRES_PASSWORD
 valueFrom:
 secretKeyRef:
 name: airflow-postgresql
 key: postgres-password
 - name: POD_IP
 valueFrom: \{ fieldRef: { fieldPath: status.podIP } }
 ports:
 - name: postgresql
 containerPort: 5432
 livenessProbe:
 exec:
 command:
 - sh
 - -c
 - exec pg_isready --host $POD_IP
 initialDelaySeconds: 120
 timeoutSeconds: 5
 failureThreshold: 6
 readinessProbe:
 exec:
 command:
 - sh
 - -c
 - exec pg_isready --host $POD_IP
 initialDelaySeconds: 5
 timeoutSeconds: 3
 periodSeconds: 5
 resources:
 requests:
 cpu: 100m
 memory: 256Mi

volumeMounts:
 - name: data
 mountPath: /var/lib/postgresql/data/pgdata
 subPath: postgresql-db
 volumes:
 - name: data
 persistentVolumeClaim:
 claimName: airflow-postgresql
---
# Source: airflow/charts/redis/templates/deployment.yaml
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
 name: airflow-redis
 labels:
 app: airflow-redis
 chart: "redis-1.1.12"
 release: "airflow"
 heritage: "Tiller"
spec:
 template:
 metadata:
 labels:
 app: airflow-redis
 spec:
 securityContext:
 fsGroup: 1001
 runAsUser: 1001
 containers:
 - name: airflow-redis
 image: "bitnami/redis:4.0.8-r0"
 imagePullPolicy: "IfNotPresent"
 env:
 - name: REDIS_PASSWORD
 valueFrom:
 secretKeyRef:
 name: airflow-redis
 key: redis-password
 ports:
 - name: redis
 containerPort: 6379
 livenessProbe:
 exec:
 command:
 - redis-cli
 - ping
 initialDelaySeconds: 30
 timeoutSeconds: 5
 readinessProbe:
 exec:
 command:
 - redis-cli
 - ping
 initialDelaySeconds: 5
 timeoutSeconds: 1
 resources:
 requests:
 cpu: 100m
 memory: 256Mi

volumeMounts:
 - name: redis-data
 mountPath: /bitnami
 subPath:
 volumes:
 - name: redis-data
 persistentVolumeClaim:
 claimName: airflow-redis
---
# Source: airflow/templates/deployments-flower.yaml
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
 name: airflow-airflow-flower
 labels:
 app: airflow-flower
 chart: airflow-v0.2.1
 release: airflow
 heritage: Tiller
spec:
 replicas: 1
 template:
 metadata:
 labels:
 app: airflow-flower
 release: airflow
 spec:
 restartPolicy: Always
 containers:
 - name: airflow-flower
 image: rcmadhankumar/docker-airflow:1.9.0
 imagePullPolicy: IfNotPresent
 env:
 - name: FLOWER_PORT
 value: "5555"
 - name: REDIS_HOST
 value: airflow-redis
 - name: REDIS_PASSWORD
 value: redis
 ports:
 - name: flower
 containerPort: 5555
 protocol: TCP
 args: ["flower"]
---
{color:#f79232}# Source: airflow/templates/deployments-scheduler.yaml{color}
{color:#f79232}apiVersion: extensions/v1beta1{color}
{color:#f79232}kind: Deployment{color}
{color:#f79232}metadata:{color}
{color:#f79232} name: airflow-airflow-scheduler{color}
{color:#f79232} labels:{color}
{color:#f79232} app: airflow-scheduler{color}
{color:#f79232} chart: airflow-v0.2.1{color}
{color:#f79232} release: airflow{color}
{color:#f79232} heritage: Tiller{color}
{color:#f79232}spec:{color}
{color:#f79232} replicas: 1{color}
{color:#f79232} strategy:{color}
{color:#f79232} type: RollingUpdate{color}
{color:#f79232} rollingUpdate:{color}
{color:#f79232} maxSurge: 1{color}
{color:#f79232} maxUnavailable: 1{color}
{color:#f79232} template:{color}
{color:#f79232} metadata:{color}
{color:#f79232} labels:{color}
{color:#f79232} app: airflow-scheduler{color}
{color:#f79232} release: airflow{color}
{color:#f79232} spec:{color}
{color:#f79232} restartPolicy: Always{color}
{color:#f79232} containers:{color}
{color:#f79232} - name: airflow-scheduler{color}
{color:#f79232} image: rcmadhankumar/docker-airflow:1.9.0{color}
{color:#f79232} imagePullPolicy: IfNotPresent{color}
{color:#f79232} env:{color}
{color:#f79232} - name: POSTGRES_HOST{color}
{color:#f79232} value: airflow-postgresql{color}
{color:#f79232} - name: POSTGRES_DB{color}
{color:#f79232} value: airflow{color}
{color:#f79232} - name: POSTGRES_USER{color}
{color:#f79232} value: airflow{color}
{color:#f79232} - name: POSTGRES_PASSWORD{color}
{color:#f79232} value: airflow{color}
{color:#f79232} args: ["scheduler", "-n", "-1", "-p"]{color}
{color:#f79232} volumes:{color}
{color:#f79232} - name: dags-data{color}
{color:#f79232} emptyDir: {}{color}
---
# Source: airflow/templates/deployments-web.yaml
apiVersion: extensions/v1beta1
kind: Deployment
metadata:
 name: airflow-airflow-web
 labels:
 app: airflow-web
 chart: airflow-v0.2.1
 release: airflow
 heritage: Tiller
spec:
 replicas: 2
 strategy:
 type: RollingUpdate
 rollingUpdate:
 maxSurge: 1
 maxUnavailable: 2
 template:
 metadata:
 labels:
 app: airflow-web
 release: airflow
 spec:
 restartPolicy: Always
 containers:
 - name: airflow-web
 image: rcmadhankumar/docker-airflow:1.9.0
 imagePullPolicy: IfNotPresent
 ports:
 - name: web
 containerPort: 8080
 protocol: TCP
 env:
 - name: POSTGRES_HOST
 value: airflow-postgresql
 - name: POSTGRES_DB
 value: airflow
 - name: POSTGRES_USER
 value: airflow
 - name: POSTGRES_PASSWORD
 value: airflow
 - name: REDIS_HOST
 value: airflow-redis
 - name: REDIS_PASSWORD
 value: redis
 args: ["webserver"]
 livenessProbe:
 httpGet:
 path: "/health"
 port: web
---
# Source: airflow/templates/statefulsets-workers.yaml
# Workers are not in deployment, but in StatefulSet, to allow each worker 
expose a mini-server
# that only serve logs, that will be used by the web server.

apiVersion: apps/v1beta1
kind: StatefulSet
metadata:
 name: airflow-airflow-worker
 labels:
 app: airflow-worker
 chart: airflow-v0.2.1
 release: airflow
 heritage: Tiller
spec:
 serviceName: "airflow-airflow-workers"
 updateStrategy:
 type: RollingUpdate
 # Use experimental burst mode for faster StatefulSet scaling
 # 
https://github.com/kubernetes/kubernetes/commit/c2c5051adf096ffd48bf1dcf5b11cb47e464ecdd
 podManagementPolicy: Parallel
 replicas: 1
 template:
 metadata:
 labels:
 app: airflow-worker
 release: airflow
 spec:
 restartPolicy: Always
 terminationGracePeriodSeconds: 30
 containers:
 - name: airflow-worker
 imagePullPolicy: IfNotPresent
 image: "rcmadhankumar/docker-airflow:1.9.0"
 env:
 - name: POSTGRES_HOST
 value: airflow-postgresql
 - name: POSTGRES_DB
 value: airflow
 - name: POSTGRES_USER
 value: airflow
 - name: POSTGRES_PASSWORD
 value: airflow
 - name: REDIS_HOST
 value: airflow-redis
 - name: REDIS_PASSWORD
 value: redis
 # volumes:
 # - /localpath/to/dags:/usr/local/airflow/dags
 args: ["worker"]
 ports:
 - name: wlog
 containerPort: 8793
 protocol: TCP
 volumes:
 - name: dags-data
 emptyDir: {}
LAST DEPLOYED: Wed Nov 21 09:52:49 2018
NAMESPACE: default
STATUS: DEPLOYED

RESOURCES:
==> v1/PersistentVolumeClaim
NAME AGE
airflow-postgresql 1s
airflow-redis 1s

==> v1/Service
airflow-postgresql 1s
airflow-redis 1s
airflow-airflow-web 1s
airflow-airflow-flower 1s
airflow-airflow-worker 1s

==> v1beta1/Deployment
airflow-postgresql 1s
airflow-redis 1s
airflow-airflow-flower 1s
{color:#f79232}airflow-airflow-scheduler 1s{color}
airflow-airflow-web 1s

==> v1beta1/StatefulSet
airflow-airflow-worker 1s

==> v1/Pod(related)

NAME READY STATUS RESTARTS AGE
airflow-postgresql-9df799579-cxn8k 0/1 Pending 0 1s
airflow-redis-7d75b85f7-9pxsz 0/1 Pending 0 1s
airflow-airflow-flower-8fcbb5c64-9vwct 0/1 ContainerCreating 0 1s
airflow-airflow-scheduler-56d5f7cccd-pfl24 0/1 ContainerCreating 0 1s
airflow-airflow-web-7dcf747cb5-kb49s 0/1 ContainerCreating 0 1s
airflow-airflow-web-7dcf747cb5-v2n92 0/1 ContainerCreating 0 1s
airflow-airflow-worker-0 0/1 ContainerCreating 0 0s

==> v1/Secret

NAME AGE
airflow-postgresql 1s
airflow-redis 1s

 

> Unable to start airflow scheduler
> ---------------------------------
>
>                 Key: AIRFLOW-3372
>                 URL: https://issues.apache.org/jira/browse/AIRFLOW-3372
>             Project: Apache Airflow
>          Issue Type: Bug
>          Components: docker, kubernetes, scheduler
>    Affects Versions: 1.9.0
>         Environment: Kubernetes,docker
>            Reporter: MADHANKUMAR C
>            Priority: Blocker
>   Original Estimate: 48h
>  Remaining Estimate: 48h
>
> *I have installed airflow in kubernetes cluster.When i am installing airflow 
> ,i am unable to start the scheduler.The below is the log of scheduler 
> container.*
> [2018-11-20 12:02:40,860] {{__init__.py:51}} INFO - Using executor 
> SequentialExecutor
>  [2018-11-20 12:02:40,973] {{cli_action_loggers.py:69}} ERROR - Failed on 
> pre-execution callback using <function default_action_log at 0x7f26b730b620>
>  Traceback (most recent call last):
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/engine/base.py", 
> line 1182, in _execute_context
>  context)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/engine/default.py", 
> line 470, in do_execute
>  cursor.execute(statement, parameters)
>  sqlite3.OperationalError: no such table: log
> The above exception was the direct cause of the following exception:
> Traceback (most recent call last):
>  File 
> "/usr/local/lib/python3.5/dist-packages/airflow/utils/cli_action_loggers.py", 
> line 67, in on_pre_execution
>  cb(**kwargs)
>  File 
> "/usr/local/lib/python3.5/dist-packages/airflow/utils/cli_action_loggers.py", 
> line 99, in default_action_log
>  session.commit()
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/session.py", 
> line 927, in commit
>  self.transaction.commit()
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/session.py", 
> line 467, in commit
>  self._prepare_impl()
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/session.py", 
> line 447, in _prepare_impl
>  self.session.flush()
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/session.py", 
> line 2209, in flush
>  self._flush(objects)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/session.py", 
> line 2329, in _flush
>  transaction.rollback(_capture_exception=True)
>  File 
> "/usr/local/lib/python3.5/dist-packages/sqlalchemy/util/langhelpers.py", line 
> 66, in __exit__
>  compat.reraise(exc_type, exc_value, exc_tb)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/util/compat.py", 
> line 187, in reraise
>  raise value
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/session.py", 
> line 2293, in _flush
>  flush_context.execute()
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/unitofwork.py", 
> line 389, in execute
>  rec.execute(self)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/unitofwork.py", 
> line 548, in execute
>  uow
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/persistence.py", 
> line 181, in save_obj
>  mapper, table, insert)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/persistence.py", 
> line 835, in _emit_insert_statements
>  execute(statement, params)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/engine/base.py", 
> line 945, in execute
>  return meth(self, multiparams, params)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/sql/elements.py", 
> line 263, in _execute_on_connection
>  return connection._execute_clauseelement(self, multiparams, params)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/engine/base.py", 
> line 1053, in _execute_clauseelement
>  compiled_sql, distilled_params
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/engine/base.py", 
> line 1189, in _execute_context
>  context)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/engine/base.py", 
> line 1402, in _handle_dbapi_exception
>  exc_info
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/util/compat.py", 
> line 203, in raise_from_cause
>  reraise(type(exception), exception, tb=exc_tb, cause=cause)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/util/compat.py", 
> line 186, in reraise
>  raise value.with_traceback(tb)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/engine/base.py", 
> line 1182, in _execute_context
>  context)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/engine/default.py", 
> line 470, in do_execute
>  cursor.execute(statement, parameters)
>  sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) no such table: 
> log [SQL: 'INSERT INTO log (dttm, dag_id, task_id, event, execution_date, 
> owner, extra) VALUES (?, ?, ?, ?, ?, ?, ?)'] [parameters: ('2018-11-20 
> 12:02:40.969353', None, None, 'cli_scheduler', None, 'airflow', '
> {"host_name": "airflow-airflow-scheduler-5b5f8b9549-89dmn", "full_command": 
> "[\'/usr/local/bin/airflow\', \'scheduler\', \'-n\', \'-1\', \'-p\']"}
> ')]
>  ____________ _____________
>  ____ |__( )_________ __/__ /________ __
>  ____ /| |_ /__ ___/_ /_ __ /_ __ _ | /| / /
>  ___ ___ | / _ / _ __/ _ / / /_/ /_ |/ |/ /
>  _/_/ |_/_/ /_/ /_/ /_/ ____/____/|__/
> [2018-11-20 12:02:40,977] {{jobs.py:580}} *{color:#FF0000}ERROR - Cannot use 
> more than 1 thread when using sqlite. Setting max_threads to 1{color}*
>  /usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/session.py:513: 
> SAWarning: Session's state has been changed on a non-active transaction - 
> this state will be discarded.
>  "Session's state has been changed on "
>  Traceback (most recent call last):
>  File "/usr/local/bin/airflow", line 32, in <module>
>  args.func(args)
>  File "/usr/local/lib/python3.5/dist-packages/airflow/utils/cli.py", line 74, 
> in wrapper
>  return f(*args, **kwargs)
>  File "/usr/local/lib/python3.5/dist-packages/airflow/bin/cli.py", line 925, 
> in scheduler
>  job.run()
>  File "/usr/local/lib/python3.5/dist-packages/airflow/jobs.py", line 196, in 
> run
>  session.commit()
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/session.py", 
> line 927, in commit
>  self.transaction.commit()
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/session.py", 
> line 465, in commit
>  self._assert_active(prepared_ok=True)
>  File "/usr/local/lib/python3.5/dist-packages/sqlalchemy/orm/session.py", 
> line 276, in _assert_active
>  % self._rollback_exception
>  sqlalchemy.exc.InvalidRequestError: This Session's transaction has been 
> rolled back due to a previous exception during flush. To begin a new 
> transaction with this Session, first issue Session.rollback(). Original 
> exception was: (sqlite3.OperationalError) no such table: log [SQL: 'INSERT 
> INTO log (dttm, dag_id, task_id, event, execution_date, owner, extra) VALUES 
> (?, ?, ?, ?, ?, ?, ?)'] [parameters: ('2018-11-20 12:02:40.969353', None, 
> None, 'cli_scheduler', None, 'airflow', '
> {"host_name": "airflow-airflow-scheduler-5b5f8b9549-89dmn", "full_command": 
> "[\'/usr/local/bin/airflow\', \'scheduler\', \'-n\', \'-1\', \'-p\']"}
> ')]
> {color:#FF0000}*The pod status is like below*{color}
> [root@kubernetes-cpal-master-0 kube-airflow]# *kubectl get pod*
> NAME READY STATUS RESTARTS AGE
> airflow-airflow-flower-6668559cf7-ll5bn 1/1 Running 0 19m
> ~{color:#FF0000}*airflow-airflow-scheduler-5b5f8b9549-89dmn 0/1 
> CrashLoopBackOff 8 19m*{color}~
> airflow-airflow-web-89d8fb554-9ztss 1/1 Running 0 19m
> airflow-airflow-web-89d8fb554-f4mbm 1/1 Running 0 19m
> airflow-airflow-worker-0 1/1 Running 0 19m
> airflow-postgresql-9df799579-swg8q 1/1 Running 0 19m
> airflow-redis-7d75b85f7-26lsx 1/1 Running 0 19m
> centos 0/1 CrashLoopBackOff 1278 29d
> logger-deepinsights-fluentd-759ffcfc5d-fjmml 1/1 Running 8 33d



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to