[ 
https://issues.apache.org/jira/browse/AIRFLOW-2105?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16366681#comment-16366681
 ] 

Yuliya Volkova commented on AIRFLOW-2105:
-----------------------------------------

[~paymahn], try to airflow upgradedb, will it gone correct? 

It's first start after migration to 1.9.0 or not? 

> Exception on known event creation
> ---------------------------------
>
>                 Key: AIRFLOW-2105
>                 URL: https://issues.apache.org/jira/browse/AIRFLOW-2105
>             Project: Apache Airflow
>          Issue Type: Bug
>    Affects Versions: 1.9.0
>            Reporter: Paymahn Moghadasian
>            Priority: Minor
>
> I tried to create a known event through the UI and was shown the following 
> error:
> {noformat}
> -------------------------------------------------------------------------------
> Node: PaymahnSolvvy.local
> -------------------------------------------------------------------------------
> Traceback (most recent call last):
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask/app.py",
>  line 1988, in wsgi_app
>     response = self.full_dispatch_request()
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask/app.py",
>  line 1641, in full_dispatch_request
>     rv = self.handle_user_exception(e)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask/app.py",
>  line 1544, in handle_user_exception
>     reraise(exc_type, exc_value, tb)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask/_compat.py",
>  line 33, in reraise
>     raise value
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask/app.py",
>  line 1639, in full_dispatch_request
>     rv = self.dispatch_request()
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask/app.py",
>  line 1625, in dispatch_request
>     return self.view_functions[rule.endpoint](**req.view_args)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/base.py",
>  line 69, in inner
>     return self._run_view(f, *args, **kwargs)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/base.py",
>  line 368, in _run_view
>     return fn(self, *args, **kwargs)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/model/base.py",
>  line 1947, in create_view
>     return_url=return_url)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/base.py",
>  line 308, in render
>     return render_template(template, **kwargs)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask/templating.py",
>  line 134, in render_template
>     context, ctx.app)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask/templating.py",
>  line 116, in _render
>     rv = template.render(context)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/jinja2/environment.py",
>  line 989, in render
>     return self.environment.handle_exception(exc_info, True)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/jinja2/environment.py",
>  line 754, in handle_exception
>     reraise(exc_type, exc_value, tb)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/jinja2/_compat.py",
>  line 37, in reraise
>     raise value.with_traceback(tb)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/airflow/www/templates/airflow/model_create.html",
>  line 18, in top-level template code
>     {% extends 'admin/model/create.html' %}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/templates/bootstrap3/admin/model/create.html",
>  line 3, in top-level template code
>     {% from 'admin/lib.html' import extra with context %} {# backward 
> compatible #}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/airflow/www/templates/admin/master.html",
>  line 18, in top-level template code
>     {% extends 'admin/base.html' %}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/templates/bootstrap3/admin/base.html",
>  line 30, in top-level template code
>     {% block page_body %}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/airflow/www/templates/admin/master.html",
>  line 104, in block "page_body"
>     {% block body %}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/airflow/www/templates/airflow/model_create.html",
>  line 28, in block "body"
>     {{ super() }}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/templates/bootstrap3/admin/model/create.html",
>  line 22, in block "body"
>     {% block create_form %}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/templates/bootstrap3/admin/model/create.html",
>  line 23, in block "create_form"
>     {{ lib.render_form(form, return_url, extra(), form_opts) }}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/templates/bootstrap3/admin/lib.html",
>  line 202, in template
>     {% call form_tag(action=action) %}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/templates/bootstrap3/admin/lib.html",
>  line 182, in template
>     {{ caller() }}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/templates/bootstrap3/admin/lib.html",
>  line 203, in template
>     {{ render_form_fields(form, form_opts=form_opts) }}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/templates/bootstrap3/admin/lib.html",
>  line 175, in template
>     {{ render_field(form, f, kwargs) }}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/templates/bootstrap3/admin/lib.html",
>  line 130, in template
>     {{ field(**kwargs)|safe }}
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/wtforms/fields/core.py",
>  line 153, in __call__
>     return self.meta.render_field(self, kwargs)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/wtforms/meta.py",
>  line 56, in render_field
>     return field.widget(field, **render_kw)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/form/widgets.py",
>  line 28, in __call__
>     return super(Select2Widget, self).__call__(field, **kwargs)
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/wtforms/widgets/core.py",
>  line 287, in __call__
>     for val, label, selected in field.iter_choices():
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/contrib/sqla/fields.py",
>  line 110, in iter_choices
>     for pk, obj in self._get_object_list():
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/contrib/sqla/fields.py",
>  line 103, in _get_object_list
>     self._object_list = [(text_type(get_pk(obj)), obj) for obj in query]
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/contrib/sqla/fields.py",
>  line 103, in <listcomp>
>     self._object_list = [(text_type(get_pk(obj)), obj) for obj in query]
>   File 
> "/Users/paymahn/solvvy/scheduler/venv/lib/python3.6/site-packages/flask_admin/contrib/sqla/fields.py",
>  line 300, in get_pk_from_identity
>     cls, key = identity_key(instance=obj)
> ValueError: too many values to unpack (expected 2)
> {noformat}
> My virtualenv looks like:
> {noformat}
> alembic==0.8.10
> apache-airflow==1.9.0
> asn1crypto==0.24.0
> bleach==2.1.2
> certifi==2018.1.18
> cffi==1.11.4
> chardet==3.0.4
> click==6.7
> configparser==3.5.0
> croniter==0.3.20
> cryptography==2.1.4
> dill==0.2.7.1
> docutils==0.14
> fernet==1.0.1
> Flask==0.11.1
> Flask-Admin==1.4.1
> Flask-Cache==0.13.1
> Flask-Login==0.2.11
> flask-swagger==0.2.13
> Flask-WTF==0.14
> funcsigs==1.0.0
> future==0.16.0
> gitdb2==2.0.3
> GitPython==2.1.8
> gunicorn==19.7.1
> html5lib==1.0.1
> idna==2.6
> itsdangerous==0.24
> Jinja2==2.8.1
> lockfile==0.12.2
> lxml==3.8.0
> Mako==1.0.7
> Markdown==2.6.11
> MarkupSafe==1.0
> numpy==1.14.0
> ordereddict==1.1
> pandas==0.22.0
> psutil==4.4.2
> psycopg2==2.7.4
> psycopg2-binary==2.7.4
> pyaes==1.6.1
> pycparser==2.18
> Pygments==2.2.0
> python-daemon==2.1.2
> python-dateutil==2.6.1
> python-editor==1.0.3
> python-nvd3==0.14.2
> python-slugify==1.1.4
> pytz==2018.3
> PyYAML==3.12
> requests==2.18.4
> setproctitle==1.1.10
> six==1.11.0
> smmap2==2.0.3
> SQLAlchemy==1.2.2
> tabulate==0.7.7
> thrift==0.11.0
> Unidecode==1.0.22
> urllib3==1.22
> webencodings==0.5.1
> Werkzeug==0.14.1
> WTForms==2.1
> zope.deprecation==4.3.0
> {noformat}
> My airflow.cfg looks like:
> {noformat}
> [core]
> # The home folder for airflow, default is ~/airflow
> airflow_home = /Users/paymahn/solvvy/scheduler/airflow_home
> # The folder where your airflow pipelines live, most likely a
> # subfolder in a code repository
> # This path must be absolute
> dags_folder = /Users/paymahn/solvvy/scheduler/airflow_home/dags
> # The folder where airflow should store its log files
> # This path must be absolute
> base_log_folder = /Users/paymahn/solvvy/scheduler/airflow_home/logs
> # Airflow can store logs remotely in AWS S3 or Google Cloud Storage. Users
> # must supply an Airflow connection id that provides access to the storage
> # location.
> remote_log_conn_id =
> encrypt_s3_logs = False
> # Logging level
> logging_level = INFO
> # Logging class
> # Specify the class that will specify the logging configuration
> # This class has to be on the python classpath
> # logging_config_class = my.path.default_local_settings.LOGGING_CONFIG
> logging_config_class =
> # Log format
> log_format = [%%(asctime)s] {%%(filename)s:%%(lineno)d} %%(levelname)s - 
> %%(message)s
> simple_log_format = %%(asctime)s %%(levelname)s - %%(message)s
> # The executor class that airflow should use. Choices include
> # SequentialExecutor, LocalExecutor, CeleryExecutor, DaskExecutor
> executor = LocalExecutor
> # The SqlAlchemy connection string to the metadata database.
> # SqlAlchemy supports many different database engine, more information
> # their website
> # sql_alchemy_conn = 
> sqlite:////Users/paymahn/solvvy/scheduler/airflow_home/airflow.db
> sql_alchemy_conn = postgresql+psycopg2://airflow:airflow@localhost/postgres
> # The SqlAlchemy pool size is the maximum number of database connections
> # in the pool.
> sql_alchemy_pool_size = 5
> # The SqlAlchemy pool recycle is the number of seconds a connection
> # can be idle in the pool before it is invalidated. This config does
> # not apply to sqlite.
> sql_alchemy_pool_recycle = 3600
> # The amount of parallelism as a setting to the executor. This defines
> # the max number of task instances that should run simultaneously
> # on this airflow installation
> parallelism = 32
> # The number of task instances allowed to run concurrently by the scheduler
> dag_concurrency = 16
> # Are DAGs paused by default at creation
> dags_are_paused_at_creation = True
> # When not using pools, tasks are run in the "default pool",
> # whose size is guided by this config element
> non_pooled_task_slot_count = 128
> # The maximum number of active DAG runs per DAG
> max_active_runs_per_dag = 16
> # Whether to load the examples that ship with Airflow. It's good to
> # get started, but you probably want to set this to False in a production
> # environment
> load_examples = True
> # Where your Airflow plugins are stored
> plugins_folder = /Users/paymahn/solvvy/scheduler/airflow_home/plugins
> # Secret key to save connection passwords in the db
> fernet_key = pvHY8FTnk9VcN-LF8nKzuAr2PVclfQwKm4fhKQo_66k=
> # Whether to disable pickling dags
> donot_pickle = False
> # How long before timing out a python file import while filling the DagBag
> dagbag_import_timeout = 30
> # The class to use for running task instances in a subprocess
> task_runner = BashTaskRunner
> # If set, tasks without a `run_as_user` argument will be run with this user
> # Can be used to de-elevate a sudo user running Airflow when executing tasks
> default_impersonation =
> # What security module to use (for example kerberos):
> security =
> # Turn unit test mode on (overwrites many configuration options with test
> # values at runtime)
> unit_test_mode = False
> # Name of handler to read task instance logs.
> # Default to use file task handler.
> task_log_reader = file.task
> # Whether to enable pickling for xcom (note that this is insecure and allows 
> for
> # RCE exploits). This will be deprecated in Airflow 2.0 (be forced to False).
> enable_xcom_pickling = True
> # When a task is killed forcefully, this is the amount of time in seconds that
> # it has to cleanup after it is sent a SIGTERM, before it is SIGKILLED
> killed_task_cleanup_time = 60
> [cli]
> # In what way should the cli access the API. The LocalClient will use the
> # database directly, while the json_client will use the api running on the
> # webserver
> api_client = airflow.api.client.local_client
> endpoint_url = http://localhost:8080
> [api]
> # How to authenticate users of the API
> auth_backend = airflow.api.auth.backend.default
> [operators]
> # The default owner assigned to each new operator, unless
> # provided explicitly or passed via `default_args`
> default_owner = Airflow
> default_cpus = 1
> default_ram = 512
> default_disk = 512
> default_gpus = 0
> [webserver]
> # The base url of your website as airflow cannot guess what domain or
> # cname you are using. This is used in automated emails that
> # airflow sends to point links to the right web server
> base_url = http://localhost:8080
> # The ip specified when starting the web server
> web_server_host = 0.0.0.0
> # The port on which to run the web server
> web_server_port = 8080
> # Paths to the SSL certificate and key for the web server. When both are
> # provided SSL will be enabled. This does not change the web server port.
> web_server_ssl_cert =
> web_server_ssl_key =
> # Number of seconds the gunicorn webserver waits before timing out on a worker
> web_server_worker_timeout = 120
> # Number of workers to refresh at a time. When set to 0, worker refresh is
> # disabled. When nonzero, airflow periodically refreshes webserver workers by
> # bringing up new ones and killing old ones.
> worker_refresh_batch_size = 1
> # Number of seconds to wait before refreshing a batch of workers.
> worker_refresh_interval = 30
> # Secret key used to run your flask app
> secret_key = temporary_key
> # Number of workers to run the Gunicorn web server
> workers = 4
> # The worker class gunicorn should use. Choices include
> # sync (default), eventlet, gevent
> worker_class = sync
> # Log files for the gunicorn webserver. '-' means log to stderr.
> access_logfile = -
> error_logfile = -
> # Expose the configuration file in the web server
> expose_config = False
> # Set to true to turn on authentication:
> # http://pythonhosted.org/airflow/security.html#web-authentication
> authenticate = False
> # Filter the list of dags by owner name (requires authentication to be 
> enabled)
> filter_by_owner = False
> # Filtering mode. Choices include user (default) and ldapgroup.
> # Ldap group filtering requires using the ldap backend
> #
> # Note that the ldap server needs the "memberOf" overlay to be set up
> # in order to user the ldapgroup mode.
> owner_mode = user
> # Default DAG view.  Valid values are:
> # tree, graph, duration, gantt, landing_times
> dag_default_view = tree
> # Default DAG orientation. Valid values are:
> # LR (Left->Right), TB (Top->Bottom), RL (Right->Left), BT (Bottom->Top)
> dag_orientation = LR
> # Puts the webserver in demonstration mode; blurs the names of Operators for
> # privacy.
> demo_mode = False
> # The amount of time (in secs) webserver will wait for initial handshake
> # while fetching logs from other worker machine
> log_fetch_timeout_sec = 5
> # By default, the webserver shows paused DAGs. Flip this to hide paused
> # DAGs by default
> hide_paused_dags_by_default = False
> # Consistent page size across all listing views in the UI
> page_size = 100
> [email]
> email_backend = airflow.utils.email.send_email_smtp
> [smtp]
> # If you want airflow to send emails on retries, failure, and you want to use
> # the airflow.utils.email.send_email_smtp function, you have to configure an
> # smtp server here
> smtp_host = localhost
> smtp_starttls = True
> smtp_ssl = False
> # Uncomment and set the user/pass settings if you want to use SMTP AUTH
> # smtp_user = airflow
> # smtp_password = airflow
> smtp_port = 25
> smtp_mail_from = airf...@example.com
> [celery]
> # This section only applies if you are using the CeleryExecutor in
> # [core] section above
> # The app name that will be used by celery
> celery_app_name = airflow.executors.celery_executor
> # The concurrency that will be used when starting workers with the
> # "airflow worker" command. This defines the number of task instances that
> # a worker will take, so size up your workers based on the resources on
> # your worker box and the nature of your tasks
> celeryd_concurrency = 16
> # When you start an airflow worker, airflow starts a tiny web server
> # subprocess to serve the workers local log files to the airflow main
> # web server, who then builds pages and sends them to users. This defines
> # the port on which the logs are served. It needs to be unused, and open
> # visible from the main web server to connect into the workers.
> worker_log_server_port = 8793
> # The Celery broker URL. Celery supports RabbitMQ, Redis and experimentally
> # a sqlalchemy database. Refer to the Celery documentation for more
> # information.
> broker_url = sqla+mysql://airflow:airflow@localhost:3306/airflow
> # Another key Celery setting
> celery_result_backend = db+mysql://airflow:airflow@localhost:3306/airflow
> # Celery Flower is a sweet UI for Celery. Airflow has a shortcut to start
> # it `airflow flower`. This defines the IP that Celery Flower runs on
> flower_host = 0.0.0.0
> # This defines the port that Celery Flower runs on
> flower_port = 5555
> # Default queue that tasks get assigned to and that worker listen on.
> default_queue = default
> # Import path for celery configuration options
> celery_config_options = 
> airflow.config_templates.default_celery.DEFAULT_CELERY_CONFIG
> [dask]
> # This section only applies if you are using the DaskExecutor in
> # [core] section above
> # The IP address and port of the Dask cluster's scheduler.
> cluster_address = 127.0.0.1:8786
> [scheduler]
> # Task instances listen for external kill signal (when you clear tasks
> # from the CLI or the UI), this defines the frequency at which they should
> # listen (in seconds).
> job_heartbeat_sec = 5
> # The scheduler constantly tries to trigger new tasks (look at the
> # scheduler section in the docs for more information). This defines
> # how often the scheduler should run (in seconds).
> scheduler_heartbeat_sec = 5
> # after how much time should the scheduler terminate in seconds
> # -1 indicates to run continuously (see also num_runs)
> run_duration = -1
> # after how much time a new DAGs should be picked up from the filesystem
> min_file_process_interval = 0
> dag_dir_list_interval = 300
> # How often should stats be printed to the logs
> print_stats_interval = 30
> child_process_log_directory = 
> /Users/paymahn/solvvy/scheduler/airflow_home/logs/scheduler
> # Local task jobs periodically heartbeat to the DB. If the job has
> # not heartbeat in this many seconds, the scheduler will mark the
> # associated task instance as failed and will re-schedule the task.
> scheduler_zombie_task_threshold = 300
> # Turn off scheduler catchup by setting this to False.
> # Default behavior is unchanged and
> # Command Line Backfills still work, but the scheduler
> # will not do scheduler catchup if this is False,
> # however it can be set on a per DAG basis in the
> # DAG definition (catchup)
> catchup_by_default = True
> # This changes the batch size of queries in the scheduling main loop.
> # This depends on query length limits and how long you are willing to hold 
> locks.
> # 0 for no limit
> max_tis_per_query = 0
> # Statsd (https://github.com/etsy/statsd) integration settings
> statsd_on = False
> statsd_host = localhost
> statsd_port = 8125
> statsd_prefix = airflow
> # The scheduler can run multiple threads in parallel to schedule dags.
> # This defines how many threads will run.
> max_threads = 2
> authenticate = False
> [ldap]
> # set this to ldaps://<your.ldap.server>:<port>
> uri = 
> user_filter = objectClass=*
> user_name_attr = uid
> group_member_attr = memberOf
> superuser_filter = 
> data_profiler_filter = 
> bind_user = cn=Manager,dc=example,dc=com
> bind_password = insecure
> basedn = dc=example,dc=com
> cacert = /etc/ca/ldap_ca.crt
> search_scope = LEVEL
> [mesos]
> # Mesos master address which MesosExecutor will connect to.
> master = localhost:5050
> # The framework name which Airflow scheduler will register itself as on mesos
> framework_name = Airflow
> # Number of cpu cores required for running one task instance using
> # 'airflow run <dag_id> <task_id> <execution_date> --local -p <pickle_id>'
> # command on a mesos slave
> task_cpu = 1
> # Memory in MB required for running one task instance using
> # 'airflow run <dag_id> <task_id> <execution_date> --local -p <pickle_id>'
> # command on a mesos slave
> task_memory = 256
> # Enable framework checkpointing for mesos
> # See http://mesos.apache.org/documentation/latest/slave-recovery/
> checkpoint = False
> # Failover timeout in milliseconds.
> # When checkpointing is enabled and this option is set, Mesos waits
> # until the configured timeout for
> # the MesosExecutor framework to re-register after a failover. Mesos
> # shuts down running tasks if the
> # MesosExecutor framework fails to re-register within this timeframe.
> # failover_timeout = 604800
> # Enable framework authentication for mesos
> # See http://mesos.apache.org/documentation/latest/configuration/
> authenticate = False
> # Mesos credentials, if authentication is enabled
> # default_principal = admin
> # default_secret = admin
> [kerberos]
> ccache = /tmp/airflow_krb5_ccache
> # gets augmented with fqdn
> principal = airflow
> reinit_frequency = 3600
> kinit_path = kinit
> keytab = airflow.keytab
> [github_enterprise]
> api_rev = v3
> [admin]
> # UI to hide sensitive variable fields when set to True
> hide_sensitive_variable_fields = True
> {noformat}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to