vatsrahul1001 opened a new pull request, #49371:
URL: https://github.com/apache/airflow/pull/49371

   Noticed when running any DAG which uses `DatasetOrTimeSchedule`, we are 
getting the below error. This PR fixes it
   
   ```
   Traceback (most recent call last):
     File "/usr/local/bin/airflow", line 10, in <module>
       sys.exit(main())
     File "/opt/airflow/airflow-core/src/airflow/__main__.py", line 55, in main
       args.func(args)
     File "/opt/airflow/airflow-core/src/airflow/cli/cli_config.py", line 48, 
in command
       return func(*args, **kwargs)
     File "/opt/airflow/airflow-core/src/airflow/utils/cli.py", line 111, in 
wrapper
       return f(*args, **kwargs)
     File 
"/opt/airflow/airflow-core/src/airflow/utils/providers_configuration_loader.py",
 line 55, in wrapped_function
       return func(*args, **kwargs)
     File 
"/opt/airflow/airflow-core/src/airflow/cli/commands/dag_processor_command.py", 
line 54, in dag_processor
       run_command_with_daemon_option(
     File "/opt/airflow/airflow-core/src/airflow/cli/commands/daemon_utils.py", 
line 86, in run_command_with_daemon_option
       callback()
     File 
"/opt/airflow/airflow-core/src/airflow/cli/commands/dag_processor_command.py", 
line 57, in <lambda>
       callback=lambda: run_job(job=job_runner.job, 
execute_callable=job_runner._execute),
     File "/opt/airflow/airflow-core/src/airflow/utils/session.py", line 101, 
in wrapper
       return func(*args, session=session, **kwargs)
     File "/opt/airflow/airflow-core/src/airflow/jobs/job.py", line 347, in 
run_job
       return execute_job(job, execute_callable=execute_callable)
     File "/opt/airflow/airflow-core/src/airflow/jobs/job.py", line 376, in 
execute_job
       ret = execute_callable()
     File 
"/opt/airflow/airflow-core/src/airflow/jobs/dag_processor_job_runner.py", line 
61, in _execute
       self.processor.run()
     File "/opt/airflow/airflow-core/src/airflow/dag_processing/manager.py", 
line 262, in run
       return self._run_parsing_loop()
     File "/opt/airflow/airflow-core/src/airflow/dag_processing/manager.py", 
line 351, in _run_parsing_loop
       self._collect_results()
     File "/opt/airflow/airflow-core/src/airflow/utils/session.py", line 101, 
in wrapper
       return func(*args, session=session, **kwargs)
     File "/opt/airflow/airflow-core/src/airflow/dag_processing/manager.py", 
line 808, in _collect_results
       self._file_stats[file] = process_parse_results(
     File "/opt/airflow/airflow-core/src/airflow/dag_processing/manager.py", 
line 1129, in process_parse_results
       update_dag_parsing_results_in_db(
     File "/opt/airflow/airflow-core/src/airflow/dag_processing/collection.py", 
line 334, in update_dag_parsing_results_in_db
       for attempt in run_with_db_retries(logger=log):
     File "/usr/local/lib/python3.9/site-packages/tenacity/__init__.py", line 
445, in __iter__
       do = self.iter(retry_state=retry_state)
     File "/usr/local/lib/python3.9/site-packages/tenacity/__init__.py", line 
378, in iter
       result = action(retry_state)
     File "/usr/local/lib/python3.9/site-packages/tenacity/__init__.py", line 
400, in <lambda>
       self._add_action_func(lambda rs: rs.outcome.result())
     File "/usr/local/lib/python3.9/concurrent/futures/_base.py", line 439, in 
result
       return self.__get_result()
     File "/usr/local/lib/python3.9/concurrent/futures/_base.py", line 391, in 
__get_result
       raise self._exception
     File "/opt/airflow/airflow-core/src/airflow/dag_processing/collection.py", 
line 344, in update_dag_parsing_results_in_db
       DAG.bulk_write_to_db(bundle_name, bundle_version, dags, session=session)
     File "/opt/airflow/airflow-core/src/airflow/utils/session.py", line 99, in 
wrapper
       return func(*args, **kwargs)
     File "/opt/airflow/airflow-core/src/airflow/models/dag.py", line 1900, in 
bulk_write_to_db
       dag_op.update_dags(orm_dags, session=session)
     File "/opt/airflow/airflow-core/src/airflow/dag_processing/collection.py", 
line 469, in update_dags
       dm.timetable_summary = dag.timetable.summary
     File 
"/opt/airflow/airflow-core/src/airflow/serialization/serialized_objects.py", 
line 2096, in timetable
       return decode_timetable(self.data["dag"]["timetable"])
     File 
"/opt/airflow/airflow-core/src/airflow/serialization/serialized_objects.py", 
line 415, in decode_timetable
       return timetable_class.deserialize(var[Encoding.VAR])
     File "/opt/airflow/airflow-core/src/airflow/timetables/assets.py", line 
60, in deserialize
       return cls(
   TypeError: __init__() got an unexpected keyword argument 'assets'
   root@1a391d6dc50b:/opt/airflow#
   
   ```
   
   <!-- Please keep an empty line above the dashes. -->
   ---
   **^ Add meaningful description above**
   Read the **[Pull Request 
Guidelines](https://github.com/apache/airflow/blob/main/contributing-docs/05_pull_requests.rst#pull-request-guidelines)**
 for more information.
   In case of fundamental code changes, an Airflow Improvement Proposal 
([AIP](https://cwiki.apache.org/confluence/display/AIRFLOW/Airflow+Improvement+Proposals))
 is needed.
   In case of a new dependency, check compliance with the [ASF 3rd Party 
License Policy](https://www.apache.org/legal/resolved.html#category-x).
   In case of backwards incompatible changes please leave a note in a 
newsfragment file, named `{pr_number}.significant.rst` or 
`{issue_number}.significant.rst`, in 
[airflow-core/newsfragments](https://github.com/apache/airflow/tree/main/airflow-core/newsfragments).
   


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to