This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 9aa589ec90 Fix: git clone on Windows (#26105)
9aa589ec90 is described below

commit 9aa589ec90a6e56837495334f5eadf62158274e4
Author: Denis Kokorin <[email protected]>
AuthorDate: Mon Sep 19 14:09:54 2022 +0300

    Fix: git clone on Windows (#26105)
---
 .github/workflows/ci.yml                           | 12 +++++
 tests/www/test_logs/__init__.py                    | 16 -------
 .../task_for_testing_log_view/1.log                |  1 -
 .../2017-09-01T00.00.00+00.00/1.log                |  1 -
 .../attempt=1.log                                  |  1 -
 tests/www/views/test_views_log.py                  | 55 ++++++++++++++++++----
 6 files changed, 57 insertions(+), 29 deletions(-)

diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index d8849e2c4f..c3c7352131 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -605,6 +605,18 @@ ${{ hashFiles('.pre-commit-config.yaml') }}"
           python -m pip install -r ./docker_tests/requirements.txt &&
           python -m pytest 
docker_tests/test_examples_of_prod_image_building.py -n auto --color=yes
 
+  test-git-clone-on-windows:
+    timeout-minutes: 5
+    name: "Test git clone on Windows"
+    runs-on: windows-latest
+    needs: [build-info]
+    steps:
+      - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )"
+        uses: actions/checkout@v2
+        with:
+          fetch-depth: 2
+          persist-credentials: false
+
   wait-for-ci-images:
     timeout-minutes: 120
     name: "Wait for CI images"
diff --git a/tests/www/test_logs/__init__.py b/tests/www/test_logs/__init__.py
deleted file mode 100644
index 13a83393a9..0000000000
--- a/tests/www/test_logs/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
diff --git 
a/tests/www/test_logs/dag_for_testing_log_view/scheduled__2017-09-01T00:00:00+00:00/task_for_testing_log_view/1.log
 
b/tests/www/test_logs/dag_for_testing_log_view/scheduled__2017-09-01T00:00:00+00:00/task_for_testing_log_view/1.log
deleted file mode 100644
index bc10ef7880..0000000000
--- 
a/tests/www/test_logs/dag_for_testing_log_view/scheduled__2017-09-01T00:00:00+00:00/task_for_testing_log_view/1.log
+++ /dev/null
@@ -1 +0,0 @@
-Log for testing.
diff --git 
a/tests/www/test_logs/dag_for_testing_log_view/task_for_testing_log_view/2017-09-01T00.00.00+00.00/1.log
 
b/tests/www/test_logs/dag_for_testing_log_view/task_for_testing_log_view/2017-09-01T00.00.00+00.00/1.log
deleted file mode 100644
index bc10ef7880..0000000000
--- 
a/tests/www/test_logs/dag_for_testing_log_view/task_for_testing_log_view/2017-09-01T00.00.00+00.00/1.log
+++ /dev/null
@@ -1 +0,0 @@
-Log for testing.
diff --git 
a/tests/www/test_logs/dag_id=dag_for_testing_log_view/run_id=scheduled__2017-09-01T00:00:00+00:00/task_id=task_for_testing_log_view/attempt=1.log
 
b/tests/www/test_logs/dag_id=dag_for_testing_log_view/run_id=scheduled__2017-09-01T00:00:00+00:00/task_id=task_for_testing_log_view/attempt=1.log
deleted file mode 100644
index bc10ef7880..0000000000
--- 
a/tests/www/test_logs/dag_id=dag_for_testing_log_view/run_id=scheduled__2017-09-01T00:00:00+00:00/task_id=task_for_testing_log_view/attempt=1.log
+++ /dev/null
@@ -1 +0,0 @@
-Log for testing.
diff --git a/tests/www/views/test_views_log.py 
b/tests/www/views/test_views_log.py
index 20c969eb05..0c55ac4f66 100644
--- a/tests/www/views/test_views_log.py
+++ b/tests/www/views/test_views_log.py
@@ -18,8 +18,10 @@
 from __future__ import annotations
 
 import copy
+import logging
 import logging.config
 import pathlib
+import shutil
 import sys
 import tempfile
 import unittest.mock
@@ -32,6 +34,7 @@ from airflow.config_templates.airflow_local_settings import 
DEFAULT_LOGGING_CONF
 from airflow.models import DagBag, DagRun
 from airflow.models.tasklog import LogTemplate
 from airflow.utils import timezone
+from airflow.utils.log.file_task_handler import FileTaskHandler
 from airflow.utils.log.logging_mixin import ExternalLoggingMixin
 from airflow.utils.session import create_session
 from airflow.utils.state import DagRunState, TaskInstanceState
@@ -56,7 +59,12 @@ def backup_modules():
 
 
 @pytest.fixture(scope="module")
-def log_app(backup_modules):
+def log_path(tmp_path_factory):
+    return tmp_path_factory.mktemp("logs")
+
+
[email protected](scope="module")
+def log_app(backup_modules, log_path):
     @dont_initialize_flask_app_submodules(
         skip_all_except=[
             "init_appbuilder",
@@ -84,9 +92,7 @@ def log_app(backup_modules):
 
     # Create a custom logging configuration
     logging_config = copy.deepcopy(DEFAULT_LOGGING_CONFIG)
-    logging_config['handlers']['task']['base_log_folder'] = str(
-        pathlib.Path(__file__, "..", "..", "test_logs").resolve(),
-    )
+    logging_config['handlers']['task']['base_log_folder'] = str(log_path)
 
     with tempfile.TemporaryDirectory() as settings_dir:
         local_settings = pathlib.Path(settings_dir, 
"airflow_local_settings.py")
@@ -165,6 +171,25 @@ def tis(dags, session):
     clear_db_runs()
 
 
[email protected]
+def create_expected_log_file(log_path, tis):
+    ti, _ = tis
+    handler = FileTaskHandler(log_path)
+
+    def create_expected_log_file(try_number):
+        ti.try_number = try_number - 1
+        handler.set_context(ti)
+        handler.emit(logging.makeLogRecord({"msg": "Log for testing."}))
+        handler.flush()
+
+    yield create_expected_log_file
+    # log_path fixture is used in log_app, so both have "module" scope. 
Because of that
+    # log_path isn't deleted automatically by pytest between tests
+    # We delete created log files manually to make sure tests do not reuse 
logs created by other tests
+    for sub_path in log_path.iterdir():
+        shutil.rmtree(sub_path)
+
+
 @pytest.fixture()
 def log_admin_client(log_app):
     return client_with_login(log_app, username="test", password="test")
@@ -213,13 +238,14 @@ def test_get_file_task_log(log_admin_client, tis, state, 
try_number, num_logs):
     assert f'log-group-{num_logs + 1}' not in data
 
 
-def test_get_logs_with_metadata_as_download_file(log_admin_client):
+def test_get_logs_with_metadata_as_download_file(log_admin_client, 
create_expected_log_file):
     url_template = (
         "get_logs_with_metadata?dag_id={}&"
         "task_id={}&execution_date={}&"
         "try_number={}&metadata={}&format=file"
     )
     try_number = 1
+    create_expected_log_file(try_number)
     date = DEFAULT_DATE.isoformat()
     url = url_template.format(
         DAG_ID,
@@ -245,7 +271,7 @@ DIFFERENT_LOG_FILENAME = "{{ ti.dag_id }}/{{ ti.run_id 
}}/{{ ti.task_id }}/{{ tr
 
 
 @pytest.fixture()
-def dag_run_with_log_filename():
+def dag_run_with_log_filename(tis):
     run_filters = [DagRun.dag_id == DAG_ID, DagRun.execution_date == 
DEFAULT_DATE]
     with create_session() as session:
         log_template = session.merge(
@@ -255,14 +281,20 @@ def dag_run_with_log_filename():
         run_query = session.query(DagRun).filter(*run_filters)
         run_query.update({"log_template_id": log_template.id})
         dag_run = run_query.one()
+    # Dag has been updated, replace Dag in Task Instance
+    ti, _ = tis
+    ti.dag_run = dag_run
     yield dag_run
     with create_session() as session:
         session.query(DagRun).filter(*run_filters).update({"log_template_id": 
None})
         session.query(LogTemplate).filter(LogTemplate.id == 
log_template.id).delete()
 
 
-def test_get_logs_for_changed_filename_format_db(log_admin_client, 
dag_run_with_log_filename):
+def test_get_logs_for_changed_filename_format_db(
+    log_admin_client, dag_run_with_log_filename, create_expected_log_file
+):
     try_number = 1
+    create_expected_log_file(try_number)
     url = (
         f"get_logs_with_metadata?dag_id={dag_run_with_log_filename.dag_id}&"
         f"task_id={TASK_ID}&"
@@ -315,14 +347,16 @@ def test_get_logs_with_metadata_as_download_large_file(_, 
log_admin_client):
 
 
 @pytest.mark.parametrize("metadata", ["null", "{}"])
-def test_get_logs_with_metadata(log_admin_client, metadata):
+def test_get_logs_with_metadata(log_admin_client, metadata, 
create_expected_log_file):
     url_template = 
"get_logs_with_metadata?dag_id={}&task_id={}&execution_date={}&try_number={}&metadata={}"
+    try_number = 1
+    create_expected_log_file(try_number)
     response = log_admin_client.get(
         url_template.format(
             DAG_ID,
             TASK_ID,
             urllib.parse.quote_plus(DEFAULT_DATE.isoformat()),
-            1,
+            try_number,
             metadata,
         ),
         data={"username": "test", "password": "test"},
@@ -403,13 +437,14 @@ def 
test_get_logs_response_with_ti_equal_to_none(log_admin_client):
     assert "*** Task instance did not exist in the DB\n" == data['message']
 
 
-def test_get_logs_with_json_response_format(log_admin_client):
+def test_get_logs_with_json_response_format(log_admin_client, 
create_expected_log_file):
     url_template = (
         "get_logs_with_metadata?dag_id={}&"
         "task_id={}&execution_date={}&"
         "try_number={}&metadata={}&format=json"
     )
     try_number = 1
+    create_expected_log_file(try_number)
     url = url_template.format(
         DAG_ID,
         TASK_ID,

Reply via email to