This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 95b5a0a2ac Refactor dataproc system tests (#40720)
95b5a0a2ac is described below
commit 95b5a0a2ac457ff5b0258845eb3e2c790cfaea41
Author: VladaZakharova <[email protected]>
AuthorDate: Wed Jul 24 16:01:54 2024 +0200
Refactor dataproc system tests (#40720)
---
airflow/providers/google/cloud/hooks/dataproc.py | 2 +-
.../providers/google/cloud/operators/dataproc.py | 2 +-
.../google/cloud/operators/dataproc_metastore.py | 10 +-
scripts/ci/pre_commit/check_system_tests.py | 3 +-
.../cloud/dataproc/example_dataproc_batch.py | 35 +-
.../dataproc/example_dataproc_batch_deferrable.py | 2 +-
.../dataproc/example_dataproc_batch_persistent.py | 23 +-
...proc_cluster_create_existing_stopped_cluster.py | 14 +-
.../example_dataproc_cluster_deferrable.py | 8 +-
.../dataproc/example_dataproc_cluster_diagnose.py | 11 +-
.../dataproc/example_dataproc_cluster_generator.py | 46 +-
.../example_dataproc_cluster_start_stop.py | 11 +-
.../dataproc/example_dataproc_cluster_update.py | 8 +-
.../google/cloud/dataproc/example_dataproc_gke.py | 16 +-
.../cloud/dataproc/example_dataproc_hadoop.py | 6 +-
.../google/cloud/dataproc/example_dataproc_hive.py | 6 +-
.../google/cloud/dataproc/example_dataproc_pig.py | 6 +-
.../cloud/dataproc/example_dataproc_presto.py | 6 +-
.../cloud/dataproc/example_dataproc_pyspark.py | 42 +-
.../cloud/dataproc/example_dataproc_spark.py | 7 +-
.../cloud/dataproc/example_dataproc_spark_async.py | 7 +-
.../dataproc/example_dataproc_spark_deferrable.py | 7 +-
.../cloud/dataproc/example_dataproc_spark_sql.py | 7 +-
.../cloud/dataproc/example_dataproc_sparkr.py | 40 +-
.../cloud/dataproc/example_dataproc_trino.py | 6 +-
.../cloud/dataproc/example_dataproc_workflow.py | 7 +-
.../example_dataproc_workflow_deferrable.py | 6 +-
.../google/cloud/dataproc/resources/__init__.py | 16 -
.../google/cloud/dataproc/resources/hello_world.R | 9 -
.../google/cloud/dataproc/resources/hello_world.py | 25 -
.../google/cloud/dataproc/resources/hive.sql | 2188 --------------------
.../google/cloud/dataproc/resources/pip-install.sh | 69 -
.../example_dataproc_metastore.py | 34 +-
.../example_dataproc_metastore_backup.py | 5 +-
...ple_dataproc_metastore_hive_partition_sensor.py | 5 +-
35 files changed, 264 insertions(+), 2431 deletions(-)
diff --git a/airflow/providers/google/cloud/hooks/dataproc.py
b/airflow/providers/google/cloud/hooks/dataproc.py
index fc7f92c906..2eb8d8952c 100644
--- a/airflow/providers/google/cloud/hooks/dataproc.py
+++ b/airflow/providers/google/cloud/hooks/dataproc.py
@@ -274,7 +274,7 @@ class DataprocHook(GoogleBaseHook):
self,
operation: Operation,
timeout: float | None = None,
- result_retry: AsyncRetry | _MethodDefault = DEFAULT,
+ result_retry: AsyncRetry | _MethodDefault | Retry = DEFAULT,
) -> Any:
"""Wait for a long-lasting operation to complete."""
try:
diff --git a/airflow/providers/google/cloud/operators/dataproc.py
b/airflow/providers/google/cloud/operators/dataproc.py
index 889f8b3e59..efaf0d6326 100644
--- a/airflow/providers/google/cloud/operators/dataproc.py
+++ b/airflow/providers/google/cloud/operators/dataproc.py
@@ -2995,7 +2995,7 @@ class
DataprocCreateBatchOperator(GoogleCloudBaseOperator):
metadata: Sequence[tuple[str, str]] = (),
gcp_conn_id: str = "google_cloud_default",
impersonation_chain: str | Sequence[str] | None = None,
- result_retry: AsyncRetry | _MethodDefault = DEFAULT,
+ result_retry: AsyncRetry | _MethodDefault | Retry = DEFAULT,
asynchronous: bool = False,
deferrable: bool = conf.getboolean("operators", "default_deferrable",
fallback=False),
polling_interval_seconds: int = 5,
diff --git a/airflow/providers/google/cloud/operators/dataproc_metastore.py
b/airflow/providers/google/cloud/operators/dataproc_metastore.py
index 2da7ce9269..aa7dbafd22 100644
--- a/airflow/providers/google/cloud/operators/dataproc_metastore.py
+++ b/airflow/providers/google/cloud/operators/dataproc_metastore.py
@@ -22,12 +22,12 @@ from __future__ import annotations
import time
from typing import TYPE_CHECKING, Sequence
+from google.api_core.exceptions import AlreadyExists
from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault
from google.api_core.retry import Retry, exponential_sleep_generator
from google.cloud.metastore_v1 import MetadataExport,
MetadataManagementActivity
from google.cloud.metastore_v1.types import Backup, MetadataImport, Service
from google.cloud.metastore_v1.types.metastore import DatabaseDumpSpec, Restore
-from googleapiclient.errors import HttpError
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator, BaseOperatorLink
@@ -242,9 +242,7 @@ class
DataprocMetastoreCreateBackupOperator(GoogleCloudBaseOperator):
)
backup = hook.wait_for_operation(self.timeout, operation)
self.log.info("Backup %s created successfully", self.backup_id)
- except HttpError as err:
- if err.resp.status not in (409, "409"):
- raise
+ except AlreadyExists:
self.log.info("Backup %s already exists", self.backup_id)
backup = hook.get_backup(
project_id=self.project_id,
@@ -448,9 +446,7 @@ class
DataprocMetastoreCreateServiceOperator(GoogleCloudBaseOperator):
)
service = hook.wait_for_operation(self.timeout, operation)
self.log.info("Service %s created successfully", self.service_id)
- except HttpError as err:
- if err.resp.status not in (409, "409"):
- raise
+ except AlreadyExists:
self.log.info("Instance %s already exists", self.service_id)
service = hook.get_service(
region=self.region,
diff --git a/scripts/ci/pre_commit/check_system_tests.py
b/scripts/ci/pre_commit/check_system_tests.py
index 4c82272ad7..89e2a9f24a 100755
--- a/scripts/ci/pre_commit/check_system_tests.py
+++ b/scripts/ci/pre_commit/check_system_tests.py
@@ -35,6 +35,7 @@ console = Console(color_system="standard", width=200)
errors: list[str] = []
WATCHER_APPEND_INSTRUCTION = "list(dag.tasks) >> watcher()"
+WATCHER_APPEND_INSTRUCTION_SHORT = " >> watcher()"
PYTEST_FUNCTION = """
from tests.system.utils import get_test_run # noqa: E402
@@ -52,7 +53,7 @@ PYTEST_FUNCTION_PATTERN = re.compile(
def _check_file(file: Path):
content = file.read_text()
if "from tests.system.utils.watcher import watcher" in content:
- index = content.find(WATCHER_APPEND_INSTRUCTION)
+ index = content.find(WATCHER_APPEND_INSTRUCTION_SHORT)
if index == -1:
errors.append(
f"[red]The example {file} imports tests.system.utils.watcher "
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py
index a6001c43ff..852fa0914d 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_batch.py
@@ -23,7 +23,7 @@ from __future__ import annotations
import os
from datetime import datetime
-from google.api_core.retry_async import AsyncRetry
+from google.api_core.retry import Retry
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.dataproc import (
@@ -37,10 +37,10 @@ from airflow.providers.google.cloud.sensors.dataproc import
DataprocBatchSensor
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-DAG_ID = "dataproc_batch"
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-REGION = "europe-west1"
+DAG_ID = "dataproc_batch"
+REGION = "europe-west3"
BATCH_ID = f"batch-{ENV_ID}-{DAG_ID}".replace("_", "-")
BATCH_ID_2 = f"batch-{ENV_ID}-{DAG_ID}-2".replace("_", "-")
@@ -77,7 +77,7 @@ with DAG(
region=REGION,
batch=BATCH_CONFIG,
batch_id=BATCH_ID_2,
- result_retry=AsyncRetry(maximum=10.0, initial=10.0, multiplier=1.0),
+ result_retry=Retry(maximum=100.0, initial=10.0, multiplier=1.0),
)
create_batch_3 = DataprocCreateBatchOperator(
@@ -104,10 +104,6 @@ with DAG(
get_batch = DataprocGetBatchOperator(
task_id="get_batch", project_id=PROJECT_ID, region=REGION,
batch_id=BATCH_ID
)
-
- get_batch_2 = DataprocGetBatchOperator(
- task_id="get_batch_2", project_id=PROJECT_ID, region=REGION,
batch_id=BATCH_ID_2
- )
# [END how_to_cloud_dataproc_get_batch_operator]
# [START how_to_cloud_dataproc_list_batches_operator]
@@ -136,6 +132,14 @@ with DAG(
)
# [END how_to_cloud_dataproc_cancel_operation_operator]
+ batch_cancelled_sensor = DataprocBatchSensor(
+ task_id="batch_cancelled_sensor",
+ region=REGION,
+ project_id=PROJECT_ID,
+ batch_id=BATCH_ID_4,
+ poke_interval=10,
+ )
+
# [START how_to_cloud_dataproc_delete_batch_operator]
delete_batch = DataprocDeleteBatchOperator(
task_id="delete_batch", project_id=PROJECT_ID, region=REGION,
batch_id=BATCH_ID
@@ -153,25 +157,30 @@ with DAG(
delete_batch.trigger_rule = TriggerRule.ALL_DONE
delete_batch_2.trigger_rule = TriggerRule.ALL_DONE
delete_batch_3.trigger_rule = TriggerRule.ALL_DONE
- delete_batch_4.trigger_rule = TriggerRule.ALL_DONE
+ delete_batch_4.trigger_rule = TriggerRule.ALL_FAILED
(
# TEST SETUP
[create_batch, create_batch_2, create_batch_3]
# TEST BODY
>> batch_async_sensor
- >> [get_batch, get_batch_2, list_batches]
+ >> get_batch
+ >> list_batches
>> create_batch_4
>> cancel_operation
# TEST TEARDOWN
- >> [delete_batch, delete_batch_2, delete_batch_3, delete_batch_4]
+ >> [delete_batch, delete_batch_2, delete_batch_3]
+ >> batch_cancelled_sensor
+ >> delete_batch_4
)
from tests.system.utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "teardown" task with trigger rule is part of the DAG
- list(dag.tasks) >> watcher()
+
+ # Excluding sensor because we expect it to fail due to cancelled operation
+ [task for task in dag.tasks if task.task_id != "batch_cancelled_sensor"]
>> watcher()
from tests.system.utils import get_test_run # noqa: E402
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py
index ad0d513a7a..1d2fec951e 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_deferrable.py
@@ -37,7 +37,7 @@ from tests.system.providers.google import
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "dataproc_batch_deferrable"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-REGION = "europe-west1"
+REGION = "europe-north1"
BATCH_ID = f"batch-{ENV_ID}-{DAG_ID}".replace("_", "-")
BATCH_CONFIG = {
"spark_batch": {
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py
index 1cf300afe7..cb4e731c67 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_batch_persistent.py
@@ -28,17 +28,21 @@ from airflow.providers.google.cloud.operators.dataproc
import (
ClusterGenerator,
DataprocCreateBatchOperator,
DataprocCreateClusterOperator,
+ DataprocDeleteBatchOperator,
DataprocDeleteClusterOperator,
)
from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_batch_ps"
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
-BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
-REGION = "europe-west1"
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
+BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}".replace("-", "_")
+REGION = "europe-north1"
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
BATCH_ID = f"batch-{ENV_ID}-{DAG_ID}".replace("_", "-")
CLUSTER_GENERATOR_CONFIG_FOR_PHS = ClusterGenerator(
@@ -98,6 +102,14 @@ with DAG(
)
# [END
how_to_cloud_dataproc_create_batch_operator_with_persistent_history_server]
+ delete_batch = DataprocDeleteBatchOperator(
+ task_id="delete_batch",
+ project_id=PROJECT_ID,
+ region=REGION,
+ batch_id=BATCH_ID,
+ trigger_rule=TriggerRule.ALL_DONE,
+ )
+
delete_cluster = DataprocDeleteClusterOperator(
task_id="delete_cluster",
project_id=PROJECT_ID,
@@ -117,6 +129,7 @@ with DAG(
# TEST BODY
>> create_batch
# TEST TEARDOWN
+ >> delete_batch
>> delete_cluster
>> delete_bucket
)
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py
index 31d1c51962..10403cea3b 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_create_existing_stopped_cluster.py
@@ -32,14 +32,18 @@ from airflow.providers.google.cloud.operators.dataproc
import (
DataprocStopClusterOperator,
)
from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-DAG_ID = "example_dataproc_cluster_create_existing_stopped_cluster"
+DAG_ID = "dataproc_create_existing_stopped_cluster"
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEMS_TESTS_GCP_PROJECTS") or ""
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
-REGION = "europe-west1"
+
+CLUSTER_NAME_BASE = f"{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+REGION = "europe-north1"
# Cluster definition
CLUSTER_CONFIG = {
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py
index cd97ff0eb4..35adca660d 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_deferrable.py
@@ -33,12 +33,14 @@ from airflow.providers.google.cloud.operators.dataproc
import (
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_cluster_def"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
-REGION = "europe-west1"
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+REGION = "europe-north1"
# Cluster definition
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py
index 11acdd903b..a610049bf9 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_diagnose.py
@@ -33,12 +33,15 @@ from airflow.providers.google.cloud.operators.dataproc
import (
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_diagnose_cluster"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
-REGION = "europe-west1"
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+
+REGION = "europe-north1"
# Cluster definition
@@ -102,7 +105,7 @@ with DAG(
# TEST SETUP
create_cluster
# TEST BODY
- >> diagnose_cluster
+ >> [diagnose_cluster, diagnose_cluster_deferrable]
# TEST TEARDOWN
>> delete_cluster
)
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py
index 769af2d27a..d64fda25e4 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_generator.py
@@ -24,7 +24,6 @@ from __future__ import annotations
import os
from datetime import datetime
-from pathlib import Path
from airflow.models.dag import DAG
from airflow.providers.google.cloud.operators.dataproc import (
@@ -32,25 +31,32 @@ from airflow.providers.google.cloud.operators.dataproc
import (
DataprocCreateClusterOperator,
DataprocDeleteClusterOperator,
)
-from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator, GCSDeleteBucketOperator
-from airflow.providers.google.cloud.transfers.local_to_gcs import
LocalFilesystemToGCSOperator
+from airflow.providers.google.cloud.operators.gcs import (
+ GCSCreateBucketOperator,
+ GCSDeleteBucketOperator,
+ GCSSynchronizeBucketsOperator,
+)
from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_cluster_generation"
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
-REGION = "europe-west1"
-ZONE = "europe-west1-b"
-INIT_FILE_SRC = str(Path(__file__).parent / "resources" / "pip-install.sh")
+RESOURCE_DATA_BUCKET = "airflow-system-tests-resources"
+INIT_FILE = "pip-install.sh"
+GCS_INIT_FILE = f"gs://{RESOURCE_DATA_BUCKET}/dataproc/{INIT_FILE}"
-# Cluster definition: Generating Cluster Config for
DataprocCreateClusterOperator
-# [START how_to_cloud_dataproc_create_cluster_generate_cluster_config]
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
-INIT_FILE = "pip-install.sh"
+REGION = "us-east4"
+ZONE = "us-east4-a"
+# Cluster definition: Generating Cluster Config for
DataprocCreateClusterOperator
+# [START how_to_cloud_dataproc_create_cluster_generate_cluster_config]
CLUSTER_GENERATOR_CONFIG = ClusterGenerator(
project_id=PROJECT_ID,
zone=ZONE,
@@ -60,7 +66,7 @@ CLUSTER_GENERATOR_CONFIG = ClusterGenerator(
worker_disk_size=32,
num_workers=2,
storage_bucket=BUCKET_NAME,
- init_actions_uris=[f"gs://{BUCKET_NAME}/{INIT_FILE}"],
+ init_actions_uris=[GCS_INIT_FILE],
metadata={"PIP_PACKAGES": "pyyaml requests pandas openpyxl"},
num_preemptible_workers=1,
preemptibility="PREEMPTIBLE",
@@ -80,11 +86,13 @@ with DAG(
task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID
)
- upload_file = LocalFilesystemToGCSOperator(
- task_id="upload_file",
- src=INIT_FILE_SRC,
- dst=INIT_FILE,
- bucket=BUCKET_NAME,
+ move_init_file = GCSSynchronizeBucketsOperator(
+ task_id="move_init_file",
+ source_bucket=RESOURCE_DATA_BUCKET,
+ source_object="dataproc",
+ destination_bucket=BUCKET_NAME,
+ destination_object="dataproc",
+ recursive=True,
)
# [START
how_to_cloud_dataproc_create_cluster_generate_cluster_config_operator]
@@ -114,7 +122,7 @@ with DAG(
(
# TEST SETUP
create_bucket
- >> upload_file
+ >> move_init_file
# TEST BODY
>> create_dataproc_cluster
# TEST TEARDOWN
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py
index 1393183ba4..2af1352fdc 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_start_stop.py
@@ -32,14 +32,17 @@ from airflow.providers.google.cloud.operators.dataproc
import (
DataprocStopClusterOperator,
)
from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
DAG_ID = "dataproc_cluster_start_stop"
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-PROJECT_ID = os.environ.get("SYSTEMS_TESTS_GCP_PROJECTS") or ""
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
-REGION = "europe-west1"
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+REGION = "europe-north1"
# Cluster definition
CLUSTER_CONFIG = {
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py
index 6706e22cbe..610a19e17f 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_cluster_update.py
@@ -33,12 +33,14 @@ from airflow.providers.google.cloud.operators.dataproc
import (
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_update"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
-REGION = "europe-west1"
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+REGION = "europe-north1"
# Cluster definition
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py
index a9953621bd..550dec97c4 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py
@@ -41,13 +41,16 @@ from
airflow.providers.google.cloud.operators.kubernetes_engine import (
GKEDeleteClusterOperator,
)
from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
-DAG_ID = "dataproc-gke"
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "default")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
+DAG_ID = "dataproc_gke"
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
REGION = "us-central1"
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
GKE_CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}-gke".replace("_", "-")
WORKLOAD_POOL = f"{PROJECT_ID}.svc.id.goog"
GKE_CLUSTER_CONFIG = {
@@ -57,6 +60,7 @@ GKE_CLUSTER_CONFIG = {
},
"initial_node_count": 1,
}
+
GKE_NAMESPACE = os.environ.get("GKE_NAMESPACE", f"{CLUSTER_NAME}")
# [START how_to_cloud_dataproc_create_cluster_in_gke_config]
@@ -70,7 +74,8 @@ VIRTUAL_CLUSTER_CONFIG = {
"roles": ["DEFAULT"],
"node_pool_config": {
"config": {
- "preemptible": True,
+ "preemptible": False,
+ "machine_type": "e2-standard-4",
}
},
}
@@ -130,7 +135,6 @@ with DAG(
# TEST BODY
>> create_cluster_in_gke
# TEST TEARDOWN
- # >> delete_gke_cluster
>> delete_dataproc_cluster
>> delete_gke_cluster
)
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py
index 5cc6274c04..f8f5cc3063 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_hadoop.py
@@ -34,12 +34,14 @@ from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_hadoop"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
REGION = "europe-west1"
OUTPUT_FOLDER = "wordcount"
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py
index 2529ae232d..5e1189c7f9 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_hive.py
@@ -33,11 +33,13 @@ from airflow.providers.google.cloud.operators.dataproc
import (
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_hive"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
REGION = "europe-west1"
# Cluster definition
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py
index cad53c5d76..9b76c10cbe 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_pig.py
@@ -34,10 +34,12 @@ from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
DAG_ID = "dataproc_pig"
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
REGION = "europe-west1"
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py
index c1217b0670..1c3cdf2082 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_presto.py
@@ -33,11 +33,13 @@ from airflow.providers.google.cloud.operators.dataproc
import (
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_presto"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
REGION = "europe-west1"
# Cluster definition
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py
index 2a8fb8fce7..4c6a64783a 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_pyspark.py
@@ -30,15 +30,27 @@ from airflow.providers.google.cloud.operators.dataproc
import (
DataprocDeleteClusterOperator,
DataprocSubmitJobOperator,
)
+from airflow.providers.google.cloud.operators.gcs import (
+ GCSCreateBucketOperator,
+ GCSDeleteBucketOperator,
+ GCSSynchronizeBucketsOperator,
+)
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_pyspark"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-JOB_FILE_URI =
"gs://airflow-system-tests-resources/dataproc/pyspark/dataproc-pyspark-job-pi.py"
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+
+BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
+RESOURCE_DATA_BUCKET = "airflow-system-tests-resources"
+JOB_FILE = "dataproc-pyspark-job-pi.py"
+GCS_JOB_FILE = f"gs://{BUCKET_NAME}/dataproc/{JOB_FILE}"
REGION = "europe-west1"
# Cluster definition
@@ -60,7 +72,7 @@ CLUSTER_CONFIG = {
PYSPARK_JOB = {
"reference": {"project_id": PROJECT_ID},
"placement": {"cluster_name": CLUSTER_NAME},
- "pyspark_job": {"main_python_file_uri": JOB_FILE_URI},
+ "pyspark_job": {"main_python_file_uri": GCS_JOB_FILE},
}
# [END how_to_cloud_dataproc_pyspark_config]
@@ -72,6 +84,19 @@ with DAG(
catchup=False,
tags=["example", "dataproc", "pyspark"],
) as dag:
+ create_bucket = GCSCreateBucketOperator(
+ task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID
+ )
+
+ move_job_file = GCSSynchronizeBucketsOperator(
+ task_id="move_job_file",
+ source_bucket=RESOURCE_DATA_BUCKET,
+ source_object="dataproc/pyspark",
+ destination_bucket=BUCKET_NAME,
+ destination_object="dataproc",
+ recursive=True,
+ )
+
create_cluster = DataprocCreateClusterOperator(
task_id="create_cluster",
project_id=PROJECT_ID,
@@ -94,13 +119,20 @@ with DAG(
trigger_rule=TriggerRule.ALL_DONE,
)
+ delete_bucket = GCSDeleteBucketOperator(
+ task_id="delete_bucket", bucket_name=BUCKET_NAME,
trigger_rule=TriggerRule.ALL_DONE
+ )
+
(
# TEST SETUP
- create_cluster
+ create_bucket
+ >> move_job_file
+ >> create_cluster
# TEST BODY
>> pyspark_task
# TEST TEARDOWN
>> delete_cluster
+ >> delete_bucket
)
from tests.system.utils.watcher import watcher
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py
index 75d888b2a4..f939347baa 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark.py
@@ -33,11 +33,14 @@ from airflow.providers.google.cloud.operators.dataproc
import (
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_spark"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+
REGION = "europe-west1"
# Cluster definition
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py
index 0f50e332a8..cd1060b5fe 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_async.py
@@ -34,11 +34,14 @@ from airflow.providers.google.cloud.sensors.dataproc import
DataprocJobSensor
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_spark_async"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+
REGION = "europe-west1"
# Cluster definition
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py
index 35345625d9..d20aa0aa0e 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_deferrable.py
@@ -34,11 +34,14 @@ from airflow.providers.google.cloud.operators.dataproc
import (
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_spark_deferrable"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+
REGION = "europe-west1"
# Cluster definition
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py
index 158a5ef917..0ca0d062d2 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_spark_sql.py
@@ -33,11 +33,14 @@ from airflow.providers.google.cloud.operators.dataproc
import (
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_spark_sql"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"dataproc-spark-sql-{ENV_ID}"
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+
REGION = "europe-west1"
# Cluster definition
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py
index 0927a5bdb7..5045490475 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_sparkr.py
@@ -30,15 +30,25 @@ from airflow.providers.google.cloud.operators.dataproc
import (
DataprocDeleteClusterOperator,
DataprocSubmitJobOperator,
)
+from airflow.providers.google.cloud.operators.gcs import (
+ GCSCreateBucketOperator,
+ GCSDeleteBucketOperator,
+ GCSSynchronizeBucketsOperator,
+)
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_sparkr"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-JOB_FILE_URI =
"gs://airflow-system-tests-resources/dataproc/sparkr/dataproc-sparkr-job.r"
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
+RESOURCE_DATA_BUCKET = "airflow-system-tests-resources"
+JOB_FILE = "dataproc-sparkr-job.r"
+GCS_JOB_FILE = f"gs://{BUCKET_NAME}/dataproc/{JOB_FILE}"
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
REGION = "europe-west1"
# Cluster definition
@@ -60,7 +70,7 @@ CLUSTER_CONFIG = {
SPARKR_JOB = {
"reference": {"project_id": PROJECT_ID},
"placement": {"cluster_name": CLUSTER_NAME},
- "spark_r_job": {"main_r_file_uri": JOB_FILE_URI},
+ "spark_r_job": {"main_r_file_uri": GCS_JOB_FILE},
}
# [END how_to_cloud_dataproc_sparkr_config]
@@ -72,6 +82,19 @@ with DAG(
catchup=False,
tags=["example", "dataproc", "sparkr"],
) as dag:
+ create_bucket = GCSCreateBucketOperator(
+ task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID
+ )
+
+ move_job_file = GCSSynchronizeBucketsOperator(
+ task_id="move_job_file",
+ source_bucket=RESOURCE_DATA_BUCKET,
+ source_object="dataproc/sparkr",
+ destination_bucket=BUCKET_NAME,
+ destination_object="dataproc",
+ recursive=True,
+ )
+
create_cluster = DataprocCreateClusterOperator(
task_id="create_cluster",
project_id=PROJECT_ID,
@@ -92,13 +115,20 @@ with DAG(
trigger_rule=TriggerRule.ALL_DONE,
)
+ delete_bucket = GCSDeleteBucketOperator(
+ task_id="delete_bucket", bucket_name=BUCKET_NAME,
trigger_rule=TriggerRule.ALL_DONE
+ )
+
(
# TEST SETUP
- create_cluster
+ create_bucket
+ >> move_job_file
+ >> create_cluster
# TEST BODY
>> sparkr_task
# TEST TEARDOWN
>> delete_cluster
+ >> delete_bucket
)
from tests.system.utils.watcher import watcher
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py
index efcf9d8bdc..936430ccf0 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_trino.py
@@ -33,11 +33,13 @@ from airflow.providers.google.cloud.operators.dataproc
import (
from airflow.utils.trigger_rule import TriggerRule
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_trino"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
REGION = "europe-west1"
# Cluster definition
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py
index 5a18e87084..334826b565 100644
--- a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py
+++ b/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow.py
@@ -31,12 +31,15 @@ from airflow.providers.google.cloud.operators.dataproc
import (
)
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_workflow"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
REGION = "europe-west1"
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
+
CLUSTER_CONFIG = {
"master_config": {
"num_instances": 1,
diff --git
a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow_deferrable.py
b/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow_deferrable.py
index 1a72407914..ec765250a3 100644
---
a/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow_deferrable.py
+++
b/tests/system/providers/google/cloud/dataproc/example_dataproc_workflow_deferrable.py
@@ -31,12 +31,14 @@ from airflow.providers.google.cloud.operators.dataproc
import (
)
from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
DAG_ID = "dataproc_workflow_def"
PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
REGION = "europe-west1"
-CLUSTER_NAME = f"cluster-{ENV_ID}-{DAG_ID}".replace("_", "")
+CLUSTER_NAME_BASE = f"cluster-{DAG_ID}".replace("_", "-")
+CLUSTER_NAME_FULL = CLUSTER_NAME_BASE + f"-{ENV_ID}".replace("_", "-")
+CLUSTER_NAME = CLUSTER_NAME_BASE if len(CLUSTER_NAME_FULL) >= 33 else
CLUSTER_NAME_FULL
CLUSTER_CONFIG = {
"master_config": {
"num_instances": 1,
diff --git a/tests/system/providers/google/cloud/dataproc/resources/__init__.py
b/tests/system/providers/google/cloud/dataproc/resources/__init__.py
deleted file mode 100644
index 13a83393a9..0000000000
--- a/tests/system/providers/google/cloud/dataproc/resources/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
diff --git
a/tests/system/providers/google/cloud/dataproc/resources/hello_world.R
b/tests/system/providers/google/cloud/dataproc/resources/hello_world.R
deleted file mode 100644
index c6721b7b44..0000000000
--- a/tests/system/providers/google/cloud/dataproc/resources/hello_world.R
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/r
-if (nchar(Sys.getenv("SPARK_HOME")) < 1) {
-Sys.setenv(SPARK_HOME = "/home/spark")
-}
-library(SparkR, lib.loc = c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib")))
-sparkR.session()
-# Create the SparkDataFrame
-df <- as.DataFrame(faithful)
-head(summarize(groupBy(df, df$waiting), count = n(df$waiting)))
diff --git
a/tests/system/providers/google/cloud/dataproc/resources/hello_world.py
b/tests/system/providers/google/cloud/dataproc/resources/hello_world.py
deleted file mode 100644
index bc4ea3e37e..0000000000
--- a/tests/system/providers/google/cloud/dataproc/resources/hello_world.py
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/usr/bin/python
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-from __future__ import annotations
-
-import pyspark
-
-sc = pyspark.SparkContext()
-rdd = sc.parallelize(["Hello,", "world!"])
-words = sorted(rdd.collect())
-print(words)
diff --git a/tests/system/providers/google/cloud/dataproc/resources/hive.sql
b/tests/system/providers/google/cloud/dataproc/resources/hive.sql
deleted file mode 100644
index 75e8ec2aa9..0000000000
--- a/tests/system/providers/google/cloud/dataproc/resources/hive.sql
+++ /dev/null
@@ -1,2188 +0,0 @@
-/*
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied. See the License for the
- specific language governing permissions and limitations
- under the License.
-*/
-
--- MySQL dump 10.13 Distrib 5.7.35, for Linux (x86_64)
---
--- Host: localhost Database: hive-7e773be5
--- ------------------------------------------------------
--- Server version 5.7.34-google-log
-
-/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
-/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
-/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
-/*!40101 SET NAMES utf8 */;
-/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
-/*!40103 SET TIME_ZONE='+00:00' */;
-/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
-/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS,
FOREIGN_KEY_CHECKS=0 */;
-/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
-/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
-
---
--- Current Database: `hive-7e773be5`
---
-
-CREATE DATABASE /*!32312 IF NOT EXISTS*/ `hive-7e773be5` /*!40100 DEFAULT
CHARACTER SET utf8 */;
-
-USE `hive-7e773be5`;
-
---
--- Table structure for table `AUX_TABLE`
---
-
-DROP TABLE IF EXISTS `AUX_TABLE`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `AUX_TABLE` (
- `MT_KEY1` varchar(128) NOT NULL,
- `MT_KEY2` bigint(20) NOT NULL,
- `MT_COMMENT` varchar(255) DEFAULT NULL,
- PRIMARY KEY (`MT_KEY1`,`MT_KEY2`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `AUX_TABLE`
---
-
-LOCK TABLES `AUX_TABLE` WRITE;
-/*!40000 ALTER TABLE `AUX_TABLE` DISABLE KEYS */;
-INSERT INTO `AUX_TABLE` VALUES
('Cleaner',0,NULL),('CompactionHistory',0,NULL),('HouseKeeper',0,NULL),('Initiator',0,NULL),('MaterializationRebuild',0,NULL),('WriteSetCleaner',0,NULL);
-/*!40000 ALTER TABLE `AUX_TABLE` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `BUCKETING_COLS`
---
-
-DROP TABLE IF EXISTS `BUCKETING_COLS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `BUCKETING_COLS` (
- `SD_ID` bigint(20) NOT NULL,
- `BUCKET_COL_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `INTEGER_IDX` int(11) NOT NULL,
- PRIMARY KEY (`SD_ID`,`INTEGER_IDX`),
- KEY `BUCKETING_COLS_N49` (`SD_ID`),
- CONSTRAINT `BUCKETING_COLS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS`
(`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `BUCKETING_COLS`
---
-
-LOCK TABLES `BUCKETING_COLS` WRITE;
-/*!40000 ALTER TABLE `BUCKETING_COLS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `BUCKETING_COLS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `CDS`
---
-
-DROP TABLE IF EXISTS `CDS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `CDS` (
- `CD_ID` bigint(20) NOT NULL,
- PRIMARY KEY (`CD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `CDS`
---
-
-LOCK TABLES `CDS` WRITE;
-/*!40000 ALTER TABLE `CDS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `CDS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `COLUMNS_V2`
---
-
-DROP TABLE IF EXISTS `COLUMNS_V2`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `COLUMNS_V2` (
- `CD_ID` bigint(20) NOT NULL,
- `COMMENT` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `COLUMN_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TYPE_NAME` mediumtext,
- `INTEGER_IDX` int(11) NOT NULL,
- PRIMARY KEY (`CD_ID`,`COLUMN_NAME`),
- KEY `COLUMNS_V2_N49` (`CD_ID`),
- CONSTRAINT `COLUMNS_V2_FK1` FOREIGN KEY (`CD_ID`) REFERENCES `CDS` (`CD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `COLUMNS_V2`
---
-
-LOCK TABLES `COLUMNS_V2` WRITE;
-/*!40000 ALTER TABLE `COLUMNS_V2` DISABLE KEYS */;
-/*!40000 ALTER TABLE `COLUMNS_V2` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `COMPACTION_QUEUE`
---
-
-DROP TABLE IF EXISTS `COMPACTION_QUEUE`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `COMPACTION_QUEUE` (
- `CQ_ID` bigint(20) NOT NULL,
- `CQ_DATABASE` varchar(128) NOT NULL,
- `CQ_TABLE` varchar(128) NOT NULL,
- `CQ_PARTITION` varchar(767) DEFAULT NULL,
- `CQ_STATE` char(1) NOT NULL,
- `CQ_TYPE` char(1) NOT NULL,
- `CQ_TBLPROPERTIES` varchar(2048) DEFAULT NULL,
- `CQ_WORKER_ID` varchar(128) DEFAULT NULL,
- `CQ_START` bigint(20) DEFAULT NULL,
- `CQ_RUN_AS` varchar(128) DEFAULT NULL,
- `CQ_HIGHEST_WRITE_ID` bigint(20) DEFAULT NULL,
- `CQ_META_INFO` varbinary(2048) DEFAULT NULL,
- `CQ_HADOOP_JOB_ID` varchar(32) DEFAULT NULL,
- PRIMARY KEY (`CQ_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `COMPACTION_QUEUE`
---
-
-LOCK TABLES `COMPACTION_QUEUE` WRITE;
-/*!40000 ALTER TABLE `COMPACTION_QUEUE` DISABLE KEYS */;
-/*!40000 ALTER TABLE `COMPACTION_QUEUE` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `COMPLETED_COMPACTIONS`
---
-
-DROP TABLE IF EXISTS `COMPLETED_COMPACTIONS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `COMPLETED_COMPACTIONS` (
- `CC_ID` bigint(20) NOT NULL,
- `CC_DATABASE` varchar(128) NOT NULL,
- `CC_TABLE` varchar(128) NOT NULL,
- `CC_PARTITION` varchar(767) DEFAULT NULL,
- `CC_STATE` char(1) NOT NULL,
- `CC_TYPE` char(1) NOT NULL,
- `CC_TBLPROPERTIES` varchar(2048) DEFAULT NULL,
- `CC_WORKER_ID` varchar(128) DEFAULT NULL,
- `CC_START` bigint(20) DEFAULT NULL,
- `CC_END` bigint(20) DEFAULT NULL,
- `CC_RUN_AS` varchar(128) DEFAULT NULL,
- `CC_HIGHEST_WRITE_ID` bigint(20) DEFAULT NULL,
- `CC_META_INFO` varbinary(2048) DEFAULT NULL,
- `CC_HADOOP_JOB_ID` varchar(32) DEFAULT NULL,
- PRIMARY KEY (`CC_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `COMPLETED_COMPACTIONS`
---
-
-LOCK TABLES `COMPLETED_COMPACTIONS` WRITE;
-/*!40000 ALTER TABLE `COMPLETED_COMPACTIONS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `COMPLETED_COMPACTIONS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `COMPLETED_TXN_COMPONENTS`
---
-
-DROP TABLE IF EXISTS `COMPLETED_TXN_COMPONENTS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `COMPLETED_TXN_COMPONENTS` (
- `CTC_TXNID` bigint(20) NOT NULL,
- `CTC_DATABASE` varchar(128) NOT NULL,
- `CTC_TABLE` varchar(256) DEFAULT NULL,
- `CTC_PARTITION` varchar(767) DEFAULT NULL,
- `CTC_TIMESTAMP` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
- `CTC_WRITEID` bigint(20) DEFAULT NULL,
- `CTC_UPDATE_DELETE` char(1) NOT NULL,
- KEY `COMPLETED_TXN_COMPONENTS_IDX`
(`CTC_DATABASE`,`CTC_TABLE`,`CTC_PARTITION`) USING BTREE
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `COMPLETED_TXN_COMPONENTS`
---
-
-LOCK TABLES `COMPLETED_TXN_COMPONENTS` WRITE;
-/*!40000 ALTER TABLE `COMPLETED_TXN_COMPONENTS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `COMPLETED_TXN_COMPONENTS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `CTLGS`
---
-
-DROP TABLE IF EXISTS `CTLGS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `CTLGS` (
- `CTLG_ID` bigint(20) NOT NULL,
- `NAME` varchar(256) DEFAULT NULL,
- `DESC` varchar(4000) DEFAULT NULL,
- `LOCATION_URI` varchar(4000) NOT NULL,
- PRIMARY KEY (`CTLG_ID`),
- UNIQUE KEY `UNIQUE_CATALOG` (`NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `CTLGS`
---
-
-LOCK TABLES `CTLGS` WRITE;
-/*!40000 ALTER TABLE `CTLGS` DISABLE KEYS */;
-INSERT INTO `CTLGS` VALUES (1,'hive','Default catalog for
Hive','gs://gcs-bucket-wj-dm-service-1-022faf65-f2f7-4f8c-b720-5aab8f43e94d/hive-warehouse');
-/*!40000 ALTER TABLE `CTLGS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `DATABASE_PARAMS`
---
-
-DROP TABLE IF EXISTS `DATABASE_PARAMS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `DATABASE_PARAMS` (
- `DB_ID` bigint(20) NOT NULL,
- `PARAM_KEY` varchar(180) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`DB_ID`,`PARAM_KEY`),
- KEY `DATABASE_PARAMS_N49` (`DB_ID`),
- CONSTRAINT `DATABASE_PARAMS_FK1` FOREIGN KEY (`DB_ID`) REFERENCES `DBS`
(`DB_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `DATABASE_PARAMS`
---
-
-LOCK TABLES `DATABASE_PARAMS` WRITE;
-/*!40000 ALTER TABLE `DATABASE_PARAMS` DISABLE KEYS */;
-INSERT INTO `DATABASE_PARAMS` VALUES (1,'transient_lastDdlTime','1634563201');
-/*!40000 ALTER TABLE `DATABASE_PARAMS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `DBS`
---
-
-DROP TABLE IF EXISTS `DBS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `DBS` (
- `DB_ID` bigint(20) NOT NULL,
- `DESC` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `DB_LOCATION_URI` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin NOT
NULL,
- `NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `OWNER_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `OWNER_TYPE` varchar(10) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `CTLG_NAME` varchar(256) NOT NULL DEFAULT 'hive',
- PRIMARY KEY (`DB_ID`),
- UNIQUE KEY `UNIQUE_DATABASE` (`NAME`,`CTLG_NAME`),
- KEY `CTLG_FK1` (`CTLG_NAME`),
- CONSTRAINT `CTLG_FK1` FOREIGN KEY (`CTLG_NAME`) REFERENCES `CTLGS` (`NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `DBS`
---
-
-LOCK TABLES `DBS` WRITE;
-/*!40000 ALTER TABLE `DBS` DISABLE KEYS */;
-INSERT INTO `DBS` VALUES (1,'Default Hive
database','gs://gcs-bucket-wj-dm-service-1-022faf65-f2f7-4f8c-b720-5aab8f43e94d/hive-warehouse','default','public','ROLE','hive');
-/*!40000 ALTER TABLE `DBS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `DB_PRIVS`
---
-
-DROP TABLE IF EXISTS `DB_PRIVS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `DB_PRIVS` (
- `DB_GRANT_ID` bigint(20) NOT NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `DB_ID` bigint(20) DEFAULT NULL,
- `GRANT_OPTION` smallint(6) NOT NULL,
- `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `DB_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `AUTHORIZER` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`DB_GRANT_ID`),
- UNIQUE KEY `DBPRIVILEGEINDEX`
(`AUTHORIZER`,`DB_ID`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`DB_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
- KEY `DB_PRIVS_N49` (`DB_ID`),
- CONSTRAINT `DB_PRIVS_FK1` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` (`DB_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `DB_PRIVS`
---
-
-LOCK TABLES `DB_PRIVS` WRITE;
-/*!40000 ALTER TABLE `DB_PRIVS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `DB_PRIVS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `DELEGATION_TOKENS`
---
-
-DROP TABLE IF EXISTS `DELEGATION_TOKENS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `DELEGATION_TOKENS` (
- `TOKEN_IDENT` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TOKEN` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- PRIMARY KEY (`TOKEN_IDENT`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `DELEGATION_TOKENS`
---
-
-LOCK TABLES `DELEGATION_TOKENS` WRITE;
-/*!40000 ALTER TABLE `DELEGATION_TOKENS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `DELEGATION_TOKENS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `FUNCS`
---
-
-DROP TABLE IF EXISTS `FUNCS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `FUNCS` (
- `FUNC_ID` bigint(20) NOT NULL,
- `CLASS_NAME` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `DB_ID` bigint(20) DEFAULT NULL,
- `FUNC_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `FUNC_TYPE` int(11) NOT NULL,
- `OWNER_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `OWNER_TYPE` varchar(10) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`FUNC_ID`),
- UNIQUE KEY `UNIQUEFUNCTION` (`FUNC_NAME`,`DB_ID`),
- KEY `FUNCS_N49` (`DB_ID`),
- CONSTRAINT `FUNCS_FK1` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` (`DB_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `FUNCS`
---
-
-LOCK TABLES `FUNCS` WRITE;
-/*!40000 ALTER TABLE `FUNCS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `FUNCS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `FUNC_RU`
---
-
-DROP TABLE IF EXISTS `FUNC_RU`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `FUNC_RU` (
- `FUNC_ID` bigint(20) NOT NULL,
- `RESOURCE_TYPE` int(11) NOT NULL,
- `RESOURCE_URI` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `INTEGER_IDX` int(11) NOT NULL,
- PRIMARY KEY (`FUNC_ID`,`INTEGER_IDX`),
- CONSTRAINT `FUNC_RU_FK1` FOREIGN KEY (`FUNC_ID`) REFERENCES `FUNCS`
(`FUNC_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `FUNC_RU`
---
-
-LOCK TABLES `FUNC_RU` WRITE;
-/*!40000 ALTER TABLE `FUNC_RU` DISABLE KEYS */;
-/*!40000 ALTER TABLE `FUNC_RU` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `GLOBAL_PRIVS`
---
-
-DROP TABLE IF EXISTS `GLOBAL_PRIVS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `GLOBAL_PRIVS` (
- `USER_GRANT_ID` bigint(20) NOT NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `GRANT_OPTION` smallint(6) NOT NULL,
- `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `USER_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `AUTHORIZER` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`USER_GRANT_ID`),
- UNIQUE KEY `GLOBALPRIVILEGEINDEX`
(`AUTHORIZER`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`USER_PRIV`,`GRANTOR`,`GRANTOR_TYPE`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `GLOBAL_PRIVS`
---
-
-LOCK TABLES `GLOBAL_PRIVS` WRITE;
-/*!40000 ALTER TABLE `GLOBAL_PRIVS` DISABLE KEYS */;
-INSERT INTO `GLOBAL_PRIVS` VALUES
(1,1634563201,1,'admin','ROLE','admin','ROLE','All','SQL');
-/*!40000 ALTER TABLE `GLOBAL_PRIVS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `HIVE_LOCKS`
---
-
-DROP TABLE IF EXISTS `HIVE_LOCKS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `HIVE_LOCKS` (
- `HL_LOCK_EXT_ID` bigint(20) NOT NULL,
- `HL_LOCK_INT_ID` bigint(20) NOT NULL,
- `HL_TXNID` bigint(20) NOT NULL,
- `HL_DB` varchar(128) NOT NULL,
- `HL_TABLE` varchar(128) DEFAULT NULL,
- `HL_PARTITION` varchar(767) DEFAULT NULL,
- `HL_LOCK_STATE` char(1) NOT NULL,
- `HL_LOCK_TYPE` char(1) NOT NULL,
- `HL_LAST_HEARTBEAT` bigint(20) NOT NULL,
- `HL_ACQUIRED_AT` bigint(20) DEFAULT NULL,
- `HL_USER` varchar(128) NOT NULL,
- `HL_HOST` varchar(128) NOT NULL,
- `HL_HEARTBEAT_COUNT` int(11) DEFAULT NULL,
- `HL_AGENT_INFO` varchar(128) DEFAULT NULL,
- `HL_BLOCKEDBY_EXT_ID` bigint(20) DEFAULT NULL,
- `HL_BLOCKEDBY_INT_ID` bigint(20) DEFAULT NULL,
- PRIMARY KEY (`HL_LOCK_EXT_ID`,`HL_LOCK_INT_ID`),
- KEY `HIVE_LOCK_TXNID_INDEX` (`HL_TXNID`),
- KEY `HL_TXNID_IDX` (`HL_TXNID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `HIVE_LOCKS`
---
-
-LOCK TABLES `HIVE_LOCKS` WRITE;
-/*!40000 ALTER TABLE `HIVE_LOCKS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `HIVE_LOCKS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `IDXS`
---
-
-DROP TABLE IF EXISTS `IDXS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `IDXS` (
- `INDEX_ID` bigint(20) NOT NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `DEFERRED_REBUILD` bit(1) NOT NULL,
- `INDEX_HANDLER_CLASS` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `INDEX_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `INDEX_TBL_ID` bigint(20) DEFAULT NULL,
- `LAST_ACCESS_TIME` int(11) NOT NULL,
- `ORIG_TBL_ID` bigint(20) DEFAULT NULL,
- `SD_ID` bigint(20) DEFAULT NULL,
- PRIMARY KEY (`INDEX_ID`),
- UNIQUE KEY `UNIQUEINDEX` (`INDEX_NAME`,`ORIG_TBL_ID`),
- KEY `IDXS_N51` (`SD_ID`),
- KEY `IDXS_N50` (`INDEX_TBL_ID`),
- KEY `IDXS_N49` (`ORIG_TBL_ID`),
- CONSTRAINT `IDXS_FK1` FOREIGN KEY (`ORIG_TBL_ID`) REFERENCES `TBLS`
(`TBL_ID`),
- CONSTRAINT `IDXS_FK2` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`),
- CONSTRAINT `IDXS_FK3` FOREIGN KEY (`INDEX_TBL_ID`) REFERENCES `TBLS`
(`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `IDXS`
---
-
-LOCK TABLES `IDXS` WRITE;
-/*!40000 ALTER TABLE `IDXS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `IDXS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `INDEX_PARAMS`
---
-
-DROP TABLE IF EXISTS `INDEX_PARAMS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `INDEX_PARAMS` (
- `INDEX_ID` bigint(20) NOT NULL,
- `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`INDEX_ID`,`PARAM_KEY`),
- KEY `INDEX_PARAMS_N49` (`INDEX_ID`),
- CONSTRAINT `INDEX_PARAMS_FK1` FOREIGN KEY (`INDEX_ID`) REFERENCES `IDXS`
(`INDEX_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `INDEX_PARAMS`
---
-
-LOCK TABLES `INDEX_PARAMS` WRITE;
-/*!40000 ALTER TABLE `INDEX_PARAMS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `INDEX_PARAMS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `I_SCHEMA`
---
-
-DROP TABLE IF EXISTS `I_SCHEMA`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `I_SCHEMA` (
- `SCHEMA_ID` bigint(20) NOT NULL,
- `SCHEMA_TYPE` int(11) NOT NULL,
- `NAME` varchar(256) DEFAULT NULL,
- `DB_ID` bigint(20) DEFAULT NULL,
- `COMPATIBILITY` int(11) NOT NULL,
- `VALIDATION_LEVEL` int(11) NOT NULL,
- `CAN_EVOLVE` bit(1) NOT NULL,
- `SCHEMA_GROUP` varchar(256) DEFAULT NULL,
- `DESCRIPTION` varchar(4000) DEFAULT NULL,
- PRIMARY KEY (`SCHEMA_ID`),
- KEY `DB_ID` (`DB_ID`),
- KEY `UNIQUE_NAME` (`NAME`),
- CONSTRAINT `I_SCHEMA_ibfk_1` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` (`DB_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `I_SCHEMA`
---
-
-LOCK TABLES `I_SCHEMA` WRITE;
-/*!40000 ALTER TABLE `I_SCHEMA` DISABLE KEYS */;
-/*!40000 ALTER TABLE `I_SCHEMA` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `KEY_CONSTRAINTS`
---
-
-DROP TABLE IF EXISTS `KEY_CONSTRAINTS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `KEY_CONSTRAINTS` (
- `CHILD_CD_ID` bigint(20) DEFAULT NULL,
- `CHILD_INTEGER_IDX` int(11) DEFAULT NULL,
- `CHILD_TBL_ID` bigint(20) DEFAULT NULL,
- `PARENT_CD_ID` bigint(20) DEFAULT NULL,
- `PARENT_INTEGER_IDX` int(11) NOT NULL,
- `PARENT_TBL_ID` bigint(20) NOT NULL,
- `POSITION` bigint(20) NOT NULL,
- `CONSTRAINT_NAME` varchar(400) NOT NULL,
- `CONSTRAINT_TYPE` smallint(6) NOT NULL,
- `UPDATE_RULE` smallint(6) DEFAULT NULL,
- `DELETE_RULE` smallint(6) DEFAULT NULL,
- `ENABLE_VALIDATE_RELY` smallint(6) NOT NULL,
- `DEFAULT_VALUE` varchar(400) DEFAULT NULL,
- PRIMARY KEY (`CONSTRAINT_NAME`,`POSITION`),
- KEY `CONSTRAINTS_PARENT_TABLE_ID_INDEX` (`PARENT_TBL_ID`) USING BTREE,
- KEY `CONSTRAINTS_CONSTRAINT_TYPE_INDEX` (`CONSTRAINT_TYPE`) USING BTREE
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `KEY_CONSTRAINTS`
---
-
-LOCK TABLES `KEY_CONSTRAINTS` WRITE;
-/*!40000 ALTER TABLE `KEY_CONSTRAINTS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `KEY_CONSTRAINTS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `MASTER_KEYS`
---
-
-DROP TABLE IF EXISTS `MASTER_KEYS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `MASTER_KEYS` (
- `KEY_ID` int(11) NOT NULL AUTO_INCREMENT,
- `MASTER_KEY` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`KEY_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `MASTER_KEYS`
---
-
-LOCK TABLES `MASTER_KEYS` WRITE;
-/*!40000 ALTER TABLE `MASTER_KEYS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `MASTER_KEYS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `MATERIALIZATION_REBUILD_LOCKS`
---
-
-DROP TABLE IF EXISTS `MATERIALIZATION_REBUILD_LOCKS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `MATERIALIZATION_REBUILD_LOCKS` (
- `MRL_TXN_ID` bigint(20) NOT NULL,
- `MRL_DB_NAME` varchar(128) NOT NULL,
- `MRL_TBL_NAME` varchar(256) NOT NULL,
- `MRL_LAST_HEARTBEAT` bigint(20) NOT NULL,
- PRIMARY KEY (`MRL_TXN_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `MATERIALIZATION_REBUILD_LOCKS`
---
-
-LOCK TABLES `MATERIALIZATION_REBUILD_LOCKS` WRITE;
-/*!40000 ALTER TABLE `MATERIALIZATION_REBUILD_LOCKS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `MATERIALIZATION_REBUILD_LOCKS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `METASTORE_DB_PROPERTIES`
---
-
-DROP TABLE IF EXISTS `METASTORE_DB_PROPERTIES`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `METASTORE_DB_PROPERTIES` (
- `PROPERTY_KEY` varchar(255) NOT NULL,
- `PROPERTY_VALUE` varchar(1000) NOT NULL,
- `DESCRIPTION` varchar(1000) DEFAULT NULL,
- PRIMARY KEY (`PROPERTY_KEY`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `METASTORE_DB_PROPERTIES`
---
-
-LOCK TABLES `METASTORE_DB_PROPERTIES` WRITE;
-/*!40000 ALTER TABLE `METASTORE_DB_PROPERTIES` DISABLE KEYS */;
-/*!40000 ALTER TABLE `METASTORE_DB_PROPERTIES` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `MIN_HISTORY_LEVEL`
---
-
-DROP TABLE IF EXISTS `MIN_HISTORY_LEVEL`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `MIN_HISTORY_LEVEL` (
- `MHL_TXNID` bigint(20) NOT NULL,
- `MHL_MIN_OPEN_TXNID` bigint(20) NOT NULL,
- PRIMARY KEY (`MHL_TXNID`),
- KEY `MIN_HISTORY_LEVEL_IDX` (`MHL_MIN_OPEN_TXNID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `MIN_HISTORY_LEVEL`
---
-
-LOCK TABLES `MIN_HISTORY_LEVEL` WRITE;
-/*!40000 ALTER TABLE `MIN_HISTORY_LEVEL` DISABLE KEYS */;
-/*!40000 ALTER TABLE `MIN_HISTORY_LEVEL` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `MV_CREATION_METADATA`
---
-
-DROP TABLE IF EXISTS `MV_CREATION_METADATA`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `MV_CREATION_METADATA` (
- `MV_CREATION_METADATA_ID` bigint(20) NOT NULL,
- `CAT_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `DB_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TBL_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TXN_LIST` text,
- `MATERIALIZATION_TIME` bigint(20) NOT NULL,
- PRIMARY KEY (`MV_CREATION_METADATA_ID`),
- KEY `MV_UNIQUE_TABLE` (`TBL_NAME`,`DB_NAME`) USING BTREE
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `MV_CREATION_METADATA`
---
-
-LOCK TABLES `MV_CREATION_METADATA` WRITE;
-/*!40000 ALTER TABLE `MV_CREATION_METADATA` DISABLE KEYS */;
-/*!40000 ALTER TABLE `MV_CREATION_METADATA` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `MV_TABLES_USED`
---
-
-DROP TABLE IF EXISTS `MV_TABLES_USED`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `MV_TABLES_USED` (
- `MV_CREATION_METADATA_ID` bigint(20) NOT NULL,
- `TBL_ID` bigint(20) NOT NULL,
- KEY `MV_TABLES_USED_FK1` (`MV_CREATION_METADATA_ID`),
- KEY `MV_TABLES_USED_FK2` (`TBL_ID`),
- CONSTRAINT `MV_TABLES_USED_FK1` FOREIGN KEY (`MV_CREATION_METADATA_ID`)
REFERENCES `MV_CREATION_METADATA` (`MV_CREATION_METADATA_ID`),
- CONSTRAINT `MV_TABLES_USED_FK2` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS`
(`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `MV_TABLES_USED`
---
-
-LOCK TABLES `MV_TABLES_USED` WRITE;
-/*!40000 ALTER TABLE `MV_TABLES_USED` DISABLE KEYS */;
-/*!40000 ALTER TABLE `MV_TABLES_USED` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `NEXT_COMPACTION_QUEUE_ID`
---
-
-DROP TABLE IF EXISTS `NEXT_COMPACTION_QUEUE_ID`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `NEXT_COMPACTION_QUEUE_ID` (
- `NCQ_NEXT` bigint(20) NOT NULL
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `NEXT_COMPACTION_QUEUE_ID`
---
-
-LOCK TABLES `NEXT_COMPACTION_QUEUE_ID` WRITE;
-/*!40000 ALTER TABLE `NEXT_COMPACTION_QUEUE_ID` DISABLE KEYS */;
-INSERT INTO `NEXT_COMPACTION_QUEUE_ID` VALUES (1);
-/*!40000 ALTER TABLE `NEXT_COMPACTION_QUEUE_ID` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `NEXT_LOCK_ID`
---
-
-DROP TABLE IF EXISTS `NEXT_LOCK_ID`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `NEXT_LOCK_ID` (
- `NL_NEXT` bigint(20) NOT NULL
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `NEXT_LOCK_ID`
---
-
-LOCK TABLES `NEXT_LOCK_ID` WRITE;
-/*!40000 ALTER TABLE `NEXT_LOCK_ID` DISABLE KEYS */;
-INSERT INTO `NEXT_LOCK_ID` VALUES (1);
-/*!40000 ALTER TABLE `NEXT_LOCK_ID` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `NEXT_TXN_ID`
---
-
-DROP TABLE IF EXISTS `NEXT_TXN_ID`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `NEXT_TXN_ID` (
- `NTXN_NEXT` bigint(20) NOT NULL
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `NEXT_TXN_ID`
---
-
-LOCK TABLES `NEXT_TXN_ID` WRITE;
-/*!40000 ALTER TABLE `NEXT_TXN_ID` DISABLE KEYS */;
-INSERT INTO `NEXT_TXN_ID` VALUES (1);
-/*!40000 ALTER TABLE `NEXT_TXN_ID` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `NEXT_WRITE_ID`
---
-
-DROP TABLE IF EXISTS `NEXT_WRITE_ID`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `NEXT_WRITE_ID` (
- `NWI_DATABASE` varchar(128) NOT NULL,
- `NWI_TABLE` varchar(256) NOT NULL,
- `NWI_NEXT` bigint(20) NOT NULL,
- UNIQUE KEY `NEXT_WRITE_ID_IDX` (`NWI_DATABASE`,`NWI_TABLE`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `NEXT_WRITE_ID`
---
-
-LOCK TABLES `NEXT_WRITE_ID` WRITE;
-/*!40000 ALTER TABLE `NEXT_WRITE_ID` DISABLE KEYS */;
-/*!40000 ALTER TABLE `NEXT_WRITE_ID` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `NOTIFICATION_LOG`
---
-
-DROP TABLE IF EXISTS `NOTIFICATION_LOG`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `NOTIFICATION_LOG` (
- `NL_ID` bigint(20) NOT NULL,
- `EVENT_ID` bigint(20) NOT NULL,
- `EVENT_TIME` int(11) NOT NULL,
- `EVENT_TYPE` varchar(32) NOT NULL,
- `CAT_NAME` varchar(256) DEFAULT NULL,
- `DB_NAME` varchar(128) DEFAULT NULL,
- `TBL_NAME` varchar(256) DEFAULT NULL,
- `MESSAGE` longtext,
- `MESSAGE_FORMAT` varchar(16) DEFAULT NULL,
- PRIMARY KEY (`NL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `NOTIFICATION_LOG`
---
-
-LOCK TABLES `NOTIFICATION_LOG` WRITE;
-/*!40000 ALTER TABLE `NOTIFICATION_LOG` DISABLE KEYS */;
-/*!40000 ALTER TABLE `NOTIFICATION_LOG` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `NOTIFICATION_SEQUENCE`
---
-
-DROP TABLE IF EXISTS `NOTIFICATION_SEQUENCE`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `NOTIFICATION_SEQUENCE` (
- `NNI_ID` bigint(20) NOT NULL,
- `NEXT_EVENT_ID` bigint(20) NOT NULL,
- PRIMARY KEY (`NNI_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `NOTIFICATION_SEQUENCE`
---
-
-LOCK TABLES `NOTIFICATION_SEQUENCE` WRITE;
-/*!40000 ALTER TABLE `NOTIFICATION_SEQUENCE` DISABLE KEYS */;
-INSERT INTO `NOTIFICATION_SEQUENCE` VALUES (1,1);
-/*!40000 ALTER TABLE `NOTIFICATION_SEQUENCE` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `NUCLEUS_TABLES`
---
-
-DROP TABLE IF EXISTS `NUCLEUS_TABLES`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `NUCLEUS_TABLES` (
- `CLASS_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TABLE_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TYPE` varchar(4) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `OWNER` varchar(2) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `VERSION` varchar(20) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `INTERFACE_NAME` varchar(255) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- PRIMARY KEY (`CLASS_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `NUCLEUS_TABLES`
---
-
-LOCK TABLES `NUCLEUS_TABLES` WRITE;
-/*!40000 ALTER TABLE `NUCLEUS_TABLES` DISABLE KEYS */;
-/*!40000 ALTER TABLE `NUCLEUS_TABLES` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `PARTITIONS`
---
-
-DROP TABLE IF EXISTS `PARTITIONS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `PARTITIONS` (
- `PART_ID` bigint(20) NOT NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `LAST_ACCESS_TIME` int(11) NOT NULL,
- `PART_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `SD_ID` bigint(20) DEFAULT NULL,
- `TBL_ID` bigint(20) DEFAULT NULL,
- PRIMARY KEY (`PART_ID`),
- UNIQUE KEY `UNIQUEPARTITION` (`PART_NAME`,`TBL_ID`),
- KEY `PARTITIONS_N49` (`TBL_ID`),
- KEY `PARTITIONS_N50` (`SD_ID`),
- CONSTRAINT `PARTITIONS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS`
(`TBL_ID`),
- CONSTRAINT `PARTITIONS_FK2` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `PARTITIONS`
---
-
-LOCK TABLES `PARTITIONS` WRITE;
-/*!40000 ALTER TABLE `PARTITIONS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `PARTITIONS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `PARTITION_EVENTS`
---
-
-DROP TABLE IF EXISTS `PARTITION_EVENTS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `PARTITION_EVENTS` (
- `PART_NAME_ID` bigint(20) NOT NULL,
- `CAT_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `DB_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `EVENT_TIME` bigint(20) NOT NULL,
- `EVENT_TYPE` int(11) NOT NULL,
- `PARTITION_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `TBL_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- PRIMARY KEY (`PART_NAME_ID`),
- KEY `PARTITIONEVENTINDEX` (`PARTITION_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `PARTITION_EVENTS`
---
-
-LOCK TABLES `PARTITION_EVENTS` WRITE;
-/*!40000 ALTER TABLE `PARTITION_EVENTS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `PARTITION_EVENTS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `PARTITION_KEYS`
---
-
-DROP TABLE IF EXISTS `PARTITION_KEYS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `PARTITION_KEYS` (
- `TBL_ID` bigint(20) NOT NULL,
- `PKEY_COMMENT` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `PKEY_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PKEY_TYPE` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `INTEGER_IDX` int(11) NOT NULL,
- PRIMARY KEY (`TBL_ID`,`PKEY_NAME`),
- KEY `PARTITION_KEYS_N49` (`TBL_ID`),
- CONSTRAINT `PARTITION_KEYS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS`
(`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `PARTITION_KEYS`
---
-
-LOCK TABLES `PARTITION_KEYS` WRITE;
-/*!40000 ALTER TABLE `PARTITION_KEYS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `PARTITION_KEYS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `PARTITION_KEY_VALS`
---
-
-DROP TABLE IF EXISTS `PARTITION_KEY_VALS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `PARTITION_KEY_VALS` (
- `PART_ID` bigint(20) NOT NULL,
- `PART_KEY_VAL` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `INTEGER_IDX` int(11) NOT NULL,
- PRIMARY KEY (`PART_ID`,`INTEGER_IDX`),
- KEY `PARTITION_KEY_VALS_N49` (`PART_ID`),
- CONSTRAINT `PARTITION_KEY_VALS_FK1` FOREIGN KEY (`PART_ID`) REFERENCES
`PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `PARTITION_KEY_VALS`
---
-
-LOCK TABLES `PARTITION_KEY_VALS` WRITE;
-/*!40000 ALTER TABLE `PARTITION_KEY_VALS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `PARTITION_KEY_VALS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `PARTITION_PARAMS`
---
-
-DROP TABLE IF EXISTS `PARTITION_PARAMS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `PARTITION_PARAMS` (
- `PART_ID` bigint(20) NOT NULL,
- `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PARAM_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`PART_ID`,`PARAM_KEY`),
- KEY `PARTITION_PARAMS_N49` (`PART_ID`),
- CONSTRAINT `PARTITION_PARAMS_FK1` FOREIGN KEY (`PART_ID`) REFERENCES
`PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `PARTITION_PARAMS`
---
-
-LOCK TABLES `PARTITION_PARAMS` WRITE;
-/*!40000 ALTER TABLE `PARTITION_PARAMS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `PARTITION_PARAMS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `PART_COL_PRIVS`
---
-
-DROP TABLE IF EXISTS `PART_COL_PRIVS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `PART_COL_PRIVS` (
- `PART_COLUMN_GRANT_ID` bigint(20) NOT NULL,
- `COLUMN_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `GRANT_OPTION` smallint(6) NOT NULL,
- `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `PART_ID` bigint(20) DEFAULT NULL,
- `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `PART_COL_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `AUTHORIZER` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`PART_COLUMN_GRANT_ID`),
- KEY `PART_COL_PRIVS_N49` (`PART_ID`),
- KEY `PARTITIONCOLUMNPRIVILEGEINDEX`
(`AUTHORIZER`,`PART_ID`,`COLUMN_NAME`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`PART_COL_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
- CONSTRAINT `PART_COL_PRIVS_FK1` FOREIGN KEY (`PART_ID`) REFERENCES
`PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `PART_COL_PRIVS`
---
-
-LOCK TABLES `PART_COL_PRIVS` WRITE;
-/*!40000 ALTER TABLE `PART_COL_PRIVS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `PART_COL_PRIVS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `PART_COL_STATS`
---
-
-DROP TABLE IF EXISTS `PART_COL_STATS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `PART_COL_STATS` (
- `CS_ID` bigint(20) NOT NULL,
- `CAT_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `DB_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TABLE_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PARTITION_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT
NULL,
- `COLUMN_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `COLUMN_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PART_ID` bigint(20) NOT NULL,
- `LONG_LOW_VALUE` bigint(20) DEFAULT NULL,
- `LONG_HIGH_VALUE` bigint(20) DEFAULT NULL,
- `DOUBLE_HIGH_VALUE` double(53,4) DEFAULT NULL,
- `DOUBLE_LOW_VALUE` double(53,4) DEFAULT NULL,
- `BIG_DECIMAL_LOW_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE
latin1_bin DEFAULT NULL,
- `BIG_DECIMAL_HIGH_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE
latin1_bin DEFAULT NULL,
- `NUM_NULLS` bigint(20) NOT NULL,
- `NUM_DISTINCTS` bigint(20) DEFAULT NULL,
- `BIT_VECTOR` blob,
- `AVG_COL_LEN` double(53,4) DEFAULT NULL,
- `MAX_COL_LEN` bigint(20) DEFAULT NULL,
- `NUM_TRUES` bigint(20) DEFAULT NULL,
- `NUM_FALSES` bigint(20) DEFAULT NULL,
- `LAST_ANALYZED` bigint(20) NOT NULL,
- PRIMARY KEY (`CS_ID`),
- KEY `PART_COL_STATS_FK` (`PART_ID`),
- KEY `PCS_STATS_IDX`
(`CAT_NAME`,`DB_NAME`,`TABLE_NAME`,`COLUMN_NAME`,`PARTITION_NAME`) USING BTREE,
- CONSTRAINT `PART_COL_STATS_FK` FOREIGN KEY (`PART_ID`) REFERENCES
`PARTITIONS` (`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `PART_COL_STATS`
---
-
-LOCK TABLES `PART_COL_STATS` WRITE;
-/*!40000 ALTER TABLE `PART_COL_STATS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `PART_COL_STATS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `PART_PRIVS`
---
-
-DROP TABLE IF EXISTS `PART_PRIVS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `PART_PRIVS` (
- `PART_GRANT_ID` bigint(20) NOT NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `GRANT_OPTION` smallint(6) NOT NULL,
- `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `PART_ID` bigint(20) DEFAULT NULL,
- `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `PART_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `AUTHORIZER` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`PART_GRANT_ID`),
- KEY `PARTPRIVILEGEINDEX`
(`AUTHORIZER`,`PART_ID`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`PART_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
- KEY `PART_PRIVS_N49` (`PART_ID`),
- CONSTRAINT `PART_PRIVS_FK1` FOREIGN KEY (`PART_ID`) REFERENCES `PARTITIONS`
(`PART_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `PART_PRIVS`
---
-
-LOCK TABLES `PART_PRIVS` WRITE;
-/*!40000 ALTER TABLE `PART_PRIVS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `PART_PRIVS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `REPL_TXN_MAP`
---
-
-DROP TABLE IF EXISTS `REPL_TXN_MAP`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `REPL_TXN_MAP` (
- `RTM_REPL_POLICY` varchar(256) NOT NULL,
- `RTM_SRC_TXN_ID` bigint(20) NOT NULL,
- `RTM_TARGET_TXN_ID` bigint(20) NOT NULL,
- PRIMARY KEY (`RTM_REPL_POLICY`,`RTM_SRC_TXN_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `REPL_TXN_MAP`
---
-
-LOCK TABLES `REPL_TXN_MAP` WRITE;
-/*!40000 ALTER TABLE `REPL_TXN_MAP` DISABLE KEYS */;
-/*!40000 ALTER TABLE `REPL_TXN_MAP` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `ROLES`
---
-
-DROP TABLE IF EXISTS `ROLES`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `ROLES` (
- `ROLE_ID` bigint(20) NOT NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `OWNER_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `ROLE_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`ROLE_ID`),
- UNIQUE KEY `ROLEENTITYINDEX` (`ROLE_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `ROLES`
---
-
-LOCK TABLES `ROLES` WRITE;
-/*!40000 ALTER TABLE `ROLES` DISABLE KEYS */;
-INSERT INTO `ROLES` VALUES
(1,1634563201,'admin','admin'),(2,1634563201,'public','public');
-/*!40000 ALTER TABLE `ROLES` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `ROLE_MAP`
---
-
-DROP TABLE IF EXISTS `ROLE_MAP`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `ROLE_MAP` (
- `ROLE_GRANT_ID` bigint(20) NOT NULL,
- `ADD_TIME` int(11) NOT NULL,
- `GRANT_OPTION` smallint(6) NOT NULL,
- `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `ROLE_ID` bigint(20) DEFAULT NULL,
- PRIMARY KEY (`ROLE_GRANT_ID`),
- UNIQUE KEY `USERROLEMAPINDEX`
(`PRINCIPAL_NAME`,`ROLE_ID`,`GRANTOR`,`GRANTOR_TYPE`),
- KEY `ROLE_MAP_N49` (`ROLE_ID`),
- CONSTRAINT `ROLE_MAP_FK1` FOREIGN KEY (`ROLE_ID`) REFERENCES `ROLES`
(`ROLE_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `ROLE_MAP`
---
-
-LOCK TABLES `ROLE_MAP` WRITE;
-/*!40000 ALTER TABLE `ROLE_MAP` DISABLE KEYS */;
-/*!40000 ALTER TABLE `ROLE_MAP` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `RUNTIME_STATS`
---
-
-DROP TABLE IF EXISTS `RUNTIME_STATS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `RUNTIME_STATS` (
- `RS_ID` bigint(20) NOT NULL,
- `CREATE_TIME` bigint(20) NOT NULL,
- `WEIGHT` bigint(20) NOT NULL,
- `PAYLOAD` blob,
- PRIMARY KEY (`RS_ID`),
- KEY `IDX_RUNTIME_STATS_CREATE_TIME` (`CREATE_TIME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `RUNTIME_STATS`
---
-
-LOCK TABLES `RUNTIME_STATS` WRITE;
-/*!40000 ALTER TABLE `RUNTIME_STATS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `RUNTIME_STATS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SCHEMA_VERSION`
---
-
-DROP TABLE IF EXISTS `SCHEMA_VERSION`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SCHEMA_VERSION` (
- `SCHEMA_VERSION_ID` bigint(20) NOT NULL,
- `SCHEMA_ID` bigint(20) DEFAULT NULL,
- `VERSION` int(11) NOT NULL,
- `CREATED_AT` bigint(20) NOT NULL,
- `CD_ID` bigint(20) DEFAULT NULL,
- `STATE` int(11) NOT NULL,
- `DESCRIPTION` varchar(4000) DEFAULT NULL,
- `SCHEMA_TEXT` mediumtext,
- `FINGERPRINT` varchar(256) DEFAULT NULL,
- `SCHEMA_VERSION_NAME` varchar(256) DEFAULT NULL,
- `SERDE_ID` bigint(20) DEFAULT NULL,
- PRIMARY KEY (`SCHEMA_VERSION_ID`),
- KEY `CD_ID` (`CD_ID`),
- KEY `SERDE_ID` (`SERDE_ID`),
- KEY `UNIQUE_VERSION` (`SCHEMA_ID`,`VERSION`),
- CONSTRAINT `SCHEMA_VERSION_ibfk_1` FOREIGN KEY (`SCHEMA_ID`) REFERENCES
`I_SCHEMA` (`SCHEMA_ID`),
- CONSTRAINT `SCHEMA_VERSION_ibfk_2` FOREIGN KEY (`CD_ID`) REFERENCES `CDS`
(`CD_ID`),
- CONSTRAINT `SCHEMA_VERSION_ibfk_3` FOREIGN KEY (`SERDE_ID`) REFERENCES
`SERDES` (`SERDE_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SCHEMA_VERSION`
---
-
-LOCK TABLES `SCHEMA_VERSION` WRITE;
-/*!40000 ALTER TABLE `SCHEMA_VERSION` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SCHEMA_VERSION` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SDS`
---
-
-DROP TABLE IF EXISTS `SDS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SDS` (
- `SD_ID` bigint(20) NOT NULL,
- `CD_ID` bigint(20) DEFAULT NULL,
- `INPUT_FORMAT` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `IS_COMPRESSED` bit(1) NOT NULL,
- `IS_STOREDASSUBDIRECTORIES` bit(1) NOT NULL,
- `LOCATION` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `NUM_BUCKETS` int(11) NOT NULL,
- `OUTPUT_FORMAT` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `SERDE_ID` bigint(20) DEFAULT NULL,
- PRIMARY KEY (`SD_ID`),
- KEY `SDS_N49` (`SERDE_ID`),
- KEY `SDS_N50` (`CD_ID`),
- CONSTRAINT `SDS_FK1` FOREIGN KEY (`SERDE_ID`) REFERENCES `SERDES`
(`SERDE_ID`),
- CONSTRAINT `SDS_FK2` FOREIGN KEY (`CD_ID`) REFERENCES `CDS` (`CD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SDS`
---
-
-LOCK TABLES `SDS` WRITE;
-/*!40000 ALTER TABLE `SDS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SDS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SD_PARAMS`
---
-
-DROP TABLE IF EXISTS `SD_PARAMS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SD_PARAMS` (
- `SD_ID` bigint(20) NOT NULL,
- `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PARAM_VALUE` mediumtext CHARACTER SET latin1 COLLATE latin1_bin,
- PRIMARY KEY (`SD_ID`,`PARAM_KEY`),
- KEY `SD_PARAMS_N49` (`SD_ID`),
- CONSTRAINT `SD_PARAMS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SD_PARAMS`
---
-
-LOCK TABLES `SD_PARAMS` WRITE;
-/*!40000 ALTER TABLE `SD_PARAMS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SD_PARAMS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SEQUENCE_TABLE`
---
-
-DROP TABLE IF EXISTS `SEQUENCE_TABLE`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SEQUENCE_TABLE` (
- `SEQUENCE_NAME` varchar(255) CHARACTER SET latin1 COLLATE latin1_bin NOT
NULL,
- `NEXT_VAL` bigint(20) NOT NULL,
- PRIMARY KEY (`SEQUENCE_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SEQUENCE_TABLE`
---
-
-LOCK TABLES `SEQUENCE_TABLE` WRITE;
-/*!40000 ALTER TABLE `SEQUENCE_TABLE` DISABLE KEYS */;
-INSERT INTO `SEQUENCE_TABLE` VALUES
('org.apache.hadoop.hive.metastore.model.MDatabase',6),('org.apache.hadoop.hive.metastore.model.MGlobalPrivilege',6),('org.apache.hadoop.hive.metastore.model.MNotificationLog',1),('org.apache.hadoop.hive.metastore.model.MRole',6);
-/*!40000 ALTER TABLE `SEQUENCE_TABLE` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SERDES`
---
-
-DROP TABLE IF EXISTS `SERDES`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SERDES` (
- `SERDE_ID` bigint(20) NOT NULL,
- `NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `SLIB` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `DESCRIPTION` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `SERIALIZER_CLASS` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `DESERIALIZER_CLASS` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `SERDE_TYPE` int(11) DEFAULT NULL,
- PRIMARY KEY (`SERDE_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SERDES`
---
-
-LOCK TABLES `SERDES` WRITE;
-/*!40000 ALTER TABLE `SERDES` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SERDES` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SERDE_PARAMS`
---
-
-DROP TABLE IF EXISTS `SERDE_PARAMS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SERDE_PARAMS` (
- `SERDE_ID` bigint(20) NOT NULL,
- `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PARAM_VALUE` mediumtext CHARACTER SET latin1 COLLATE latin1_bin,
- PRIMARY KEY (`SERDE_ID`,`PARAM_KEY`),
- KEY `SERDE_PARAMS_N49` (`SERDE_ID`),
- CONSTRAINT `SERDE_PARAMS_FK1` FOREIGN KEY (`SERDE_ID`) REFERENCES `SERDES`
(`SERDE_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SERDE_PARAMS`
---
-
-LOCK TABLES `SERDE_PARAMS` WRITE;
-/*!40000 ALTER TABLE `SERDE_PARAMS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SERDE_PARAMS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SKEWED_COL_NAMES`
---
-
-DROP TABLE IF EXISTS `SKEWED_COL_NAMES`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SKEWED_COL_NAMES` (
- `SD_ID` bigint(20) NOT NULL,
- `SKEWED_COL_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `INTEGER_IDX` int(11) NOT NULL,
- PRIMARY KEY (`SD_ID`,`INTEGER_IDX`),
- KEY `SKEWED_COL_NAMES_N49` (`SD_ID`),
- CONSTRAINT `SKEWED_COL_NAMES_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS`
(`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SKEWED_COL_NAMES`
---
-
-LOCK TABLES `SKEWED_COL_NAMES` WRITE;
-/*!40000 ALTER TABLE `SKEWED_COL_NAMES` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SKEWED_COL_NAMES` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SKEWED_COL_VALUE_LOC_MAP`
---
-
-DROP TABLE IF EXISTS `SKEWED_COL_VALUE_LOC_MAP`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SKEWED_COL_VALUE_LOC_MAP` (
- `SD_ID` bigint(20) NOT NULL,
- `STRING_LIST_ID_KID` bigint(20) NOT NULL,
- `LOCATION` varchar(4000) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`SD_ID`,`STRING_LIST_ID_KID`),
- KEY `SKEWED_COL_VALUE_LOC_MAP_N49` (`STRING_LIST_ID_KID`),
- KEY `SKEWED_COL_VALUE_LOC_MAP_N50` (`SD_ID`),
- CONSTRAINT `SKEWED_COL_VALUE_LOC_MAP_FK1` FOREIGN KEY (`SD_ID`) REFERENCES
`SDS` (`SD_ID`),
- CONSTRAINT `SKEWED_COL_VALUE_LOC_MAP_FK2` FOREIGN KEY (`STRING_LIST_ID_KID`)
REFERENCES `SKEWED_STRING_LIST` (`STRING_LIST_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SKEWED_COL_VALUE_LOC_MAP`
---
-
-LOCK TABLES `SKEWED_COL_VALUE_LOC_MAP` WRITE;
-/*!40000 ALTER TABLE `SKEWED_COL_VALUE_LOC_MAP` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SKEWED_COL_VALUE_LOC_MAP` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SKEWED_STRING_LIST`
---
-
-DROP TABLE IF EXISTS `SKEWED_STRING_LIST`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SKEWED_STRING_LIST` (
- `STRING_LIST_ID` bigint(20) NOT NULL,
- PRIMARY KEY (`STRING_LIST_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SKEWED_STRING_LIST`
---
-
-LOCK TABLES `SKEWED_STRING_LIST` WRITE;
-/*!40000 ALTER TABLE `SKEWED_STRING_LIST` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SKEWED_STRING_LIST` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SKEWED_STRING_LIST_VALUES`
---
-
-DROP TABLE IF EXISTS `SKEWED_STRING_LIST_VALUES`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SKEWED_STRING_LIST_VALUES` (
- `STRING_LIST_ID` bigint(20) NOT NULL,
- `STRING_LIST_VALUE` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `INTEGER_IDX` int(11) NOT NULL,
- PRIMARY KEY (`STRING_LIST_ID`,`INTEGER_IDX`),
- KEY `SKEWED_STRING_LIST_VALUES_N49` (`STRING_LIST_ID`),
- CONSTRAINT `SKEWED_STRING_LIST_VALUES_FK1` FOREIGN KEY (`STRING_LIST_ID`)
REFERENCES `SKEWED_STRING_LIST` (`STRING_LIST_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SKEWED_STRING_LIST_VALUES`
---
-
-LOCK TABLES `SKEWED_STRING_LIST_VALUES` WRITE;
-/*!40000 ALTER TABLE `SKEWED_STRING_LIST_VALUES` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SKEWED_STRING_LIST_VALUES` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SKEWED_VALUES`
---
-
-DROP TABLE IF EXISTS `SKEWED_VALUES`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SKEWED_VALUES` (
- `SD_ID_OID` bigint(20) NOT NULL,
- `STRING_LIST_ID_EID` bigint(20) NOT NULL,
- `INTEGER_IDX` int(11) NOT NULL,
- PRIMARY KEY (`SD_ID_OID`,`INTEGER_IDX`),
- KEY `SKEWED_VALUES_N50` (`SD_ID_OID`),
- KEY `SKEWED_VALUES_N49` (`STRING_LIST_ID_EID`),
- CONSTRAINT `SKEWED_VALUES_FK1` FOREIGN KEY (`SD_ID_OID`) REFERENCES `SDS`
(`SD_ID`),
- CONSTRAINT `SKEWED_VALUES_FK2` FOREIGN KEY (`STRING_LIST_ID_EID`) REFERENCES
`SKEWED_STRING_LIST` (`STRING_LIST_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SKEWED_VALUES`
---
-
-LOCK TABLES `SKEWED_VALUES` WRITE;
-/*!40000 ALTER TABLE `SKEWED_VALUES` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SKEWED_VALUES` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `SORT_COLS`
---
-
-DROP TABLE IF EXISTS `SORT_COLS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `SORT_COLS` (
- `SD_ID` bigint(20) NOT NULL,
- `COLUMN_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `ORDER` int(11) NOT NULL,
- `INTEGER_IDX` int(11) NOT NULL,
- PRIMARY KEY (`SD_ID`,`INTEGER_IDX`),
- KEY `SORT_COLS_N49` (`SD_ID`),
- CONSTRAINT `SORT_COLS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `SORT_COLS`
---
-
-LOCK TABLES `SORT_COLS` WRITE;
-/*!40000 ALTER TABLE `SORT_COLS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `SORT_COLS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `TABLE_PARAMS`
---
-
-DROP TABLE IF EXISTS `TABLE_PARAMS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `TABLE_PARAMS` (
- `TBL_ID` bigint(20) NOT NULL,
- `PARAM_KEY` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `PARAM_VALUE` mediumtext CHARACTER SET latin1 COLLATE latin1_bin,
- PRIMARY KEY (`TBL_ID`,`PARAM_KEY`),
- KEY `TABLE_PARAMS_N49` (`TBL_ID`),
- CONSTRAINT `TABLE_PARAMS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS`
(`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `TABLE_PARAMS`
---
-
-LOCK TABLES `TABLE_PARAMS` WRITE;
-/*!40000 ALTER TABLE `TABLE_PARAMS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `TABLE_PARAMS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `TAB_COL_STATS`
---
-
-DROP TABLE IF EXISTS `TAB_COL_STATS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `TAB_COL_STATS` (
- `CS_ID` bigint(20) NOT NULL,
- `CAT_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `DB_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TABLE_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `COLUMN_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `COLUMN_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `TBL_ID` bigint(20) NOT NULL,
- `LONG_LOW_VALUE` bigint(20) DEFAULT NULL,
- `LONG_HIGH_VALUE` bigint(20) DEFAULT NULL,
- `DOUBLE_HIGH_VALUE` double(53,4) DEFAULT NULL,
- `DOUBLE_LOW_VALUE` double(53,4) DEFAULT NULL,
- `BIG_DECIMAL_LOW_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE
latin1_bin DEFAULT NULL,
- `BIG_DECIMAL_HIGH_VALUE` varchar(4000) CHARACTER SET latin1 COLLATE
latin1_bin DEFAULT NULL,
- `NUM_NULLS` bigint(20) NOT NULL,
- `NUM_DISTINCTS` bigint(20) DEFAULT NULL,
- `BIT_VECTOR` blob,
- `AVG_COL_LEN` double(53,4) DEFAULT NULL,
- `MAX_COL_LEN` bigint(20) DEFAULT NULL,
- `NUM_TRUES` bigint(20) DEFAULT NULL,
- `NUM_FALSES` bigint(20) DEFAULT NULL,
- `LAST_ANALYZED` bigint(20) NOT NULL,
- PRIMARY KEY (`CS_ID`),
- KEY `TAB_COL_STATS_FK` (`TBL_ID`),
- KEY `TAB_COL_STATS_IDX` (`CAT_NAME`,`DB_NAME`,`TABLE_NAME`,`COLUMN_NAME`)
USING BTREE,
- CONSTRAINT `TAB_COL_STATS_FK` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS`
(`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `TAB_COL_STATS`
---
-
-LOCK TABLES `TAB_COL_STATS` WRITE;
-/*!40000 ALTER TABLE `TAB_COL_STATS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `TAB_COL_STATS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `TBLS`
---
-
-DROP TABLE IF EXISTS `TBLS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `TBLS` (
- `TBL_ID` bigint(20) NOT NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `DB_ID` bigint(20) DEFAULT NULL,
- `LAST_ACCESS_TIME` int(11) NOT NULL,
- `OWNER` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `OWNER_TYPE` varchar(10) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `RETENTION` int(11) NOT NULL,
- `SD_ID` bigint(20) DEFAULT NULL,
- `TBL_NAME` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `TBL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `VIEW_EXPANDED_TEXT` mediumtext,
- `VIEW_ORIGINAL_TEXT` mediumtext,
- `IS_REWRITE_ENABLED` bit(1) NOT NULL DEFAULT b'0',
- PRIMARY KEY (`TBL_ID`),
- UNIQUE KEY `UNIQUETABLE` (`TBL_NAME`,`DB_ID`),
- KEY `TBLS_N50` (`SD_ID`),
- KEY `TBLS_N49` (`DB_ID`),
- CONSTRAINT `TBLS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES `SDS` (`SD_ID`),
- CONSTRAINT `TBLS_FK2` FOREIGN KEY (`DB_ID`) REFERENCES `DBS` (`DB_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `TBLS`
---
-
-LOCK TABLES `TBLS` WRITE;
-/*!40000 ALTER TABLE `TBLS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `TBLS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `TBL_COL_PRIVS`
---
-
-DROP TABLE IF EXISTS `TBL_COL_PRIVS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `TBL_COL_PRIVS` (
- `TBL_COLUMN_GRANT_ID` bigint(20) NOT NULL,
- `COLUMN_NAME` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `GRANT_OPTION` smallint(6) NOT NULL,
- `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `TBL_COL_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `TBL_ID` bigint(20) DEFAULT NULL,
- `AUTHORIZER` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`TBL_COLUMN_GRANT_ID`),
- KEY `TABLECOLUMNPRIVILEGEINDEX`
(`AUTHORIZER`,`TBL_ID`,`COLUMN_NAME`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`TBL_COL_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
- KEY `TBL_COL_PRIVS_N49` (`TBL_ID`),
- CONSTRAINT `TBL_COL_PRIVS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS`
(`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `TBL_COL_PRIVS`
---
-
-LOCK TABLES `TBL_COL_PRIVS` WRITE;
-/*!40000 ALTER TABLE `TBL_COL_PRIVS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `TBL_COL_PRIVS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `TBL_PRIVS`
---
-
-DROP TABLE IF EXISTS `TBL_PRIVS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `TBL_PRIVS` (
- `TBL_GRANT_ID` bigint(20) NOT NULL,
- `CREATE_TIME` int(11) NOT NULL,
- `GRANT_OPTION` smallint(6) NOT NULL,
- `GRANTOR` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `GRANTOR_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `PRINCIPAL_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `PRINCIPAL_TYPE` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin
DEFAULT NULL,
- `TBL_PRIV` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `TBL_ID` bigint(20) DEFAULT NULL,
- `AUTHORIZER` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- PRIMARY KEY (`TBL_GRANT_ID`),
- KEY `TBL_PRIVS_N49` (`TBL_ID`),
- KEY `TABLEPRIVILEGEINDEX`
(`AUTHORIZER`,`TBL_ID`,`PRINCIPAL_NAME`,`PRINCIPAL_TYPE`,`TBL_PRIV`,`GRANTOR`,`GRANTOR_TYPE`),
- CONSTRAINT `TBL_PRIVS_FK1` FOREIGN KEY (`TBL_ID`) REFERENCES `TBLS`
(`TBL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `TBL_PRIVS`
---
-
-LOCK TABLES `TBL_PRIVS` WRITE;
-/*!40000 ALTER TABLE `TBL_PRIVS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `TBL_PRIVS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `TXNS`
---
-
-DROP TABLE IF EXISTS `TXNS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `TXNS` (
- `TXN_ID` bigint(20) NOT NULL,
- `TXN_STATE` char(1) NOT NULL,
- `TXN_STARTED` bigint(20) NOT NULL,
- `TXN_LAST_HEARTBEAT` bigint(20) NOT NULL,
- `TXN_USER` varchar(128) NOT NULL,
- `TXN_HOST` varchar(128) NOT NULL,
- `TXN_AGENT_INFO` varchar(128) DEFAULT NULL,
- `TXN_META_INFO` varchar(128) DEFAULT NULL,
- `TXN_HEARTBEAT_COUNT` int(11) DEFAULT NULL,
- `TXN_TYPE` int(11) DEFAULT NULL,
- PRIMARY KEY (`TXN_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `TXNS`
---
-
-LOCK TABLES `TXNS` WRITE;
-/*!40000 ALTER TABLE `TXNS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `TXNS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `TXN_COMPONENTS`
---
-
-DROP TABLE IF EXISTS `TXN_COMPONENTS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `TXN_COMPONENTS` (
- `TC_TXNID` bigint(20) NOT NULL,
- `TC_DATABASE` varchar(128) NOT NULL,
- `TC_TABLE` varchar(128) DEFAULT NULL,
- `TC_PARTITION` varchar(767) DEFAULT NULL,
- `TC_OPERATION_TYPE` char(1) NOT NULL,
- `TC_WRITEID` bigint(20) DEFAULT NULL,
- KEY `TC_TXNID_INDEX` (`TC_TXNID`),
- CONSTRAINT `TXN_COMPONENTS_ibfk_1` FOREIGN KEY (`TC_TXNID`) REFERENCES
`TXNS` (`TXN_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `TXN_COMPONENTS`
---
-
-LOCK TABLES `TXN_COMPONENTS` WRITE;
-/*!40000 ALTER TABLE `TXN_COMPONENTS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `TXN_COMPONENTS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `TXN_TO_WRITE_ID`
---
-
-DROP TABLE IF EXISTS `TXN_TO_WRITE_ID`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `TXN_TO_WRITE_ID` (
- `T2W_TXNID` bigint(20) NOT NULL,
- `T2W_DATABASE` varchar(128) NOT NULL,
- `T2W_TABLE` varchar(256) NOT NULL,
- `T2W_WRITEID` bigint(20) NOT NULL,
- UNIQUE KEY `TBL_TO_TXN_ID_IDX` (`T2W_DATABASE`,`T2W_TABLE`,`T2W_TXNID`),
- UNIQUE KEY `TBL_TO_WRITE_ID_IDX` (`T2W_DATABASE`,`T2W_TABLE`,`T2W_WRITEID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `TXN_TO_WRITE_ID`
---
-
-LOCK TABLES `TXN_TO_WRITE_ID` WRITE;
-/*!40000 ALTER TABLE `TXN_TO_WRITE_ID` DISABLE KEYS */;
-/*!40000 ALTER TABLE `TXN_TO_WRITE_ID` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `TYPES`
---
-
-DROP TABLE IF EXISTS `TYPES`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `TYPES` (
- `TYPES_ID` bigint(20) NOT NULL,
- `TYPE_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT
NULL,
- `TYPE1` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `TYPE2` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- PRIMARY KEY (`TYPES_ID`),
- UNIQUE KEY `UNIQUE_TYPE` (`TYPE_NAME`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `TYPES`
---
-
-LOCK TABLES `TYPES` WRITE;
-/*!40000 ALTER TABLE `TYPES` DISABLE KEYS */;
-/*!40000 ALTER TABLE `TYPES` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `TYPE_FIELDS`
---
-
-DROP TABLE IF EXISTS `TYPE_FIELDS`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `TYPE_FIELDS` (
- `TYPE_NAME` bigint(20) NOT NULL,
- `COMMENT` varchar(256) CHARACTER SET latin1 COLLATE latin1_bin DEFAULT NULL,
- `FIELD_NAME` varchar(128) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `FIELD_TYPE` varchar(767) CHARACTER SET latin1 COLLATE latin1_bin NOT NULL,
- `INTEGER_IDX` int(11) NOT NULL,
- PRIMARY KEY (`TYPE_NAME`,`FIELD_NAME`),
- KEY `TYPE_FIELDS_N49` (`TYPE_NAME`),
- CONSTRAINT `TYPE_FIELDS_FK1` FOREIGN KEY (`TYPE_NAME`) REFERENCES `TYPES`
(`TYPES_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `TYPE_FIELDS`
---
-
-LOCK TABLES `TYPE_FIELDS` WRITE;
-/*!40000 ALTER TABLE `TYPE_FIELDS` DISABLE KEYS */;
-/*!40000 ALTER TABLE `TYPE_FIELDS` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `VERSION`
---
-
-DROP TABLE IF EXISTS `VERSION`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `VERSION` (
- `VER_ID` bigint(20) NOT NULL,
- `SCHEMA_VERSION` varchar(127) NOT NULL,
- `VERSION_COMMENT` varchar(255) DEFAULT NULL,
- PRIMARY KEY (`VER_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `VERSION`
---
-
-LOCK TABLES `VERSION` WRITE;
-/*!40000 ALTER TABLE `VERSION` DISABLE KEYS */;
-INSERT INTO `VERSION` VALUES (1,'3.1.0','Hive release version 3.1.0');
-/*!40000 ALTER TABLE `VERSION` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `WM_MAPPING`
---
-
-DROP TABLE IF EXISTS `WM_MAPPING`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `WM_MAPPING` (
- `MAPPING_ID` bigint(20) NOT NULL,
- `RP_ID` bigint(20) NOT NULL,
- `ENTITY_TYPE` varchar(128) NOT NULL,
- `ENTITY_NAME` varchar(128) NOT NULL,
- `POOL_ID` bigint(20) DEFAULT NULL,
- `ORDERING` int(11) DEFAULT NULL,
- PRIMARY KEY (`MAPPING_ID`),
- UNIQUE KEY `UNIQUE_WM_MAPPING` (`RP_ID`,`ENTITY_TYPE`,`ENTITY_NAME`),
- KEY `WM_MAPPING_FK2` (`POOL_ID`),
- CONSTRAINT `WM_MAPPING_FK1` FOREIGN KEY (`RP_ID`) REFERENCES
`WM_RESOURCEPLAN` (`RP_ID`),
- CONSTRAINT `WM_MAPPING_FK2` FOREIGN KEY (`POOL_ID`) REFERENCES `WM_POOL`
(`POOL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `WM_MAPPING`
---
-
-LOCK TABLES `WM_MAPPING` WRITE;
-/*!40000 ALTER TABLE `WM_MAPPING` DISABLE KEYS */;
-/*!40000 ALTER TABLE `WM_MAPPING` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `WM_POOL`
---
-
-DROP TABLE IF EXISTS `WM_POOL`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `WM_POOL` (
- `POOL_ID` bigint(20) NOT NULL,
- `RP_ID` bigint(20) NOT NULL,
- `PATH` varchar(767) NOT NULL,
- `ALLOC_FRACTION` double DEFAULT NULL,
- `QUERY_PARALLELISM` int(11) DEFAULT NULL,
- `SCHEDULING_POLICY` varchar(767) DEFAULT NULL,
- PRIMARY KEY (`POOL_ID`),
- UNIQUE KEY `UNIQUE_WM_POOL` (`RP_ID`,`PATH`),
- CONSTRAINT `WM_POOL_FK1` FOREIGN KEY (`RP_ID`) REFERENCES `WM_RESOURCEPLAN`
(`RP_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `WM_POOL`
---
-
-LOCK TABLES `WM_POOL` WRITE;
-/*!40000 ALTER TABLE `WM_POOL` DISABLE KEYS */;
-/*!40000 ALTER TABLE `WM_POOL` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `WM_POOL_TO_TRIGGER`
---
-
-DROP TABLE IF EXISTS `WM_POOL_TO_TRIGGER`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `WM_POOL_TO_TRIGGER` (
- `POOL_ID` bigint(20) NOT NULL,
- `TRIGGER_ID` bigint(20) NOT NULL,
- PRIMARY KEY (`POOL_ID`,`TRIGGER_ID`),
- KEY `WM_POOL_TO_TRIGGER_FK2` (`TRIGGER_ID`),
- CONSTRAINT `WM_POOL_TO_TRIGGER_FK1` FOREIGN KEY (`POOL_ID`) REFERENCES
`WM_POOL` (`POOL_ID`),
- CONSTRAINT `WM_POOL_TO_TRIGGER_FK2` FOREIGN KEY (`TRIGGER_ID`) REFERENCES
`WM_TRIGGER` (`TRIGGER_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `WM_POOL_TO_TRIGGER`
---
-
-LOCK TABLES `WM_POOL_TO_TRIGGER` WRITE;
-/*!40000 ALTER TABLE `WM_POOL_TO_TRIGGER` DISABLE KEYS */;
-/*!40000 ALTER TABLE `WM_POOL_TO_TRIGGER` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `WM_RESOURCEPLAN`
---
-
-DROP TABLE IF EXISTS `WM_RESOURCEPLAN`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `WM_RESOURCEPLAN` (
- `RP_ID` bigint(20) NOT NULL,
- `NAME` varchar(128) NOT NULL,
- `QUERY_PARALLELISM` int(11) DEFAULT NULL,
- `STATUS` varchar(20) NOT NULL,
- `DEFAULT_POOL_ID` bigint(20) DEFAULT NULL,
- PRIMARY KEY (`RP_ID`),
- UNIQUE KEY `UNIQUE_WM_RESOURCEPLAN` (`NAME`),
- KEY `WM_RESOURCEPLAN_FK1` (`DEFAULT_POOL_ID`),
- CONSTRAINT `WM_RESOURCEPLAN_FK1` FOREIGN KEY (`DEFAULT_POOL_ID`) REFERENCES
`WM_POOL` (`POOL_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `WM_RESOURCEPLAN`
---
-
-LOCK TABLES `WM_RESOURCEPLAN` WRITE;
-/*!40000 ALTER TABLE `WM_RESOURCEPLAN` DISABLE KEYS */;
-/*!40000 ALTER TABLE `WM_RESOURCEPLAN` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `WM_TRIGGER`
---
-
-DROP TABLE IF EXISTS `WM_TRIGGER`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `WM_TRIGGER` (
- `TRIGGER_ID` bigint(20) NOT NULL,
- `RP_ID` bigint(20) NOT NULL,
- `NAME` varchar(128) NOT NULL,
- `TRIGGER_EXPRESSION` varchar(1024) DEFAULT NULL,
- `ACTION_EXPRESSION` varchar(1024) DEFAULT NULL,
- `IS_IN_UNMANAGED` bit(1) NOT NULL DEFAULT b'0',
- PRIMARY KEY (`TRIGGER_ID`),
- UNIQUE KEY `UNIQUE_WM_TRIGGER` (`RP_ID`,`NAME`),
- CONSTRAINT `WM_TRIGGER_FK1` FOREIGN KEY (`RP_ID`) REFERENCES
`WM_RESOURCEPLAN` (`RP_ID`)
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `WM_TRIGGER`
---
-
-LOCK TABLES `WM_TRIGGER` WRITE;
-/*!40000 ALTER TABLE `WM_TRIGGER` DISABLE KEYS */;
-/*!40000 ALTER TABLE `WM_TRIGGER` ENABLE KEYS */;
-UNLOCK TABLES;
-
---
--- Table structure for table `WRITE_SET`
---
-
-DROP TABLE IF EXISTS `WRITE_SET`;
-/*!40101 SET @saved_cs_client = @@character_set_client */;
-/*!40101 SET character_set_client = utf8 */;
-CREATE TABLE `WRITE_SET` (
- `WS_DATABASE` varchar(128) NOT NULL,
- `WS_TABLE` varchar(128) NOT NULL,
- `WS_PARTITION` varchar(767) DEFAULT NULL,
- `WS_TXNID` bigint(20) NOT NULL,
- `WS_COMMIT_ID` bigint(20) NOT NULL,
- `WS_OPERATION_TYPE` char(1) NOT NULL
-) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-/*!40101 SET character_set_client = @saved_cs_client */;
-
---
--- Dumping data for table `WRITE_SET`
---
-
-LOCK TABLES `WRITE_SET` WRITE;
-/*!40000 ALTER TABLE `WRITE_SET` DISABLE KEYS */;
-/*!40000 ALTER TABLE `WRITE_SET` ENABLE KEYS */;
-UNLOCK TABLES;
-/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
-
-/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
-/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
-/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
-/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
-/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
-/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-
--- Dump completed on 2021-10-18 13:43:15
diff --git
a/tests/system/providers/google/cloud/dataproc/resources/pip-install.sh
b/tests/system/providers/google/cloud/dataproc/resources/pip-install.sh
deleted file mode 100644
index a9839b5b80..0000000000
--- a/tests/system/providers/google/cloud/dataproc/resources/pip-install.sh
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-set -exo pipefail
-
-PACKAGES=$(/usr/share/google/get_metadata_value attributes/PIP_PACKAGES ||
true)
-readonly PACKAGES
-
-function err() {
- echo "[$(date +'%Y-%m-%dT%H:%M:%S%z')]: $*" >&2
- exit 1
-}
-
-function run_with_retry() {
- local -r cmd=("$@")
- for ((i = 0; i < 10; i++)); do
- if "${cmd[@]}"; then
- return 0
- fi
- sleep 5
- done
- err "Failed to run command: ${cmd[*]}"
-}
-
-function install_pip() {
- if command -v pip >/dev/null; then
- echo "pip is already installed."
- return 0
- fi
-
- if command -v easy_install >/dev/null; then
- echo "Installing pip with easy_install..."
- run_with_retry easy_install pip
- return 0
- fi
-
- echo "Installing python-pip..."
- run_with_retry apt update
- run_with_retry apt install python-pip -y
-}
-
-function main() {
- if [[ -z "${PACKAGES}" ]]; then
- echo "ERROR: Must specify PIP_PACKAGES metadata key"
- exit 1
- fi
-
- install_pip
- # shellcheck disable=SC2086
- run_with_retry pip install --upgrade ${PACKAGES}
-}
-
-main
diff --git
a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py
b/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py
index 9c9d3b1d56..0d0c41f52f 100644
---
a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py
+++
b/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py
@@ -24,7 +24,6 @@ from __future__ import annotations
import datetime
import os
-from pathlib import Path
from google.protobuf.field_mask_pb2 import FieldMask
@@ -37,26 +36,29 @@ from
airflow.providers.google.cloud.operators.dataproc_metastore import (
DataprocMetastoreGetServiceOperator,
DataprocMetastoreUpdateServiceOperator,
)
-from airflow.providers.google.cloud.operators.gcs import
GCSCreateBucketOperator, GCSDeleteBucketOperator
-from airflow.providers.google.cloud.transfers.local_to_gcs import
LocalFilesystemToGCSOperator
+from airflow.providers.google.cloud.operators.gcs import (
+ GCSCreateBucketOperator,
+ GCSDeleteBucketOperator,
+ GCSSynchronizeBucketsOperator,
+)
from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
DAG_ID = "dataproc_metastore"
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "")
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
SERVICE_ID = f"{DAG_ID}-service-{ENV_ID}".replace("_", "-")
METADATA_IMPORT_ID = f"{DAG_ID}-metadata-{ENV_ID}".replace("_", "-")
REGION = "europe-west1"
+RESOURCE_DATA_BUCKET = "airflow-system-tests-resources"
BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
TIMEOUT = 2400
DB_TYPE = "MYSQL"
DESTINATION_GCS_FOLDER = f"gs://{BUCKET_NAME}/>"
-
-HIVE_FILE_SRC = str(Path(__file__).parent.parent / "dataproc" / "resources" /
"hive.sql")
-HIVE_FILE = "data/hive.sql"
-GCS_URI = f"gs://{BUCKET_NAME}/data/hive.sql"
+HIVE_FILE = "hive.sql"
+GCS_URI = f"gs://{BUCKET_NAME}/dataproc/{HIVE_FILE}"
# Service definition
# Docs:
https://cloud.google.com/dataproc-metastore/docs/reference/rest/v1/projects.locations.services#Service
@@ -98,11 +100,13 @@ with DAG(
task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID
)
- upload_file = LocalFilesystemToGCSOperator(
- task_id="upload_file",
- src=HIVE_FILE_SRC,
- dst=HIVE_FILE,
- bucket=BUCKET_NAME,
+ move_file = GCSSynchronizeBucketsOperator(
+ task_id="move_file",
+ source_bucket=RESOURCE_DATA_BUCKET,
+ source_object="dataproc/hive",
+ destination_bucket=BUCKET_NAME,
+ destination_object="dataproc",
+ recursive=True,
)
# [START how_to_cloud_dataproc_metastore_create_service_operator]
@@ -177,7 +181,7 @@ with DAG(
(
create_bucket
- >> upload_file
+ >> move_file
>> create_service
>> get_service
>> update_service
diff --git
a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py
b/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py
index 1dbfbf4090..6a5a7566b3 100644
---
a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py
+++
b/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_backup.py
@@ -35,11 +35,12 @@ from
airflow.providers.google.cloud.operators.dataproc_metastore import (
DataprocMetastoreRestoreServiceOperator,
)
from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
DAG_ID = "dataproc_metastore_backup"
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "")
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
SERVICE_ID = f"{DAG_ID}-service-{ENV_ID}".replace("_", "-")
BACKUP_ID = f"{DAG_ID}-backup-{ENV_ID}".replace("_", "-")
diff --git
a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py
b/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py
index 5c2be86bd8..07677e11cb 100644
---
a/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py
+++
b/tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore_hive_partition_sensor.py
@@ -42,10 +42,11 @@ from airflow.providers.google.cloud.operators.gcs import
GCSDeleteBucketOperator
from airflow.providers.google.cloud.sensors.dataproc_metastore import
MetastoreHivePartitionSensor
from airflow.providers.google.cloud.transfers.gcs_to_gcs import
GCSToGCSOperator
from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
DAG_ID = "hive_partition_sensor"
-PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "demo-project")
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "demo-env")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
REGION = "europe-west1"
NETWORK = "default"