abdulbasitds commented on a change in pull request #6007: [AIRFLOW-2310] Enable AWS Glue Job Integration URL: https://github.com/apache/airflow/pull/6007#discussion_r397142449
########## File path: airflow/providers/amazon/aws/hooks/glue.py ########## @@ -0,0 +1,214 @@ +# -*- coding: utf-8 -*- +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from airflow.exceptions import AirflowException +from airflow.contrib.hooks.aws_hook import AwsHook +import os.path +import time + + +class AwsGlueJobHook(AwsHook): + """ + Interact with AWS Glue - create job, trigger, crawler + + :param job_name: unique job name per AWS account + :type str + :param desc: job description + :type str + :param concurrent_run_limit: The maximum number of concurrent runs allowed for a job + :type int + :param script_location: path to etl script either on s3 or local + :type str + :param conns: A list of connections used by the job + :type list + :param retry_limit: Maximum number of times to retry this job if it fails + :type int + :param num_of_dpus: Number of AWS Glue DPUs to allocate to this Job + :type int + :param region_name: aws region name (example: us-east-1) + :type region_name: str + :param s3_bucket: S3 bucket where logs and local etl script will be uploaded + :type str + :param iam_role_name: AWS IAM Role for Glue Job + :type str + """ + + def __init__(self, + job_name=None, + desc=None, + concurrent_run_limit=None, + script_location=None, + conns=None, + retry_limit=None, + num_of_dpus=None, + aws_conn_id='aws_default', + region_name=None, + iam_role_name=None, + s3_bucket=None, *args, **kwargs): + self.job_name = job_name + self.desc = desc + self.concurrent_run_limit = concurrent_run_limit or 1 + self.script_location = script_location + self.conns = conns or ["s3"] + self.retry_limit = retry_limit or 0 + self.num_of_dpus = num_of_dpus or 10 + self.aws_conn_id = aws_conn_id + self.region_name = region_name + self.s3_bucket = s3_bucket + self.role_name = iam_role_name + self.S3_PROTOCOL = "s3://" + self.S3_ARTIFACTS_PREFIX = 'artifacts/glue-scripts/' + self.S3_GLUE_LOGS = 'logs/glue-logs/' + super(AwsGlueJobHook, self).__init__(*args, **kwargs) + + def get_conn(self): + conn = self.get_client_type('glue', self.region_name) + return conn Review comment: I am sorry but I didnt get what should I do for now for this, If its really very neccessary, can you please explain how should I do it, as these are changes from other Pull ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: [email protected] With regards, Apache Git Services
