samredai commented on a change in pull request #3691: URL: https://github.com/apache/iceberg/pull/3691#discussion_r766795029
########## File path: python/src/iceberg/io/s3.py ########## @@ -0,0 +1,185 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Union +from urllib.parse import urlparse + +import boto3 +from botocore.errorfactory import ClientError + +from iceberg.io.base import FileIO, InputFile, OutputFile + + +class S3InputFile(InputFile): + def __init__(self, location: str): + super().__init__(location=location) + self._s3_client = None + + def __len__(self, s3_client=None): + s3 = self._create_s3_session() if not s3_client else s3_client + s3_url = urlparse(self.location) + resource = s3.head_object(Bucket=s3_url.netloc, Key=s3_url.path.lstrip("/")) + return resource["ContentLength"] + + def _create_s3_session(self): + return boto3.Session().client("s3") + + def exists(self, s3_client=None): + s3 = self._create_s3_session() if not s3_client else s3_client + s3_url = urlparse(self.location) + try: + s3.head_object(Bucket=s3_url.netloc, Key=s3_url.path.lstrip("/")) + except ClientError as e: + if "Not Found" in str(e): + return False + else: + raise + + return True + + def __call__(self, s3_client=None): + """Allows injecting an s3 client before the context is opened""" + self._s3_client = s3_client + return self + + def __enter__(self): + """Enter context for S3InputFile + + This sets a botocore.response.StreamingBody instance for the + location to self.input_stream. If an `s3_client` is not provided through + a call during context open, a boto3 session using the environment AWS + credentials will be created. + + Example: + >>> input_file = S3InputFile(location="s3://foo/bar.json") + with input_file as f: + data = input_file.input_stream.read() + + >>> import boto3 + AWS_ACCESS_KEY_ID = "..." + AWS_SECRET_ACCESS_KEY = "..." + s3_client = boto3.Session( + aws_access_key_id=AWS_ACCESS_KEY_ID, + aws_secret_access_key=AWS_SECRET_ACCESS_KEY, + ).client("s3") + input_file = S3InputFile(location="s3://baz/qux.json") + with input_file(s3_client=s3_client) as f: + data = input_file.input_stream.read() + + + """ + super().__enter__() + s3 = self._create_s3_session() if not self._s3_client else self._s3_client + s3_url = urlparse(self.location) + resource = s3.get_object(Bucket=s3_url.netloc, Key=s3_url.path.lstrip("/")) + return resource["Body"] Review comment: Indeed it does not include any seek functionality which is surprising. It lets you set a range in the `get_object` call so we could create our own reader as you suggested. There's a lot of solid open-source libraries that do this already, such as [smart_open](https://github.com/RaRe-Technologies/smart_open). On the other hand, I feel like some of these libraries should be file io implementations on their own. A `SmartOpenFileIO` would allow for streaming to and from "S3, GCS, Azure Blob Storage, HDFS, WebHDFS, HTTP, HTTPS, SFTP, or local filesystem", all with file-like objects. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
