2022-06-15 12:27:21 +05:30
|
|
|
# Copyright 2021 Collate
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2022-08-02 09:13:46 +02:00
|
|
|
|
2022-10-10 16:23:47 +05:30
|
|
|
"""
|
|
|
|
Utils module to convert different file types from s3 buckets into a dataframe
|
|
|
|
"""
|
|
|
|
|
2022-11-18 16:01:25 +05:30
|
|
|
import gzip
|
2022-07-17 21:56:54 +05:30
|
|
|
import json
|
2022-07-25 07:24:57 +02:00
|
|
|
import os
|
2022-11-15 20:31:10 +05:30
|
|
|
import traceback
|
2022-07-17 21:56:54 +05:30
|
|
|
from typing import Any
|
2022-06-15 12:27:21 +05:30
|
|
|
|
2022-07-17 21:56:54 +05:30
|
|
|
import pandas as pd
|
2022-07-25 07:24:57 +02:00
|
|
|
from pyarrow import fs
|
|
|
|
from pyarrow.parquet import ParquetFile
|
2022-06-15 12:27:21 +05:30
|
|
|
|
2022-11-15 20:31:10 +05:30
|
|
|
from metadata.utils.logger import utils_logger
|
|
|
|
|
|
|
|
logger = utils_logger()
|
|
|
|
|
2022-06-15 12:27:21 +05:30
|
|
|
|
2022-11-18 16:01:25 +05:30
|
|
|
def _get_json_text(key: str, text: bytes) -> str:
|
|
|
|
if key.endswith(".gz"):
|
|
|
|
return gzip.decompress(text)
|
|
|
|
return text.decode("utf-8")
|
|
|
|
|
|
|
|
|
2022-07-25 07:24:57 +02:00
|
|
|
def read_csv_from_s3(
|
2022-11-15 20:31:10 +05:30
|
|
|
client: Any,
|
|
|
|
key: str,
|
|
|
|
bucket_name: str,
|
|
|
|
sep: str = ",",
|
|
|
|
):
|
2022-10-10 16:23:47 +05:30
|
|
|
"""
|
|
|
|
Read the csv file from the s3 bucket and return a dataframe
|
|
|
|
"""
|
2022-11-15 20:31:10 +05:30
|
|
|
try:
|
|
|
|
stream = client.get_object(Bucket=bucket_name, Key=key)["Body"]
|
|
|
|
chunk_list = []
|
|
|
|
with pd.read_csv(stream, sep=sep, chunksize=200000) as reader:
|
|
|
|
for chunks in reader:
|
|
|
|
chunk_list.append(chunks)
|
|
|
|
return chunk_list
|
|
|
|
except Exception as exc:
|
|
|
|
logger.debug(traceback.format_exc())
|
|
|
|
logger.warning(f"Error reading CSV from s3 - {exc}")
|
|
|
|
return None
|
2022-06-15 12:27:21 +05:30
|
|
|
|
|
|
|
|
2022-07-29 12:21:58 +05:30
|
|
|
def read_tsv_from_s3(
|
2022-11-15 20:31:10 +05:30
|
|
|
client,
|
|
|
|
key: str,
|
|
|
|
bucket_name: str,
|
|
|
|
):
|
2022-10-10 16:23:47 +05:30
|
|
|
"""
|
|
|
|
Read the tsv file from the s3 bucket and return a dataframe
|
|
|
|
"""
|
2022-11-15 20:31:10 +05:30
|
|
|
try:
|
|
|
|
return read_csv_from_s3(client, key, bucket_name, sep="\t")
|
|
|
|
except Exception as exc:
|
|
|
|
logger.debug(traceback.format_exc())
|
|
|
|
logger.warning(f"Error reading TSV from s3 - {exc}")
|
|
|
|
return None
|
2022-06-15 12:27:21 +05:30
|
|
|
|
|
|
|
|
2022-11-15 20:31:10 +05:30
|
|
|
def read_json_from_s3(client: Any, key: str, bucket_name: str, sample_size=100):
|
2022-10-10 16:23:47 +05:30
|
|
|
"""
|
|
|
|
Read the json file from the s3 bucket and return a dataframe
|
|
|
|
"""
|
2022-10-19 14:12:23 +05:30
|
|
|
obj = client.get_object(Bucket=bucket_name, Key=key)
|
2022-11-18 16:01:25 +05:30
|
|
|
json_text = obj["Body"].read()
|
|
|
|
data = json.loads(_get_json_text(key, json_text))
|
2022-10-19 14:12:23 +05:30
|
|
|
if isinstance(data, list):
|
2022-11-15 20:31:10 +05:30
|
|
|
return [pd.DataFrame.from_dict(data[:sample_size])]
|
|
|
|
return [
|
|
|
|
pd.DataFrame.from_dict({key: pd.Series(value) for key, value in data.items()})
|
|
|
|
]
|
2022-06-15 12:27:21 +05:30
|
|
|
|
|
|
|
|
2022-11-15 20:31:10 +05:30
|
|
|
def read_parquet_from_s3(client: Any, key: str, bucket_name: str):
|
2022-10-10 16:23:47 +05:30
|
|
|
"""
|
|
|
|
Read the parquet file from the s3 bucket and return a dataframe
|
|
|
|
"""
|
|
|
|
|
|
|
|
s3_file = fs.S3FileSystem(region=client.meta.region_name)
|
2022-11-15 20:31:10 +05:30
|
|
|
return [
|
2022-10-10 16:23:47 +05:30
|
|
|
ParquetFile(s3_file.open_input_file(os.path.join(bucket_name, key)))
|
2022-11-15 20:31:10 +05:30
|
|
|
.read()
|
2022-07-25 07:24:57 +02:00
|
|
|
.to_pandas()
|
2022-11-15 20:31:10 +05:30
|
|
|
]
|