# -*- coding: utf-8 -*- import hashlib import logging import os import re import shutil import time from multiprocessing.dummy import Pool import boto3 # type: ignore from env_helper import S3_TEST_REPORTS_BUCKET, S3_BUILDS_BUCKET, RUNNER_TEMP, CI from compress_files import compress_file_fast def _md5(fname): hash_md5 = hashlib.md5() with open(fname, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hash_md5.update(chunk) logging.debug("MD5 for %s is %s", fname, hash_md5.hexdigest()) return hash_md5.hexdigest() def _flatten_list(lst): result = [] for elem in lst: if isinstance(elem, list): result += _flatten_list(elem) else: result.append(elem) return result class S3Helper: def __init__(self, host): self.session = boto3.session.Session(region_name='us-east-1') self.client = self.session.client('s3', endpoint_url=host) def _upload_file_to_s3(self, bucket_name, file_path, s3_path): logging.debug("Start uploading %s to bucket=%s path=%s", file_path, bucket_name, s3_path) metadata = {} if os.path.getsize(file_path) < 64 * 1024 * 1024: if s3_path.endswith("txt") or s3_path.endswith("log") or s3_path.endswith("err") or s3_path.endswith("out"): metadata['ContentType'] = "text/plain; charset=utf-8" logging.info("Content type %s for file path %s", "text/plain; charset=utf-8", file_path) elif s3_path.endswith("html"): metadata['ContentType'] = "text/html; charset=utf-8" logging.info("Content type %s for file path %s", "text/html; charset=utf-8", file_path) elif s3_path.endswith("css"): metadata['ContentType'] = "text/css; charset=utf-8" logging.info("Content type %s for file path %s", "text/css; charset=utf-8", file_path) elif s3_path.endswith("js"): metadata['ContentType'] = "text/javascript; charset=utf-8" logging.info("Content type %s for file path %s", "text/css; charset=utf-8", file_path) else: logging.info("No content type provied for %s", file_path) else: if re.search(r'\.(txt|log|err|out)$', s3_path) or re.search(r'\.log\..*(?