diff --git a/allure-docker-api/app.py b/allure-docker-api/app.py index dd713d9..2f8e226 100644 --- a/allure-docker-api/app.py +++ b/allure-docker-api/app.py @@ -12,8 +12,11 @@ import shutil import tempfile import subprocess +import mimetypes import zipfile import waitress +from storage import get_storage +import scripts from werkzeug.utils import secure_filename from flask import ( Flask, jsonify, render_template, redirect, @@ -145,8 +148,11 @@ def __str__(self): STATIC_CONTENT = os.environ['STATIC_CONTENT'] PROJECTS_DIRECTORY = os.environ['STATIC_CONTENT_PROJECTS'] EMAILABLE_REPORT_FILE_NAME = os.environ['EMAILABLE_REPORT_FILE_NAME'] +STORAGE_TYPE = os.getenv('STORAGE_TYPE', 'local') +BUCKET_NAME = os.getenv('BUCKET_NAME') ORIGIN = 'api' SECURITY_SPECS_PATH = 'swagger/security_specs' +ALLURE_RESOURCES = '/app/resources' REPORT_INDEX_FILE = 'index.html' DEFAULT_TEMPLATE = 'default.html' @@ -157,6 +163,15 @@ def __str__(self): EMAILABLE_REPORT_TITLE = "Emailable Report" API_RESPONSE_LESS_VERBOSE = 0 +# define storage +storage = get_storage(STORAGE_TYPE, BUCKET_NAME) +scripts.setup_storage(storage) + +if "ALLURE_RESOURCES" in os.environ: + ALLURE_RESOURCES = os.environ['ALLURE_RESOURCES'] + LOGGER.info('Overriding path for allure resources. ALLURE_RESOURCES=%s', + ALLURE_RESOURCES) + if "EMAILABLE_REPORT_CSS_CDN" in os.environ: EMAILABLE_REPORT_CSS = os.environ['EMAILABLE_REPORT_CSS_CDN'] LOGGER.info('Overriding CSS for Emailable Report. EMAILABLE_REPORT_CSS_CDN=%s', @@ -905,7 +920,7 @@ def send_results_endpoint(): #pylint: disable=too-many-branches raise Exception('Problems with files: {}'.format(failed_files)) if API_RESPONSE_LESS_VERBOSE != 1: - files = os.listdir(results_project) + files = storage.listdir(results_project) current_files_count = len(files) sent_files_count = len(validated_results) processed_files_count = len(processed_files) @@ -970,7 +985,7 @@ def generate_report_endpoint(): results_project = '{}/results'.format(project_path) if API_RESPONSE_LESS_VERBOSE != 1: - files = os.listdir(results_project) + files = storage.listdir(results_project) execution_name = request.args.get('execution_name') if execution_name is None or not execution_name: @@ -984,22 +999,21 @@ def generate_report_endpoint(): if execution_type is None or not execution_type: execution_type = '' - check_process(KEEP_HISTORY_PROCESS, project_id) + # check_process(KEEP_HISTORY_PROCESS, project_id) check_process(GENERATE_REPORT_PROCESS, project_id) exec_store_results_process = '1' - call([KEEP_HISTORY_PROCESS, project_id, ORIGIN]) - response = subprocess.Popen([ - GENERATE_REPORT_PROCESS, exec_store_results_process, - project_id, ORIGIN, execution_name, execution_from, execution_type], - stdout=subprocess.PIPE).communicate()[0] + scripts.keep_allure_history(project_id) + LOGGER.warning(f'executing generate-report function') + build_order = 'latest' + + result = scripts.generate_allure_report(exec_store_results_process, project_id, origin=ORIGIN, execution_name=execution_name, execution_from=execution_from, execution_type=execution_type) + call([RENDER_EMAIL_REPORT_PROCESS, project_id, ORIGIN]) - build_order = 'latest' - for line in response.decode("utf-8").split("\n"): - if line.startswith("BUILD_ORDER"): - build_order = line[line.index(':') + 1: len(line)] + if result: + build_order = result report_url = url_for('get_reports_endpoint', project_id=project_id, path='{}/index.html'.format(build_order), _external=True) @@ -1102,7 +1116,7 @@ def clean_results_endpoint(): check_process(GENERATE_REPORT_PROCESS, project_id) check_process(CLEAN_RESULTS_PROCESS, project_id) - call([CLEAN_RESULTS_PROCESS, project_id, ORIGIN]) + scripts.clean_allure_results(project_id) except Exception as ex: body = { 'meta_data': { @@ -1139,39 +1153,7 @@ def emailable_report_render_endpoint(): return resp check_process(GENERATE_REPORT_PROCESS, project_id) - - project_path = get_project_path(project_id) - tcs_latest_report_project = "{}/reports/latest/data/test-cases/*.json".format(project_path) - - files = glob.glob(tcs_latest_report_project) - files.sort(key=os.path.getmtime, reverse=True) - test_cases = [] - for file_name in files: - with open(file_name) as file: - json_string = file.read() - LOGGER.debug("----TestCase-JSON----") - LOGGER.debug(json_string) - test_case = json.loads(json_string) - if test_case["hidden"] is False: - test_cases.append(test_case) - - server_url = url_for('latest_report_endpoint', project_id=project_id, _external=True) - - if "SERVER_URL" in os.environ: - server_url = os.environ['SERVER_URL'] - - report = render_template(DEFAULT_TEMPLATE, css=EMAILABLE_REPORT_CSS, - title=EMAILABLE_REPORT_TITLE, projectId=project_id, - serverUrl=server_url, testCases=test_cases) - - emailable_report_path = '{}/reports/{}'.format(project_path, EMAILABLE_REPORT_FILE_NAME) - file = None - try: - file = open(emailable_report_path, "w") - file.write(report) - finally: - if file is not None: - file.close() + report = render_emailable_report(project_id=project_id) except Exception as ex: body = { 'meta_data': { @@ -1201,9 +1183,11 @@ def emailable_report_export_endpoint(): return resp check_process(GENERATE_REPORT_PROCESS, project_id) - project_path = get_project_path(project_id) emailable_report_path = '{}/reports/{}'.format(project_path, EMAILABLE_REPORT_FILE_NAME) + if storage.type == 's3': + storage.get_files(emailable_report_path, os.path.join("/tmp/allure-results", project_id)) + emailable_report_path = os.path.join("/tmp/allure-results", project_id, EMAILABLE_REPORT_FILE_NAME) report = send_file(emailable_report_path, as_attachment=True) except Exception as ex: @@ -1325,7 +1309,7 @@ def delete_project_endpoint(project_id): return resp project_path = get_project_path(project_id) - shutil.rmtree(project_path) + storage.rmdir(project_path) except Exception as ex: body = { 'meta_data': { @@ -1362,13 +1346,13 @@ def get_project_endpoint(project_id): project_reports_path = '{}/reports'.format(get_project_path(project_id)) reports_entity = [] - for file in os.listdir(project_reports_path): + for file in storage.listdir(project_reports_path): file_path = '{}/{}/index.html'.format(project_reports_path, file) - is_file = os.path.isfile(file_path) + is_file = storage.isfile(file_path) if is_file is True: report = url_for('get_reports_endpoint', project_id=project_id, path='{}/index.html'.format(file), _external=True) - reports_entity.append([report, os.path.getmtime(file_path), file]) + reports_entity.append([report, storage.getmtime(file_path), file]) reports_entity.sort(key=lambda reports_entity: reports_entity[1], reverse=True) reports = [] @@ -1416,7 +1400,7 @@ def get_project_endpoint(project_id): @jwt_required def get_projects_endpoint(): try: - projects_dirs = os.listdir(PROJECTS_DIRECTORY) + projects_dirs = storage.listdir(PROJECTS_DIRECTORY) projects = get_projects(projects_dirs) body = { @@ -1450,7 +1434,7 @@ def get_projects_search_endpoint(): raise Exception("'id' query parameter is required") project_id = project_id.lower() - projects_filtered = get_projects_filtered_by_id(project_id, os.listdir(PROJECTS_DIRECTORY)) + projects_filtered = get_projects_filtered_by_id(project_id, storage.listdir(PROJECTS_DIRECTORY)) projects = get_projects(projects_filtered) if len(projects) == 0: @@ -1482,14 +1466,47 @@ def get_projects_search_endpoint(): @jwt_required def get_reports_endpoint(project_id, path): try: - project_path = '{}/reports/{}'.format(project_id, path) - return send_from_directory(PROJECTS_DIRECTORY, project_path) + object_path = '{}/{}/reports/{}'.format(PROJECTS_DIRECTORY,project_id, path) + mime_type, _ = mimetypes.guess_type(object_path) + + if not mime_type: + mime_type = 'application/octet-stream' + pattern = r'^(latest|\d+)\/(?!.*\/)(styles\.css|app\.js)$' + + if OPTIMIZE_STORAGE is 1 and re.match(pattern, path): + with open(transform_path(path), 'rb') as f: + content = f.read() + else: + content = storage.read_file(object_path) + + return send_file( + io.BytesIO(content), + mimetype=mime_type, + as_attachment=False, + download_name=path.split('/')[-1] + ) + + # TO DO: rewrite exception logic except Exception: if request.args.get('redirect') == 'false': + project_path = '{}/reports/{}'.format(project_id, path) return send_from_directory(PROJECTS_DIRECTORY, project_path) return redirect(url_for('get_project_endpoint', project_id=project_id, _external=True)) +def transform_path(path): + pattern = r'(latest|\d+)/(styles\.css|app\.js)$' + transformations = { + 'styles.css': f'{ALLURE_RESOURCES}/styles.css', + 'app.js': f'{ALLURE_RESOURCES}/app.js', + } + def replace(match): + filename = match.group(2) + return transformations.get(filename.replace('\\', ''), filename) + + transformed_path = re.sub(pattern, replace, path) + return transformed_path + def validate_files_array(files): if not files: raise Exception("'files[]' array is empty") @@ -1536,9 +1553,12 @@ def validate_json_results(results): def send_files_results(results_project, validated_results, processed_files, failed_files): for file in validated_results: try: + LOGGER.info(f"WRITING FILE: {file.filename}") file_name = secure_filename(file.filename) - file.save("{}/{}".format(results_project, file_name)) + storage.save_file(file, "{}/{}".format(results_project, file_name), 'wb') + # file.save("{}/{}".format(results_project, file_name)) except Exception as ex: + LOGGER.info(f"EXCEPTION WITH FILE: {file.filename}, {ex}") error = {} error['message'] = str(ex) error['file_name'] = file_name @@ -1552,8 +1572,10 @@ def send_json_results(results_project, validated_results, processed_files, faile content_base64 = result.get('content_base64') file = None try: - file = open("%s/%s" % (results_project, file_name), "wb") - file.write(content_base64) + LOGGER.info(f"trying to save json result: {result.get('file_name')}") + storage.save_json(content_base64, "%s/%s" % (results_project, file_name)) + # file = open("%s/%s" % (results_project, file_name), "wb") + # file.write(content_base64) except Exception as ex: error = {} error['message'] = str(ex) @@ -1594,23 +1616,23 @@ def create_project(json_body): latest_report_project = '{}/reports/latest'.format(project_path) results_project = '{}/results'.format(project_path) - if not os.path.exists(latest_report_project): - os.makedirs(latest_report_project) + if not storage.exists(latest_report_project): + storage.mkdir(latest_report_project) - if not os.path.exists(results_project): - os.makedirs(results_project) + if not storage.exists(results_project): + storage.mkdir(results_project) return project_id def is_existent_project(project_id): if not project_id.strip(): return False - return os.path.isdir(get_project_path(project_id)) + return storage.isdir(get_project_path(project_id)) def get_projects(projects_dirs): projects = {} for project_name in projects_dirs: - is_dir = os.path.isdir('{}/{}'.format(PROJECTS_DIRECTORY, project_name)) + is_dir = storage.isdir('{}/{}'.format(PROJECTS_DIRECTORY, project_name)) if is_dir is True: project = {} project['uri'] = url_for('get_project_endpoint', @@ -1657,6 +1679,35 @@ def check_process(process_file, project_id): if proccount > 0: raise Exception("Processing files for project_id '{}'. Try later!".format(project_id)) +def render_emailable_report(project_id): + project_path = get_project_path(project_id) + tcs_latest_report_project = "{}/reports/latest/data/test-cases/*.json".format(project_path) + + files = storage.glob(tcs_latest_report_project) + files.sort(key=lambda x: storage.getmtime(x), reverse=True) + test_cases = [] + for file_name in files: + file = storage.read_file(file_name) + LOGGER.debug("----TestCase-JSON----") + LOGGER.debug(file) + test_case = json.loads(file) + if test_case["hidden"] is False: + test_cases.append(test_case) + + server_url = url_for('latest_report_endpoint', project_id=project_id, _external=True) + + if "SERVER_URL" in os.environ: + server_url = os.environ['SERVER_URL'] + + report = render_template(DEFAULT_TEMPLATE, css=EMAILABLE_REPORT_CSS, + title=EMAILABLE_REPORT_TITLE, projectId=project_id, + serverUrl=server_url, testCases=test_cases) + + emailable_report_path = '{}/reports/{}'.format(project_path, EMAILABLE_REPORT_FILE_NAME) + storage.write_file(report, emailable_report_path, 'w') + return report + + if __name__ == '__main__': if DEV_MODE == 1: LOGGER.info('Starting in DEV_MODE') diff --git a/allure-docker-api/scripts.py b/allure-docker-api/scripts.py new file mode 100644 index 0000000..16d56c1 --- /dev/null +++ b/allure-docker-api/scripts.py @@ -0,0 +1,169 @@ +import os +import re +import json +import subprocess + +storage = None + +def setup_storage(storage_instance): + global storage + storage = storage_instance + +def keep_allure_history(project_id): + static_content_projects = os.getenv('STATIC_CONTENT_PROJECTS') + keep_history = os.getenv('KEEP_HISTORY', 'false').lower() # Default to 'false' if not set + project_reports_directory = os.path.join(static_content_projects, project_id, 'reports') + project_results_history = os.path.join(static_content_projects, project_id, 'results', 'history') + project_latest_report = os.path.join(project_reports_directory, 'latest', 'history') + + # Create history in results directory + if keep_history in ["TRUE", "true", "1"]: + print(f"Creating history on results directory for project_id: {project_id} ...") + storage.mkdir(project_results_history) + if storage.exists(project_latest_report): + print("Copying history from previous results...") + storage.copy_dir(project_latest_report, project_results_history) + else: + # Remove the history directory if it exists + if storage.isdir(project_results_history): + print(f"Removing history directory from results for project_id: {project_id} ...") + storage.rmdir(project_results_history) + +def generate_allure_report(EXEC_STORE_RESULTS_PROCESS, project_id, origin='api', execution_name='Automatic Execution', execution_from='', execution_type='another'): + STATIC_CONTENT_PROJECTS = os.getenv('STATIC_CONTENT_PROJECTS') + EMAILABLE_REPORT_FILE_NAME = os.getenv('EMAILABLE_REPORT_FILE_NAME') + EXECUTOR_FILENAME = os.getenv('EXECUTOR_FILENAME') + OPTIMIZE_STORAGE = os.getenv('OPTIMIZE_STORAGE') + KEEP_HISTORY = os.getenv('KEEP_HISTORY') + ALLURE_RESOURCES = os.getenv('ALLURE_RESOURCES') + ROOT = os.getenv('ROOT') + PROJECT_REPORTS = os.path.join(STATIC_CONTENT_PROJECTS, project_id, 'reports') + STORAGE_TYPE = os.getenv('STORAGE_TYPE', 'local') + + # Get the last report directory, similar logic to Bash script + last_report_path_directory = None + if storage.listdir(PROJECT_REPORTS): + directories = [d for d in storage.listdir(PROJECT_REPORTS) if storage.isdir(os.path.join(PROJECT_REPORTS, d))] + if 'latest' in directories: + directories.remove('latest') + directories = sorted( + directories, reverse=True, + key=lambda x: storage.getmtime(os.path.join(PROJECT_REPORTS, x, 'dummyfile')) if STORAGE_TYPE == 's3' else storage.getmtime(os.path.join(PROJECT_REPORTS, x)) + ) + if directories: + last_report_path_directory = os.path.join(PROJECT_REPORTS, directories[0]) + + LAST_REPORT_DIRECTORY = os.path.basename(last_report_path_directory) if last_report_path_directory else None + + + if STORAGE_TYPE == 's3': + TEMP_PROJECT_DIRECTORY = os.path.join("/tmp/allure-results", project_id) + RESULTS_DIRECTORY = os.path.join(TEMP_PROJECT_DIRECTORY, 'results') + PROJECT_REPORTS = os.path.join(TEMP_PROJECT_DIRECTORY, 'reports') + else: + RESULTS_DIRECTORY = os.path.join(STATIC_CONTENT_PROJECTS, project_id, 'results') + if not os.path.exists(RESULTS_DIRECTORY): + os.makedirs(RESULTS_DIRECTORY) + + EXECUTOR_PATH = os.path.join(RESULTS_DIRECTORY, EXECUTOR_FILENAME) + print(f"Creating {EXECUTOR_FILENAME} for project_id: {project_id}") + + if LAST_REPORT_DIRECTORY != "latest": + build_order = int(LAST_REPORT_DIRECTORY) + 1 if LAST_REPORT_DIRECTORY else 1 + EXECUTOR_JSON = { + "reportName": project_id, + "buildName": f"{project_id} #{build_order}", + "buildOrder": build_order, + "name": execution_name, + "reportUrl": f"../{build_order}/index.html", + "buildUrl": execution_from, + "type": execution_type + } + if EXEC_STORE_RESULTS_PROCESS == "1": + with open(EXECUTOR_PATH, 'w') as file: + json.dump(EXECUTOR_JSON, file) + else: + with open(EXECUTOR_PATH, 'w') as file: + file.write('') + else: + with open(EXECUTOR_PATH, 'w') as file: + file.write('') + + if STORAGE_TYPE == 's3': + storage.get_files(os.path.join(STATIC_CONTENT_PROJECTS, project_id, 'results'), TEMP_PROJECT_DIRECTORY) + + subprocess.run(['allure', 'generate', '--clean', RESULTS_DIRECTORY, '-o', os.path.join(PROJECT_REPORTS, 'latest')]) + + if OPTIMIZE_STORAGE == "1": + create_or_update_symlink(ALLURE_RESOURCES + '/app.js', os.path.join(PROJECT_REPORTS, 'latest', 'app.js')) + create_or_update_symlink(ALLURE_RESOURCES + '/styles.css', os.path.join(PROJECT_REPORTS, 'latest', 'styles.css')) + + if STORAGE_TYPE == 's3': + storage.put_files(os.path.join(PROJECT_REPORTS, 'latest'), os.path.join(STATIC_CONTENT_PROJECTS, project_id, 'reports', 'latest')) + + if KEEP_HISTORY in ["TRUE", "true", "1"]: + if EXEC_STORE_RESULTS_PROCESS == "1": + store_allure_report(project_id, str(build_order)) + + keep_allure_latest_history(project_id) + + +def store_allure_report(project_id, build_order): + static_content_projects = os.environ.get('STATIC_CONTENT_PROJECTS') + project_reports_directory = os.path.join(static_content_projects, project_id, 'reports') + project_latest_report = os.path.join(project_reports_directory, 'latest') + + # Check if the latest report directory is not empty + if storage.listdir(project_latest_report): + print(f"Storing report history for PROJECT_ID: {project_id}") + new_report_directory = os.path.join(project_reports_directory, build_order) + storage.mkdir(new_report_directory) + storage.copy_dir(project_latest_report, new_report_directory) + +def keep_allure_latest_history(project_id): + keep_history = os.environ.get('KEEP_HISTORY') + keep_history_latest = os.environ.get('KEEP_HISTORY_LATEST') + static_content_projects = os.environ.get('STATIC_CONTENT_PROJECTS') + email_report_file_name = os.environ.get('EMAILABLE_REPORT_FILE_NAME') + + if keep_history.lower() == 'true' or keep_history == '1': + project_reports_directory = os.path.join(static_content_projects, project_id, 'reports') + keep_latest = 20 + if re.match('^[0-9]+$', keep_history_latest): + keep_latest = int(keep_history_latest) + + report_files = [f for f in storage.listdir(project_reports_directory) if f != 'latest' and f != '0' and email_report_file_name not in f] + + current_size = len(report_files) + + if current_size > keep_latest: + size_to_remove = current_size - keep_latest + print(f"Keeping latest {keep_latest} history reports for PROJECT_ID: {project_id}") + + files_to_remove = sorted(report_files)[:size_to_remove] + + for file in files_to_remove: + storage.rmdir(os.path.join(project_reports_directory, file)) + print(f"Removed: {file}") + + +def clean_allure_results(project_id): + print(f"Cleaning results for PROJECT_ID: {project_id}") + project_results_directory = os.path.join(os.environ['STATIC_CONTENT_PROJECTS'], str(project_id), 'results') + + # Check if the directory is not empty + files = storage.listdir(project_results_directory) + if files: + # Loop through files in the directory and remove each file + for filename in files: + file_path = os.path.join(project_results_directory, filename) + if storage.isfile(file_path): # Ensuring it's a file and not a directory + storage.remove(file_path) + + print(f"Results cleaned for PROJECT_ID: {project_id}") + + +def create_or_update_symlink(source, target): + if os.path.exists(target) or os.path.islink(target): + os.remove(target) + os.symlink(source, target) \ No newline at end of file diff --git a/allure-docker-api/storage.py b/allure-docker-api/storage.py new file mode 100644 index 0000000..f2b7c13 --- /dev/null +++ b/allure-docker-api/storage.py @@ -0,0 +1,195 @@ +from abc import ABC, abstractmethod +import s3fs +import os +import shutil +import glob + +class StorageInterface(ABC): + @abstractmethod + def isdir(self, path): + pass + + @abstractmethod + def isfile(self, path): + pass + + @abstractmethod + def getmtime(self, path): + pass + + @abstractmethod + def listdir(self, path): + pass + + @abstractmethod + def mkdir(self, path): + pass + + @abstractmethod + def remove(self, path): + pass + + @abstractmethod + def rmdir(self, path): + pass + + @abstractmethod + def exists(self, path): + pass + + @abstractmethod + def save_file(self, file, path): + pass + + @abstractmethod + def save_json(self, content, path): + pass + + @abstractmethod + def read_file(self, path): + pass + + @abstractmethod + def copy_dir(self, source_path, dest_path): + pass + +class LocalStorage(StorageInterface): + def __init__(self): + self.type = 'local' + + def isdir(self, path): + return os.path.isdir(path) + + def isfile(self, path): + return os.path.isfile(path) + + def getmtime(self, path): + return os.path.getmtime(path) + + def listdir(self, path): + return os.listdir(path) + + def glob(self, pathname): + return glob.glob(pathname) + + def mkdir(self, path): + return os.makedirs(path, exist_ok=True) + + def rmdir(self, path): + shutil.rmtree(path) + + def remove(self, path): + os.remove(path) + + def exists(self, path): + return os.path.exists(path) + + def save_file(self, file, path, method): + file.save(path) + + def write_file(self, file, path, method): + with open(path, method) as f: + if hasattr(file, 'read'): + f.write(file.read()) + else: + f.write(file) + + def save_json(self, content, path): + file = open(path, "wb") + file.write(content) + + def read_file(self, path): + with open(path, 'rb') as f: + content = f.read() + return content + + def copy_dir(self, source_path, dest_path): + shutil.copytree(source_path, dest_path, dirs_exist_ok=True) + + +class S3Storage(StorageInterface): + def __init__(self, bucket_name): + self.fs = s3fs.S3FileSystem() + self.bucket_name = bucket_name + self.type = 's3' + self.init_root_folder() + self.write_file = self.save_file + + def isdir(self, path): + return self.fs.isdir(f"{self.bucket_name}{path}") + + def isfile(self, path): + return self.fs.isfile(f"{self.bucket_name}{path}") + + def getmtime(self, path): + if path.startswith(self.bucket_name): + path = path[len(self.bucket_name):] + info = self.fs.info(f"{self.bucket_name}{path}") + return info['LastModified'].timestamp() + + def listdir(self, path): + return [item.split('/')[-1] for item in self.fs.ls(f"{self.bucket_name}{path}") if not item.endswith('/')] + + def glob(self, pathname): + return self.fs.glob(pathname) + + def mkdir(self, path): + self.fs.touch(f"{self.bucket_name}{path}/dummyfile") + + def rmdir(self, path): + self.fs.rm(f"{self.bucket_name}{path}", recursive=True) + + def remove(self, path): + return self.fs.rm(f"{self.bucket_name}{path}") + + def exists(self, path): + return self.fs.exists(f"{self.bucket_name}{path}") + + def save_file(self, file, path, method): + try: + with self.fs.open(f"{self.bucket_name}{path}", method) as f: + if hasattr(file, 'read'): + f.write(file.read()) + else: + f.write(file) + except Exception as e: + return e + + def save_json(self, content, path): + try: + with self.fs.open(f"{self.bucket_name}{path}", "wb") as f: + f.write(content) + except Exception as e: + return e + + def read_file(self, path): + if path.startswith(self.bucket_name): + path = path[len(self.bucket_name):] + with self.fs.open(f"{self.bucket_name}{path}", 'rb') as f: + content = f.read() + return content + + def copy_dir(self, source_path, dest_path): + self.fs.copy(f"{self.bucket_name}{source_path}/", f"{self.bucket_name}{dest_path}/", recursive=True) + + # s3 specific methods + + def get_files(self, source_path, dest_path): + self.fs.get(f"{self.bucket_name}{source_path}", f"{dest_path}/", recursive=True) + + def put_files(self, source_path, dest_path): + self.fs.put(f"{source_path}/", f"{self.bucket_name}{dest_path}", recursive=True) + + def init_root_folder(self): + base_dir = os.environ['STATIC_CONTENT_PROJECTS'] + if not self.exists(base_dir): + self.mkdir(base_dir) + + +def get_storage(storage_type, bucket_name=None): + if storage_type == 'local': + return LocalStorage() + elif storage_type == 's3': + return S3Storage(bucket_name=bucket_name) + else: + raise ValueError("Unsupported storage type") diff --git a/docker-custom/Dockerfile.bionic-custom b/docker-custom/Dockerfile.bionic-custom index 57d8904..51e732b 100644 --- a/docker-custom/Dockerfile.bionic-custom +++ b/docker-custom/Dockerfile.bionic-custom @@ -16,7 +16,7 @@ RUN apk update RUN apk add build-base RUN pip install -U pylint RUN pip install --upgrade pip setuptools wheel waitress && \ - pip install -Iv Flask==3.0.2 Flask-JWT-Extended==4.6.0 flask-swagger-ui==4.11.1 requests==2.31.0 + pip install -Iv Flask==3.0.2 Flask-JWT-Extended==4.6.0 flask-swagger-ui==4.11.1 requests==2.31.0 s3fs==2024.3.1 ENV ROOT_DIR=/code RUN mkdir -p $ROOT_DIR @@ -62,7 +62,7 @@ RUN apt-get update && \ ln -s `which python3` /usr/bin/python && \ pip3 install --upgrade pip && \ python${PYTHON_VERSION} -m pip install --upgrade pip setuptools wheel waitress && \ - python${PYTHON_VERSION} -m pip install -v Flask==3.0.2 Flask-JWT-Extended==4.6.0 flask-swagger-ui==4.11.1 requests==2.31.0 && \ + python${PYTHON_VERSION} -m pip install -v Flask==3.0.2 Flask-JWT-Extended==4.6.0 flask-swagger-ui==4.11.1 requests==2.31.0 s3fs==2024.3.1 && \ curl ${ALLURE_REPO}/${ALLURE_RELEASE}/allure-commandline-${ALLURE_RELEASE}.zip -L -o /tmp/allure-commandline.zip && \ unzip -q /tmp/allure-commandline.zip -d / && \ apt-get remove -y unzip && \ diff --git a/docker/Dockerfile.bionic b/docker/Dockerfile.bionic index 6b87181..014c4f5 100644 --- a/docker/Dockerfile.bionic +++ b/docker/Dockerfile.bionic @@ -17,7 +17,7 @@ RUN apk update RUN apk add build-base RUN pip install -U pylint RUN pip install --upgrade pip setuptools wheel waitress && \ - pip install -Iv Flask==3.0.2 Flask-JWT-Extended==4.6.0 flask-swagger-ui==4.11.1 requests==2.31.0 + pip install -Iv Flask==3.0.2 Flask-JWT-Extended==4.6.0 flask-swagger-ui==4.11.1 requests==2.31.0 s3fs==2024.3.1 ENV ROOT_DIR=/code RUN mkdir -p $ROOT_DIR @@ -69,7 +69,7 @@ RUN apt-get update && \ ln -s `which python3` /usr/bin/python && \ pip3 install --upgrade pip && \ python${PYTHON_VERSION} -m pip install --upgrade pip setuptools wheel waitress && \ - python${PYTHON_VERSION} -m pip install -v Flask==3.0.2 Flask-JWT-Extended==4.6.0 flask-swagger-ui==4.11.1 requests==2.31.0 && \ + python${PYTHON_VERSION} -m pip install -v Flask==3.0.2 Flask-JWT-Extended==4.6.0 flask-swagger-ui==4.11.1 requests==2.31.0 s3fs==2024.3.1 && \ curl ${ALLURE_REPO}/${ALLURE_RELEASE}/allure-commandline-${ALLURE_RELEASE}.zip -L -o /tmp/allure-commandline.zip && \ unzip -q /tmp/allure-commandline.zip -d / && \ apt-get remove -y unzip && \