From 4a0ac3613a9d77cbef626b476557d4dda9768e46 Mon Sep 17 00:00:00 2001 From: Eh Uh Date: Mon, 17 Apr 2023 15:51:13 +0200 Subject: [PATCH 001/210] syntax --- sedbackend/apps/core/db.py | 3 ++- sedbackend/apps/cvs/simulation/storage.py | 18 ++++++------------ 2 files changed, 8 insertions(+), 13 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index cc147989..c882a9f5 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,7 +10,8 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +#host = 'core-db' +host = 'localhost' database = 'seddb' port = 3306 diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index d31a57fa..ea5d451b 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -45,12 +45,9 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr if file_extension == '.xlsx': try: - # Workaround because current python version doesn't support - tmp_xlsx = tempfile.TemporaryFile() - # readable() attribute on SpooledTemporaryFile which UploadFile - tmp_xlsx.write(dsm_file.file.read()) - tmp_xlsx.seek( - 0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 + tmp_xlsx = tempfile.TemporaryFile() # Workaround because current python version doesn't support + tmp_xlsx.write(dsm_file.file.read()) # readable() attribute on SpooledTemporaryFile which UploadFile + tmp_xlsx.seek(0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 dsm = get_dsm_from_excel(tmp_xlsx) if dsm is None: @@ -61,12 +58,9 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr tmp_xlsx.close() elif file_extension == '.csv': try: - # Workaround because current python version doesn't support - tmp_csv = tempfile.TemporaryFile() - # readable() attribute on SpooledTemporaryFile which UploadFile - tmp_csv.write(dsm_file.file.read()) - tmp_csv.seek( - 0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 + tmp_csv = tempfile.TemporaryFile() # Workaround because current python version doesn't support + tmp_csv.write(dsm_file.file.read()) # readable() attribute on SpooledTemporaryFile which UploadFile + tmp_csv.seek(0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 # This should hopefully open up the file for the processor. dsm = get_dsm_from_csv(tmp_csv) From 20575998f10b8ad5e6b7858a9fa2709b3fc67870 Mon Sep 17 00:00:00 2001 From: Eh Uh Date: Tue, 18 Apr 2023 12:18:54 +0200 Subject: [PATCH 002/210] Prepped for potential file upload with sim --- sedbackend/apps/cvs/simulation/storage.py | 62 +++++++++++++++++++++++ 1 file changed, 62 insertions(+) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index ea5d451b..7e2195d5 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -37,6 +37,68 @@ 'minutes': TimeFormat.MINUTES }) +#TODO: Finish method. No checks on file this time since we won't be getting an uploaded file here, it will already be on the server. +def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, sim_settings: models.EditSimSettings, + vcs_ids: List[int], design_group_ids: List[int], normalized_npv: bool) -> List[models.Simulation]: + design_results = [] + + if not check_sim_settings(sim_settings): + raise e.BadlyFormattedSettingsException + interarrival = sim_settings.interarrival_time + flow_time = sim_settings.flow_time + runtime = sim_settings.end_time - sim_settings.start_time + non_tech_add = sim_settings.non_tech_add + discount_rate = sim_settings.discount_rate + process = sim_settings.flow_process + time_unit = TIME_FORMAT_DICT.get(sim_settings.time_unit) + + for vcs_id in vcs_ids: + for design_group_id in design_group_ids: + res = get_sim_data(db_connection, vcs_id, design_group_id) + if res is None or res == []: + raise e.VcsFailedException + + if not check_entity_rate(res, process): + raise e.RateWrongOrderException + + design_ids = [design.id for design in design_impl.get_all_designs(project_id, design_group_id)] + + if design_ids is None or []: + raise e.DesignIdsNotFoundException + + for design_id in design_ids: + # get_design(design_id) + processes, non_tech_processes = populate_processes(non_tech_add, res, db_connection, vcs_id, + design_id) # BUG probably. Populate processes changes the order of the processes. + + dsm = {} #TODO: Fetch DSM from file. Should be able to guess file based on vcs_id and proj_id + + sim = des.Des() + + try: + results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, + non_tech_add, dsm, time_unit, + discount_rate, runtime) + + except Exception as exc: + tb = sys.exc_info()[2] + logger.debug( + f'{exc.__class__}, {exc}, {exc.with_traceback(tb)}') + print(f'{exc.__class__}, {exc}') + raise e.SimulationFailedException + + design_res = models.Simulation( + time=results.timesteps[-1], + mean_NPV=results.mean_npv(), + max_NPVs=results.all_max_npv(), + mean_payback_time=results.mean_npv_payback_time(), + all_npvs=results.npvs + ) + + design_results.append(design_res) + logger.debug('Returning the results') + return design_results + def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, sim_params: models.FileParams, From 213cae680e5455479946d8b6e40213da5b4f55fa Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 25 Apr 2023 18:55:55 +0200 Subject: [PATCH 003/210] Began adding functionality to upload files --- sedbackend/apps/cvs/life_cycle/storage.py | 8 ++++++++ sql/V220608_cvs.sql | 10 ++++++++++ 2 files changed, 18 insertions(+) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 33612e01..f53da2de 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -5,6 +5,8 @@ from sedbackend.apps.cvs.life_cycle import exceptions, models from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions from mysql.connector import Error +from sedbackend.apps.core.files import models as file_models +from sedbackend.apps.core.files import implementation as file_impl CVS_NODES_TABLE = 'cvs_nodes' CVS_NODES_COLUMNS = ['cvs_nodes.id', 'vcs', 'from', 'to', 'pos_x', 'pos_y'] @@ -257,3 +259,9 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i update_node(db_connection, project_id, node.id, updated_node) return True + + +def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, + vcs_id: int, file: file_models.StoredFilePost) -> bool: + + pass \ No newline at end of file diff --git a/sql/V220608_cvs.sql b/sql/V220608_cvs.sql index 3dd8940e..a0805890 100644 --- a/sql/V220608_cvs.sql +++ b/sql/V220608_cvs.sql @@ -331,4 +331,14 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` FOREIGN KEY(`value_driver`) REFERENCES `seddb`.`cvs_value_drivers`(`id`) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_dsm_files` +( + `vcs_id` INT UNSIGNED NOT NULL, + `file_id` INT UNSIGNED NOT NULL, + PRIMARY KEY (`vcs_id`), + FOREIGN KEY (`vcs_id`) + REFERENCES `seddb`.`cvs_vcss`(`id`) + ON DELETE CASCADE, ); \ No newline at end of file From 72d13bf78f1155103e2f8643914ed1dbeb333659 Mon Sep 17 00:00:00 2001 From: Eh Uh Date: Tue, 25 Apr 2023 17:51:54 +0200 Subject: [PATCH 004/210] Added simple storage methods for fetching files --- sedbackend/apps/cvs/life_cycle/storage.py | 34 ++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index f53da2de..d029ae49 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,4 +1,5 @@ from fastapi.logger import logger +from fastapi.responses import FileResponse from mysql.connector.pooling import PooledMySQLConnection from sedbackend.libs.mysqlutils import MySQLStatementBuilder, FetchType, Sort @@ -8,6 +9,7 @@ from sedbackend.apps.core.files import models as file_models from sedbackend.apps.core.files import implementation as file_impl + CVS_NODES_TABLE = 'cvs_nodes' CVS_NODES_COLUMNS = ['cvs_nodes.id', 'vcs', 'from', 'to', 'pos_x', 'pos_y'] @@ -17,6 +19,8 @@ CVS_START_STOP_NODES_TABLE = 'cvs_start_stop_nodes' CVS_START_STOP_NODES_COLUMNS = CVS_NODES_COLUMNS + ['type'] +CVS_DSM_FILES_TABLE = 'cvs_dsm_files' +CVS_DSM_FILES_COLUMNS = ['vcs_id', 'file_id'] # TODO error handling @@ -264,4 +268,32 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, file: file_models.StoredFilePost) -> bool: - pass \ No newline at end of file + #TODO + # * ensure that the file is what it says + # * Make sure that all fields (all processes in vcs) in the file exists + # * Check file size + + stored_file = file_impl.impl_save_file(file) + + insert_statement = MySQLStatementBuilder(db_connection) + insert_statement.insert(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ + .set_values([vcs_id, stored_file.id])\ + .execute(fetch_type=FetchType.FETCH_NONE) + + return True + +def get_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> FileResponse: + + select_statement = MySQLStatementBuilder(db_connection) + file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS)\ + .where('vcs_id = %s', [vcs_id]) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + + file_path = file_impl.impl_get_file_path(file_res['file_id'], user_id) + resp = FileResponse( + path=file_path.path, + filename=file_path.filename + ) + + return resp + From 1cd640c5e5b9992888747f990d11e412f6b8bf6b Mon Sep 17 00:00:00 2001 From: EppChops Date: Thu, 4 May 2023 14:08:45 +0200 Subject: [PATCH 005/210] Fixed test that fails --- tests/apps/cvs/simulation/test_sim_multiprocessing.py | 7 ++++--- tests/apps/cvs/simulation/testutils.py | 2 +- tests/apps/cvs/testutils.py | 9 ++++++--- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/tests/apps/cvs/simulation/test_sim_multiprocessing.py b/tests/apps/cvs/simulation/test_sim_multiprocessing.py index e37822f0..620a5942 100644 --- a/tests/apps/cvs/simulation/test_sim_multiprocessing.py +++ b/tests/apps/cvs/simulation/test_sim_multiprocessing.py @@ -212,13 +212,15 @@ def test_run_mc_sim_both_flows(client, std_headers, std_user): tu.delete_vd_from_user(current_user.id) -''' + def test_run_mc_sim_rate_invalid_order(client, std_headers, std_user): #Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id) + first_tech_process = tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id) + if first_tech_process is None: + raise Exception("Cannot find first technical process") settings.monte_carlo = False #Act @@ -240,4 +242,3 @@ def test_run_mc_sim_rate_invalid_order(client, std_headers, std_user): tu.delete_VCS_with_ids(project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) -''' \ No newline at end of file diff --git a/tests/apps/cvs/simulation/testutils.py b/tests/apps/cvs/simulation/testutils.py index 6f4f1f27..5d8dfae5 100644 --- a/tests/apps/cvs/simulation/testutils.py +++ b/tests/apps/cvs/simulation/testutils.py @@ -11,7 +11,7 @@ def setup_single_simulation(user_id) -> Tuple[CVSProject, VCS, DesignGroup, List project = tu.seed_random_project(user_id) vcs = tu.seed_random_vcs(project.id) design_group = tu.seed_random_design_group(project.id) - tu.seed_random_formulas(project.id, vcs.id, design_group.id, user_id, 10) #Also creates the vcs rows + tu.seed_random_formulas(project.id, vcs.id, design_group.id, user_id, 15) #Also creates the vcs rows design = tu.seed_random_designs(project.id, design_group.id, 1) settings = tu.seed_simulation_settings(project.id, [vcs.id], [design[0].id]) diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 76b3c7eb..a6962dcc 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -153,7 +153,10 @@ def random_table_row( subprocess = random_subprocess(project_id, vcs_id) subprocess_id = subprocess.id else: - iso_process_id = random.randint(1, 25) + if random.randint(1, 5) == 1: #Give 1/5 chance to produce non-tech process + iso_process_id = random.randint(1, 14) + else: + iso_process_id = random.randint(15, 25) if stakeholder is None: stakeholder = tu.random_str(5, 50) @@ -556,10 +559,10 @@ def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int) rows.reverse() # reverse back to find first technical process for row in rows: if row.iso_process is not None: - if row.iso_process.category == 'Technical processes': + if row.iso_process.category == 'Technical processes' and row.id != last_id: return row else: - if row.subprocess.parent_process.category == 'Technical processes': + if row.subprocess.parent_process.category == 'Technical processes' and row.id != last_id: return row From 8482abc3d4c5b9fc50912f51b7c5454141d26718 Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 9 May 2023 17:47:02 +0200 Subject: [PATCH 006/210] Added some checks --- requirements.txt | 1 + sedbackend/apps/cvs/life_cycle/exceptions.py | 4 ++++ sedbackend/apps/cvs/life_cycle/storage.py | 6 ++++++ 3 files changed, 11 insertions(+) diff --git a/requirements.txt b/requirements.txt index 70b5087d..d31d3c6d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,6 +11,7 @@ python-multipart==0.0.6 starlette==0.26.1 uvicorn==0.21.1 openpyxl==3.1.2 +python-magic==0.4.27 pytest==7.3.1 httpx==0.24.0 \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/exceptions.py b/sedbackend/apps/cvs/life_cycle/exceptions.py index fbac10e7..99488567 100644 --- a/sedbackend/apps/cvs/life_cycle/exceptions.py +++ b/sedbackend/apps/cvs/life_cycle/exceptions.py @@ -17,3 +17,7 @@ class InvalidNodeType(Exception): class NodeFailedToUpdateException(Exception): pass + + +class InvalidFileTypeException(Exception): + pass \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index d029ae49..2b1f1b9c 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,3 +1,4 @@ +import magic from fastapi.logger import logger from fastapi.responses import FileResponse from mysql.connector.pooling import PooledMySQLConnection @@ -268,6 +269,11 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, file: file_models.StoredFilePost) -> bool: + if file.extension != ".csv": + raise exceptions.InvalidFileTypeException + + mime = magic.from_buffer(open(file.file_object, "rb").read(2048), mime=True) + print(mime) #TODO # * ensure that the file is what it says # * Make sure that all fields (all processes in vcs) in the file exists From 9b02b67561240958f0b40b964adbb8eda3625340 Mon Sep 17 00:00:00 2001 From: EppChops Date: Wed, 10 May 2023 23:06:44 +0200 Subject: [PATCH 007/210] Added api call for saving files --- sedbackend/apps/core/db.py | 4 ++-- sedbackend/apps/cvs/life_cycle/implementation.py | 16 ++++++++++++++++ sedbackend/apps/cvs/life_cycle/router.py | 15 +++++++++++++++ sedbackend/apps/cvs/life_cycle/storage.py | 7 ++++--- sql/V220608_cvs.sql | 2 +- 5 files changed, 38 insertions(+), 6 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 425398de..08cb86a8 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,8 +10,8 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -#host = 'core-db' -host = 'localhost' +host = 'core-db' +#host = 'localhost' database = 'seddb' port = 3306 diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 0ad37444..198dce90 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -6,6 +6,7 @@ from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.life_cycle import exceptions, storage, models from sedbackend.apps.cvs.project import exceptions as project_exceptions +from sedbackend.apps.core.files import models as file_models def create_process_node(project_id: int, vcs_id: int, node: models.ProcessNodePost) -> models.ProcessNodeGet: @@ -155,3 +156,18 @@ def update_bpmn(project_id: int, vcs_id: int, bpmn: models.BPMNGet) -> bool: status_code=status.HTTP_400_BAD_REQUEST, detail=f'Project with id={project_id} is not a part of vcs with id={vcs_id}.', ) + +def save_dsm_file(project_id: int, vcs_id: int, + file: file_models.StoredFilePost) -> bool: + try: + with get_connection() as con: + result = storage.save_dsm_file(con, project_id, vcs_id, file) + con.commit() + return result + except exceptions.InvalidFileTypeException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='Wrong filetype' + ) + + diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index ec4fa9f9..49dcb019 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -1,8 +1,12 @@ from fastapi import APIRouter, Depends +from fastapi.datastructures import UploadFile from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker from sedbackend.apps.core.projects.models import AccessLevel from sedbackend.apps.cvs.life_cycle import models, implementation +from sedbackend.apps.core.files import models as file_models +from sedbackend.apps.core.users.models import User +from sedbackend.apps.core.authentication.utils import get_current_active_user from sedbackend.apps.cvs.project.router import CVS_APP_SID router = APIRouter() @@ -56,3 +60,14 @@ async def get_bpmn(native_project_id: int, vcs_id: int) -> models.BPMNGet: ) async def update_bpmn(native_project_id: int, vcs_id: int, bpmn: models.BPMNGet) -> bool: return implementation.update_bpmn(native_project_id, vcs_id, bpmn) + + +@router.post( + '/project/{native_project_id}/vcs/{vcs_id}/upload-dsm', + summary="Upload DSM file", + response_model=bool, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] +) +async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, user: User = Depends(get_current_active_user)) -> bool: + model_file = file_models.StoredFilePost.import_fastapi_file(file, user.id) + return implementation.save_dsm_file(native_project_id, vcs_id, model_file) \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 2b1f1b9c..51a5bc6f 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,4 +1,3 @@ -import magic from fastapi.logger import logger from fastapi.responses import FileResponse from mysql.connector.pooling import PooledMySQLConnection @@ -9,6 +8,7 @@ from mysql.connector import Error from sedbackend.apps.core.files import models as file_models from sedbackend.apps.core.files import implementation as file_impl +import magic CVS_NODES_TABLE = 'cvs_nodes' @@ -272,8 +272,9 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, if file.extension != ".csv": raise exceptions.InvalidFileTypeException - mime = magic.from_buffer(open(file.file_object, "rb").read(2048), mime=True) - print(mime) + #mime = magic.from_buffer(open(file.file_object, "rb").read(2048), mime=True) + #print(mime) + #logger.debug(mime) #TODO # * ensure that the file is what it says # * Make sure that all fields (all processes in vcs) in the file exists diff --git a/sql/V220608_cvs.sql b/sql/V220608_cvs.sql index a0805890..fd70f045 100644 --- a/sql/V220608_cvs.sql +++ b/sql/V220608_cvs.sql @@ -340,5 +340,5 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_dsm_files` PRIMARY KEY (`vcs_id`), FOREIGN KEY (`vcs_id`) REFERENCES `seddb`.`cvs_vcss`(`id`) - ON DELETE CASCADE, + ON DELETE CASCADE ); \ No newline at end of file From b468f9bf8d1698880cf7d8d0dae882b1c474241b Mon Sep 17 00:00:00 2001 From: EppChops Date: Fri, 12 May 2023 13:27:30 +0200 Subject: [PATCH 008/210] Can now fetch files --- sedbackend/apps/cvs/life_cycle/implementation.py | 12 ++++++++++++ sedbackend/apps/cvs/life_cycle/router.py | 12 +++++++++++- sedbackend/apps/cvs/life_cycle/storage.py | 4 ++-- 3 files changed, 25 insertions(+), 3 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 198dce90..410e5ba1 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -1,5 +1,6 @@ from fastapi import HTTPException from starlette import status +from fastapi.responses import FileResponse from sedbackend.apps.core.authentication import exceptions as auth_ex from sedbackend.apps.core.db import get_connection @@ -171,3 +172,14 @@ def save_dsm_file(project_id: int, vcs_id: int, ) +def get_dsm_file(project_id: int, vcs_id: int, user_id: int) -> FileResponse: + try: + with get_connection() as con: + res = storage.get_dsm_file(con, project_id, vcs_id, user_id) + con.commit() + return res + except Exception: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Something wrong" + ) \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index 49dcb019..b1f58f1a 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -1,5 +1,6 @@ from fastapi import APIRouter, Depends from fastapi.datastructures import UploadFile +from fastapi.responses import FileResponse from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker from sedbackend.apps.core.projects.models import AccessLevel @@ -70,4 +71,13 @@ async def update_bpmn(native_project_id: int, vcs_id: int, bpmn: models.BPMNGet) ) async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, user: User = Depends(get_current_active_user)) -> bool: model_file = file_models.StoredFilePost.import_fastapi_file(file, user.id) - return implementation.save_dsm_file(native_project_id, vcs_id, model_file) \ No newline at end of file + return implementation.save_dsm_file(native_project_id, vcs_id, model_file) + +@router.get( + '/project/{native_project_id}/vcs/{vcs_id}/get-dsm', + summary="Fetch DSM file", + response_class=FileResponse, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +) +async def get_dsm_file(native_project_id: int, vcs_id: int, user: User = Depends(get_current_active_user)) -> FileResponse: + return implementation.get_dsm_file(native_project_id, vcs_id, user.id) \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 51a5bc6f..a05d3269 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -292,9 +292,9 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, def get_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> FileResponse: select_statement = MySQLStatementBuilder(db_connection) - file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS)\ + file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ .where('vcs_id = %s', [vcs_id]) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) file_path = file_impl.impl_get_file_path(file_res['file_id'], user_id) resp = FileResponse( From 92c4571632e47d9ceab8055bbbbdc8d56b606eac Mon Sep 17 00:00:00 2001 From: EppChops Date: Fri, 12 May 2023 16:49:32 +0200 Subject: [PATCH 009/210] Safer save files --- docker-compose.yml | 2 +- sedbackend/apps/core/db.py | 5 +++-- sedbackend/apps/core/files/storage.py | 2 ++ sedbackend/apps/cvs/life_cycle/storage.py | 22 ++++++++++++++++++---- 4 files changed, 24 insertions(+), 7 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 22b4209a..2cbbba5a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,7 +5,7 @@ services: networks: - sedlab ports: - - "3001:3306" + - "3001:3006" build: context: . dockerfile: Dockerfile-mysql-server diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 08cb86a8..c882a9f5 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,11 +10,12 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' -#host = 'localhost' +#host = 'core-db' +host = 'localhost' database = 'seddb' port = 3306 + try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( user=user, diff --git a/sedbackend/apps/core/files/storage.py b/sedbackend/apps/core/files/storage.py index 75e3e873..229f6dce 100644 --- a/sedbackend/apps/core/files/storage.py +++ b/sedbackend/apps/core/files/storage.py @@ -8,7 +8,9 @@ import sedbackend.apps.core.files.exceptions as exc from sedbackend.libs.mysqlutils import MySQLStatementBuilder, exclude_cols, FetchType +FILES_CHOPS_TEMP_DIR = f'{os.path.abspath(os.sep)}/home/chops/' FILES_RELATIVE_UPLOAD_DIR = f'{os.path.abspath(os.sep)}sed_lab/uploaded_files/' +FILES_RELATIVE_UPLOAD_DIR = FILES_CHOPS_TEMP_DIR + "/sed_lab/uploaded_files/" FILES_TABLE = 'files' FILES_COLUMNS = ['id', 'temp', 'uuid', 'filename', 'insert_timestamp', 'directory', 'owner_id', 'extension'] diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index a05d3269..f0006e31 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -272,15 +272,29 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, if file.extension != ".csv": raise exceptions.InvalidFileTypeException - #mime = magic.from_buffer(open(file.file_object, "rb").read(2048), mime=True) - #print(mime) - #logger.debug(mime) + with file.file_object as f: + f.seek(0) + tmp_file = f.read() + #TODO check file size + #TODO Check fields in file + mime = magic.from_buffer(tmp_file) + print(mime) + logger.debug(mime) + if mime != "CSV text": #TODO doesn't work with windows if we create the file in excel. + raise exceptions.InvalidFileTypeException + + + f.seek(0) + + + stored_file = file_impl.impl_save_file(file) #TODO # * ensure that the file is what it says # * Make sure that all fields (all processes in vcs) in the file exists # * Check file size + # * If file exists, then remove the previous file and replace it. - stored_file = file_impl.impl_save_file(file) + insert_statement = MySQLStatementBuilder(db_connection) insert_statement.insert(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ From 908571b483ff2d4eacdc0d93bdc36dfa96b011df Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 16 May 2023 14:21:57 +0200 Subject: [PATCH 010/210] checks on DSM file now in place in save_dsm --- sedbackend/apps/cvs/life_cycle/exceptions.py | 6 +++++ .../apps/cvs/life_cycle/implementation.py | 12 ++++++++- sedbackend/apps/cvs/life_cycle/storage.py | 26 ++++++++++++++++--- 3 files changed, 39 insertions(+), 5 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/exceptions.py b/sedbackend/apps/cvs/life_cycle/exceptions.py index 99488567..e27dc2a6 100644 --- a/sedbackend/apps/cvs/life_cycle/exceptions.py +++ b/sedbackend/apps/cvs/life_cycle/exceptions.py @@ -20,4 +20,10 @@ class NodeFailedToUpdateException(Exception): class InvalidFileTypeException(Exception): + pass + +class TooLargeFileException(Exception): + pass + +class ProcessesDoesNotMatchVcsException(Exception): pass \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 410e5ba1..40e7f672 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -167,9 +167,19 @@ def save_dsm_file(project_id: int, vcs_id: int, return result except exceptions.InvalidFileTypeException: raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, + status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, detail='Wrong filetype' ) + except exceptions.TooLargeFileException: + raise HTTPException( + status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, + detail='File too large' + ) + except exceptions.ProcessesDoesNotMatchVcsException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='Processes in DSM does not match processes in VCS' + ) def get_dsm_file(project_id: int, vcs_id: int, user_id: int) -> FileResponse: diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index f0006e31..5040f524 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -4,10 +4,11 @@ from sedbackend.libs.mysqlutils import MySQLStatementBuilder, FetchType, Sort from sedbackend.apps.cvs.life_cycle import exceptions, models -from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions +from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions, implementation as vcs_impl from mysql.connector import Error from sedbackend.apps.core.files import models as file_models from sedbackend.apps.core.files import implementation as file_impl +import pandas as pd import magic @@ -23,6 +24,8 @@ CVS_DSM_FILES_TABLE = 'cvs_dsm_files' CVS_DSM_FILES_COLUMNS = ['vcs_id', 'file_id'] +MAX_FILE_SIZE = 100 * 10**8 #100 MB + # TODO error handling def populate_process_node(db_connection, project_id, result) -> models.ProcessNodeGet: @@ -283,15 +286,30 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, if mime != "CSV text": #TODO doesn't work with windows if we create the file in excel. raise exceptions.InvalidFileTypeException + if f.tell() > MAX_FILE_SIZE: + raise exceptions.TooLargeFileException f.seek(0) + dsm_file = pd.read_csv(f) + print(dsm_file) + print(dsm_file['processes'].values) + vcs_table = vcs_impl.get_vcs_table(project_id, vcs_id) #Question: Should we demand that it is in the exact same order + # or is it enough that it exist in the vcs? + print(vcs_table) + + vcs_processes = [row.iso_process.name if row.iso_process is not None else \ + row.subprocess.name for row in vcs_table] + for process in dsm_file['processes'].values: + if process not in vcs_processes: + raise exceptions.ProcessesDoesNotMatchVcsException + f.seek(0) stored_file = file_impl.impl_save_file(file) #TODO - # * ensure that the file is what it says - # * Make sure that all fields (all processes in vcs) in the file exists - # * Check file size + # * ensure that the file is what it says DONE + # * Make sure that all fields (all processes in vcs) in the file exists DONE ish + # * Check file size DONE # * If file exists, then remove the previous file and replace it. From dde88ff9c377100b5bb81a94921ef720dfd78aed Mon Sep 17 00:00:00 2001 From: EppChops Date: Mon, 22 May 2023 15:09:29 +0200 Subject: [PATCH 011/210] dependenciesfor files --- sedbackend/apps/core/authentication/utils.py | 2 +- sedbackend/apps/core/files/dependencies.py | 30 +++++++++++++++++++ sedbackend/apps/core/files/exceptions.py | 3 ++ sedbackend/apps/core/files/implementation.py | 5 ++++ sedbackend/apps/core/files/models.py | 1 + sedbackend/apps/core/files/router.py | 18 +++++++++-- sedbackend/apps/core/files/storage.py | 22 ++++++++++++-- sedbackend/apps/cvs/life_cycle/exceptions.py | 3 ++ .../apps/cvs/life_cycle/implementation.py | 15 +++++++++- sedbackend/apps/cvs/life_cycle/storage.py | 23 ++++++++++++-- 10 files changed, 112 insertions(+), 10 deletions(-) create mode 100644 sedbackend/apps/core/files/dependencies.py diff --git a/sedbackend/apps/core/authentication/utils.py b/sedbackend/apps/core/authentication/utils.py index 00fe53cd..0a238bf5 100644 --- a/sedbackend/apps/core/authentication/utils.py +++ b/sedbackend/apps/core/authentication/utils.py @@ -40,7 +40,7 @@ async def verify_scopes(security_scopes: SecurityScopes, token: str = Depends(oa raise credentials_exception logger.debug(f"VERIFY SCOPE: Required scopes: {security_scopes.scopes}, user scopes: {token_data.scopes}") - + print(f"VERIFY SCOPE: Required scopes: {security_scopes.scopes}, user scopes: {token_data.scopes}") for scope in security_scopes.scopes: if scope not in token_data.scopes: logger.warning(f'VERIFY SCOPE: User "{token_data.username}" attempted to access an endpoint without the appropriate scope.') diff --git a/sedbackend/apps/core/files/dependencies.py b/sedbackend/apps/core/files/dependencies.py new file mode 100644 index 00000000..dc7a338e --- /dev/null +++ b/sedbackend/apps/core/files/dependencies.py @@ -0,0 +1,30 @@ +from typing import Any, List + +from fastapi import HTTPException, Request, status +from fastapi.logger import logger + +from sedbackend.apps.core.projects.models import AccessLevel +from sedbackend.apps.core.files.implementation import impl_get_file_path + +class FileAccessChecker: + def __init__(self, allowed_levels: List[AccessLevel]) -> None: + self.access_levels = allowed_levels + + def __call__(self, file_id: int, request: Request): + logger.debug(f'Does user with id {request.state.user_id}' + f'have the appropriate access levels ({self.access_levels})?') + user_id = request.state.user_id + + file_path = impl_get_file_path(file_id, user_id) + + #Should have accesslevel on the files... + # or check simply for admin access + + if user_id == file_path.owner_id or AccessLevel.ADMIN in self.access_levels: + logger.debug(f'Yes, user {user_id} has correct access level') + return True + + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="User does not have the necessary access level" + ) \ No newline at end of file diff --git a/sedbackend/apps/core/files/exceptions.py b/sedbackend/apps/core/files/exceptions.py index 0aa2f6a1..52c5f9a9 100644 --- a/sedbackend/apps/core/files/exceptions.py +++ b/sedbackend/apps/core/files/exceptions.py @@ -8,3 +8,6 @@ class FileNotFoundException(Exception): class FileParsingException(Exception): pass + +class FileNotDeletedException(Exception): + pass \ No newline at end of file diff --git a/sedbackend/apps/core/files/implementation.py b/sedbackend/apps/core/files/implementation.py index 62a18aa5..e112eb63 100644 --- a/sedbackend/apps/core/files/implementation.py +++ b/sedbackend/apps/core/files/implementation.py @@ -39,6 +39,11 @@ def impl_delete_file(file_id: int, current_user_id: int) -> bool: status_code=status.HTTP_403_FORBIDDEN, detail=f"User does not have access to a file with id = {file_id}" ) + except exc.FileNotDeletedException: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"File could not be deleted" + ) def impl_get_file_path(file_id: int, current_user_id: int) -> models.StoredFilePath: diff --git a/sedbackend/apps/core/files/models.py b/sedbackend/apps/core/files/models.py index 4c2cd9fb..6991f1c8 100644 --- a/sedbackend/apps/core/files/models.py +++ b/sedbackend/apps/core/files/models.py @@ -47,6 +47,7 @@ class StoredFilePath(BaseModel): id: int filename: str path: str + owner_id: int extension: str diff --git a/sedbackend/apps/core/files/router.py b/sedbackend/apps/core/files/router.py index 68910132..770d6249 100644 --- a/sedbackend/apps/core/files/router.py +++ b/sedbackend/apps/core/files/router.py @@ -1,8 +1,10 @@ -from fastapi import APIRouter, Depends +from fastapi import APIRouter, Depends, Security from fastapi.responses import FileResponse import sedbackend.apps.core.files.implementation as impl -from sedbackend.apps.core.authentication.utils import get_current_active_user +from sedbackend.apps.core.projects.dependencies import ProjectAccessChecker +import sedbackend.apps.core.projects.models as models +from sedbackend.apps.core.authentication.utils import get_current_active_user, verify_scopes from sedbackend.apps.core.users.models import User @@ -22,3 +24,15 @@ async def get_file(file_id: int, current_user: User = Depends(get_current_active filename=stored_file_path.filename ) return resp + +@router.delete("/{file_id}/delete", + summary="Delete file", + response_model=bool, + dependencies=[Security(verify_scopes, scopes=["admin"])]) +async def delete_file(file_id: int, current_user: User = Depends(get_current_active_user)): + """ + Delete a file. + Only accessible to admins and the owner of the file. + """ + return impl.impl_delete_file(file_id, current_user.id) + \ No newline at end of file diff --git a/sedbackend/apps/core/files/storage.py b/sedbackend/apps/core/files/storage.py index 229f6dce..69eab185 100644 --- a/sedbackend/apps/core/files/storage.py +++ b/sedbackend/apps/core/files/storage.py @@ -12,7 +12,8 @@ FILES_RELATIVE_UPLOAD_DIR = f'{os.path.abspath(os.sep)}sed_lab/uploaded_files/' FILES_RELATIVE_UPLOAD_DIR = FILES_CHOPS_TEMP_DIR + "/sed_lab/uploaded_files/" FILES_TABLE = 'files' -FILES_COLUMNS = ['id', 'temp', 'uuid', 'filename', 'insert_timestamp', 'directory', 'owner_id', 'extension'] +FILES_COLUMNS = ['id', 'temp', 'uuid', 'filename', + 'insert_timestamp', 'directory', 'owner_id', 'extension'] def db_save_file(con: PooledMySQLConnection, file: models.StoredFilePost) -> models.StoredFileEntry: @@ -34,6 +35,20 @@ def db_save_file(con: PooledMySQLConnection, file: models.StoredFilePost) -> mod def db_delete_file(con: PooledMySQLConnection, file_id: int, current_user_id: int) -> bool: + stored_file_path = db_get_file_path(con, file_id, current_user_id) + + if stored_file_path.owner_id != current_user_id: #perhaps check admin scopes + pass #Raise permissions exception + try: + os.remove(stored_file_path.path) + delete_stmnt = MySQLStatementBuilder(con) + delete_stmnt.delete(FILES_TABLE)\ + .where('id=?', [file_id])\ + .execute(fetch_type=FetchType.FETCH_NONE) + + except Exception as e: + raise exc.FileNotDeletedException + return True @@ -53,7 +68,7 @@ def db_get_file_entry(con: PooledMySQLConnection, file_id: int, current_user_id: def db_get_file_path(con: PooledMySQLConnection, file_id: int, current_user_id: int) -> models.StoredFilePath: select_stmnt = MySQLStatementBuilder(con) res = select_stmnt\ - .select(FILES_TABLE, ['filename', 'uuid', 'directory', 'extension'])\ + .select(FILES_TABLE, ['filename', 'uuid', 'directory', 'owner_id', 'extension'])\ .where('id=?', [file_id])\ .execute(dictionary=True, fetch_type=FetchType.FETCH_ONE) @@ -61,7 +76,8 @@ def db_get_file_path(con: PooledMySQLConnection, file_id: int, current_user_id: raise exc.FileNotFoundException('File not found in DB') path = res['directory'] + res['uuid'] - stored_path = models.StoredFilePath(id=file_id, filename=res['filename'], path=path, extension=res['extension']) + stored_path = models.StoredFilePath( + id=file_id, filename=res['filename'], path=path, owner_id=res['owner_id'], extension=res['extension']) return stored_path diff --git a/sedbackend/apps/cvs/life_cycle/exceptions.py b/sedbackend/apps/cvs/life_cycle/exceptions.py index e27dc2a6..f97d658e 100644 --- a/sedbackend/apps/cvs/life_cycle/exceptions.py +++ b/sedbackend/apps/cvs/life_cycle/exceptions.py @@ -26,4 +26,7 @@ class TooLargeFileException(Exception): pass class ProcessesDoesNotMatchVcsException(Exception): + pass + +class FileDeletionFailedException(Exception): pass \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 40e7f672..76fa166c 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -192,4 +192,17 @@ def get_dsm_file(project_id: int, vcs_id: int, user_id: int) -> FileResponse: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Something wrong" - ) \ No newline at end of file + ) + + +def delete_dsm_file(project_id: int, vcs_id: int, user_id: int) -> bool: + try: + with get_connection() as con: + res = storage.delete_dsm_file(con, project_id, vcs_id, user_id) + con.commit() + return res + except exceptions.FileDeletionFailedException: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f'"File could not be deleted' + ) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 5040f524..63584908 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -278,12 +278,10 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, with file.file_object as f: f.seek(0) tmp_file = f.read() - #TODO check file size - #TODO Check fields in file mime = magic.from_buffer(tmp_file) print(mime) logger.debug(mime) - if mime != "CSV text": #TODO doesn't work with windows if we create the file in excel. + if mime != "CSV text" and mime != "ASCII text": #TODO doesn't work with windows if we create the file in excel. raise exceptions.InvalidFileTypeException if f.tell() > MAX_FILE_SIZE: @@ -336,3 +334,22 @@ def get_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: return resp + +def delete_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> bool: + select_statement = MySQLStatementBuilder(db_connection) + file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ + .where('vcs_id = %s', [vcs_id]) \ + .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + + + file_impl.impl_delete_file(file_res['file_id'], user_id) + + delete_stmt = MySQLStatementBuilder(db_connection) + _, rows = delete_stmt.delete(CVS_DSM_FILES_TABLE) \ + .where('vcs_id = %s', [vcs_id]) \ + .execute(return_affected_rows=True) + + if len(rows) == 0: + raise exceptions.FileDeletionFailedException + + return True \ No newline at end of file From a8ba51c288378135204b250e976dd5a0927e7d07 Mon Sep 17 00:00:00 2001 From: johnmartins Date: Mon, 22 May 2023 16:32:49 +0200 Subject: [PATCH 012/210] Bumped version to 1.0.4 --- sedbackend/main.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/main.py b/sedbackend/main.py index 97f151af..86193b76 100644 --- a/sedbackend/main.py +++ b/sedbackend/main.py @@ -15,7 +15,7 @@ app = FastAPI( title="SED lab API", description="The SED lab API contains all HTTP operations available within the SED lab application.", - version="1.0.3", + version="1.0.4", ) app.include_router(api.router, prefix="/api") From 8d602c2a3ab06412ad5dc21dbf1b2166ef031be1 Mon Sep 17 00:00:00 2001 From: Julian Martinsson Bonde Date: Mon, 22 May 2023 16:34:34 +0200 Subject: [PATCH 013/210] File access control (#86) * added subproject requirement to uploaded files * Adjusted file saving mechanism * Added comment for clarity * Implemented FileAccessControl based on subproject access * Improved robustness of MySQLStatementBuilder --- sedbackend/apps/core/files/dependencies.py | 28 +++++++++++ sedbackend/apps/core/files/exceptions.py | 4 ++ sedbackend/apps/core/files/implementation.py | 12 +++++ sedbackend/apps/core/files/models.py | 9 ++-- sedbackend/apps/core/files/router.py | 8 ++- sedbackend/apps/core/files/storage.py | 50 ++++++++++++++++--- .../apps/core/measurements/implementation.py | 4 +- sedbackend/apps/core/measurements/router.py | 4 +- sedbackend/apps/core/projects/dependencies.py | 14 ++++-- .../apps/core/projects/implementation.py | 15 +++++- sedbackend/apps/core/projects/storage.py | 16 +++++- sedbackend/libs/mysqlutils/builder.py | 5 ++ sedbackend/libs/mysqlutils/exceptions.py | 2 + sql/V230522_file_access.sql | 17 +++++++ 14 files changed, 165 insertions(+), 23 deletions(-) create mode 100644 sedbackend/apps/core/files/dependencies.py create mode 100644 sedbackend/libs/mysqlutils/exceptions.py create mode 100644 sql/V230522_file_access.sql diff --git a/sedbackend/apps/core/files/dependencies.py b/sedbackend/apps/core/files/dependencies.py new file mode 100644 index 00000000..54d2e3d7 --- /dev/null +++ b/sedbackend/apps/core/files/dependencies.py @@ -0,0 +1,28 @@ +from typing import List + +from fastapi import Request +from fastapi.logger import logger +from fastapi import HTTPException, status + +from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker +from sedbackend.apps.core.projects.models import AccessLevel +from sedbackend.apps.core.projects.implementation import impl_get_subproject_by_id +from sedbackend.apps.core.files.implementation import impl_get_file_mapped_subproject_id +from sedbackend.apps.core.users.implementation import impl_get_user_with_id + + +class FileAccessChecker: + def __init__(self, allowed_levels: List[AccessLevel]): + self.access_levels = allowed_levels + + def __call__(self, file_id: int, request: Request): + logger.debug(f'Is user with id {request.state.user_id} ' + f'allowed to access file with id {file_id}?') + user_id = request.state.user_id + + # Get subproject ID + subproject_id = impl_get_file_mapped_subproject_id(file_id) + + # Run subproject access check + subproject = impl_get_subproject_by_id(subproject_id) + return SubProjectAccessChecker.check_user_subproject_access(subproject, AccessLevel.list_can_read(), user_id) diff --git a/sedbackend/apps/core/files/exceptions.py b/sedbackend/apps/core/files/exceptions.py index 0aa2f6a1..105c25a1 100644 --- a/sedbackend/apps/core/files/exceptions.py +++ b/sedbackend/apps/core/files/exceptions.py @@ -8,3 +8,7 @@ class FileNotFoundException(Exception): class FileParsingException(Exception): pass + + +class SubprojectMappingNotFound(Exception): + pass diff --git a/sedbackend/apps/core/files/implementation.py b/sedbackend/apps/core/files/implementation.py index 62a18aa5..8e29ebb3 100644 --- a/sedbackend/apps/core/files/implementation.py +++ b/sedbackend/apps/core/files/implementation.py @@ -102,3 +102,15 @@ def impl_get_file(file_id: int, current_user_id: int): status_code=status.HTTP_403_FORBIDDEN, detail="User does not have access to requested file." ) + + +def impl_get_file_mapped_subproject_id(file_id): + try: + with get_connection() as con: + subproject_id = storage.db_get_file_mapped_subproject_id(con, file_id) + return subproject_id + except exc.SubprojectMappingNotFound: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"No subproject mapping found for file with id = {file_id}" + ) diff --git a/sedbackend/apps/core/files/models.py b/sedbackend/apps/core/files/models.py index 4c2cd9fb..44d25b2f 100644 --- a/sedbackend/apps/core/files/models.py +++ b/sedbackend/apps/core/files/models.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, Optional from datetime import datetime import os from tempfile import SpooledTemporaryFile @@ -12,15 +12,17 @@ class StoredFilePost(BaseModel): owner_id: int extension: str file_object: Any + subproject_id: int @staticmethod - def import_fastapi_file(file: UploadFile, current_user_id: int): + def import_fastapi_file(file: UploadFile, current_user_id: int, subproject_id: int): filename = file.filename extension = os.path.splitext(file.filename)[1] return StoredFilePost(filename=filename, extension=extension, owner_id=current_user_id, - file_object=file.file) + file_object=file.file, + subproject_id=subproject_id) class StoredFileEntry(BaseModel): @@ -30,6 +32,7 @@ class StoredFileEntry(BaseModel): insert_timestamp: datetime owner_id: int extension: str + subproject_id: int class StoredFile(BaseModel): diff --git a/sedbackend/apps/core/files/router.py b/sedbackend/apps/core/files/router.py index 68910132..f1b71bd9 100644 --- a/sedbackend/apps/core/files/router.py +++ b/sedbackend/apps/core/files/router.py @@ -2,7 +2,9 @@ from fastapi.responses import FileResponse import sedbackend.apps.core.files.implementation as impl +from sedbackend.apps.core.files.dependencies import FileAccessChecker from sedbackend.apps.core.authentication.utils import get_current_active_user +from sedbackend.apps.core.projects.models import AccessLevel from sedbackend.apps.core.users.models import User @@ -10,8 +12,10 @@ @router.get("/{file_id}/download", - summary="Download file", - response_class=FileResponse) + summary="Download file", + response_class=FileResponse, + dependencies=[Depends(FileAccessChecker(AccessLevel.list_can_read()))] + ) async def get_file(file_id: int, current_user: User = Depends(get_current_active_user)): """ Download an uploaded file diff --git a/sedbackend/apps/core/files/storage.py b/sedbackend/apps/core/files/storage.py index 75e3e873..47e19a04 100644 --- a/sedbackend/apps/core/files/storage.py +++ b/sedbackend/apps/core/files/storage.py @@ -3,6 +3,7 @@ import os from mysql.connector.pooling import PooledMySQLConnection +from fastapi.logger import logger import sedbackend.apps.core.files.models as models import sedbackend.apps.core.files.exceptions as exc @@ -10,7 +11,9 @@ FILES_RELATIVE_UPLOAD_DIR = f'{os.path.abspath(os.sep)}sed_lab/uploaded_files/' FILES_TABLE = 'files' +FILES_TO_SUBPROJECTS_MAP_TABLE = 'files_subprojects_map' FILES_COLUMNS = ['id', 'temp', 'uuid', 'filename', 'insert_timestamp', 'directory', 'owner_id', 'extension'] +FILES_TO_SUBPROJECTS_MAP_COLUMNS = ['id', 'file_id', 'subproject_id'] def db_save_file(con: PooledMySQLConnection, file: models.StoredFilePost) -> models.StoredFileEntry: @@ -28,6 +31,12 @@ def db_save_file(con: PooledMySQLConnection, file: models.StoredFilePost) -> mod file_id = insert_stmnt.last_insert_id + # Store mapping between file id and subproject id in database + insert_mapping_stmnt = MySQLStatementBuilder(con) + insert_mapping_stmnt.insert(FILES_TO_SUBPROJECTS_MAP_TABLE, ['file_id', 'subproject_id'])\ + .set_values([file_id, file.subproject_id])\ + .execute() + return db_get_file_entry(con, file_id, file.owner_id) @@ -36,15 +45,30 @@ def db_delete_file(con: PooledMySQLConnection, file_id: int, current_user_id: in def db_get_file_entry(con: PooledMySQLConnection, file_id: int, current_user_id: int) -> models.StoredFileEntry: - select_stmnt = MySQLStatementBuilder(con) - res = select_stmnt.select(FILES_TABLE, exclude_cols(FILES_COLUMNS, ['uuid', 'directory']))\ - .where('id = ?', [file_id])\ - .execute(dictionary=True, fetch_type=FetchType.FETCH_ONE) + res_dict = None + with con.cursor(prepared=True) as cursor: + # This expression uses two tables (files and files_to_subprojects_map) + query = f"SELECT {', '.join(['f.id', 'f.temp', 'f.uuid', 'f.filename', 'f.insert_timestamp', 'f.directory', 'f.owner_id', 'f.extension'])}, fsm.`subproject_id` " \ + f"FROM `{FILES_TABLE}` f " \ + f"INNER JOIN {FILES_TO_SUBPROJECTS_MAP_TABLE} fsm ON (f.id = fsm.file_id) " \ + f"WHERE f.`id` = ?" + values = [file_id] - if res is None: - raise exc.FileNotFoundException + # Log for sanity-check + logger.debug(f"db_get_file_entry query: '{query}' with values: {values}") + + # Execute query + cursor.execute(query, values) + + # Handle results + results = cursor.fetchone() + + if results is None: + raise exc.FileNotFoundException - stored_file = models.StoredFileEntry(**res) + res_dict = dict(zip(cursor.column_names, results)) + + stored_file = models.StoredFileEntry(**res_dict) return stored_file @@ -71,3 +95,15 @@ def db_put_file_temp(con: PooledMySQLConnection, file_id: int, temp: bool, curre def db_put_filename(con: PooledMySQLConnection, file_id: int, filename_new: str, current_user_id: int) \ -> models.StoredFileEntry: pass + + +def db_get_file_mapped_subproject_id(con: PooledMySQLConnection, file_id) -> int: + select_stmnt = MySQLStatementBuilder(con) + res = select_stmnt.select(FILES_TO_SUBPROJECTS_MAP_TABLE, ['subproject_id'])\ + .where('file_id=?', [file_id])\ + .execute(dictionary=True, fetch_type=FetchType.FETCH_ONE) + + if res is None: + raise exc.SubprojectMappingNotFound('Mapping could not be found.') + + return res['subproject_id'] diff --git a/sedbackend/apps/core/measurements/implementation.py b/sedbackend/apps/core/measurements/implementation.py index 281d5227..d6ec4dc5 100644 --- a/sedbackend/apps/core/measurements/implementation.py +++ b/sedbackend/apps/core/measurements/implementation.py @@ -119,9 +119,9 @@ def impl_post_measurement_result(measurement_id: int, mr: models.MeasurementResu return res -def impl_post_upload_set(file, current_user_id: int, csv_delimiter: Optional[str] = None) -> List: +def impl_post_upload_set(file, current_user_id: int, subproject_id: int, csv_delimiter: Optional[str] = None) -> List: try: - stored_file_post = models_files.StoredFilePost.import_fastapi_file(file, current_user_id) + stored_file_post = models_files.StoredFilePost.import_fastapi_file(file, current_user_id, subproject_id) with get_connection() as con: file_entry = storage_files.db_save_file(con, stored_file_post) file_path = storage_files.db_get_file_path(con, file_entry.id, current_user_id) diff --git a/sedbackend/apps/core/measurements/router.py b/sedbackend/apps/core/measurements/router.py index 99b42c32..cfdf6b47 100644 --- a/sedbackend/apps/core/measurements/router.py +++ b/sedbackend/apps/core/measurements/router.py @@ -30,9 +30,9 @@ async def get_measurement_sets(subproject_id: Optional[int] = None): response_model=List[str], description="Upload a measurement set using a CSV or Excel file. Leaving csv_delimiter as None will " "result in the value being inferred automatically.") -async def post_upload_set(file: UploadFile = File(...), current_user: User = Depends(get_current_active_user), +async def post_upload_set(subproject_id: int, file: UploadFile = File(...), current_user: User = Depends(get_current_active_user), csv_delimiter: Optional[str] = None): - return impl.impl_post_upload_set(file, current_user.id, csv_delimiter=csv_delimiter) + return impl.impl_post_upload_set(file, current_user.id, subproject_id, csv_delimiter=csv_delimiter) @router.get("/sets/{measurement_set_id}", diff --git a/sedbackend/apps/core/projects/dependencies.py b/sedbackend/apps/core/projects/dependencies.py index f6936dfd..8043ee77 100644 --- a/sedbackend/apps/core/projects/dependencies.py +++ b/sedbackend/apps/core/projects/dependencies.py @@ -3,7 +3,7 @@ from fastapi import HTTPException, Request, status from fastapi.logger import logger -from sedbackend.apps.core.projects.models import AccessLevel +from sedbackend.apps.core.projects.models import AccessLevel, SubProject from sedbackend.apps.core.projects.implementation import impl_get_project, impl_get_subproject_native @@ -53,18 +53,23 @@ def __call__(self, native_project_id: int, request: Request): # Get subproject subproject = impl_get_subproject_native(self.application_sid, native_project_id) + return SubProjectAccessChecker.check_user_subproject_access(subproject, self.access_levels, user_id) + + @staticmethod + def check_user_subproject_access(subproject: SubProject, access_levels: List[AccessLevel], user_id: int): if subproject.project_id is not None: # Get project project = impl_get_project(subproject.project_id) # <-- This can throw # Check user access level in that project access = project.participants_access[user_id] - if access in self.access_levels: + if access in access_levels: logger.debug(f"Yes, user {user_id} has access level {access}") return True else: # Fallback solution: Check if user is the owner/creator of the subproject. - if request.state.user_id == subproject.owner_id: - logger.debug("User is owner of subproject.") + if user_id == subproject.owner_id: + logger.debug(f"User with id {user_id} is the owner of subproject with id {subproject.id} " + f"(owner_id = {subproject.owner_id}).") return True logger.debug(f"No, user {user_id} does not have the minimum required access level") @@ -73,3 +78,4 @@ def __call__(self, native_project_id: int, request: Request): detail="User does not have the necessary access level", ) + diff --git a/sedbackend/apps/core/projects/implementation.py b/sedbackend/apps/core/projects/implementation.py index fe170af5..f7b85d01 100644 --- a/sedbackend/apps/core/projects/implementation.py +++ b/sedbackend/apps/core/projects/implementation.py @@ -158,12 +158,23 @@ def impl_get_subproject_native(application_sid: str, native_project_id: int) -> except exc.SubProjectNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail="Sub project not found" + detail="Sub-project not found." ) except ApplicationNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail="No such application" + detail="No such application." + ) + + +def impl_get_subproject_by_id(subproject_id: int) -> models.SubProject: + try: + with get_connection() as con: + return storage.db_get_subproject_with_id(con, subproject_id) + except exc.SubProjectNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Sub-project not found." ) diff --git a/sedbackend/apps/core/projects/storage.py b/sedbackend/apps/core/projects/storage.py index be4b712f..1f2ad264 100644 --- a/sedbackend/apps/core/projects/storage.py +++ b/sedbackend/apps/core/projects/storage.py @@ -225,7 +225,6 @@ def db_get_subprojects(connection: PooledMySQLConnection, project_id: int) \ def db_get_subproject(connection, project_id, subproject_id) -> models.SubProject: - db_get_project_exists(connection, project_id) # Raises exception if project does not exist select_stmnt = MySQLStatementBuilder(connection) @@ -242,6 +241,21 @@ def db_get_subproject(connection, project_id, subproject_id) -> models.SubProjec return sub_project +def db_get_subproject_with_id(connection, subproject_id) -> models.SubProject: + select_stmnt = MySQLStatementBuilder(connection) + res = select_stmnt\ + .select(SUBPROJECTS_TABLE, SUBPROJECT_COLUMNS)\ + .where("id = %s", [subproject_id])\ + .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + + if res is None: + raise exc.SubProjectNotFoundException + + sub_project = models.SubProject(**res) + + return sub_project + + def db_get_subproject_native(connection, application_sid, native_project_id) -> models.SubProject: get_application(application_sid) # Raises exception of application does not exist diff --git a/sedbackend/libs/mysqlutils/builder.py b/sedbackend/libs/mysqlutils/builder.py index 658ea5fc..24256862 100644 --- a/sedbackend/libs/mysqlutils/builder.py +++ b/sedbackend/libs/mysqlutils/builder.py @@ -1,4 +1,5 @@ from .statements import * +from .exceptions import InvalidStatementConfiguration from typing import Any, List, Optional, Tuple from fastapi.logger import logger from enum import Enum @@ -127,6 +128,10 @@ def execute(self, if fetch_type is None: fetch_type = FetchType.FETCH_NONE + if fetch_type is FetchType.FETCH_NONE and dictionary is True: + logger.error('FetchType cannot be None if dictionary is set to True.') + raise InvalidStatementConfiguration('FetchType cannot be None if dictionary is set to True.') + if no_logs is False: logger.debug(f'Executing query "{self.query}" with values "{self.values}". fetch_type={fetch_type}') diff --git a/sedbackend/libs/mysqlutils/exceptions.py b/sedbackend/libs/mysqlutils/exceptions.py new file mode 100644 index 00000000..ed12071d --- /dev/null +++ b/sedbackend/libs/mysqlutils/exceptions.py @@ -0,0 +1,2 @@ +class InvalidStatementConfiguration(Exception): + pass diff --git a/sql/V230522_file_access.sql b/sql/V230522_file_access.sql new file mode 100644 index 00000000..73f58466 --- /dev/null +++ b/sql/V230522_file_access.sql @@ -0,0 +1,17 @@ +CREATE TABLE IF NOT EXISTS `seddb`.`files_subprojects_map` ( + `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, + `file_id` INT UNSIGNED NOT NULL, + `subproject_id` INT UNSIGNED NOT NULL, + PRIMARY KEY (`id`), + UNIQUE INDEX `id_UNIQUE` (`id` ASC) VISIBLE, + CONSTRAINT `remove_subproject_to_file_map_on_file_removal` + FOREIGN KEY (`file_id`) + REFERENCES `seddb`.`files` (`id`) + ON DELETE CASCADE + ON UPDATE NO ACTION, + CONSTRAINT `remove_subproject_to_file_map_on_subproject_removal` + FOREIGN KEY (`subproject_id`) + REFERENCES `seddb`.`projects_subprojects` (`id`) + ON DELETE CASCADE + ON UPDATE NO ACTION + ); \ No newline at end of file From c93854f491f38935ebf2a505a3c6f603afb75120 Mon Sep 17 00:00:00 2001 From: johnmartins Date: Mon, 22 May 2023 17:17:45 +0200 Subject: [PATCH 014/210] Fixed static acces_level issue in FileAccessChecker --- sedbackend/apps/core/files/dependencies.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/core/files/dependencies.py b/sedbackend/apps/core/files/dependencies.py index 54d2e3d7..1126a312 100644 --- a/sedbackend/apps/core/files/dependencies.py +++ b/sedbackend/apps/core/files/dependencies.py @@ -25,4 +25,4 @@ def __call__(self, file_id: int, request: Request): # Run subproject access check subproject = impl_get_subproject_by_id(subproject_id) - return SubProjectAccessChecker.check_user_subproject_access(subproject, AccessLevel.list_can_read(), user_id) + return SubProjectAccessChecker.check_user_subproject_access(subproject, self.access_levels, user_id) From 5a6f46dbf8638287c8658b9ca4ea98a4d4019ed2 Mon Sep 17 00:00:00 2001 From: Julian Martinsson Bonde Date: Tue, 23 May 2023 13:05:06 +0200 Subject: [PATCH 015/210] Mysql statement builder refactor (#88) * Removed mysqlutils and instead imported mysql-statement-builder package * Added mysql-statement-builder to requirements.txt * Reconnected mysqlsb logger with fastapi logger --- requirements.txt | 1 + .../apps/core/authentication/storage.py | 2 +- sedbackend/apps/core/files/storage.py | 2 +- sedbackend/apps/core/individuals/storage.py | 2 +- sedbackend/apps/core/measurements/storage.py | 2 +- sedbackend/apps/core/projects/storage.py | 2 +- sedbackend/apps/core/users/storage.py | 2 +- sedbackend/apps/cvs/design/storage.py | 2 +- sedbackend/apps/cvs/life_cycle/storage.py | 2 +- .../apps/cvs/link_design_lifecycle/storage.py | 2 +- sedbackend/apps/cvs/market_input/storage.py | 2 +- sedbackend/apps/cvs/project/storage.py | 2 +- sedbackend/apps/cvs/simulation/storage.py | 2 +- sedbackend/apps/cvs/vcs/storage.py | 2 +- sedbackend/libs/mysqlutils/__init__.py | 3 - sedbackend/libs/mysqlutils/builder.py | 202 ------------------ sedbackend/libs/mysqlutils/exceptions.py | 2 - sedbackend/libs/mysqlutils/statements.py | 79 ------- sedbackend/libs/mysqlutils/utils.py | 19 -- sedbackend/setup.py | 4 + 20 files changed, 18 insertions(+), 318 deletions(-) delete mode 100644 sedbackend/libs/mysqlutils/__init__.py delete mode 100644 sedbackend/libs/mysqlutils/builder.py delete mode 100644 sedbackend/libs/mysqlutils/exceptions.py delete mode 100644 sedbackend/libs/mysqlutils/statements.py delete mode 100644 sedbackend/libs/mysqlutils/utils.py diff --git a/requirements.txt b/requirements.txt index 70b5087d..03e18a36 100644 --- a/requirements.txt +++ b/requirements.txt @@ -11,6 +11,7 @@ python-multipart==0.0.6 starlette==0.26.1 uvicorn==0.21.1 openpyxl==3.1.2 +mysql-statement-builder==0.* pytest==7.3.1 httpx==0.24.0 \ No newline at end of file diff --git a/sedbackend/apps/core/authentication/storage.py b/sedbackend/apps/core/authentication/storage.py index 4da6d0ac..57888662 100644 --- a/sedbackend/apps/core/authentication/storage.py +++ b/sedbackend/apps/core/authentication/storage.py @@ -6,7 +6,7 @@ from sedbackend.apps.core.authentication.models import UserAuth, SSOResolutionData from sedbackend.apps.core.users.exceptions import UserNotFoundException from sedbackend.apps.core.authentication.exceptions import InvalidNonceException, FaultyNonceOperation -from sedbackend.libs.mysqlutils.builder import MySQLStatementBuilder, FetchType +from mysqlsb.builder import MySQLStatementBuilder, FetchType from mysql.connector.pooling import PooledMySQLConnection diff --git a/sedbackend/apps/core/files/storage.py b/sedbackend/apps/core/files/storage.py index 47e19a04..9de8ac33 100644 --- a/sedbackend/apps/core/files/storage.py +++ b/sedbackend/apps/core/files/storage.py @@ -7,7 +7,7 @@ import sedbackend.apps.core.files.models as models import sedbackend.apps.core.files.exceptions as exc -from sedbackend.libs.mysqlutils import MySQLStatementBuilder, exclude_cols, FetchType +from mysqlsb import MySQLStatementBuilder, exclude_cols, FetchType FILES_RELATIVE_UPLOAD_DIR = f'{os.path.abspath(os.sep)}sed_lab/uploaded_files/' FILES_TABLE = 'files' diff --git a/sedbackend/apps/core/individuals/storage.py b/sedbackend/apps/core/individuals/storage.py index 3c51f183..57d5f81b 100644 --- a/sedbackend/apps/core/individuals/storage.py +++ b/sedbackend/apps/core/individuals/storage.py @@ -5,7 +5,7 @@ import sedbackend.apps.core.individuals.models as models import sedbackend.apps.core.individuals.exceptions as ex -from sedbackend.libs.mysqlutils import MySQLStatementBuilder, FetchType, exclude_cols +from mysqlsb import MySQLStatementBuilder, FetchType, exclude_cols INDIVIDUALS_TABLE = 'individuals' INDIVIDUALS_COLUMNS = ['id', 'name', 'is_archetype'] diff --git a/sedbackend/apps/core/measurements/storage.py b/sedbackend/apps/core/measurements/storage.py index 014dfcbe..2b208afd 100644 --- a/sedbackend/apps/core/measurements/storage.py +++ b/sedbackend/apps/core/measurements/storage.py @@ -3,7 +3,7 @@ from fastapi.logger import logger -from sedbackend.libs.mysqlutils import MySQLStatementBuilder, FetchType, exclude_cols +from mysqlsb import MySQLStatementBuilder, FetchType, exclude_cols import sedbackend.apps.core.measurements.models as models import sedbackend.apps.core.measurements.exceptions as exc diff --git a/sedbackend/apps/core/projects/storage.py b/sedbackend/apps/core/projects/storage.py index 1f2ad264..b1cd7066 100644 --- a/sedbackend/apps/core/projects/storage.py +++ b/sedbackend/apps/core/projects/storage.py @@ -1,7 +1,7 @@ from typing import List, Optional from fastapi.logger import logger -from sedbackend.libs.mysqlutils import MySQLStatementBuilder, FetchType +from mysqlsb import MySQLStatementBuilder, FetchType from mysql.connector.pooling import PooledMySQLConnection from sedbackend.apps.core.applications.state import get_application diff --git a/sedbackend/apps/core/users/storage.py b/sedbackend/apps/core/users/storage.py index 409f39c2..c544fcf4 100644 --- a/sedbackend/apps/core/users/storage.py +++ b/sedbackend/apps/core/users/storage.py @@ -5,7 +5,7 @@ import sedbackend.apps.core.users.exceptions as exc import sedbackend.apps.core.users.models as models from sedbackend.apps.core.authentication.utils import get_password_hash -from sedbackend.libs.mysqlutils import MySQLStatementBuilder, FetchType +from mysqlsb import MySQLStatementBuilder, FetchType from mysql.connector.errors import Error as SQLError USERS_COLUMNS_SAFE = ['id', 'username', 'email', 'full_name', 'scopes', 'disabled'] # Safe, as it does not contain passwords diff --git a/sedbackend/apps/cvs/design/storage.py b/sedbackend/apps/cvs/design/storage.py index 878382aa..88efb340 100644 --- a/sedbackend/apps/cvs/design/storage.py +++ b/sedbackend/apps/cvs/design/storage.py @@ -6,7 +6,7 @@ from sedbackend.apps.cvs.vcs.models import ValueDriver from sedbackend.apps.cvs.vcs import storage as vcs_storage from sedbackend.apps.cvs.vcs.storage import CVS_VALUE_DRIVER_COLUMNS, CVS_VALUE_DRIVER_TABLE, populate_value_driver -from sedbackend.libs.mysqlutils import MySQLStatementBuilder, FetchType, Sort +from mysqlsb import MySQLStatementBuilder, FetchType, Sort from sedbackend.apps.cvs.design import models, exceptions DESIGN_GROUPS_TABLE = 'cvs_design_groups' diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 33612e01..f09d6e78 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,7 +1,7 @@ from fastapi.logger import logger from mysql.connector.pooling import PooledMySQLConnection -from sedbackend.libs.mysqlutils import MySQLStatementBuilder, FetchType, Sort +from mysqlsb import MySQLStatementBuilder, FetchType, Sort from sedbackend.apps.cvs.life_cycle import exceptions, models from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions from mysql.connector import Error diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 10baaf3c..f8fa467f 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -8,7 +8,7 @@ from sedbackend.apps.cvs.project.implementation import get_cvs_project from sedbackend.apps.cvs.vcs.implementation import get_vcs from sedbackend.apps.cvs.link_design_lifecycle import models, exceptions -from sedbackend.libs.mysqlutils.builder import FetchType, MySQLStatementBuilder +from mysqlsb import FetchType, MySQLStatementBuilder from sedbackend.apps.cvs.market_input import implementation as market_impl from sedbackend.apps.cvs.design import implementation as design_impl diff --git a/sedbackend/apps/cvs/market_input/storage.py b/sedbackend/apps/cvs/market_input/storage.py index 9f25012d..4ddfb802 100644 --- a/sedbackend/apps/cvs/market_input/storage.py +++ b/sedbackend/apps/cvs/market_input/storage.py @@ -3,7 +3,7 @@ from fastapi.logger import logger from mysql.connector.pooling import PooledMySQLConnection -from sedbackend.libs.mysqlutils import MySQLStatementBuilder, FetchType, Sort +from mysqlsb import MySQLStatementBuilder, FetchType, Sort from sedbackend.apps.cvs.market_input import models, exceptions from sedbackend.apps.cvs.vcs import storage as vcs_storage, implementation as vcs_impl from sedbackend.apps.cvs.project import exceptions as project_exceptions diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 2e115441..0f4ff9f1 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -5,7 +5,7 @@ from sedbackend.apps.core.users.storage import db_get_user_safe_with_id from sedbackend.apps.cvs.project import models as models, exceptions as exceptions from sedbackend.libs.datastructures.pagination import ListChunk -from sedbackend.libs.mysqlutils import MySQLStatementBuilder, Sort, FetchType +from mysqlsb import MySQLStatementBuilder, Sort, FetchType import sedbackend.apps.core.projects.models as proj_models import sedbackend.apps.core.projects.storage as proj_storage diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 03846fb2..aacafd17 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -14,7 +14,7 @@ from typing import List from sedbackend.apps.cvs.design.implementation import get_design -from sedbackend.libs.mysqlutils.builder import FetchType, MySQLStatementBuilder +from mysqlsb import FetchType, MySQLStatementBuilder from sedbackend.libs.formula_parser.parser import NumericStringParser from sedbackend.libs.formula_parser import expressions as expr diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 6ee28fd3..bcb7816f 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -9,7 +9,7 @@ from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage, models as life_cycle_models from sedbackend.libs.datastructures.pagination import ListChunk from sedbackend.apps.core.users import exceptions as user_exceptions -from sedbackend.libs.mysqlutils import MySQLStatementBuilder, Sort, FetchType +from mysqlsb import MySQLStatementBuilder, Sort, FetchType DEBUG_ERROR_HANDLING = True # Set to false in production diff --git a/sedbackend/libs/mysqlutils/__init__.py b/sedbackend/libs/mysqlutils/__init__.py deleted file mode 100644 index 093a91c4..00000000 --- a/sedbackend/libs/mysqlutils/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .builder import MySQLStatementBuilder, FetchType -from .statements import Sort -from .utils import exclude_cols diff --git a/sedbackend/libs/mysqlutils/builder.py b/sedbackend/libs/mysqlutils/builder.py deleted file mode 100644 index 24256862..00000000 --- a/sedbackend/libs/mysqlutils/builder.py +++ /dev/null @@ -1,202 +0,0 @@ -from .statements import * -from .exceptions import InvalidStatementConfiguration -from typing import Any, List, Optional, Tuple -from fastapi.logger import logger -from enum import Enum - - -class FetchType(Enum): - """ - Used to determine how many rows should be fetched by a MySQL statement/query - """ - - FETCH_ONE = "Fetch_One" - FETCH_ALL = "Fetch_All" - FETCH_NONE = "Fetch_None" - - -class MySQLStatementBuilder: - """ - Assists in building simple MySQL queries and statements. Does not need to be closed. - It automatically closes the MySQL cursor. - """ - - def __init__(self, connection): - self.con = connection - self.query = "" - self.values = [] - self.last_insert_id = None - self.default_fetchtype = FetchType.FETCH_NONE - - def insert(self, table: str, columns: List[str]): - """ - Create a prepared insert statement - - :param table: - :param columns: - :return: - """ - self.query += create_insert_statement(table, columns) - return self - - def set_values(self, values: List[str]): - self.query += create_prepared_values_statement(len(values)) - self.values.extend(values) - return self - - def select(self, table: str, columns: List[str]): - """ - Create a select statement - - :param table: - :param columns: - :return: - """ - - self.query += create_select_statement(table, columns) - return self - - def count(self, table: str): - self.query += create_count_statement(table) - self.default_fetchtype = FetchType.FETCH_ONE - return self - - def update(self, table: str, set_statement, values): - self.query += create_update_statement(table, set_statement) - self.values.extend(values) - return self - - def delete(self, table: str): - self.query += create_delete_statement(table) - return self - - def order_by(self, columns: List[str], order: Sort = None): - self.query += create_order_by_statement(columns, order) - return self - - def offset(self, offset_count: int): - self.query += create_offset_statement(offset_count) - return self - - def limit(self, limit_count: int): - self.query += create_limit_statement(limit_count) - return self - - def inner_join(self, target_table, join_statement): - self.query += create_inner_join_statement(target_table, join_statement) - return self - - def where(self, condition, condition_values: List[Any]): - """ - Create prepared WHERE statement - :param condition: Should be a prepared condition. Use %s or ? to represent variables - :param condition_values: List of condition variables (switches out the %s and ? prepared placeholders) - :return: - """ - - self.query += create_prepared_where_statement(condition) - self.values.extend(condition_values) - return self - - @staticmethod - def placeholder_array(number_of_elements): - """ - Creates an array with N elements, where each element is "%s" - :param number_of_elements: - :return: - """ - placeholder_array = ['%s'] * number_of_elements # Make an array with N '%s' elements - return f'({",".join(placeholder_array)})' # Return that as a SQL array in string format - - def execute(self, - fetch_type: Optional[FetchType] = None, - dictionary: bool = False, - return_affected_rows: bool = False, - no_logs: bool = False): - """ - Executes constructed MySQL query. Does not need to be closed (closes automatically). - - :param no_logs: If performing sensitive operations, then logs should not be saved. Setting this to True will ensure the operation is not recorded in detail. - :param dictionary: boolean. Default is False. Converts response to dictionaries - :param fetch_type: FetchType.FETCH_NONE by default - :param return_affected_rows: When deleting rows, the amount of rows deleted may be returned if this is true - :return: None by default, but can be changed by setting keyword param "fetch_type" - """ - if fetch_type is None and self.default_fetchtype is not None: - fetch_type = self.default_fetchtype - - if fetch_type is None: - fetch_type = FetchType.FETCH_NONE - - if fetch_type is FetchType.FETCH_NONE and dictionary is True: - logger.error('FetchType cannot be None if dictionary is set to True.') - raise InvalidStatementConfiguration('FetchType cannot be None if dictionary is set to True.') - - if no_logs is False: - logger.debug(f'Executing query "{self.query}" with values "{self.values}". fetch_type={fetch_type}') - - with self.con.cursor(prepared=True) as cursor: - cursor.execute(self.query, self.values) - self.last_insert_id = cursor.lastrowid - - # Determine what the query should return - if fetch_type is FetchType.FETCH_ONE: - res = cursor.fetchone() - - # This is awful. But, since we can't combine prepared cursors with buffered cursors this is necessary - if res is not None: - while cursor.fetchone() is not None: - pass - - elif fetch_type is FetchType.FETCH_ALL: - res = cursor.fetchall() - elif fetch_type is FetchType.FETCH_NONE: - res = None - else: - res = None - - # Convert result to dictionary (or, array of dictionaries) if requested. Skip if there isn't a result - if dictionary is True and res is not None: - - # Format response depending on fetch type - if fetch_type in [FetchType.FETCH_ALL]: - dict_array = [] - - for row in res: - dict_array.append(dict(zip(cursor.column_names, row))) - - res = dict_array - - elif fetch_type is FetchType.FETCH_ONE: - res = dict(zip(cursor.column_names, res)) - - # Finally, return results - if return_affected_rows is True: - return res, cursor.rowcount - else: - return res - - def execute_procedure(self, procedure: str, args: List) -> List[List[Any]]: - """ - Execute a stored procedure. May return multiple result sets depending on the procedure. - :param procedure: Name of stored procedure - :param args: List of arguments - :return: List of result sets - """ - logger.debug(f'executing stored procedure "{procedure}" with arguments {args}') - - with self.con.cursor(dictionary=True) as cursor: - cursor.callproc(procedure, args=args) - - result_sets = [] - - for recordset in cursor.stored_results(): - column_names = recordset.column_names - res_list = [] - for row in recordset: - row_dict = dict(zip(column_names, row)) - res_list.append(row_dict) - - result_sets.append(res_list) - - return result_sets diff --git a/sedbackend/libs/mysqlutils/exceptions.py b/sedbackend/libs/mysqlutils/exceptions.py deleted file mode 100644 index ed12071d..00000000 --- a/sedbackend/libs/mysqlutils/exceptions.py +++ /dev/null @@ -1,2 +0,0 @@ -class InvalidStatementConfiguration(Exception): - pass diff --git a/sedbackend/libs/mysqlutils/statements.py b/sedbackend/libs/mysqlutils/statements.py deleted file mode 100644 index 2b58eea6..00000000 --- a/sedbackend/libs/mysqlutils/statements.py +++ /dev/null @@ -1,79 +0,0 @@ -from typing import List -from enum import Enum - - -class Sort(Enum): - ASCENDING = 'ASC' - DESCENDING = 'DESC' - - -def create_insert_statement(table: str, columns: List[str], backticks=True): - - if backticks: - insert_cols_str = ', '.join(wrap_in_backticks(columns)) # `col1`, `col2`, `col3`, .. - else: - insert_cols_str = ', '.join(columns) # col1, col2, col3, .. - - query = f"INSERT INTO {table} ({insert_cols_str}) " - return query - - -def create_select_statement(table, columns: List[str]): - return f"SELECT {','.join(wrap_in_backticks(columns))} FROM {table} " # SELECT col1, col2 FROM table - - -def create_count_statement(table): - return f"SELECT COUNT(*) as count FROM {table} " - - -def create_delete_statement(table: str): - return f"DELETE FROM {table} " - - -def create_update_statement(table: str, set_statement: str): - return f"UPDATE {table} SET {set_statement} " - - -def create_prepared_values_statement(count: int): - placeholder_array = ['%s'] * count - placeholder_str = ', '.join(placeholder_array) - return f"VALUES ({placeholder_str}) " - - -def create_prepared_where_statement(condition): - return f"WHERE {condition} " - - -def create_limit_statement(n): - return f"LIMIT {n} " - - -def create_offset_statement(n): - return f"OFFSET {n} " - - -def create_order_by_statement(columns: List[str], order: Sort = None): - cols_str = ', '.join(wrap_in_backticks(columns)) - if order: - return f"ORDER BY {cols_str} {order.value} " - else: - return f"ORDER BY {cols_str} " - - -def create_inner_join_statement(target_table, join_statement): - return f"INNER JOIN {target_table} ON {join_statement} " - - -def wrap_in_backticks(array: List[str]): - """ - Wraps each element in back-ticks. This is useful for escaping reserved key-words, - and future-proofing column/table names. - :param array: Array of table/column names - :return: - """ - new_array = [] - for element in array: - element = element.replace('.', '`.`') - new_array.append("`{}`".format(element)) - - return new_array diff --git a/sedbackend/libs/mysqlutils/utils.py b/sedbackend/libs/mysqlutils/utils.py deleted file mode 100644 index 289d9ca7..00000000 --- a/sedbackend/libs/mysqlutils/utils.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import List - - -def exclude_cols(column_list: List[str], exclude_list: List[str]): - """ - Takes a list of strings, and excludes all entries in the exlclude list. - Returns a copy of the list, but without the excluded entries. - Does not change the inserted list. - :return: - """ - column_list_copy = column_list[:] - - for exclude_col in exclude_list: - if exclude_col in column_list: - column_list_copy.remove(exclude_col) - else: - raise ValueError("Excluded column could not be found in column list.") - - return column_list_copy diff --git a/sedbackend/setup.py b/sedbackend/setup.py index c649fdb8..af94fc31 100644 --- a/sedbackend/setup.py +++ b/sedbackend/setup.py @@ -3,10 +3,14 @@ from logging.handlers import TimedRotatingFileHandler import tempfile +import mysqlsb from fastapi import Request from fastapi.logger import logger from starlette.responses import Response +# Set database logger +mysqlsb.Configuration.logger = logger + def config_default_logging(): """ From 8f8ee3a63d17917b914a04880d3570d09f229a77 Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 23 May 2023 19:02:54 +0200 Subject: [PATCH 016/210] Added file deletion functionality --- sedbackend/apps/core/files/exceptions.py | 4 ++++ sedbackend/apps/core/files/implementation.py | 5 +++++ sedbackend/apps/core/files/router.py | 13 +++++++++++++ sedbackend/apps/core/files/storage.py | 13 +++++++++++++ 4 files changed, 35 insertions(+) diff --git a/sedbackend/apps/core/files/exceptions.py b/sedbackend/apps/core/files/exceptions.py index 105c25a1..24b18e7d 100644 --- a/sedbackend/apps/core/files/exceptions.py +++ b/sedbackend/apps/core/files/exceptions.py @@ -12,3 +12,7 @@ class FileParsingException(Exception): class SubprojectMappingNotFound(Exception): pass + + +class FileNotDeletedException(Exception): + pass \ No newline at end of file diff --git a/sedbackend/apps/core/files/implementation.py b/sedbackend/apps/core/files/implementation.py index 8e29ebb3..d0bc8b88 100644 --- a/sedbackend/apps/core/files/implementation.py +++ b/sedbackend/apps/core/files/implementation.py @@ -39,6 +39,11 @@ def impl_delete_file(file_id: int, current_user_id: int) -> bool: status_code=status.HTTP_403_FORBIDDEN, detail=f"User does not have access to a file with id = {file_id}" ) + except exc.FileNotDeletedException: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"File could not be deleted" + ) def impl_get_file_path(file_id: int, current_user_id: int) -> models.StoredFilePath: diff --git a/sedbackend/apps/core/files/router.py b/sedbackend/apps/core/files/router.py index f1b71bd9..235fc5d4 100644 --- a/sedbackend/apps/core/files/router.py +++ b/sedbackend/apps/core/files/router.py @@ -26,3 +26,16 @@ async def get_file(file_id: int, current_user: User = Depends(get_current_active filename=stored_file_path.filename ) return resp + + +@router.delete("/{file_id}/delete", + summary="Delete file", + response_model=bool, + dependencies=[Depends(FileAccessChecker(AccessLevel.list_are_admins()))]) +async def delete_file(file_id: int, current_user: User = Depends(get_current_active_user)): + """ + Delete a file. + Only accessible to admins and the owner of the file. + """ + return impl.impl_delete_file(file_id, current_user.id) + \ No newline at end of file diff --git a/sedbackend/apps/core/files/storage.py b/sedbackend/apps/core/files/storage.py index 9de8ac33..fd458edd 100644 --- a/sedbackend/apps/core/files/storage.py +++ b/sedbackend/apps/core/files/storage.py @@ -7,6 +7,7 @@ import sedbackend.apps.core.files.models as models import sedbackend.apps.core.files.exceptions as exc +import sedbackend.apps.core.files.implementation as impl from mysqlsb import MySQLStatementBuilder, exclude_cols, FetchType FILES_RELATIVE_UPLOAD_DIR = f'{os.path.abspath(os.sep)}sed_lab/uploaded_files/' @@ -41,6 +42,18 @@ def db_save_file(con: PooledMySQLConnection, file: models.StoredFilePost) -> mod def db_delete_file(con: PooledMySQLConnection, file_id: int, current_user_id: int) -> bool: + stored_file_path = impl.impl_get_file_path(file_id, current_user_id) + + try: + os.remove(stored_file_path.path) + delete_stmnt = MySQLStatementBuilder(con) + delete_stmnt.delete(FILES_TABLE) \ + .where('id=?', [file_id]) \ + .execute(fetch_type=FetchType.FETCH_NONE) + + except Exception: + raise exc.FileNotDeletedException + return True From 6c5b51d7f3460a39db57d42928f2c54674bef442 Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 23 May 2023 19:05:46 +0200 Subject: [PATCH 017/210] removed unneccessary code --- sedbackend/apps/core/files/models.py | 1 - sedbackend/apps/core/files/router.py | 12 ------------ 2 files changed, 13 deletions(-) diff --git a/sedbackend/apps/core/files/models.py b/sedbackend/apps/core/files/models.py index 6991f1c8..4c2cd9fb 100644 --- a/sedbackend/apps/core/files/models.py +++ b/sedbackend/apps/core/files/models.py @@ -47,7 +47,6 @@ class StoredFilePath(BaseModel): id: int filename: str path: str - owner_id: int extension: str diff --git a/sedbackend/apps/core/files/router.py b/sedbackend/apps/core/files/router.py index 770d6249..d5290eec 100644 --- a/sedbackend/apps/core/files/router.py +++ b/sedbackend/apps/core/files/router.py @@ -24,15 +24,3 @@ async def get_file(file_id: int, current_user: User = Depends(get_current_active filename=stored_file_path.filename ) return resp - -@router.delete("/{file_id}/delete", - summary="Delete file", - response_model=bool, - dependencies=[Security(verify_scopes, scopes=["admin"])]) -async def delete_file(file_id: int, current_user: User = Depends(get_current_active_user)): - """ - Delete a file. - Only accessible to admins and the owner of the file. - """ - return impl.impl_delete_file(file_id, current_user.id) - \ No newline at end of file From 99e30ed6f163e00d31fb750803895ee85dd58b5a Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 23 May 2023 20:09:50 +0200 Subject: [PATCH 018/210] Connected with sed-labs api --- sedbackend/apps/core/files/router.py | 8 ++++---- sedbackend/apps/cvs/life_cycle/router.py | 8 ++++++-- sedbackend/apps/cvs/life_cycle/storage.py | 1 + 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/sedbackend/apps/core/files/router.py b/sedbackend/apps/core/files/router.py index bda62f85..785f1bb1 100644 --- a/sedbackend/apps/core/files/router.py +++ b/sedbackend/apps/core/files/router.py @@ -12,10 +12,10 @@ @router.get("/{file_id}/download", - summary="Download file", - response_class=FileResponse, - dependencies=[Depends(FileAccessChecker(AccessLevel.list_can_read()))] - ) + summary="Download file", + response_class=FileResponse, + dependencies=[Depends(FileAccessChecker(AccessLevel.list_can_read()))] + ) async def get_file(file_id: int, current_user: User = Depends(get_current_active_user)): """ Download an uploaded file diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index b1f58f1a..e934a76b 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -3,7 +3,9 @@ from fastapi.responses import FileResponse from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker +from sedbackend.apps.core.files.dependencies import FileAccessChecker from sedbackend.apps.core.projects.models import AccessLevel +from sedbackend.apps.core.projects.implementation import impl_get_subproject_native from sedbackend.apps.cvs.life_cycle import models, implementation from sedbackend.apps.core.files import models as file_models from sedbackend.apps.core.users.models import User @@ -70,14 +72,16 @@ async def update_bpmn(native_project_id: int, vcs_id: int, bpmn: models.BPMNGet) dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, user: User = Depends(get_current_active_user)) -> bool: - model_file = file_models.StoredFilePost.import_fastapi_file(file, user.id) + subproject = impl_get_subproject_native(CVS_APP_SID, native_project_id) + print(subproject) + model_file = file_models.StoredFilePost.import_fastapi_file(file, user.id, subproject.id) return implementation.save_dsm_file(native_project_id, vcs_id, model_file) @router.get( '/project/{native_project_id}/vcs/{vcs_id}/get-dsm', summary="Fetch DSM file", response_class=FileResponse, - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] + dependencies=[Depends(FileAccessChecker(AccessLevel.list_can_read()))] ) async def get_dsm_file(native_project_id: int, vcs_id: int, user: User = Depends(get_current_active_user)) -> FileResponse: return implementation.get_dsm_file(native_project_id, vcs_id, user.id) \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 81fa68e8..bac1ef6e 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -326,6 +326,7 @@ def get_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: .where('vcs_id = %s', [vcs_id]) \ .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + print(file_res) file_path = file_impl.impl_get_file_path(file_res['file_id'], user_id) resp = FileResponse( path=file_path.path, From 8f84c8a918a55072a411b39a7ac8cffb77b9f268 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 25 May 2023 13:40:42 +0200 Subject: [PATCH 019/210] desim-tool version 0.3.3 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 70b5087d..d3964c2a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.3.1 +desim-tool==0.3.3 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 From 720b2629f33100682eba2e71c7748feddbf10c1b Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 28 May 2023 20:15:25 +0200 Subject: [PATCH 020/210] replaced generic exception --- sedbackend/apps/cvs/simulation/exceptions.py | 6 +++++- tests/apps/cvs/simulation/test_sim_multiprocessing.py | 3 ++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/exceptions.py b/sedbackend/apps/cvs/simulation/exceptions.py index 321021e5..0b526be0 100644 --- a/sedbackend/apps/cvs/simulation/exceptions.py +++ b/sedbackend/apps/cvs/simulation/exceptions.py @@ -51,4 +51,8 @@ class FlowProcessNotFoundException(Exception): pass class SimSettingsNotFoundException(Exception): - pass \ No newline at end of file + pass + + +class NoTechnicalProcessException(Exception): + pass diff --git a/tests/apps/cvs/simulation/test_sim_multiprocessing.py b/tests/apps/cvs/simulation/test_sim_multiprocessing.py index 620a5942..abbda075 100644 --- a/tests/apps/cvs/simulation/test_sim_multiprocessing.py +++ b/tests/apps/cvs/simulation/test_sim_multiprocessing.py @@ -2,6 +2,7 @@ import tests.apps.cvs.testutils as tu import testutils as sim_tu import sedbackend.apps.core.users.implementation as impl_users +import sedbackend.apps.cvs.simulation.exceptions as sim_exceptions def test_run_single_monte_carlo_sim(client, std_headers, std_user): #Setup @@ -220,7 +221,7 @@ def test_run_mc_sim_rate_invalid_order(client, std_headers, std_user): project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) first_tech_process = tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id) if first_tech_process is None: - raise Exception("Cannot find first technical process") + raise sim_exceptions.NoTechnicalProcessException settings.monte_carlo = False #Act From 5824bca87e64aef59336c928abd747ddefb8f829 Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 30 May 2023 13:34:53 +0200 Subject: [PATCH 021/210] Added tests for dsm files --- sedbackend/apps/cvs/life_cycle/exceptions.py | 12 +- .../apps/cvs/life_cycle/implementation.py | 32 +-- sedbackend/apps/cvs/life_cycle/router.py | 13 +- sedbackend/apps/cvs/life_cycle/storage.py | 49 +---- sql/V220608_cvs.sql | 9 - sql/V230529_cvs_dsm_files.sql | 12 + .../__init__.py | 0 .../files/input-example.xlsx | Bin tests/apps/cvs/life_cycle/files/input.csv | 3 + .../test_bpmn.py | 0 tests/apps/cvs/life_cycle/test_dsm_files.py | 206 ++++++++++++++++++ tests/apps/cvs/simulation/files/input.csv | 5 - tests/apps/cvs/testutils.py | 7 +- 13 files changed, 259 insertions(+), 89 deletions(-) create mode 100644 sql/V230529_cvs_dsm_files.sql rename tests/apps/cvs/{life_cycle_bpmn => life_cycle}/__init__.py (100%) rename tests/apps/cvs/{simulation => life_cycle}/files/input-example.xlsx (100%) create mode 100644 tests/apps/cvs/life_cycle/files/input.csv rename tests/apps/cvs/{life_cycle_bpmn => life_cycle}/test_bpmn.py (100%) create mode 100644 tests/apps/cvs/life_cycle/test_dsm_files.py delete mode 100644 tests/apps/cvs/simulation/files/input.csv diff --git a/sedbackend/apps/cvs/life_cycle/exceptions.py b/sedbackend/apps/cvs/life_cycle/exceptions.py index f97d658e..1fe3a402 100644 --- a/sedbackend/apps/cvs/life_cycle/exceptions.py +++ b/sedbackend/apps/cvs/life_cycle/exceptions.py @@ -22,11 +22,15 @@ class NodeFailedToUpdateException(Exception): class InvalidFileTypeException(Exception): pass -class TooLargeFileException(Exception): + +class FileSizeException(Exception): pass -class ProcessesDoesNotMatchVcsException(Exception): + +class ProcessesVcsMatchException(Exception): pass -class FileDeletionFailedException(Exception): - pass \ No newline at end of file + +class FileNotFoundException(Exception): + def __init__(self, vcs_id: int = None): + self.vcs_id = vcs_id diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 76fa166c..4922ae0e 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -158,9 +158,10 @@ def update_bpmn(project_id: int, vcs_id: int, bpmn: models.BPMNGet) -> bool: detail=f'Project with id={project_id} is not a part of vcs with id={vcs_id}.', ) -def save_dsm_file(project_id: int, vcs_id: int, + +def save_dsm_file(project_id: int, vcs_id: int, file: file_models.StoredFilePost) -> bool: - try: + try: with get_connection() as con: result = storage.save_dsm_file(con, project_id, vcs_id, file) con.commit() @@ -170,39 +171,26 @@ def save_dsm_file(project_id: int, vcs_id: int, status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, detail='Wrong filetype' ) - except exceptions.TooLargeFileException: + except exceptions.FileSizeException: raise HTTPException( status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, detail='File too large' ) - except exceptions.ProcessesDoesNotMatchVcsException: + except exceptions.ProcessesVcsMatchException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail='Processes in DSM does not match processes in VCS' ) - - -def get_dsm_file(project_id: int, vcs_id: int, user_id: int) -> FileResponse: - try: - with get_connection() as con: - res = storage.get_dsm_file(con, project_id, vcs_id, user_id) - con.commit() - return res - except Exception: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Something wrong" - ) -def delete_dsm_file(project_id: int, vcs_id: int, user_id: int) -> bool: +def get_dsm_file_id(project_id: int, vcs_id: int, user_id: int) -> int: try: with get_connection() as con: - res = storage.delete_dsm_file(con, project_id, vcs_id, user_id) + res = storage.get_dsm_file_id(con, project_id, vcs_id, user_id) con.commit() return res - except exceptions.FileDeletionFailedException: + except exceptions.FileNotFoundException as e: raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'"File could not be deleted' + status_code=status.HTTP_404_NOT_FOUND, + detail=f"File for vcs with id {e.vcs_id} could not be found" ) diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index e934a76b..4071b7cb 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -3,7 +3,6 @@ from fastapi.responses import FileResponse from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker -from sedbackend.apps.core.files.dependencies import FileAccessChecker from sedbackend.apps.core.projects.models import AccessLevel from sedbackend.apps.core.projects.implementation import impl_get_subproject_native from sedbackend.apps.cvs.life_cycle import models, implementation @@ -78,10 +77,10 @@ async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, return implementation.save_dsm_file(native_project_id, vcs_id, model_file) @router.get( - '/project/{native_project_id}/vcs/{vcs_id}/get-dsm', - summary="Fetch DSM file", - response_class=FileResponse, - dependencies=[Depends(FileAccessChecker(AccessLevel.list_can_read()))] + '/project/{native_project_id}/vcs/{vcs_id}/get-dsm-id', + summary="Fetch DSM file id", + response_model=int, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_dsm_file(native_project_id: int, vcs_id: int, user: User = Depends(get_current_active_user)) -> FileResponse: - return implementation.get_dsm_file(native_project_id, vcs_id, user.id) \ No newline at end of file +async def get_dsm_file(native_project_id: int, vcs_id: int, user: User = Depends(get_current_active_user)) -> int: + return implementation.get_dsm_file_id(native_project_id, vcs_id, user.id) \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index bac1ef6e..946778e0 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -285,32 +285,22 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, raise exceptions.InvalidFileTypeException if f.tell() > MAX_FILE_SIZE: - raise exceptions.TooLargeFileException + raise exceptions.FileSizeException f.seek(0) dsm_file = pd.read_csv(f) - print(dsm_file) - print(dsm_file['processes'].values) - vcs_table = vcs_impl.get_vcs_table(project_id, vcs_id) #Question: Should we demand that it is in the exact same order - # or is it enough that it exist in the vcs? - print(vcs_table) + vcs_table = vcs_impl.get_vcs_table(project_id, vcs_id) + vcs_processes = [row.iso_process.name if row.iso_process is not None else \ row.subprocess.name for row in vcs_table] for process in dsm_file['processes'].values: if process not in vcs_processes: - raise exceptions.ProcessesDoesNotMatchVcsException + raise exceptions.ProcessesVcsMatchException f.seek(0) stored_file = file_impl.impl_save_file(file) - #TODO - # * ensure that the file is what it says DONE - # * Make sure that all fields (all processes in vcs) in the file exists DONE ish - # * Check file size DONE - # * If file exists, then remove the previous file and replace it. - - insert_statement = MySQLStatementBuilder(db_connection) insert_statement.insert(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ @@ -319,38 +309,15 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, return True -def get_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> FileResponse: - select_statement = MySQLStatementBuilder(db_connection) - file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ - .where('vcs_id = %s', [vcs_id]) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - - print(file_res) - file_path = file_impl.impl_get_file_path(file_res['file_id'], user_id) - resp = FileResponse( - path=file_path.path, - filename=file_path.filename - ) +def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> int: - return resp - - -def delete_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> bool: select_statement = MySQLStatementBuilder(db_connection) file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ .where('vcs_id = %s', [vcs_id]) \ .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - - - file_impl.impl_delete_file(file_res['file_id'], user_id) - - delete_stmt = MySQLStatementBuilder(db_connection) - _, rows = delete_stmt.delete(CVS_DSM_FILES_TABLE) \ - .where('vcs_id = %s', [vcs_id]) \ - .execute(return_affected_rows=True) - if len(rows) == 0: - raise exceptions.FileDeletionFailedException + if file_res == None: + raise exceptions.FileNotFoundException - return True \ No newline at end of file + return file_res['file_id'] diff --git a/sql/V220608_cvs.sql b/sql/V220608_cvs.sql index fd70f045..e107290d 100644 --- a/sql/V220608_cvs.sql +++ b/sql/V220608_cvs.sql @@ -333,12 +333,3 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` ON DELETE CASCADE ); -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_dsm_files` -( - `vcs_id` INT UNSIGNED NOT NULL, - `file_id` INT UNSIGNED NOT NULL, - PRIMARY KEY (`vcs_id`), - FOREIGN KEY (`vcs_id`) - REFERENCES `seddb`.`cvs_vcss`(`id`) - ON DELETE CASCADE -); \ No newline at end of file diff --git a/sql/V230529_cvs_dsm_files.sql b/sql/V230529_cvs_dsm_files.sql new file mode 100644 index 00000000..46f6a674 --- /dev/null +++ b/sql/V230529_cvs_dsm_files.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_dsm_files` +( + `vcs_id` INT UNSIGNED NOT NULL, + `file_id` INT UNSIGNED NOT NULL, + PRIMARY KEY (`vcs_id`), + FOREIGN KEY (`vcs_id`) + REFERENCES `seddb`.`cvs_vcss`(`id`) + ON DELETE CASCADE, + FOREIGN KEY(`file_id`) + REFERENCES `seddb`.`files`(`id`) + ON DELETE CASCADE +); \ No newline at end of file diff --git a/tests/apps/cvs/life_cycle_bpmn/__init__.py b/tests/apps/cvs/life_cycle/__init__.py similarity index 100% rename from tests/apps/cvs/life_cycle_bpmn/__init__.py rename to tests/apps/cvs/life_cycle/__init__.py diff --git a/tests/apps/cvs/simulation/files/input-example.xlsx b/tests/apps/cvs/life_cycle/files/input-example.xlsx similarity index 100% rename from tests/apps/cvs/simulation/files/input-example.xlsx rename to tests/apps/cvs/life_cycle/files/input-example.xlsx diff --git a/tests/apps/cvs/life_cycle/files/input.csv b/tests/apps/cvs/life_cycle/files/input.csv new file mode 100644 index 00000000..c3070499 --- /dev/null +++ b/tests/apps/cvs/life_cycle/files/input.csv @@ -0,0 +1,3 @@ +processes,"Architectural design","Verification" +"Architectural design",0, 1 +"Verification",0, 0 diff --git a/tests/apps/cvs/life_cycle_bpmn/test_bpmn.py b/tests/apps/cvs/life_cycle/test_bpmn.py similarity index 100% rename from tests/apps/cvs/life_cycle_bpmn/test_bpmn.py rename to tests/apps/cvs/life_cycle/test_bpmn.py diff --git a/tests/apps/cvs/life_cycle/test_dsm_files.py b/tests/apps/cvs/life_cycle/test_dsm_files.py new file mode 100644 index 00000000..65873471 --- /dev/null +++ b/tests/apps/cvs/life_cycle/test_dsm_files.py @@ -0,0 +1,206 @@ +import pytest +import os +from pathlib import Path +import tests.apps.cvs.testutils as tu +import sedbackend.apps.core.users.implementation as impl_users +import sedbackend.apps.cvs.life_cycle.implementation as impl_life_cycle +import sedbackend.apps.core.files.implementation as impl_files + + +def test_upload_dsm_file(client, std_headers, std_user): + #Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id) + + row1 = tu.vcs_model.VcsRowPost( + index=0, + stakeholder=tu.tu.random_str(5,50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5,50), + iso_process=17, + subprocess=None + ) + row2 = tu.vcs_model.VcsRowPost( + index=1, + stakeholder=tu.tu.random_str(5,50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5,50), + iso_process=20, + subprocess=None + ) + + rows = [row1, row2] + table = tu.create_vcs_table(project.id, vcs.id, rows) + + cwd = os.getcwd() + _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') + _file = {'file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} + + + #Act + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + headers=std_headers, + files=_file) + + #Assert + assert res.status_code == 200 + + #Cleanup + tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) + tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + + +def test_upload_invalid_file_extension(client, std_headers, std_user): + #Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id) + + row1 = tu.vcs_model.VcsRowPost( + index=0, + stakeholder=tu.tu.random_str(5,50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5,50), + iso_process=17, + subprocess=None + ) + row2 = tu.vcs_model.VcsRowPost( + index=1, + stakeholder=tu.tu.random_str(5,50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5,50), + iso_process=20, + subprocess=None + ) + row3 = tu.vcs_model.VcsRowPost( + index=2, + stakeholder=tu.tu.random_str(5,50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5,50), + iso_process=22, + subprocess=None + ) + row4 = tu.vcs_model.VcsRowPost( + index=3, + stakeholder=tu.tu.random_str(5,50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5,50), + iso_process=24, + subprocess=None + ) + + rows = [row1, row2, row3, row4] + + table = tu.create_vcs_table(project.id, vcs.id, rows) + + cwd = os.getcwd() + _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input-example.xlsx') + _file = {'file': ('input-example.xlsx', _test_upload_file.open('rb'), 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')} + + #Act + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + headers=std_headers, + files=_file) + + #Assert + assert res.status_code == 415 #InvalidFileTypeException + + + #Cleanup + tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) + + +def test_upload_invalid_dsm_file(client, std_headers, std_user): + #Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id) + + row1 = tu.vcs_model.VcsRowPost( + index=0, + stakeholder=tu.tu.random_str(5,50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5,50), + iso_process=17, + subprocess=None + ) + row2 = tu.vcs_model.VcsRowPost( + index=1, + stakeholder=tu.tu.random_str(5,50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5,50), + iso_process=15, + subprocess=None + ) + + rows = [row1, row2] + table = tu.create_vcs_table(project.id, vcs.id, rows) + + cwd = os.getcwd() + _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') + _file = {'file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} + + + #Act + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + headers=std_headers, + files=_file) + + #Assert + assert res.status_code == 400 #Bad request, should throw ProcessesVcsMatchException + + #Cleanup + tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + + +def test_get_dsm_file_id(client, std_headers, std_user): + #Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id) + + row1 = tu.vcs_model.VcsRowPost( + index=0, + stakeholder=tu.tu.random_str(5,50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5,50), + iso_process=17, + subprocess=None + ) + row2 = tu.vcs_model.VcsRowPost( + index=1, + stakeholder=tu.tu.random_str(5,50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5,50), + iso_process=20, + subprocess=None + ) + + rows = [row1, row2] + table = tu.create_vcs_table(project.id, vcs.id, rows) + + cwd = os.getcwd() + _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') + _file = {'file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} + + + #Act + client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + headers=std_headers, + files=_file) + + res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/get-dsm-id', + headers=std_headers) + + #Assert + assert res.status_code == 200 + + #Cleanup + tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) + tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) \ No newline at end of file diff --git a/tests/apps/cvs/simulation/files/input.csv b/tests/apps/cvs/simulation/files/input.csv deleted file mode 100644 index 3c7ab96b..00000000 --- a/tests/apps/cvs/simulation/files/input.csv +++ /dev/null @@ -1,5 +0,0 @@ -processes,"Architectural design","Verification","Validation","Maintenance" -"Architectural design",0, 1, 0, 0 -"Verification",0, 0, 1, 0 -"Validation",0, 0.2, 0, 0.8 -"Maintenance",0, 0, 0, 0 \ No newline at end of file diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index a6962dcc..e73840c3 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -2,6 +2,7 @@ from typing import List, Tuple import random +from sedbackend.apps.core.files import implementation as impl_files import sedbackend.apps.cvs.simulation.implementation as sim_impl import sedbackend.apps.cvs.simulation.models as sim_model from sedbackend.apps.cvs.simulation.models import NonTechCost @@ -9,7 +10,7 @@ import sedbackend.apps.cvs.design.models as design_model import sedbackend.apps.cvs.link_design_lifecycle.implementation as connect_impl import sedbackend.apps.cvs.link_design_lifecycle.models as connect_model -import sedbackend.apps.cvs.life_cycle.implementation +import sedbackend.apps.cvs.life_cycle.implementation as impl_life_cycle import sedbackend.apps.cvs.life_cycle.models import sedbackend.apps.cvs.project.implementation import sedbackend.apps.cvs.project.models @@ -347,6 +348,10 @@ def delete_multiple_bpmn_nodes(nodes, project_id, vcs_id, user_id): delete_bpmn_node(node.id, project_id, vcs_id, user_id) +def delete_dsm_file_from_vcs_id(proj_id, vcs_id, user_id): + file_id = impl_life_cycle.get_dsm_file_id(proj_id, vcs_id, user_id) + impl_files.impl_delete_file(file_id, user_id) + # ====================================================================================================================== # Designs # ====================================================================================================================== From 4d237f85eb2387c600774a9369db990f8b246e50 Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 30 May 2023 17:10:38 +0200 Subject: [PATCH 022/210] Reverted dev state to normal state --- docker-compose.yml | 2 +- sedbackend/apps/core/authentication/utils.py | 1 - sedbackend/apps/core/db.py | 3 +-- sedbackend/apps/core/files/router.py | 2 +- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 2cbbba5a..22b4209a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,7 +5,7 @@ services: networks: - sedlab ports: - - "3001:3006" + - "3001:3306" build: context: . dockerfile: Dockerfile-mysql-server diff --git a/sedbackend/apps/core/authentication/utils.py b/sedbackend/apps/core/authentication/utils.py index 0a238bf5..12e88977 100644 --- a/sedbackend/apps/core/authentication/utils.py +++ b/sedbackend/apps/core/authentication/utils.py @@ -40,7 +40,6 @@ async def verify_scopes(security_scopes: SecurityScopes, token: str = Depends(oa raise credentials_exception logger.debug(f"VERIFY SCOPE: Required scopes: {security_scopes.scopes}, user scopes: {token_data.scopes}") - print(f"VERIFY SCOPE: Required scopes: {security_scopes.scopes}, user scopes: {token_data.scopes}") for scope in security_scopes.scopes: if scope not in token_data.scopes: logger.warning(f'VERIFY SCOPE: User "{token_data.username}" attempted to access an endpoint without the appropriate scope.') diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index c882a9f5..cc147989 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,8 +10,7 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -#host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' port = 3306 diff --git a/sedbackend/apps/core/files/router.py b/sedbackend/apps/core/files/router.py index bda62f85..235fc5d4 100644 --- a/sedbackend/apps/core/files/router.py +++ b/sedbackend/apps/core/files/router.py @@ -1,4 +1,4 @@ -from fastapi import APIRouter, Depends, Security +from fastapi import APIRouter, Depends from fastapi.responses import FileResponse import sedbackend.apps.core.files.implementation as impl From dc1aeb720a6cb111875620a2bbec79cc579742d8 Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 30 May 2023 19:01:04 +0200 Subject: [PATCH 023/210] Added tests for files --- sedbackend/apps/core/files/storage.py | 2 - tests/apps/core/files/test_files.py | 142 ++++++++++++++++++++++++++ tests/apps/core/files/testutils.py | 8 ++ 3 files changed, 150 insertions(+), 2 deletions(-) create mode 100644 tests/apps/core/files/test_files.py create mode 100644 tests/apps/core/files/testutils.py diff --git a/sedbackend/apps/core/files/storage.py b/sedbackend/apps/core/files/storage.py index ee46f62e..a8ba568e 100644 --- a/sedbackend/apps/core/files/storage.py +++ b/sedbackend/apps/core/files/storage.py @@ -10,9 +10,7 @@ import sedbackend.apps.core.files.implementation as impl from mysqlsb import MySQLStatementBuilder, exclude_cols, FetchType -FILES_CHOPS_TEMP_DIR = f'{os.path.abspath(os.sep)}/home/chops/' FILES_RELATIVE_UPLOAD_DIR = f'{os.path.abspath(os.sep)}sed_lab/uploaded_files/' -FILES_RELATIVE_UPLOAD_DIR = FILES_CHOPS_TEMP_DIR + "/sed_lab/uploaded_files/" FILES_TABLE = 'files' FILES_TO_SUBPROJECTS_MAP_TABLE = 'files_subprojects_map' FILES_COLUMNS = ['id', 'temp', 'uuid', 'filename', 'insert_timestamp', 'directory', 'owner_id', 'extension'] diff --git a/tests/apps/core/files/test_files.py b/tests/apps/core/files/test_files.py new file mode 100644 index 00000000..dff0a9bc --- /dev/null +++ b/tests/apps/core/files/test_files.py @@ -0,0 +1,142 @@ +import tempfile +import tests.apps.core.projects.testutils as tu_proj +import tests.apps.core.users.testutils as tu_users +import tests.apps.core.files.testutils as tu + +import sedbackend.apps.core.files.implementation as impl +import sedbackend.apps.core.files.models as models +import sedbackend.apps.core.users.implementation as impl_users +from sedbackend.apps.core.projects.models import AccessLevel + + +def test_get_file(client, std_headers, std_user): + #Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu_proj.seed_random_project(current_user.id) + subp = tu_proj.seed_random_subproject(current_user.id, project.id) + + + tmp_file = tempfile.SpooledTemporaryFile() + tmp_file.write(b"Hello World!") + + post_file = models.StoredFilePost( + filename="hello", + owner_id=current_user.id, + extension=".txt", + file_object=tmp_file, + subproject_id=subp.id + ) + saved_file = impl.impl_save_file(post_file) + + #Act + res = client.get(f"/api/core/files/{saved_file.id}/download", + headers=std_headers) + + #Assert + assert res.status_code == 200 + + #Cleanup + tu.delete_files([saved_file], [current_user]) + tu_proj.delete_subprojects([subp]) + tu_proj.delete_projects([project]) + + +def test_delete_file_admin(client, admin_headers, admin_user): + #Setup + std_user = tu_users.seed_random_user(admin=False, disabled=False) + adm_user = impl_users.impl_get_user_with_username(admin_user.username) + project = tu_proj.seed_random_project(std_user.id, {adm_user.id: AccessLevel.ADMIN}) + subp = tu_proj.seed_random_subproject(std_user.id, project.id) + + + tmp_file = tempfile.SpooledTemporaryFile() + tmp_file.write(b"Hello World!") + + post_file = models.StoredFilePost( + filename="hello", + owner_id=std_user.id, + extension=".txt", + file_object=tmp_file, + subproject_id=subp.id + ) + saved_file = impl.impl_save_file(post_file) + + #Act + res = client.delete(f"/api/core/files/{saved_file.id}/delete", + headers=admin_headers) + + #Assert + assert res.status_code == 200 + + #Cleanup + tu_proj.delete_subprojects([subp]) + tu_proj.delete_projects([project]) + tu_users.delete_users([std_user]) + + + + +def test_delete_file_standard(client, std_headers, std_user): + #Setup + file_owner = tu_users.seed_random_user(admin=False, disabled=False) + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu_proj.seed_random_project(file_owner.id, {current_user.id: AccessLevel.READONLY}) + subp = tu_proj.seed_random_subproject(file_owner.id, project.id) + + + tmp_file = tempfile.SpooledTemporaryFile() + tmp_file.write(b"Hello World!") + + post_file = models.StoredFilePost( + filename="hello", + owner_id=file_owner.id, + extension=".txt", + file_object=tmp_file, + subproject_id=subp.id + ) + saved_file = impl.impl_save_file(post_file) + + #Act + res = client.delete(f"/api/core/files/{saved_file.id}/delete", + headers=std_headers) + + #Assert + assert res.status_code == 403 #403 forbidden, should not be able to access resource + + #Cleanup + tu_proj.delete_subprojects([subp]) + tu_proj.delete_projects([project]) + tu_users.delete_users([file_owner]) + + + +def test_delete_file_owner(client, std_headers, std_user): + #Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu_proj.seed_random_project(current_user.id) + subp = tu_proj.seed_random_subproject(current_user.id, project.id) + + + tmp_file = tempfile.SpooledTemporaryFile() + tmp_file.write(b"Hello World!") + + post_file = models.StoredFilePost( + filename="hello", + owner_id=current_user.id, + extension=".txt", + file_object=tmp_file, + subproject_id=subp.id + ) + saved_file = impl.impl_save_file(post_file) + + #Act + res = client.delete(f"/api/core/files/{saved_file.id}/delete", + headers=std_headers) + + #Assert + assert res.status_code == 200 + + #Cleanup + tu_proj.delete_subprojects([subp]) + tu_proj.delete_projects([project]) + \ No newline at end of file diff --git a/tests/apps/core/files/testutils.py b/tests/apps/core/files/testutils.py new file mode 100644 index 00000000..49dd78f6 --- /dev/null +++ b/tests/apps/core/files/testutils.py @@ -0,0 +1,8 @@ +from typing import List +import sedbackend.apps.core.files.implementation as impl +import sedbackend.apps.core.files.models as models +import sedbackend.apps.core.users.models as user_models + +def delete_files(files: List[models.StoredFileEntry], users: List[user_models.User]): + for i,file in enumerate(files): + impl.impl_delete_file(file.id, users[i].id) \ No newline at end of file From a933b8e6383b9c4cde95f89fc47f6a456a928082 Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 30 May 2023 19:15:44 +0200 Subject: [PATCH 024/210] Removed features meant for life_cycle on fork repo --- sedbackend/apps/cvs/life_cycle/exceptions.py | 13 --- .../apps/cvs/life_cycle/implementation.py | 51 ---------- sedbackend/apps/cvs/life_cycle/router.py | 24 ----- sedbackend/apps/cvs/life_cycle/storage.py | 96 ------------------- sql/V220608_cvs.sql | 10 -- 5 files changed, 194 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/exceptions.py b/sedbackend/apps/cvs/life_cycle/exceptions.py index f97d658e..fbac10e7 100644 --- a/sedbackend/apps/cvs/life_cycle/exceptions.py +++ b/sedbackend/apps/cvs/life_cycle/exceptions.py @@ -17,16 +17,3 @@ class InvalidNodeType(Exception): class NodeFailedToUpdateException(Exception): pass - - -class InvalidFileTypeException(Exception): - pass - -class TooLargeFileException(Exception): - pass - -class ProcessesDoesNotMatchVcsException(Exception): - pass - -class FileDeletionFailedException(Exception): - pass \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 76fa166c..0ad37444 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -1,13 +1,11 @@ from fastapi import HTTPException from starlette import status -from fastapi.responses import FileResponse from sedbackend.apps.core.authentication import exceptions as auth_ex from sedbackend.apps.core.db import get_connection from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.life_cycle import exceptions, storage, models from sedbackend.apps.cvs.project import exceptions as project_exceptions -from sedbackend.apps.core.files import models as file_models def create_process_node(project_id: int, vcs_id: int, node: models.ProcessNodePost) -> models.ProcessNodeGet: @@ -157,52 +155,3 @@ def update_bpmn(project_id: int, vcs_id: int, bpmn: models.BPMNGet) -> bool: status_code=status.HTTP_400_BAD_REQUEST, detail=f'Project with id={project_id} is not a part of vcs with id={vcs_id}.', ) - -def save_dsm_file(project_id: int, vcs_id: int, - file: file_models.StoredFilePost) -> bool: - try: - with get_connection() as con: - result = storage.save_dsm_file(con, project_id, vcs_id, file) - con.commit() - return result - except exceptions.InvalidFileTypeException: - raise HTTPException( - status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, - detail='Wrong filetype' - ) - except exceptions.TooLargeFileException: - raise HTTPException( - status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, - detail='File too large' - ) - except exceptions.ProcessesDoesNotMatchVcsException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail='Processes in DSM does not match processes in VCS' - ) - - -def get_dsm_file(project_id: int, vcs_id: int, user_id: int) -> FileResponse: - try: - with get_connection() as con: - res = storage.get_dsm_file(con, project_id, vcs_id, user_id) - con.commit() - return res - except Exception: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Something wrong" - ) - - -def delete_dsm_file(project_id: int, vcs_id: int, user_id: int) -> bool: - try: - with get_connection() as con: - res = storage.delete_dsm_file(con, project_id, vcs_id, user_id) - con.commit() - return res - except exceptions.FileDeletionFailedException: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'"File could not be deleted' - ) diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index b1f58f1a..71632a6c 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -1,13 +1,8 @@ from fastapi import APIRouter, Depends -from fastapi.datastructures import UploadFile -from fastapi.responses import FileResponse from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker from sedbackend.apps.core.projects.models import AccessLevel from sedbackend.apps.cvs.life_cycle import models, implementation -from sedbackend.apps.core.files import models as file_models -from sedbackend.apps.core.users.models import User -from sedbackend.apps.core.authentication.utils import get_current_active_user from sedbackend.apps.cvs.project.router import CVS_APP_SID router = APIRouter() @@ -62,22 +57,3 @@ async def get_bpmn(native_project_id: int, vcs_id: int) -> models.BPMNGet: async def update_bpmn(native_project_id: int, vcs_id: int, bpmn: models.BPMNGet) -> bool: return implementation.update_bpmn(native_project_id, vcs_id, bpmn) - -@router.post( - '/project/{native_project_id}/vcs/{vcs_id}/upload-dsm', - summary="Upload DSM file", - response_model=bool, - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] -) -async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, user: User = Depends(get_current_active_user)) -> bool: - model_file = file_models.StoredFilePost.import_fastapi_file(file, user.id) - return implementation.save_dsm_file(native_project_id, vcs_id, model_file) - -@router.get( - '/project/{native_project_id}/vcs/{vcs_id}/get-dsm', - summary="Fetch DSM file", - response_class=FileResponse, - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] -) -async def get_dsm_file(native_project_id: int, vcs_id: int, user: User = Depends(get_current_active_user)) -> FileResponse: - return implementation.get_dsm_file(native_project_id, vcs_id, user.id) \ No newline at end of file diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 81fa68e8..cf6a25bb 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -6,10 +6,6 @@ from sedbackend.apps.cvs.life_cycle import exceptions, models from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions, implementation as vcs_impl from mysql.connector import Error -from sedbackend.apps.core.files import models as file_models -from sedbackend.apps.core.files import implementation as file_impl -import pandas as pd -import magic CVS_NODES_TABLE = 'cvs_nodes' @@ -21,12 +17,6 @@ CVS_START_STOP_NODES_TABLE = 'cvs_start_stop_nodes' CVS_START_STOP_NODES_COLUMNS = CVS_NODES_COLUMNS + ['type'] -CVS_DSM_FILES_TABLE = 'cvs_dsm_files' -CVS_DSM_FILES_COLUMNS = ['vcs_id', 'file_id'] - -MAX_FILE_SIZE = 100 * 10**8 #100 MB - -# TODO error handling def populate_process_node(db_connection, project_id, result) -> models.ProcessNodeGet: logger.debug(f'Populating model for process node with id={result["id"]} ') @@ -267,89 +257,3 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i update_node(db_connection, project_id, node.id, updated_node) return True - - -def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, - vcs_id: int, file: file_models.StoredFilePost) -> bool: - - if file.extension != ".csv": - raise exceptions.InvalidFileTypeException - - with file.file_object as f: - f.seek(0) - tmp_file = f.read() - mime = magic.from_buffer(tmp_file) - print(mime) - logger.debug(mime) - if mime != "CSV text" and mime != "ASCII text": #TODO doesn't work with windows if we create the file in excel. - raise exceptions.InvalidFileTypeException - - if f.tell() > MAX_FILE_SIZE: - raise exceptions.TooLargeFileException - - f.seek(0) - dsm_file = pd.read_csv(f) - print(dsm_file) - print(dsm_file['processes'].values) - vcs_table = vcs_impl.get_vcs_table(project_id, vcs_id) #Question: Should we demand that it is in the exact same order - # or is it enough that it exist in the vcs? - print(vcs_table) - - vcs_processes = [row.iso_process.name if row.iso_process is not None else \ - row.subprocess.name for row in vcs_table] - - for process in dsm_file['processes'].values: - if process not in vcs_processes: - raise exceptions.ProcessesDoesNotMatchVcsException - - f.seek(0) - stored_file = file_impl.impl_save_file(file) - #TODO - # * ensure that the file is what it says DONE - # * Make sure that all fields (all processes in vcs) in the file exists DONE ish - # * Check file size DONE - # * If file exists, then remove the previous file and replace it. - - - - insert_statement = MySQLStatementBuilder(db_connection) - insert_statement.insert(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ - .set_values([vcs_id, stored_file.id])\ - .execute(fetch_type=FetchType.FETCH_NONE) - - return True - -def get_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> FileResponse: - - select_statement = MySQLStatementBuilder(db_connection) - file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ - .where('vcs_id = %s', [vcs_id]) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - - file_path = file_impl.impl_get_file_path(file_res['file_id'], user_id) - resp = FileResponse( - path=file_path.path, - filename=file_path.filename - ) - - return resp - - -def delete_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> bool: - select_statement = MySQLStatementBuilder(db_connection) - file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ - .where('vcs_id = %s', [vcs_id]) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - - - file_impl.impl_delete_file(file_res['file_id'], user_id) - - delete_stmt = MySQLStatementBuilder(db_connection) - _, rows = delete_stmt.delete(CVS_DSM_FILES_TABLE) \ - .where('vcs_id = %s', [vcs_id]) \ - .execute(return_affected_rows=True) - - if len(rows) == 0: - raise exceptions.FileDeletionFailedException - - return True \ No newline at end of file diff --git a/sql/V220608_cvs.sql b/sql/V220608_cvs.sql index fd70f045..64e422a4 100644 --- a/sql/V220608_cvs.sql +++ b/sql/V220608_cvs.sql @@ -332,13 +332,3 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` REFERENCES `seddb`.`cvs_value_drivers`(`id`) ON DELETE CASCADE ); - -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_dsm_files` -( - `vcs_id` INT UNSIGNED NOT NULL, - `file_id` INT UNSIGNED NOT NULL, - PRIMARY KEY (`vcs_id`), - FOREIGN KEY (`vcs_id`) - REFERENCES `seddb`.`cvs_vcss`(`id`) - ON DELETE CASCADE -); \ No newline at end of file From a1b7270e2d7ba70e87da5ff537fdcb2dad96efe9 Mon Sep 17 00:00:00 2001 From: EppChops Date: Tue, 30 May 2023 19:39:13 +0200 Subject: [PATCH 025/210] Fixed merge conflicts --- .../apps/cvs/life_cycle/implementation.py | 2 +- sedbackend/apps/cvs/life_cycle/router.py | 4 ++ sedbackend/apps/cvs/life_cycle/storage.py | 61 ++++++++++++------- 3 files changed, 43 insertions(+), 24 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index d59d795f..038e1706 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -6,7 +6,7 @@ from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.life_cycle import exceptions, storage, models from sedbackend.apps.cvs.project import exceptions as project_exceptions - +from sedbackend.apps.core.files import models as file_models def create_process_node(project_id: int, vcs_id: int, node: models.ProcessNodePost) -> models.ProcessNodeGet: try: diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index 538b0bd0..3da781a7 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -1,10 +1,14 @@ from fastapi import APIRouter, Depends +from fastapi import UploadFile +from sedbackend.apps.core.authentication.utils import get_current_active_user +from sedbackend.apps.core.users.models import User from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker from sedbackend.apps.core.projects.models import AccessLevel from sedbackend.apps.core.projects.implementation import impl_get_subproject_native from sedbackend.apps.cvs.life_cycle import models, implementation from sedbackend.apps.cvs.project.router import CVS_APP_SID +from sedbackend.apps.core.files import models as file_models router = APIRouter() diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 5c43a5c0..7f7ccd55 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -5,8 +5,10 @@ from mysqlsb import MySQLStatementBuilder, FetchType, Sort from sedbackend.apps.cvs.life_cycle import exceptions, models from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions, implementation as vcs_impl +from sedbackend.apps.core.files import models as file_models, implementation as file_impl from mysql.connector import Error - +import magic +import pandas as pd CVS_NODES_TABLE = 'cvs_nodes' CVS_NODES_COLUMNS = ['cvs_nodes.id', 'vcs', 'from', 'to', 'pos_x', 'pos_y'] @@ -17,6 +19,11 @@ CVS_START_STOP_NODES_TABLE = 'cvs_start_stop_nodes' CVS_START_STOP_NODES_COLUMNS = CVS_NODES_COLUMNS + ['type'] +CVS_DSM_FILES_TABLE = 'cvs_dsm_files' +CVS_DSM_FILES_COLUMNS = ['vcs_id', 'file_id'] + +MAX_FILE_SIZE = 100*10**6 # 100MB + def populate_process_node(db_connection, project_id, result) -> models.ProcessNodeGet: logger.debug(f'Populating model for process node with id={result["id"]} ') @@ -28,12 +35,14 @@ def populate_process_node(db_connection, project_id, result) -> models.ProcessNo to_node=result['to'], pos_x=result['pos_x'], pos_y=result['pos_y'], - vcs_row=vcs_storage.get_vcs_row(db_connection, project_id, result['vcs_row']) + vcs_row=vcs_storage.get_vcs_row( + db_connection, project_id, result['vcs_row']) ) def populate_start_stop_node(result) -> models.StartStopNodeGet: - logger.debug(f'Populating model for start/stop node with id={result["id"]}') + logger.debug( + f'Populating model for start/stop node with id={result["id"]}') return models.StartStopNodeGet( id=result['id'], vcs_id=result['vcs'], @@ -60,7 +69,8 @@ def get_node(db_connection: PooledMySQLConnection, project_id: int, node_id: int logger.debug(f'Error msg: {e.msg}') raise exceptions.NodeNotFoundException - vcs_storage.get_vcs(db_connection, result['vcs'], project_id) # Check if vcs exists and matches project id + # Check if vcs exists and matches project id + vcs_storage.get_vcs(db_connection, result['vcs'], project_id) return result @@ -173,7 +183,8 @@ def create_start_stop_node(db_connection: PooledMySQLConnection, node: models.St def delete_node(db_connection: PooledMySQLConnection, project_id: int, node_id: int) -> bool: logger.debug(f'Delete node with id={node_id}.') - get_node(db_connection, project_id, node_id) # Check if node exists and matches project id + # Check if node exists and matches project id + get_node(db_connection, project_id, node_id) delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement.delete(CVS_NODES_TABLE) \ @@ -209,7 +220,8 @@ def update_node(db_connection: PooledMySQLConnection, project_id: int, node_id: def get_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> models.BPMNGet: logger.debug(f'Get BPMN for vcs with id={vcs_id}.') - vcs_storage.get_vcs(db_connection, project_id, vcs_id) # Check if vcs exists and matches project id + # Check if vcs exists and matches project id + vcs_storage.get_vcs(db_connection, project_id, vcs_id) where_statement = f'vcs = %s' where_values = [vcs_id] @@ -222,7 +234,8 @@ def get_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) .where(where_statement, where_values) \ .order_by(['cvs_nodes.id'], Sort.ASCENDING) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - process_nodes = [populate_process_node(db_connection, project_id, result) for result in process_nodes_result] + process_nodes = [populate_process_node( + db_connection, project_id, result) for result in process_nodes_result] select_statement = MySQLStatementBuilder(db_connection) # start_stop_nodes_result = \ @@ -247,7 +260,8 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i bpmn: models.BPMNGet) -> bool: logger.debug(f'Updating bpmn with vcs id={vcs_id}.') - vcs_storage.get_vcs(db_connection, project_id, vcs_id) # Check if vcs exists and matches project id + # Check if vcs exists and matches project id + vcs_storage.get_vcs(db_connection, project_id, vcs_id) for node in bpmn.nodes: updated_node = models.NodePost( @@ -258,36 +272,37 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i return True -def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, + +def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, file: file_models.StoredFilePost) -> bool: - + if file.extension != ".csv": raise exceptions.InvalidFileTypeException - + with file.file_object as f: f.seek(0) tmp_file = f.read() mime = magic.from_buffer(tmp_file) print(mime) logger.debug(mime) - if mime != "CSV text" and mime != "ASCII text": #TODO doesn't work with windows if we create the file in excel. + # TODO doesn't work with windows if we create the file in excel. + if mime != "CSV text" and mime != "ASCII text": raise exceptions.InvalidFileTypeException - + if f.tell() > MAX_FILE_SIZE: raise exceptions.FileSizeException - + f.seek(0) dsm_file = pd.read_csv(f) vcs_table = vcs_impl.get_vcs_table(project_id, vcs_id) - - - vcs_processes = [row.iso_process.name if row.iso_process is not None else \ - row.subprocess.name for row in vcs_table] - + + vcs_processes = [row.iso_process.name if row.iso_process is not None else + row.subprocess.name for row in vcs_table] + for process in dsm_file['processes'].values: if process not in vcs_processes: raise exceptions.ProcessesVcsMatchException - + f.seek(0) stored_file = file_impl.impl_save_file(file) @@ -295,7 +310,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, insert_statement.insert(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ .set_values([vcs_id, stored_file.id])\ .execute(fetch_type=FetchType.FETCH_NONE) - + return True @@ -305,8 +320,8 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ .where('vcs_id = %s', [vcs_id]) \ .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - + if file_res == None: raise exceptions.FileNotFoundException - + return file_res['file_id'] From f1d3f33724598a0c64620c7829415861e0f5106d Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 8 Jun 2023 13:54:18 +0200 Subject: [PATCH 026/210] minimize sim db calls iteration 1 --- sedbackend/apps/cvs/simulation/storage.py | 53 +++++++++++++---------- 1 file changed, 30 insertions(+), 23 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 03846fb2..e26a65f2 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -11,7 +11,7 @@ from desim.simulation import Process import os -from typing import List +from typing import Optional, List from sedbackend.apps.cvs.design.implementation import get_design from sedbackend.libs.mysqlutils.builder import FetchType, MySQLStatementBuilder @@ -21,7 +21,8 @@ from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import implementation as vcs_impl -from sedbackend.apps.cvs.design import implementation as design_impl +from sedbackend.apps.cvs.design import implementation as design_impl, models as design_models +from sedbackend.apps.cvs.market_input import models as mi_models SIM_SETTINGS_TABLE = "cvs_simulation_settings" SIM_SETTINGS_COLUMNS = ['project', 'time_unit', 'flow_process', 'flow_start_time', 'flow_time', @@ -102,7 +103,7 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr for design_id in design_ids: processes, non_tech_processes = populate_processes( - non_tech_add, res, db_connection, vcs_id, design_id) + non_tech_add, res, db_connection, design_id) sim = des.Des() try: @@ -144,23 +145,24 @@ def run_simulation(db_connection: PooledMySQLConnection, project_id: int, sim_se time_unit = TIME_FORMAT_DICT.get(sim_settings.time_unit) for vcs_id in vcs_ids: + market_values = get_market_values(db_connection, vcs_id) # 1 * vcs for design_group_id in design_group_ids: - res = get_sim_data(db_connection, vcs_id, design_group_id) - if res is None or res == []: + sim_data = get_sim_data(db_connection, vcs_id, design_group_id) # 1 * vcs * design_group + if sim_data is None or sim_data == []: raise e.VcsFailedException - if not check_entity_rate(res, process): + if not check_entity_rate(sim_data, process): raise e.RateWrongOrderException - design_ids = [design.id for design in design_impl.get_all_designs(project_id, design_group_id)] + designs = [design for design in design_impl.get_all_designs(project_id, design_group_id)] # 1 * vcs * design_group - if design_ids is None or []: + if designs is None or []: raise e.DesignIdsNotFoundException - for design_id in design_ids: - # get_design(design_id) - processes, non_tech_processes = populate_processes(non_tech_add, res, db_connection, vcs_id, - design_id) # BUG probably. Populate processes changes the order of the processes. + for design in designs: + processes, non_tech_processes = populate_processes(non_tech_add, sim_data, db_connection, # 1 * vcs * design_group + design.id, + market_values, design.vd_design_values) # BUG probably. Populate processes changes the order of the processes. dsm = create_simple_dsm(processes) # TODO Change to using BPMN @@ -194,7 +196,7 @@ def run_simulation(db_connection: PooledMySQLConnection, project_id: int, sim_se def run_sim_monte_carlo(db_connection: PooledMySQLConnection, project_id: int, simSettings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], normalized_npv: bool = False, user_id: int = None) -> List[ - models.Simulation]: + models.Simulation]: design_results = [] if not check_sim_settings(simSettings): @@ -210,6 +212,7 @@ def run_sim_monte_carlo(db_connection: PooledMySQLConnection, project_id: int, s runs = simSettings.runs for vcs_id in vcs_ids: + market_values = get_market_values(db_connection, vcs_id) for design_group_id in design_group_ids: res = get_sim_data(db_connection, vcs_id, design_group_id) if res is None or res == []: @@ -226,7 +229,7 @@ def run_sim_monte_carlo(db_connection: PooledMySQLConnection, project_id: int, s for design_id in design_ids: get_design(design_id) processes, non_tech_processes = populate_processes( - non_tech_add, res, db_connection, vcs_id, design_id) + non_tech_add, res, db_connection, market_values, design_id) logger.debug('Fetched Processes and non-techproc') # TODO Change to using BPMN AND move out of the for loop dsm = create_simple_dsm(processes) @@ -234,7 +237,8 @@ def run_sim_monte_carlo(db_connection: PooledMySQLConnection, project_id: int, s sim = des.Des() try: - results = sim.run_parallell_simulations(flow_time, interarrival, process, processes, non_tech_processes, + results = sim.run_parallell_simulations(flow_time, interarrival, process, processes, + non_tech_processes, non_tech_add, dsm, time_unit, discount_rate, runtime, runs) except Exception as exc: @@ -258,15 +262,19 @@ def run_sim_monte_carlo(db_connection: PooledMySQLConnection, project_id: int, s return design_results -def populate_processes(non_tech_add: NonTechCost, db_results, db_connection: PooledMySQLConnection, vcs: int, - design: int): +def populate_processes(non_tech_add: NonTechCost, db_results, db_connection: PooledMySQLConnection, design: int, + mi_values=None, + vd_values=None): + if vd_values is None: + vd_values = [] + if mi_values is None: + mi_values = [] nsp = NumericStringParser() technical_processes = [] non_tech_processes = [] - mi_values = get_market_values(db_connection, vcs) + for row in db_results: - vd_values = get_vd_design_values(db_connection, row['id'], design) if row['category'] != 'Technical processes': try: non_tech = models.NonTechnicalProcess(cost=nsp.eval(parse_formula(row['cost'], vd_values, mi_values)), @@ -292,7 +300,7 @@ def populate_processes(non_tech_add: NonTechCost, db_results, db_connection: Poo nsp.eval(expr.replace_all( 'time', time, revenue_formula)), row['iso_name'], non_tech_add, TIME_FORMAT_DICT.get( - row['time_unit'].lower()) + row['time_unit'].lower()) ) if p.time < 0: raise e.NegativeTimeException(row['id']) @@ -314,7 +322,7 @@ def populate_processes(non_tech_add: NonTechCost, db_results, db_connection: Poo nsp.eval(expr.replace_all( 'time', time, revenue_formula)), row['sub_name'], non_tech_add, TIME_FORMAT_DICT.get( - row['time_unit'].lower()) + row['time_unit'].lower()) ) if p.time < 0: @@ -346,7 +354,6 @@ def get_sim_data(db_connection: PooledMySQLConnection, vcs_id: int, design_group def get_vd_design_values(db_connection: PooledMySQLConnection, vcs_row_id: int, design: int): - select_statement = MySQLStatementBuilder(db_connection) res = select_statement \ .select('cvs_vd_design_values', ['cvs_value_drivers.id', 'design', 'name', 'value', 'unit']) \ @@ -369,7 +376,7 @@ def get_simulation_settings(db_connection: PooledMySQLConnection, project_id: in if res is None: raise e.SimSettingsNotFoundException - + return populate_sim_settings(res) From 0df20d4407bfadaccc0e2f3ce24a1bfbd76f6422 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 12 Jun 2023 16:26:25 +0200 Subject: [PATCH 027/210] iteration 2 --- sedbackend/apps/cvs/design/implementation.py | 4 +- sedbackend/apps/cvs/design/models.py | 4 +- sedbackend/apps/cvs/design/router.py | 4 +- sedbackend/apps/cvs/design/storage.py | 36 ++++++-- sedbackend/apps/cvs/market_input/models.py | 8 ++ sedbackend/apps/cvs/simulation/storage.py | 89 +++++++++++++++++--- tests/apps/cvs/design/test_design.py | 8 +- tests/apps/cvs/testutils.py | 2 +- 8 files changed, 129 insertions(+), 26 deletions(-) diff --git a/sedbackend/apps/cvs/design/implementation.py b/sedbackend/apps/cvs/design/implementation.py index 8d02c420..b90a8298 100644 --- a/sedbackend/apps/cvs/design/implementation.py +++ b/sedbackend/apps/cvs/design/implementation.py @@ -158,10 +158,10 @@ def get_design(design_id: int) -> models.Design: ) -def get_all_designs(project_id: int, design_group_id: int) -> List[models.Design]: +def get_designs(project_id: int, design_group_id: int) -> List[models.Design]: try: with get_connection() as con: - res = storage.get_all_designs(con, project_id, design_group_id) + res = storage.get_designs(con, project_id, design_group_id) con.commit() return res except exceptions.DesignGroupNotFoundException: diff --git a/sedbackend/apps/cvs/design/models.py b/sedbackend/apps/cvs/design/models.py index a6145872..c0397c13 100644 --- a/sedbackend/apps/cvs/design/models.py +++ b/sedbackend/apps/cvs/design/models.py @@ -36,10 +36,12 @@ def __eq__(self, other: Any) -> bool: return self.vd_id == other.vd_id + class Design(BaseModel): id: int name: str - vd_design_values: List[ValueDriverDesignValue] + design_group_id: Optional[int] = None + vd_design_values: Optional[List[ValueDriverDesignValue]] class DesignPut(BaseModel): diff --git a/sedbackend/apps/cvs/design/router.py b/sedbackend/apps/cvs/design/router.py index 62f6b137..86758c2f 100644 --- a/sedbackend/apps/cvs/design/router.py +++ b/sedbackend/apps/cvs/design/router.py @@ -79,8 +79,8 @@ async def edit_design_group(native_project_id: int, design_group_id: int, response_model=List[models.Design], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_all_designs(native_project_id: int, design_group_id: int) -> List[models.Design]: - return implementation.get_all_designs(native_project_id, design_group_id) +async def get_designs(native_project_id: int, design_group_id: int) -> List[models.Design]: + return implementation.get_designs(native_project_id, design_group_id) @router.put( diff --git a/sedbackend/apps/cvs/design/storage.py b/sedbackend/apps/cvs/design/storage.py index 878382aa..d2821a83 100644 --- a/sedbackend/apps/cvs/design/storage.py +++ b/sedbackend/apps/cvs/design/storage.py @@ -158,13 +158,21 @@ def populate_design_group(db_result) -> models.DesignGroup: ) -def populate_design(db_result) -> models.Design: +def populate_design_with_values(db_result) -> models.Design: return models.Design( id=db_result['id'], name=db_result['name'], + design_group_id=db_result['design_group'], vd_design_values=db_result['vd_values'] ) +def populate_design(db_result) -> models.Design: + return models.Design( + id=db_result['id'], + name=db_result['name'], + design_group_id=db_result['design_group'] + ) + def get_design(db_connection: PooledMySQLConnection, design_id: int): logger.debug(f'Get design with id = {design_id}') @@ -180,10 +188,10 @@ def get_design(db_connection: PooledMySQLConnection, design_id: int): vd_design_values = get_all_vd_design_values(db_connection, result['id']) result.update({'vd_values': vd_design_values}) - return populate_design(result) + return populate_design_with_values(result) -def get_all_designs(db_connection: PooledMySQLConnection, project_id: int, design_group_id: int) -> List[models.Design]: +def get_designs(db_connection: PooledMySQLConnection, project_id: int, design_group_id: int) -> List[models.Design]: logger.debug(f'Get all designs in design group with id = {design_group_id}') get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project @@ -202,11 +210,29 @@ def get_all_designs(db_connection: PooledMySQLConnection, project_id: int, desig for result in res: vd_design_values = get_all_vd_design_values(db_connection, result['id']) result.update({'vd_values': vd_design_values}) - designs.append(populate_design(result)) + designs.append(populate_design_with_values(result)) return designs +def get_all_designs(db_connection: PooledMySQLConnection, design_group_ids: List[int]) -> List[models.Design]: + logger.debug(f'Get all designs in design groups with ids = {design_group_ids}') + + try: + query = f'SELECT cvs_designs.id, cvs_designs.design_group, cvs_designs.name \ + FROM cvs_designs \ + WHERE cvs_designs.design_group IN ({",".join([str(dg) for dg in design_group_ids])})' + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(query) + res = cursor.fetchall() + res = [dict(zip(cursor.column_names, row)) for row in res] + except Error as e: + logger.debug(f'Error msg: {e.msg}') + raise exceptions.DesignGroupNotFoundException + + return [populate_design(result) for result in res] + + def create_design(db_connection: PooledMySQLConnection, design_group_id: int, design: models.DesignPost) -> bool: logger.debug(f'Create a design for design group with id = {design_group_id}') @@ -277,7 +303,7 @@ def edit_designs(db_connection: PooledMySQLConnection, project_id: int, design_g logger.debug(f'Edit designs with design group id = {design_group_id}') # Check if design group exists and matches project - curr_designs = get_all_designs(db_connection, project_id, design_group_id) + curr_designs = get_designs(db_connection, project_id, design_group_id) for design in curr_designs: if design.id not in [d.id for d in designs]: delete_design(db_connection, design.id) diff --git a/sedbackend/apps/cvs/market_input/models.py b/sedbackend/apps/cvs/market_input/models.py index 866edb1c..f12f7817 100644 --- a/sedbackend/apps/cvs/market_input/models.py +++ b/sedbackend/apps/cvs/market_input/models.py @@ -21,3 +21,11 @@ class MarketInputValue(BaseModel): vcs_id: int market_input_id: int value: float + + +class MarketInputValueSim(BaseModel): + vcs_id: int + market_input_id: int + value: float + name: str + unit: str diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index e26a65f2..0d675211 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -7,12 +7,14 @@ from fastapi.logger import logger from desim import interface as des -from desim.data import NonTechCost, TimeFormat, SimResults +from desim.data import NonTechCost, TimeFormat from desim.simulation import Process import os from typing import Optional, List from sedbackend.apps.cvs.design.implementation import get_design +from sedbackend.apps.cvs.design.models import ValueDriverDesignValue +from sedbackend.apps.cvs.design.storage import get_all_designs from sedbackend.libs.mysqlutils.builder import FetchType, MySQLStatementBuilder @@ -21,7 +23,7 @@ from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import implementation as vcs_impl -from sedbackend.apps.cvs.design import implementation as design_impl, models as design_models +from sedbackend.apps.cvs.design import implementation as design_impl from sedbackend.apps.cvs.market_input import models as mi_models SIM_SETTINGS_TABLE = "cvs_simulation_settings" @@ -144,25 +146,39 @@ def run_simulation(db_connection: PooledMySQLConnection, project_id: int, sim_se process = sim_settings.flow_process time_unit = TIME_FORMAT_DICT.get(sim_settings.time_unit) + all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) # 1 + + all_market_inputs = get_all_market_values(db_connection, vcs_ids) # 1 + + all_designs = get_all_designs(db_connection, design_group_ids) # 1 + + logger.debug(f'Designs: {len(all_designs)}') + + all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) # 1 + + logger.debug(f'vd values: {len(all_vd_design_values)}') + for vcs_id in vcs_ids: - market_values = get_market_values(db_connection, vcs_id) # 1 * vcs + market_values = [mi for mi in all_market_inputs if mi['vcs'] == vcs_id] for design_group_id in design_group_ids: - sim_data = get_sim_data(db_connection, vcs_id, design_group_id) # 1 * vcs * design_group + sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] if sim_data is None or sim_data == []: raise e.VcsFailedException if not check_entity_rate(sim_data, process): raise e.RateWrongOrderException - designs = [design for design in design_impl.get_all_designs(project_id, design_group_id)] # 1 * vcs * design_group + designs = [design for design in all_designs if design.design_group_id == design_group_id] if designs is None or []: raise e.DesignIdsNotFoundException for design in designs: - processes, non_tech_processes = populate_processes(non_tech_add, sim_data, db_connection, # 1 * vcs * design_group + vd_values = [vd for vd in all_vd_design_values if vd['design'] == design.id] + logger.debug(f'vd values 2: {len(vd_values)}') # returns for last design 0 + processes, non_tech_processes = populate_processes(non_tech_add, sim_data, db_connection, design.id, - market_values, design.vd_design_values) # BUG probably. Populate processes changes the order of the processes. + market_values, vd_values) dsm = create_simple_dsm(processes) # TODO Change to using BPMN @@ -221,7 +237,7 @@ def run_sim_monte_carlo(db_connection: PooledMySQLConnection, project_id: int, s if not check_entity_rate(res, process): raise e.RateWrongOrderException - design_ids = [design.id for design in design_impl.get_all_designs(project_id, design_group_id)] + design_ids = [design.id for design in design_impl.get_designs(project_id, design_group_id)] if design_ids is None or []: raise e.DesignIdsNotFoundException @@ -265,8 +281,6 @@ def run_sim_monte_carlo(db_connection: PooledMySQLConnection, project_id: int, s def populate_processes(non_tech_add: NonTechCost, db_results, db_connection: PooledMySQLConnection, design: int, mi_values=None, vd_values=None): - if vd_values is None: - vd_values = [] if mi_values is None: mi_values = [] nsp = NumericStringParser() @@ -275,6 +289,8 @@ def populate_processes(non_tech_add: NonTechCost, db_results, db_connection: Poo non_tech_processes = [] for row in db_results: + # vd_values = [vd for vd in vd_values if vd['vcs_row'] == row['id'] and vd['design'] == design] + vd_values = get_vd_design_values(db_connection, row['id'], design) # TODO fix this if row['category'] != 'Technical processes': try: non_tech = models.NonTechnicalProcess(cost=nsp.eval(parse_formula(row['cost'], vd_values, mi_values)), @@ -352,8 +368,26 @@ def get_sim_data(db_connection: PooledMySQLConnection, vcs_id: int, design_group return res +def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], design_group_ids: List[int]): + query = f'SELECT cvs_vcs_rows.id, cvs_vcs_rows.vcs, cvs_design_mi_formulas.design_group, \ + cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ + subprocess, cvs_subprocesses.name as sub_name, time, time_unit, cost, revenue, rate FROM cvs_vcs_rows \ + LEFT OUTER JOIN cvs_subprocesses ON cvs_vcs_rows.subprocess = cvs_subprocesses.id \ + LEFT OUTER JOIN cvs_iso_processes ON cvs_vcs_rows.iso_process = cvs_iso_processes.id \ + OR cvs_subprocesses.iso_process = cvs_iso_processes.id \ + LEFT OUTER JOIN cvs_design_mi_formulas ON cvs_vcs_rows.id = cvs_design_mi_formulas.vcs_row \ + WHERE cvs_vcs_rows.vcs IN ({",".join([str(vcs) for vcs in vcs_ids])}) \ + AND cvs_design_mi_formulas.design_group \ + IN ({",".join([str(dg) for dg in design_group_ids])}) ORDER BY `index`' + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(query) + res = cursor.fetchall() + res = [dict(zip(cursor.column_names, row)) for row in res] + return res + + def get_vd_design_values(db_connection: PooledMySQLConnection, vcs_row_id: int, - design: int): + design: int) -> List[ValueDriverDesignValue]: select_statement = MySQLStatementBuilder(db_connection) res = select_statement \ .select('cvs_vd_design_values', ['cvs_value_drivers.id', 'design', 'name', 'value', 'unit']) \ @@ -363,6 +397,20 @@ def get_vd_design_values(db_connection: PooledMySQLConnection, vcs_row_id: int, .where('vcs_row = %s and design = %s', [vcs_row_id, design]) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + logger.debug(f'Fetched {len(res)} value driver design values') + return res + + +def get_all_vd_design_values(db_connection: PooledMySQLConnection, designs: List[int]): + select_statement = MySQLStatementBuilder(db_connection) + res = select_statement \ + .select('cvs_vd_design_values', ['cvs_value_drivers.id', 'design', 'name', 'value', 'unit', 'vcs_row']) \ + .inner_join('cvs_value_drivers', 'cvs_vd_design_values.value_driver = cvs_value_drivers.id') \ + .inner_join('cvs_vcs_need_drivers', 'cvs_vcs_need_drivers.value_driver = cvs_value_drivers.id') \ + .inner_join('cvs_stakeholder_needs', 'cvs_stakeholder_needs.id = cvs_vcs_need_drivers.stakeholder_need') \ + .where('design IN (%s)', [','.join([str(design) for design in designs])]) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + return res @@ -454,10 +502,29 @@ def get_market_values(db_connection: PooledMySQLConnection, vcs: int): .inner_join('cvs_market_inputs', 'cvs_market_input_values.market_input = cvs_market_inputs.id') \ .where('vcs = %s', [vcs]) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + return res + +def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[int]): + select_statement = MySQLStatementBuilder(db_connection) + res = select_statement \ + .select('cvs_market_input_values', ['id', 'name', 'value', 'unit', 'vcs']) \ + .inner_join('cvs_market_inputs', 'cvs_market_input_values.market_input = cvs_market_inputs.id') \ + .where('vcs IN (%s)', [','.join([str(vcs) for vcs in vcs_ids])]) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) return res +def populate_market_input_values(res) -> mi_models.MarketInputValueSim: + return mi_models.MarketInputValueSim( + vcs_id=res['vcs'], + market_input_id=res['market_input'], + value=res['value'], + name=res['name'], + unit=res['unit'] + ) + + def parse_formula(formula: str, vd_values, mi_values) -> str: new_formula = formula vd_names = expr.get_prefix_variables('VD', new_formula) diff --git a/tests/apps/cvs/design/test_design.py b/tests/apps/cvs/design/test_design.py index 4da5ba32..d7c90c40 100644 --- a/tests/apps/cvs/design/test_design.py +++ b/tests/apps/cvs/design/test_design.py @@ -25,7 +25,7 @@ def test_create_design(client, std_headers, std_user): # Assert assert res.status_code == 200 # 200 OK - designs = impl_design.get_all_designs(project.id, design_group.id) + designs = impl_design.get_designs(project.id, design_group.id) assert designs[0].name == "new design" assert len(designs) == 1 assert len(designs[0].vd_design_values) == len(design_group.vds) @@ -54,7 +54,7 @@ def test_create_design_no_values(client, std_headers, std_user): # Assert assert res.status_code == 200 # 200 OK - designs = impl_design.get_all_designs(project.id, design_group.id) + designs = impl_design.get_designs(project.id, design_group.id) assert len(designs) == 1 # Cleanup @@ -86,7 +86,7 @@ def test_edit_designs(client, std_headers, std_user): # Assert assert res.status_code == 200 # 200 OK - designs = impl_design.get_all_designs(project.id, design_group.id) + designs = impl_design.get_designs(project.id, design_group.id) assert designs[0].name == "new design" assert len(designs) == 1 assert len(designs[0].vd_design_values) == len(design_group.vds) @@ -111,7 +111,7 @@ def test_delete_designs(client, std_headers, std_user): # Assert assert res.status_code == 200 # 200 OK - designs = impl_design.get_all_designs(project.id, design_group.id) + designs = impl_design.get_designs(project.id, design_group.id) assert len(designs) == 0 # Cleanup diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index a6962dcc..46d6d33f 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -398,7 +398,7 @@ def seed_random_designs(project_id: int, dg_id: int, amount: int = 10): design_impl.edit_designs(project_id, dg_id, [design_model.DesignPut(name=tu.random_str(5, 50)) for _ in range(amount)]) - return design_impl.get_all_designs(project_id, dg_id) + return design_impl.get_designs(project_id, dg_id) # ====================================================================================================================== From c6efffe22584c529c7519dab9ab4adef0b0dd055 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 13 Jun 2023 10:11:28 +0200 Subject: [PATCH 028/210] simulation minimized to 4 db calls --- sedbackend/apps/cvs/simulation/storage.py | 64 +++++++++++------------ 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 0d675211..977d63f8 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -146,17 +146,13 @@ def run_simulation(db_connection: PooledMySQLConnection, project_id: int, sim_se process = sim_settings.flow_process time_unit = TIME_FORMAT_DICT.get(sim_settings.time_unit) - all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) # 1 + all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) - all_market_inputs = get_all_market_values(db_connection, vcs_ids) # 1 + all_market_inputs = get_all_market_values(db_connection, vcs_ids) - all_designs = get_all_designs(db_connection, design_group_ids) # 1 + all_designs = get_all_designs(db_connection, design_group_ids) - logger.debug(f'Designs: {len(all_designs)}') - - all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) # 1 - - logger.debug(f'vd values: {len(all_vd_design_values)}') + all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) for vcs_id in vcs_ids: market_values = [mi for mi in all_market_inputs if mi['vcs'] == vcs_id] @@ -168,16 +164,15 @@ def run_simulation(db_connection: PooledMySQLConnection, project_id: int, sim_se if not check_entity_rate(sim_data, process): raise e.RateWrongOrderException - designs = [design for design in all_designs if design.design_group_id == design_group_id] + designs = [design.id for design in all_designs if design.design_group_id == design_group_id] if designs is None or []: raise e.DesignIdsNotFoundException for design in designs: - vd_values = [vd for vd in all_vd_design_values if vd['design'] == design.id] - logger.debug(f'vd values 2: {len(vd_values)}') # returns for last design 0 - processes, non_tech_processes = populate_processes(non_tech_add, sim_data, db_connection, - design.id, + vd_values = [vd for vd in all_vd_design_values if vd['design'] == design] + processes, non_tech_processes = populate_processes(non_tech_add, sim_data, + design, market_values, vd_values) dsm = create_simple_dsm(processes) # TODO Change to using BPMN @@ -278,7 +273,7 @@ def run_sim_monte_carlo(db_connection: PooledMySQLConnection, project_id: int, s return design_results -def populate_processes(non_tech_add: NonTechCost, db_results, db_connection: PooledMySQLConnection, design: int, +def populate_processes(non_tech_add: NonTechCost, db_results, design: int, mi_values=None, vd_values=None): if mi_values is None: @@ -289,13 +284,12 @@ def populate_processes(non_tech_add: NonTechCost, db_results, db_connection: Poo non_tech_processes = [] for row in db_results: - # vd_values = [vd for vd in vd_values if vd['vcs_row'] == row['id'] and vd['design'] == design] - vd_values = get_vd_design_values(db_connection, row['id'], design) # TODO fix this + vd_values_row = [vd for vd in vd_values if vd['vcs_row'] == row['id'] and vd['design'] == design] if row['category'] != 'Technical processes': try: - non_tech = models.NonTechnicalProcess(cost=nsp.eval(parse_formula(row['cost'], vd_values, mi_values)), + non_tech = models.NonTechnicalProcess(cost=nsp.eval(parse_formula(row['cost'], vd_values_row, mi_values)), revenue=nsp.eval( - parse_formula(row['revenue'], vd_values, mi_values)), + parse_formula(row['revenue'], vd_values_row, mi_values)), name=row['iso_name']) except Exception as exc: logger.debug(f'{exc.__class__}, {exc}') @@ -402,14 +396,16 @@ def get_vd_design_values(db_connection: PooledMySQLConnection, vcs_row_id: int, def get_all_vd_design_values(db_connection: PooledMySQLConnection, designs: List[int]): - select_statement = MySQLStatementBuilder(db_connection) - res = select_statement \ - .select('cvs_vd_design_values', ['cvs_value_drivers.id', 'design', 'name', 'value', 'unit', 'vcs_row']) \ - .inner_join('cvs_value_drivers', 'cvs_vd_design_values.value_driver = cvs_value_drivers.id') \ - .inner_join('cvs_vcs_need_drivers', 'cvs_vcs_need_drivers.value_driver = cvs_value_drivers.id') \ - .inner_join('cvs_stakeholder_needs', 'cvs_stakeholder_needs.id = cvs_vcs_need_drivers.stakeholder_need') \ - .where('design IN (%s)', [','.join([str(design) for design in designs])]) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + query = f'SELECT cvs_value_drivers.id, design, name, value, unit, vcs_row \ + FROM cvs_vd_design_values \ + INNER JOIN cvs_value_drivers ON cvs_vd_design_values.value_driver = cvs_value_drivers.id \ + INNER JOIN cvs_vcs_need_drivers ON cvs_vcs_need_drivers.value_driver = cvs_value_drivers.id \ + INNER JOIN cvs_stakeholder_needs ON cvs_stakeholder_needs.id = cvs_vcs_need_drivers.stakeholder_need \ + WHERE design IN ({",".join([str(design) for design in designs])})' + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(query) + res = cursor.fetchall() + res = [dict(zip(cursor.column_names, row)) for row in res] return res @@ -506,12 +502,16 @@ def get_market_values(db_connection: PooledMySQLConnection, vcs: int): def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[int]): - select_statement = MySQLStatementBuilder(db_connection) - res = select_statement \ - .select('cvs_market_input_values', ['id', 'name', 'value', 'unit', 'vcs']) \ - .inner_join('cvs_market_inputs', 'cvs_market_input_values.market_input = cvs_market_inputs.id') \ - .where('vcs IN (%s)', [','.join([str(vcs) for vcs in vcs_ids])]) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + + query = f'SELECT id, name, value, unit, vcs \ + FROM cvs_market_input_values \ + INNER JOIN cvs_market_inputs ON cvs_market_input_values.market_input = cvs_market_inputs.id \ + WHERE cvs_market_input_values.vcs IN ({",".join([str(vcs) for vcs in vcs_ids])})' + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(query) + res = cursor.fetchall() + res = [dict(zip(cursor.column_names, row)) for row in res] + return res From eac7a7a894aa0c27054b00bebae7ee63939fe629 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 13 Jun 2023 11:24:20 +0200 Subject: [PATCH 029/210] minimize db calls for multiprocessing and error handling --- sedbackend/apps/cvs/simulation/exceptions.py | 12 ++ .../apps/cvs/simulation/implementation.py | 51 +++++-- sedbackend/apps/cvs/simulation/router.py | 21 +-- sedbackend/apps/cvs/simulation/storage.py | 133 ++++++++++-------- .../simulation/test_sim_multiprocessing.py | 1 - tests/apps/cvs/simulation/test_simulation.py | 1 - 6 files changed, 131 insertions(+), 88 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/exceptions.py b/sedbackend/apps/cvs/simulation/exceptions.py index 0b526be0..c19363df 100644 --- a/sedbackend/apps/cvs/simulation/exceptions.py +++ b/sedbackend/apps/cvs/simulation/exceptions.py @@ -56,3 +56,15 @@ class SimSettingsNotFoundException(Exception): class NoTechnicalProcessException(Exception): pass + + +class CouldNotFetchSimulationDataException(Exception): + pass + + +class CouldNotFetchMarketInputValuesException(Exception): + pass + + +class CouldNotFetchValueDriverDesignValuesException(Exception): + pass diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index d7c853e5..f016727e 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -14,19 +14,18 @@ from sedbackend.apps.cvs.simulation.exceptions import BadlyFormattedSettingsException, DSMFileNotFoundException, \ DesignIdsNotFoundException, FormulaEvalException, NegativeTimeException, ProcessNotFoundException, \ RateWrongOrderException, InvalidFlowSettingsException, VcsFailedException, FlowProcessNotFoundException, \ - SimSettingsNotFoundException - + SimSettingsNotFoundException, CouldNotFetchSimulationDataException, CouldNotFetchMarketInputValuesException, \ + CouldNotFetchValueDriverDesignValuesException + from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.market_input import exceptions as market_input_exceptions -def run_simulation(project_id: int, sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], - normalized_npv: bool, user_id: int) -> List[models.Simulation]: +def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], + design_group_ids: List[int]) -> List[models.Simulation]: try: with get_connection() as con: - result = storage.run_simulation(con, project_id, sim_settings, vcs_ids, design_group_ids, - normalized_npv, user_id) + result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( @@ -83,6 +82,21 @@ def run_simulation(project_id: int, sim_settings: models.EditSimSettings, vcs_id status_code=status.HTTP_400_BAD_REQUEST, detail=f'Settings are not correct' ) + except CouldNotFetchSimulationDataException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Could not fetch simulation data' + ) + except CouldNotFetchMarketInputValuesException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Could not fetch market input values' + ) + except CouldNotFetchValueDriverDesignValuesException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Could not fetch value driver design values' + ) def run_dsm_file_simulation(user_id: int, project_id: int, sim_params: models.FileParams, @@ -143,13 +157,11 @@ def run_dsm_file_simulation(user_id: int, project_id: int, sim_params: models.Fi ) -def run_sim_monte_carlo(project_id: int, sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], - normalized_npv: bool, user_id: int = None) -> List[models.Simulation]: +def run_sim_monte_carlo(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], + normalized_npv: bool) -> List[models.Simulation]: try: with get_connection() as con: - result = storage.run_sim_monte_carlo(con, project_id, sim_settings, vcs_ids, - design_group_ids, normalized_npv, user_id) + result = storage.run_sim_monte_carlo(con, sim_settings, vcs_ids, design_group_ids, normalized_npv) return result except vcs_exceptions.GenericDatabaseException: raise HTTPException( @@ -186,6 +198,21 @@ def run_sim_monte_carlo(project_id: int, sim_settings: models.EditSimSettings, v status_code=status.HTTP_400_BAD_REQUEST, detail=f'Settings are not correct' ) + except CouldNotFetchSimulationDataException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Could not fetch simulation data' + ) + except CouldNotFetchMarketInputValuesException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Could not fetch market input values' + ) + except CouldNotFetchValueDriverDesignValuesException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Could not fetch value driver design values' + ) def get_sim_settings(project_id: int) -> models.SimSettings: diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 615ec7ae..74dbf9d1 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -1,6 +1,4 @@ -from operator import mod -from fastapi import Depends, APIRouter, UploadFile, File, HTTPException - +from fastapi import Depends, APIRouter from typing import List, Optional from sedbackend.apps.core.authentication.utils import get_current_active_user from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker @@ -18,12 +16,9 @@ response_model=List[models.Simulation], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) -async def run_simulation(native_project_id: int, sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], - normalized_npv: Optional[bool] = False, - user: User = Depends(get_current_active_user)) -> List[models.Simulation]: - return implementation.run_simulation(native_project_id, sim_settings, vcs_ids, design_group_ids, normalized_npv, - user.id) +async def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], + design_group_ids: List[int]) -> List[models.Simulation]: + return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids) # Temporary disabled ''' @@ -50,12 +45,10 @@ async def run_dsm_file_simulation(native_project_id: int, sim_params: models.Fil response_model=List[models.Simulation], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) -async def run_sim_monte_carlo(native_project_id: int, sim_settings: models.EditSimSettings, vcs_ids: List[int], +async def run_sim_monte_carlo(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], - normalized_npv: Optional[bool] = False, - user: User = Depends(get_current_active_user)) -> List[models.Simulation]: - return implementation.run_sim_monte_carlo(native_project_id, sim_settings, vcs_ids, - design_group_ids, normalized_npv, user.id) + normalized_npv: Optional[bool] = False) -> List[models.Simulation]: + return implementation.run_sim_monte_carlo(sim_settings, vcs_ids, design_group_ids, normalized_npv) @router.get( diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 977d63f8..bfc158e9 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -3,6 +3,7 @@ from fastapi import UploadFile from mysql.connector.pooling import PooledMySQLConnection import pandas as pd +from mysql.connector import Error from fastapi.logger import logger @@ -11,8 +12,7 @@ from desim.simulation import Process import os -from typing import Optional, List -from sedbackend.apps.cvs.design.implementation import get_design +from typing import List from sedbackend.apps.cvs.design.models import ValueDriverDesignValue from sedbackend.apps.cvs.design.storage import get_all_designs @@ -23,7 +23,6 @@ from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import implementation as vcs_impl -from sedbackend.apps.cvs.design import implementation as design_impl from sedbackend.apps.cvs.market_input import models as mi_models SIM_SETTINGS_TABLE = "cvs_simulation_settings" @@ -131,9 +130,9 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr return design_results -def run_simulation(db_connection: PooledMySQLConnection, project_id: int, sim_settings: models.EditSimSettings, +def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], normalized_npv: bool, user_id: int) -> List[models.Simulation]: + design_group_ids: List[int]) -> List[models.Simulation]: design_results = [] if not check_sim_settings(sim_settings): @@ -148,14 +147,14 @@ def run_simulation(db_connection: PooledMySQLConnection, project_id: int, sim_se all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) - all_market_inputs = get_all_market_values(db_connection, vcs_ids) + all_market_values = get_all_market_values(db_connection, vcs_ids) all_designs = get_all_designs(db_connection, design_group_ids) all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) for vcs_id in vcs_ids: - market_values = [mi for mi in all_market_inputs if mi['vcs'] == vcs_id] + market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] for design_group_id in design_group_ids: sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] if sim_data is None or sim_data == []: @@ -171,9 +170,8 @@ def run_simulation(db_connection: PooledMySQLConnection, project_id: int, sim_se for design in designs: vd_values = [vd for vd in all_vd_design_values if vd['design'] == design] - processes, non_tech_processes = populate_processes(non_tech_add, sim_data, - design, - market_values, vd_values) + processes, non_tech_processes = populate_processes(non_tech_add, sim_data, design, market_values, + vd_values) dsm = create_simple_dsm(processes) # TODO Change to using BPMN @@ -204,9 +202,8 @@ def run_simulation(db_connection: PooledMySQLConnection, project_id: int, sim_se return design_results -def run_sim_monte_carlo(db_connection: PooledMySQLConnection, project_id: int, simSettings: models.EditSimSettings, - vcs_ids: List[int], - design_group_ids: List[int], normalized_npv: bool = False, user_id: int = None) -> List[ +def run_sim_monte_carlo(db_connection: PooledMySQLConnection, simSettings: models.EditSimSettings, vcs_ids: List[int], + design_group_ids: List[int], normalized_npv: bool = False) -> List[ models.Simulation]: design_results = [] @@ -222,27 +219,34 @@ def run_sim_monte_carlo(db_connection: PooledMySQLConnection, project_id: int, s time_unit = TIME_FORMAT_DICT.get(simSettings.time_unit) runs = simSettings.runs + all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) + + all_market_values = get_all_market_values(db_connection, vcs_ids) + + all_designs = get_all_designs(db_connection, design_group_ids) + + all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) + for vcs_id in vcs_ids: - market_values = get_market_values(db_connection, vcs_id) + market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] for design_group_id in design_group_ids: - res = get_sim_data(db_connection, vcs_id, design_group_id) - if res is None or res == []: + sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] + if sim_data is None or sim_data == []: raise e.VcsFailedException - if not check_entity_rate(res, process): + if not check_entity_rate(sim_data, process): raise e.RateWrongOrderException - design_ids = [design.id for design in design_impl.get_designs(project_id, design_group_id)] + designs = [design.id for design in all_designs if design.design_group_id == design_group_id] - if design_ids is None or []: + if designs is None or []: raise e.DesignIdsNotFoundException - for design_id in design_ids: - get_design(design_id) - processes, non_tech_processes = populate_processes( - non_tech_add, res, db_connection, market_values, design_id) - logger.debug('Fetched Processes and non-techproc') - # TODO Change to using BPMN AND move out of the for loop + for design in designs: + vd_values = [vd for vd in all_vd_design_values if vd['design'] == design] + processes, non_tech_processes = populate_processes(non_tech_add, sim_data, design, market_values, + vd_values) + dsm = create_simple_dsm(processes) sim = des.Des() @@ -363,20 +367,24 @@ def get_sim_data(db_connection: PooledMySQLConnection, vcs_id: int, design_group def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], design_group_ids: List[int]): - query = f'SELECT cvs_vcs_rows.id, cvs_vcs_rows.vcs, cvs_design_mi_formulas.design_group, \ - cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ - subprocess, cvs_subprocesses.name as sub_name, time, time_unit, cost, revenue, rate FROM cvs_vcs_rows \ - LEFT OUTER JOIN cvs_subprocesses ON cvs_vcs_rows.subprocess = cvs_subprocesses.id \ - LEFT OUTER JOIN cvs_iso_processes ON cvs_vcs_rows.iso_process = cvs_iso_processes.id \ - OR cvs_subprocesses.iso_process = cvs_iso_processes.id \ - LEFT OUTER JOIN cvs_design_mi_formulas ON cvs_vcs_rows.id = cvs_design_mi_formulas.vcs_row \ - WHERE cvs_vcs_rows.vcs IN ({",".join([str(vcs) for vcs in vcs_ids])}) \ - AND cvs_design_mi_formulas.design_group \ - IN ({",".join([str(dg) for dg in design_group_ids])}) ORDER BY `index`' - with db_connection.cursor(prepared=True) as cursor: - cursor.execute(query) - res = cursor.fetchall() - res = [dict(zip(cursor.column_names, row)) for row in res] + try: + query = f'SELECT cvs_vcs_rows.id, cvs_vcs_rows.vcs, cvs_design_mi_formulas.design_group, \ + cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ + subprocess, cvs_subprocesses.name as sub_name, time, time_unit, cost, revenue, rate FROM cvs_vcs_rows \ + LEFT OUTER JOIN cvs_subprocesses ON cvs_vcs_rows.subprocess = cvs_subprocesses.id \ + LEFT OUTER JOIN cvs_iso_processes ON cvs_vcs_rows.iso_process = cvs_iso_processes.id \ + OR cvs_subprocesses.iso_process = cvs_iso_processes.id \ + LEFT OUTER JOIN cvs_design_mi_formulas ON cvs_vcs_rows.id = cvs_design_mi_formulas.vcs_row \ + WHERE cvs_vcs_rows.vcs IN ({",".join([str(vcs) for vcs in vcs_ids])}) \ + AND cvs_design_mi_formulas.design_group \ + IN ({",".join([str(dg) for dg in design_group_ids])}) ORDER BY `index`' + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(query) + res = cursor.fetchall() + res = [dict(zip(cursor.column_names, row)) for row in res] + except Error as error: + logger.debug(f'Error msg: {error.msg}') + raise e.CouldNotFetchSimulationDataException return res @@ -396,17 +404,20 @@ def get_vd_design_values(db_connection: PooledMySQLConnection, vcs_row_id: int, def get_all_vd_design_values(db_connection: PooledMySQLConnection, designs: List[int]): - query = f'SELECT cvs_value_drivers.id, design, name, value, unit, vcs_row \ - FROM cvs_vd_design_values \ - INNER JOIN cvs_value_drivers ON cvs_vd_design_values.value_driver = cvs_value_drivers.id \ - INNER JOIN cvs_vcs_need_drivers ON cvs_vcs_need_drivers.value_driver = cvs_value_drivers.id \ - INNER JOIN cvs_stakeholder_needs ON cvs_stakeholder_needs.id = cvs_vcs_need_drivers.stakeholder_need \ - WHERE design IN ({",".join([str(design) for design in designs])})' - with db_connection.cursor(prepared=True) as cursor: - cursor.execute(query) - res = cursor.fetchall() - res = [dict(zip(cursor.column_names, row)) for row in res] - + try: + query = f'SELECT cvs_value_drivers.id, design, name, value, unit, vcs_row \ + FROM cvs_vd_design_values \ + INNER JOIN cvs_value_drivers ON cvs_vd_design_values.value_driver = cvs_value_drivers.id \ + INNER JOIN cvs_vcs_need_drivers ON cvs_vcs_need_drivers.value_driver = cvs_value_drivers.id \ + INNER JOIN cvs_stakeholder_needs ON cvs_stakeholder_needs.id = cvs_vcs_need_drivers.stakeholder_need \ + WHERE design IN ({",".join([str(design) for design in designs])})' + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(query) + res = cursor.fetchall() + res = [dict(zip(cursor.column_names, row)) for row in res] + except Error as error: + logger.debug(f'Error msg: {error.msg}') + raise e.CouldNotFetchValueDriverDesignValuesException return res @@ -502,16 +513,18 @@ def get_market_values(db_connection: PooledMySQLConnection, vcs: int): def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[int]): - - query = f'SELECT id, name, value, unit, vcs \ - FROM cvs_market_input_values \ - INNER JOIN cvs_market_inputs ON cvs_market_input_values.market_input = cvs_market_inputs.id \ - WHERE cvs_market_input_values.vcs IN ({",".join([str(vcs) for vcs in vcs_ids])})' - with db_connection.cursor(prepared=True) as cursor: - cursor.execute(query) - res = cursor.fetchall() - res = [dict(zip(cursor.column_names, row)) for row in res] - + try: + query = f'SELECT id, name, value, unit, vcs \ + FROM cvs_market_input_values \ + INNER JOIN cvs_market_inputs ON cvs_market_input_values.market_input = cvs_market_inputs.id \ + WHERE cvs_market_input_values.vcs IN ({",".join([str(vcs) for vcs in vcs_ids])})' + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(query) + res = cursor.fetchall() + res = [dict(zip(cursor.column_names, row)) for row in res] + except Error as error: + logger.debug(f'Error msg: {error.msg}') + raise e.CouldNotFetchMarketInputValuesException return res diff --git a/tests/apps/cvs/simulation/test_sim_multiprocessing.py b/tests/apps/cvs/simulation/test_sim_multiprocessing.py index abbda075..d1189967 100644 --- a/tests/apps/cvs/simulation/test_sim_multiprocessing.py +++ b/tests/apps/cvs/simulation/test_sim_multiprocessing.py @@ -235,7 +235,6 @@ def test_run_mc_sim_rate_invalid_order(client, std_headers, std_user): #Assert assert res.status_code == 400 - assert res.json() == {'detail': 'Wrong order of rate of entities. Per project assigned after per product'} #RateWrongOrderException #Cleanup diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index 43d288ef..464b5ca8 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -72,7 +72,6 @@ def test_run_sim_invalid_designs(client, std_headers, std_user): #Assert assert res.status_code == 400 - # assert res.json() == {'detail': 'Could not find design'} #The error from get_design() in design.implementation #Cleanup tu.delete_VCS_with_ids(project.id, [vcs.id for vcs in vcss]) From 0d1489b3ca23c1785c05ebfacb7bf2afe7ed7bd7 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 13 Jun 2023 11:41:04 +0200 Subject: [PATCH 030/210] fixed failing multiprocessing test --- sedbackend/apps/cvs/design/models.py | 1 - sedbackend/apps/cvs/market_input/models.py | 8 -------- sedbackend/apps/cvs/simulation/implementation.py | 12 +++++++++++- sedbackend/apps/cvs/simulation/storage.py | 10 ---------- tests/apps/cvs/projects/test_projects.py | 2 +- 5 files changed, 12 insertions(+), 21 deletions(-) diff --git a/sedbackend/apps/cvs/design/models.py b/sedbackend/apps/cvs/design/models.py index c0397c13..b0fc4855 100644 --- a/sedbackend/apps/cvs/design/models.py +++ b/sedbackend/apps/cvs/design/models.py @@ -36,7 +36,6 @@ def __eq__(self, other: Any) -> bool: return self.vd_id == other.vd_id - class Design(BaseModel): id: int name: str diff --git a/sedbackend/apps/cvs/market_input/models.py b/sedbackend/apps/cvs/market_input/models.py index f12f7817..866edb1c 100644 --- a/sedbackend/apps/cvs/market_input/models.py +++ b/sedbackend/apps/cvs/market_input/models.py @@ -21,11 +21,3 @@ class MarketInputValue(BaseModel): vcs_id: int market_input_id: int value: float - - -class MarketInputValueSim(BaseModel): - vcs_id: int - market_input_id: int - value: float - name: str - unit: str diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index f016727e..3eaf785f 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -15,7 +15,7 @@ DesignIdsNotFoundException, FormulaEvalException, NegativeTimeException, ProcessNotFoundException, \ RateWrongOrderException, InvalidFlowSettingsException, VcsFailedException, FlowProcessNotFoundException, \ SimSettingsNotFoundException, CouldNotFetchSimulationDataException, CouldNotFetchMarketInputValuesException, \ - CouldNotFetchValueDriverDesignValuesException + CouldNotFetchValueDriverDesignValuesException, NoTechnicalProcessException from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.market_input import exceptions as market_input_exceptions @@ -97,6 +97,11 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not fetch value driver design values' ) + except NoTechnicalProcessException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'No technical processes found' + ) def run_dsm_file_simulation(user_id: int, project_id: int, sim_params: models.FileParams, @@ -213,6 +218,11 @@ def run_sim_monte_carlo(sim_settings: models.EditSimSettings, vcs_ids: List[int] status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not fetch value driver design values' ) + except NoTechnicalProcessException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'No technical processes found' + ) def get_sim_settings(project_id: int) -> models.SimSettings: diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index bfc158e9..806a2b8c 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -528,16 +528,6 @@ def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[in return res -def populate_market_input_values(res) -> mi_models.MarketInputValueSim: - return mi_models.MarketInputValueSim( - vcs_id=res['vcs'], - market_input_id=res['market_input'], - value=res['value'], - name=res['name'], - unit=res['unit'] - ) - - def parse_formula(formula: str, vd_values, mi_values) -> str: new_formula = formula vd_names = expr.get_prefix_variables('VD', new_formula) diff --git a/tests/apps/cvs/projects/test_projects.py b/tests/apps/cvs/projects/test_projects.py index 3477966f..dee97379 100644 --- a/tests/apps/cvs/projects/test_projects.py +++ b/tests/apps/cvs/projects/test_projects.py @@ -91,7 +91,7 @@ def test_create_too_long_name_project(client, std_headers): json={ "name": testutils.random_str(255, 300), "description": testutils.random_str(20, 200), - "currency": testutils.random_str(0,10) + "currency": testutils.random_str(0, 10) }) #Assert From cc9bb67c0b42b03232662cfaaefc7cb7424c8183 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 29 Jun 2023 14:13:11 +0200 Subject: [PATCH 031/210] db update --- sedbackend/apps/core/db.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e97a10bd..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,12 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -#host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' -#port = 3306 -port = 3001 - +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 5cfda817310815a2ee7c51fb994e710e146e3771 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 29 Jun 2023 14:17:31 +0200 Subject: [PATCH 032/210] requirements update --- requirements.txt | 2 +- sedbackend/apps/core/db.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a3a6b05b..98517043 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,6 +12,6 @@ starlette==0.26.1 uvicorn==0.21.1 openpyxl==3.1.2 mysql-statement-builder==0.* -python-magic-bin==0.4.14 +python-magic==0.4.27 pytest==7.3.1 httpx==0.24.0 \ No newline at end of file diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..cc147989 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -14,6 +14,7 @@ database = 'seddb' port = 3306 + try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( user=user, From eaeb402e1e36246b362e4d8709ad0f3327745963 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 30 Jun 2023 10:39:37 +0200 Subject: [PATCH 033/210] delete dsm before saving --- sedbackend/apps/core/db.py | 1 - .../apps/cvs/life_cycle/implementation.py | 8 ++++---- sedbackend/apps/cvs/life_cycle/router.py | 7 ++++--- sedbackend/apps/cvs/life_cycle/storage.py | 20 +++++++++++++------ tests/apps/cvs/testutils.py | 2 +- 5 files changed, 23 insertions(+), 15 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index cc147989..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -14,7 +14,6 @@ database = 'seddb' port = 3306 - try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( user=user, diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 038e1706..3deebf10 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -158,10 +158,10 @@ def update_bpmn(project_id: int, vcs_id: int, bpmn: models.BPMNGet) -> bool: def save_dsm_file(project_id: int, vcs_id: int, - file: file_models.StoredFilePost) -> bool: + file: file_models.StoredFilePost, user_id: int) -> bool: try: with get_connection() as con: - result = storage.save_dsm_file(con, project_id, vcs_id, file) + result = storage.save_dsm_file(con, project_id, vcs_id, file, user_id) con.commit() return result except exceptions.InvalidFileTypeException: @@ -181,10 +181,10 @@ def save_dsm_file(project_id: int, vcs_id: int, ) -def get_dsm_file_id(project_id: int, vcs_id: int, user_id: int) -> int: +def get_dsm_file_id(project_id: int, vcs_id: int) -> int: try: with get_connection() as con: - res = storage.get_dsm_file_id(con, project_id, vcs_id, user_id) + res = storage.get_dsm_file_id(con, project_id, vcs_id) con.commit() return res except exceptions.FileNotFoundException as e: diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index 3da781a7..27e71d04 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -63,6 +63,7 @@ async def update_bpmn(native_project_id: int, vcs_id: int, bpmn: models.BPMNGet) return implementation.update_bpmn(native_project_id, vcs_id, bpmn) +# TODO only call one implementation function @router.post( '/project/{native_project_id}/vcs/{vcs_id}/upload-dsm', summary="Upload DSM file", @@ -73,7 +74,7 @@ async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, subproject = impl_get_subproject_native(CVS_APP_SID, native_project_id) print(subproject) model_file = file_models.StoredFilePost.import_fastapi_file(file, user.id, subproject.id) - return implementation.save_dsm_file(native_project_id, vcs_id, model_file) + return implementation.save_dsm_file(native_project_id, vcs_id, model_file, user.id) @router.get( @@ -82,6 +83,6 @@ async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, response_model=int, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_dsm_file(native_project_id: int, vcs_id: int, user: User = Depends(get_current_active_user)) -> int: - return implementation.get_dsm_file_id(native_project_id, vcs_id, user.id) +async def get_dsm_file(native_project_id: int, vcs_id: int) -> int: + return implementation.get_dsm_file_id(native_project_id, vcs_id) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 7f7ccd55..0f2494c7 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,11 +1,10 @@ from fastapi.logger import logger -from fastapi.responses import FileResponse from mysql.connector.pooling import PooledMySQLConnection from mysqlsb import MySQLStatementBuilder, FetchType, Sort from sedbackend.apps.cvs.life_cycle import exceptions, models from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions, implementation as vcs_impl -from sedbackend.apps.core.files import models as file_models, implementation as file_impl +from sedbackend.apps.core.files import models as file_models, storage as file_storage from mysql.connector import Error import magic import pandas as pd @@ -274,11 +273,18 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, - vcs_id: int, file: file_models.StoredFilePost) -> bool: + vcs_id: int, file: file_models.StoredFilePost, user_id) -> bool: if file.extension != ".csv": raise exceptions.InvalidFileTypeException + try: + file_id = get_dsm_file_id(db_connection, project_id, vcs_id) + if file_id is not None: + file_storage.db_delete_file(db_connection, file_id, user_id) + except exceptions.FileNotFoundException: + pass + with file.file_object as f: f.seek(0) tmp_file = f.read() @@ -294,7 +300,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, f.seek(0) dsm_file = pd.read_csv(f) - vcs_table = vcs_impl.get_vcs_table(project_id, vcs_id) + vcs_table = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) vcs_processes = [row.iso_process.name if row.iso_process is not None else row.subprocess.name for row in vcs_table] @@ -304,7 +310,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, raise exceptions.ProcessesVcsMatchException f.seek(0) - stored_file = file_impl.impl_save_file(file) + stored_file = file_storage.db_save_file(db_connection, file) insert_statement = MySQLStatementBuilder(db_connection) insert_statement.insert(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ @@ -314,7 +320,9 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, return True -def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> int: +def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> int: + + vcs_storage.get_vcs(db_connection, project_id, vcs_id) # Check if vcs exists and matches project id select_statement = MySQLStatementBuilder(db_connection) file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index df56ce95..9da7755d 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -349,7 +349,7 @@ def delete_multiple_bpmn_nodes(nodes, project_id, vcs_id, user_id): def delete_dsm_file_from_vcs_id(proj_id, vcs_id, user_id): - file_id = impl_life_cycle.get_dsm_file_id(proj_id, vcs_id, user_id) + file_id = impl_life_cycle.get_dsm_file_id(proj_id, vcs_id) impl_files.impl_delete_file(file_id, user_id) # ====================================================================================================================== From 324ce088809f83f48701a6cad191e185892316e7 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 30 Jun 2023 10:50:43 +0200 Subject: [PATCH 034/210] removed overuse of impl functions --- .../apps/cvs/life_cycle/implementation.py | 20 +++++++++++++++---- sedbackend/apps/cvs/life_cycle/router.py | 8 +++----- sedbackend/apps/cvs/life_cycle/storage.py | 18 +++++++++++------ 3 files changed, 31 insertions(+), 15 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 3deebf10..d93e9f2b 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -1,12 +1,14 @@ -from fastapi import HTTPException +from fastapi import HTTPException, UploadFile from starlette import status +from sedbackend.apps.core.applications.exceptions import ApplicationNotFoundException from sedbackend.apps.core.authentication import exceptions as auth_ex from sedbackend.apps.core.db import get_connection from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.life_cycle import exceptions, storage, models from sedbackend.apps.cvs.project import exceptions as project_exceptions from sedbackend.apps.core.files import models as file_models +import sedbackend.apps.core.projects.exceptions as core_project_exceptions def create_process_node(project_id: int, vcs_id: int, node: models.ProcessNodePost) -> models.ProcessNodeGet: try: @@ -157,11 +159,11 @@ def update_bpmn(project_id: int, vcs_id: int, bpmn: models.BPMNGet) -> bool: ) -def save_dsm_file(project_id: int, vcs_id: int, - file: file_models.StoredFilePost, user_id: int) -> bool: +def save_dsm_file(application_sid: str, project_id: int, vcs_id: int, + file: UploadFile, user_id: int) -> bool: try: with get_connection() as con: - result = storage.save_dsm_file(con, project_id, vcs_id, file, user_id) + result = storage.save_dsm_file(con, application_sid, project_id, vcs_id, file, user_id) con.commit() return result except exceptions.InvalidFileTypeException: @@ -179,6 +181,16 @@ def save_dsm_file(project_id: int, vcs_id: int, status_code=status.HTTP_400_BAD_REQUEST, detail='Processes in DSM does not match processes in VCS' ) + except core_project_exceptions.SubProjectNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Sub-project not found." + ) + except ApplicationNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="No such application." + ) def get_dsm_file_id(project_id: int, vcs_id: int) -> int: diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index 27e71d04..e8e9bc25 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -70,11 +70,9 @@ async def update_bpmn(native_project_id: int, vcs_id: int, bpmn: models.BPMNGet) response_model=bool, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, user: User = Depends(get_current_active_user)) -> bool: - subproject = impl_get_subproject_native(CVS_APP_SID, native_project_id) - print(subproject) - model_file = file_models.StoredFilePost.import_fastapi_file(file, user.id, subproject.id) - return implementation.save_dsm_file(native_project_id, vcs_id, model_file, user.id) +async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, + user: User = Depends(get_current_active_user)) -> bool: + return implementation.save_dsm_file(CVS_APP_SID, native_project_id, vcs_id, file, user.id) @router.get( diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 0f2494c7..0c1cf5c5 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,10 +1,12 @@ +from fastapi import UploadFile from fastapi.logger import logger from mysql.connector.pooling import PooledMySQLConnection from mysqlsb import MySQLStatementBuilder, FetchType, Sort from sedbackend.apps.cvs.life_cycle import exceptions, models -from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions, implementation as vcs_impl +from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions from sedbackend.apps.core.files import models as file_models, storage as file_storage +from sedbackend.apps.core.projects import storage as core_project_storage from mysql.connector import Error import magic import pandas as pd @@ -272,10 +274,14 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i return True -def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, - vcs_id: int, file: file_models.StoredFilePost, user_id) -> bool: +def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project_id: int, + vcs_id: int, file: UploadFile, user_id) -> bool: - if file.extension != ".csv": + subproject = core_project_storage.db_get_subproject_native(db_connection, application_sid, project_id) + + model_file = file_models.StoredFilePost.import_fastapi_file(file, user_id, subproject.id) + + if model_file.extension != ".csv": raise exceptions.InvalidFileTypeException try: @@ -285,7 +291,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, except exceptions.FileNotFoundException: pass - with file.file_object as f: + with model_file.file_object as f: f.seek(0) tmp_file = f.read() mime = magic.from_buffer(tmp_file) @@ -310,7 +316,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, raise exceptions.ProcessesVcsMatchException f.seek(0) - stored_file = file_storage.db_save_file(db_connection, file) + stored_file = file_storage.db_save_file(db_connection, model_file) insert_statement = MySQLStatementBuilder(db_connection) insert_statement.insert(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ From c44487ed8176fab31449aa5e9a21d4d784f5aaa9 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 30 Jun 2023 11:58:10 +0200 Subject: [PATCH 035/210] changed format of dsm file --- sedbackend/apps/cvs/life_cycle/storage.py | 15 ++++++++++++--- tests/apps/cvs/life_cycle/files/input.csv | 8 +++++--- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 0c1cf5c5..f50801ce 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -289,13 +289,21 @@ def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project if file_id is not None: file_storage.db_delete_file(db_connection, file_id, user_id) except exceptions.FileNotFoundException: - pass + pass # File doesn't exist, so we don't need to delete it + except Exception: + try: + # File does not exist in persistent storage but exists in database + delete_statement = MySQLStatementBuilder(db_connection) + _, rows = delete_statement.delete(CVS_DSM_FILES_TABLE) \ + .where('vcs_id = %s', [vcs_id]) \ + .execute(return_affected_rows=True) + except: + pass with model_file.file_object as f: f.seek(0) tmp_file = f.read() mime = magic.from_buffer(tmp_file) - print(mime) logger.debug(mime) # TODO doesn't work with windows if we create the file in excel. if mime != "CSV text" and mime != "ASCII text": @@ -306,12 +314,13 @@ def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project f.seek(0) dsm_file = pd.read_csv(f) + logger.debug(f'File content: {dsm_file}') vcs_table = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) vcs_processes = [row.iso_process.name if row.iso_process is not None else row.subprocess.name for row in vcs_table] - for process in dsm_file['processes'].values: + for process in dsm_file['Processes'].values[1:-1]: if process not in vcs_processes: raise exceptions.ProcessesVcsMatchException diff --git a/tests/apps/cvs/life_cycle/files/input.csv b/tests/apps/cvs/life_cycle/files/input.csv index c3070499..56b973fd 100644 --- a/tests/apps/cvs/life_cycle/files/input.csv +++ b/tests/apps/cvs/life_cycle/files/input.csv @@ -1,3 +1,5 @@ -processes,"Architectural design","Verification" -"Architectural design",0, 1 -"Verification",0, 0 +Processes, "Start", "Architectural design","Verification","End" +"Start", "X", 1, 0, 0 +"Architectural design", 0, "X", 1, 0 +"Verification", 0, 0, "X", 1 +"End", 0, 0, 0, "X" \ No newline at end of file From 1da52c9d17ec8678b460b260f86109139db15614 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 30 Jun 2023 14:08:33 +0200 Subject: [PATCH 036/210] delete dsm file when deleting vcs --- sedbackend/apps/cvs/vcs/implementation.py | 15 +++++++++++-- sedbackend/apps/cvs/vcs/router.py | 4 ++-- sedbackend/apps/cvs/vcs/storage.py | 12 +++++++--- .../test_connect_vcs_design.py | 20 ++++++++--------- tests/apps/cvs/life_cycle/test_dsm_files.py | 8 +++---- .../simulation/test_sim_multiprocessing.py | 14 ++++++------ tests/apps/cvs/simulation/test_simulation.py | 22 +++++++++---------- tests/apps/cvs/testutils.py | 12 ++-------- 8 files changed, 58 insertions(+), 49 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index 03da646c..a0d22e7c 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -9,6 +9,7 @@ import sedbackend.apps.cvs.project.exceptions as project_exceptions from sedbackend.apps.cvs.vcs import models, storage, exceptions from sedbackend.libs.datastructures.pagination import ListChunk +from sedbackend.apps.core.files import exceptions as file_ex # ====================================================================================================================== @@ -108,10 +109,10 @@ def edit_vcs(project_id: int, vcs_id: int, vcs_post: models.VCSPost) -> models.V ) -def delete_vcs(project_id: int, vcs_id: int) -> bool: +def delete_vcs(user_id: int, project_id: int, vcs_id: int) -> bool: try: with get_connection() as con: - res = storage.delete_vcs(con, project_id, vcs_id) + res = storage.delete_vcs(con, user_id, project_id, vcs_id) con.commit() return res except exceptions.VCSNotFoundException: @@ -134,6 +135,16 @@ def delete_vcs(project_id: int, vcs_id: int) -> bool: status_code=status.HTTP_400_BAD_REQUEST, detail=f'VCS with id={vcs_id} does not belong to project with id={project_id}.', ) + except file_ex.FileNotDeletedException: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"File could not be deleted" + ) + except file_ex.PathMismatchException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Path to file does not match internal path' + ) # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index e90afd93..8449a93b 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -64,8 +64,8 @@ async def edit_vcs(native_project_id: int, vcs_id: int, vcs_post: models.VCSPost response_model=bool, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def delete_vcs(native_project_id: int, vcs_id: int) -> bool: - return implementation.delete_vcs(native_project_id, vcs_id) +async def delete_vcs(native_project_id: int, vcs_id: int, user: User = Depends(get_current_active_user)) -> bool: + return implementation.delete_vcs(user.id, native_project_id, vcs_id) # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 07a8043d..d66fc174 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -5,8 +5,10 @@ from sedbackend.apps.cvs.project import exceptions as project_exceptions from sedbackend.apps.cvs.project.storage import get_cvs_project from sedbackend.apps.cvs.vcs import models, exceptions -from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage, models as life_cycle_models +from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage, models as life_cycle_models, \ + exceptions as life_cycle_exceptions from sedbackend.libs.datastructures.pagination import ListChunk +from sedbackend.apps.core.files import storage as file_storage from mysqlsb import MySQLStatementBuilder, Sort, FetchType DEBUG_ERROR_HANDLING = True # Set to false in production @@ -130,10 +132,14 @@ def edit_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, return get_vcs(db_connection, project_id, vcs_id) -def delete_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> bool: +def delete_vcs(db_connection: PooledMySQLConnection, user_id: int, project_id: int, vcs_id: int) -> bool: logger.debug(f'Deleting VCS with id={vcs_id}.') - get_vcs(db_connection, project_id, vcs_id) # Perform checks for existing VCS and matching project + try: + dsm_file_id = life_cycle_storage.get_dsm_file_id(db_connection, project_id, vcs_id) + file_storage.db_delete_file(db_connection, dsm_file_id, user_id) + except life_cycle_exceptions.FileNotFoundException: # DSM file does not exist + pass delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement.delete(CVS_VCS_TABLE) \ diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index ed37c45f..3715a02c 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -47,7 +47,7 @@ def test_create_formulas(client, std_headers, std_user): # Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -87,7 +87,7 @@ def test_create_formulas_no_optional(client, std_headers, std_user): # Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -112,7 +112,7 @@ def test_get_all_formulas(client, std_headers, std_user): # Cleanup tu.delete_formulas(project.id, [(formula.vcs_row_id, formula.design_group_id) for formula in formulas]) tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -229,7 +229,7 @@ def test_edit_formulas(client, std_headers, std_user): # Cleanup tu.delete_formulas(project.id, [(formula.vcs_row_id, formula.design_group_id) for formula in formulas]) tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -269,7 +269,7 @@ def test_edit_formulas_no_optional(client, std_headers, std_user): # Cleanup tu.delete_formulas(project.id, [(formula.vcs_row_id, formula.design_group_id) for formula in formulas]) tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -309,7 +309,7 @@ def test_edit_formulas_invalid_dg(client, std_headers, std_user): # Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -348,7 +348,7 @@ def test_edit_formulas_invalid_vcs_row(client, std_headers, std_user): # Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -388,7 +388,7 @@ def test_edit_formulas_invalid_project(client, std_headers, std_user): # Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -413,7 +413,7 @@ def test_delete_formulas(client, std_headers, std_user): # Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -513,7 +513,7 @@ def test_get_vcs_dg_pairs(client, std_headers, std_user): # Cleanup tu.delete_formulas(project.id, [(formula[0].vcs_row_id, formula[0].design_group_id) for formula in formulas]) [tu.delete_design_group(project.id, design_group.id) for design_group in dgs] - tu.delete_VCS_with_ids(project.id, [vcs.id for vcs in vcss]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) diff --git a/tests/apps/cvs/life_cycle/test_dsm_files.py b/tests/apps/cvs/life_cycle/test_dsm_files.py index 65873471..85eba76e 100644 --- a/tests/apps/cvs/life_cycle/test_dsm_files.py +++ b/tests/apps/cvs/life_cycle/test_dsm_files.py @@ -48,7 +48,7 @@ def test_upload_dsm_file(client, std_headers, std_user): #Cleanup tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) @@ -109,7 +109,7 @@ def test_upload_invalid_file_extension(client, std_headers, std_user): #Cleanup - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -154,7 +154,7 @@ def test_upload_invalid_dsm_file(client, std_headers, std_user): assert res.status_code == 400 #Bad request, should throw ProcessesVcsMatchException #Cleanup - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) @@ -202,5 +202,5 @@ def test_get_dsm_file_id(client, std_headers, std_user): #Cleanup tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) \ No newline at end of file diff --git a/tests/apps/cvs/simulation/test_sim_multiprocessing.py b/tests/apps/cvs/simulation/test_sim_multiprocessing.py index ea019fbb..5c8a090c 100644 --- a/tests/apps/cvs/simulation/test_sim_multiprocessing.py +++ b/tests/apps/cvs/simulation/test_sim_multiprocessing.py @@ -28,7 +28,7 @@ def test_run_single_monte_carlo_sim(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -72,7 +72,7 @@ def test_run_mc_sim_invalid_designs(client, std_headers, std_user): assert res.status_code == 400 #Cleanup - tu.delete_VCS_with_ids(project.id, [vcs.id for vcs in vcss]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -122,7 +122,7 @@ def test_run_mc_sim_invalid_vcss(client, std_headers, std_user): for dg in dgs: tu.delete_design_group(project.id, dg.id) - tu.delete_VCS_with_ids(project.id, [vcs.id for vcs in vcss]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -150,7 +150,7 @@ def test_run_mc_sim_end_time_before_start_time(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -179,7 +179,7 @@ def test_run_mc_sim_no_flows(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -208,7 +208,7 @@ def test_run_mc_sim_both_flows(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -238,6 +238,6 @@ def test_run_mc_sim_rate_invalid_order(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index 839d44da..aa8ec6bc 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -28,7 +28,7 @@ def test_run_single_simulation(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) @@ -72,7 +72,7 @@ def test_run_sim_invalid_designs(client, std_headers, std_user): assert res.status_code == 400 #Cleanup - tu.delete_VCS_with_ids(project.id, [vcs.id for vcs in vcss]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -122,7 +122,7 @@ def test_run_sim_invalid_vcss(client, std_headers, std_user): for dg in dgs: tu.delete_design_group(project.id, dg.id) - tu.delete_VCS_with_ids(project.id, [vcs.id for vcs in vcss]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -151,7 +151,7 @@ def test_run_sim_end_time_before_start_time(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -179,7 +179,7 @@ def test_run_sim_flow_time_above_total_time(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -208,7 +208,7 @@ def test_run_sim_no_flows(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id,[vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id,[vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -237,7 +237,7 @@ def test_run_sim_both_flows(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -267,7 +267,7 @@ def test_run_sim_rate_invalid_order(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -299,7 +299,7 @@ def test_run_sim_invalid_proj(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -384,7 +384,7 @@ def test_run_single_xlsx_sim(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -562,7 +562,7 @@ def test_run_single_csv_sim(client, std_headers, std_user): #Cleanup tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, [vcs.id]) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index e132fe72..4d498e0d 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -89,17 +89,9 @@ def seed_random_vcs(project_id): return new_vcs -def delete_VCSs(project_id: int, vcs_list: List[sedbackend.apps.cvs.vcs.models.VCS]): - id_list = [] - for vcs in vcs_list: - id_list.append(vcs.id) - - delete_VCS_with_ids(project_id, id_list) - - -def delete_VCS_with_ids(project_id: int, vcs_id_list: List[int]): +def delete_VCS_with_ids(user_id: int, project_id: int, vcs_id_list: List[int]): for vcsid in vcs_id_list: - vcs_impl.delete_vcs(project_id, vcsid) + vcs_impl.delete_vcs(user_id, project_id, vcsid) def random_value_driver(name: str = None, unit: str = None): From 8e8f0fc6c5e7ef59af57630ec5cec3068dc64a7c Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 30 Jun 2023 16:19:03 +0200 Subject: [PATCH 037/210] added api call for download dsm --- sedbackend/apps/cvs/life_cycle/exceptions.py | 6 ---- .../apps/cvs/life_cycle/implementation.py | 23 ++++++++++-- sedbackend/apps/cvs/life_cycle/router.py | 23 +++++++++--- sedbackend/apps/cvs/life_cycle/storage.py | 36 ++++++++++++++----- sedbackend/apps/cvs/simulation/storage.py | 9 +++-- tests/apps/cvs/life_cycle/test_dsm_files.py | 10 +++--- 6 files changed, 79 insertions(+), 28 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/exceptions.py b/sedbackend/apps/cvs/life_cycle/exceptions.py index 482a415a..322cf9cc 100644 --- a/sedbackend/apps/cvs/life_cycle/exceptions.py +++ b/sedbackend/apps/cvs/life_cycle/exceptions.py @@ -28,9 +28,3 @@ class FileSizeException(Exception): class ProcessesVcsMatchException(Exception): pass - - -class FileNotFoundException(Exception): - def __init__(self, vcs_id: int = None): - self.vcs_id = vcs_id - diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index d93e9f2b..706376d1 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -7,7 +7,7 @@ from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.life_cycle import exceptions, storage, models from sedbackend.apps.cvs.project import exceptions as project_exceptions -from sedbackend.apps.core.files import models as file_models +from sedbackend.apps.core.files import models as file_models, exceptions as file_ex import sedbackend.apps.core.projects.exceptions as core_project_exceptions def create_process_node(project_id: int, vcs_id: int, node: models.ProcessNodePost) -> models.ProcessNodeGet: @@ -199,9 +199,26 @@ def get_dsm_file_id(project_id: int, vcs_id: int) -> int: res = storage.get_dsm_file_id(con, project_id, vcs_id) con.commit() return res - except exceptions.FileNotFoundException as e: + except file_ex.FileNotFoundException as e: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f"File for vcs with id {e.vcs_id} could not be found" + detail=f"File could not be found" ) + +def get_dsm_file_path(project_id: int, vcs_id: int, user_id) -> file_models.StoredFilePath: + try: + with get_connection() as con: + res = storage.get_dsm_file_path(con, project_id, vcs_id, user_id) + con.commit() + return res + except file_ex.FileNotFoundException as e: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"File could not be found" + ) + except auth_ex.UnauthorizedOperationException: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"User does not have access to the file" + ) diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index e8e9bc25..244f9e7c 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -5,10 +5,10 @@ from sedbackend.apps.core.users.models import User from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker from sedbackend.apps.core.projects.models import AccessLevel -from sedbackend.apps.core.projects.implementation import impl_get_subproject_native from sedbackend.apps.cvs.life_cycle import models, implementation from sedbackend.apps.cvs.project.router import CVS_APP_SID from sedbackend.apps.core.files import models as file_models +from fastapi.responses import FileResponse router = APIRouter() @@ -65,7 +65,7 @@ async def update_bpmn(native_project_id: int, vcs_id: int, bpmn: models.BPMNGet) # TODO only call one implementation function @router.post( - '/project/{native_project_id}/vcs/{vcs_id}/upload-dsm', + '/project/{native_project_id}/vcs/{vcs_id}/dsm', summary="Upload DSM file", response_model=bool, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] @@ -74,9 +74,9 @@ async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, user: User = Depends(get_current_active_user)) -> bool: return implementation.save_dsm_file(CVS_APP_SID, native_project_id, vcs_id, file, user.id) - + @router.get( - '/project/{native_project_id}/vcs/{vcs_id}/get-dsm-id', + '/project/{native_project_id}/vcs/{vcs_id}/dsm/id', summary="Fetch DSM file id", response_model=int, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] @@ -84,3 +84,18 @@ async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, async def get_dsm_file(native_project_id: int, vcs_id: int) -> int: return implementation.get_dsm_file_id(native_project_id, vcs_id) + +@router.get( + '/project/{native_project_id}/vcs/{vcs_id}/dsm/download', + summary="Fetch DSM file", + response_class=FileResponse, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +) +async def download_dsm_file(native_project_id: int, vcs_id: int, + user: User = Depends(get_current_active_user)) -> FileResponse: + stored_file_path = implementation.get_dsm_file_path(native_project_id, vcs_id, user.id) + resp = FileResponse( + path=stored_file_path.path, + filename=stored_file_path.filename + ) + return resp diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index f50801ce..64102444 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,11 +1,15 @@ +from typing import List + from fastapi import UploadFile from fastapi.logger import logger from mysql.connector.pooling import PooledMySQLConnection from mysqlsb import MySQLStatementBuilder, FetchType, Sort + +from sedbackend.apps.core.files.models import StoredFilePath from sedbackend.apps.cvs.life_cycle import exceptions, models from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions -from sedbackend.apps.core.files import models as file_models, storage as file_storage +from sedbackend.apps.core.files import models as file_models, storage as file_storage, exceptions as file_ex from sedbackend.apps.core.projects import storage as core_project_storage from mysql.connector import Error import magic @@ -23,7 +27,7 @@ CVS_DSM_FILES_TABLE = 'cvs_dsm_files' CVS_DSM_FILES_COLUMNS = ['vcs_id', 'file_id'] -MAX_FILE_SIZE = 100*10**6 # 100MB +MAX_FILE_SIZE = 100 * 10 ** 6 # 100MB def populate_process_node(db_connection, project_id, result) -> models.ProcessNodeGet: @@ -276,7 +280,6 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project_id: int, vcs_id: int, file: UploadFile, user_id) -> bool: - subproject = core_project_storage.db_get_subproject_native(db_connection, application_sid, project_id) model_file = file_models.StoredFilePost.import_fastapi_file(file, user_id, subproject.id) @@ -288,7 +291,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project file_id = get_dsm_file_id(db_connection, project_id, vcs_id) if file_id is not None: file_storage.db_delete_file(db_connection, file_id, user_id) - except exceptions.FileNotFoundException: + except file_ex.FileNotFoundException: pass # File doesn't exist, so we don't need to delete it except Exception: try: @@ -329,14 +332,13 @@ def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project insert_statement = MySQLStatementBuilder(db_connection) insert_statement.insert(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ - .set_values([vcs_id, stored_file.id])\ + .set_values([vcs_id, stored_file.id]) \ .execute(fetch_type=FetchType.FETCH_NONE) return True def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> int: - vcs_storage.get_vcs(db_connection, project_id, vcs_id) # Check if vcs exists and matches project id select_statement = MySQLStatementBuilder(db_connection) @@ -344,7 +346,25 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i .where('vcs_id = %s', [vcs_id]) \ .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - if file_res == None: - raise exceptions.FileNotFoundException + if file_res is None: + raise file_ex.FileNotFoundException return file_res['file_id'] + + +def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> int: + select_statement = MySQLStatementBuilder(db_connection) + file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ + .where(",".join(["%s" for _ in range(len(vcs_ids))]), vcs_ids) \ + .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + + if file_res is None: + raise file_ex.FileNotFoundException + + return file_res['file_id'] + + +def get_dsm_file_path(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id) -> StoredFilePath: + file_id = get_dsm_file_id(db_connection, project_id, vcs_id) + return file_storage.db_get_file_path(db_connection, file_id, user_id) + diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index dfe4f4d9..e742e702 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -23,6 +23,9 @@ from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import storage as vcs_storage +from sedbackend.apps.cvs.design import storage as design_storage +from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage +from sedbackend.apps.core.files import storage as file_storage SIM_SETTINGS_TABLE = "cvs_simulation_settings" SIM_SETTINGS_COLUMNS = ['project', 'time_unit', 'flow_process', 'flow_start_time', 'flow_time', @@ -62,7 +65,7 @@ def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project if not check_entity_rate(res, process): raise e.RateWrongOrderException - design_ids = [design.id for design in design_impl.get_all_designs(project_id, design_group_id)] + design_ids = [design.id for design in design_storage.get_all_designs(db_connection, project_id, design_group_id)] if design_ids is None or []: raise e.DesignIdsNotFoundException @@ -208,7 +211,9 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) - for vcs_id in vcs_ids: + # all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) + + for i, vcs_id in enumerate(vcs_ids): market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] for design_group_id in design_group_ids: sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] diff --git a/tests/apps/cvs/life_cycle/test_dsm_files.py b/tests/apps/cvs/life_cycle/test_dsm_files.py index 65873471..533971f4 100644 --- a/tests/apps/cvs/life_cycle/test_dsm_files.py +++ b/tests/apps/cvs/life_cycle/test_dsm_files.py @@ -39,7 +39,7 @@ def test_upload_dsm_file(client, std_headers, std_user): #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', headers=std_headers, files=_file) @@ -100,7 +100,7 @@ def test_upload_invalid_file_extension(client, std_headers, std_user): _file = {'file': ('input-example.xlsx', _test_upload_file.open('rb'), 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')} #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', headers=std_headers, files=_file) @@ -146,7 +146,7 @@ def test_upload_invalid_dsm_file(client, std_headers, std_user): #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', headers=std_headers, files=_file) @@ -190,11 +190,11 @@ def test_get_dsm_file_id(client, std_headers, std_user): #Act - client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', headers=std_headers, files=_file) - res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/get-dsm-id', + res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/id', headers=std_headers) #Assert From 7f8fa124063e479ff77e583e4096d3973e238b05 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 30 Jun 2023 16:55:41 +0200 Subject: [PATCH 038/210] changed db column names for dsm and fixed get_multiple_dsm_file_id --- sedbackend/apps/cvs/life_cycle/storage.py | 18 +++++++++--------- .../apps/cvs/simulation/implementation.py | 7 +++++++ sedbackend/apps/cvs/simulation/storage.py | 2 +- sql/V230529_cvs_dsm_files.sql | 10 +++++----- 4 files changed, 22 insertions(+), 15 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 64102444..76ad7438 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -25,7 +25,7 @@ CVS_START_STOP_NODES_COLUMNS = CVS_NODES_COLUMNS + ['type'] CVS_DSM_FILES_TABLE = 'cvs_dsm_files' -CVS_DSM_FILES_COLUMNS = ['vcs_id', 'file_id'] +CVS_DSM_FILES_COLUMNS = ['vcs', 'file'] MAX_FILE_SIZE = 100 * 10 ** 6 # 100MB @@ -349,19 +349,19 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i if file_res is None: raise file_ex.FileNotFoundException - return file_res['file_id'] + return file_res['file'] -def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> int: +def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> list[int]: + where_statement = "vcs IN ("+",".join(["%s" for _ in range(len(vcs_ids))])+")" + logger.debug(f'where_statement: {where_statement}') + select_statement = MySQLStatementBuilder(db_connection) file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ - .where(",".join(["%s" for _ in range(len(vcs_ids))]), vcs_ids) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - - if file_res is None: - raise file_ex.FileNotFoundException + .where(where_statement, vcs_ids) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - return file_res['file_id'] + return [file['file'] for file in file_res] def get_dsm_file_path(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id) -> StoredFilePath: diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 3eaf785f..e4990be8 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -19,6 +19,7 @@ from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.market_input import exceptions as market_input_exceptions +from sedbackend.apps.core.files import exceptions as file_ex def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], @@ -102,6 +103,12 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], status_code=status.HTTP_400_BAD_REQUEST, detail=f'No technical processes found' ) + except file_ex.FileNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f'Could not find DSM file' + ) + def run_dsm_file_simulation(user_id: int, project_id: int, sim_params: models.FileParams, diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index e742e702..f5463157 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -211,7 +211,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) - # all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) + all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) for i, vcs_id in enumerate(vcs_ids): market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] diff --git a/sql/V230529_cvs_dsm_files.sql b/sql/V230529_cvs_dsm_files.sql index 46f6a674..bbd07694 100644 --- a/sql/V230529_cvs_dsm_files.sql +++ b/sql/V230529_cvs_dsm_files.sql @@ -1,12 +1,12 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_dsm_files` ( - `vcs_id` INT UNSIGNED NOT NULL, - `file_id` INT UNSIGNED NOT NULL, - PRIMARY KEY (`vcs_id`), - FOREIGN KEY (`vcs_id`) + `vcs` INT UNSIGNED NOT NULL, + `file` INT UNSIGNED NOT NULL, + PRIMARY KEY (`vcs`), + FOREIGN KEY (`vcs`) REFERENCES `seddb`.`cvs_vcss`(`id`) ON DELETE CASCADE, - FOREIGN KEY(`file_id`) + FOREIGN KEY(`file`) REFERENCES `seddb`.`files`(`id`) ON DELETE CASCADE ); \ No newline at end of file From 476fd1835859ebf0d702abb106d7974119c6ef4f Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sat, 1 Jul 2023 10:52:48 +0200 Subject: [PATCH 039/210] changed format of dsm --- sedbackend/apps/core/db.py | 6 ++-- sedbackend/apps/cvs/life_cycle/exceptions.py | 4 +++ .../apps/cvs/life_cycle/implementation.py | 5 +++ sedbackend/apps/cvs/life_cycle/router.py | 2 +- sedbackend/apps/cvs/life_cycle/storage.py | 29 ++++++++++++---- .../apps/cvs/simulation/implementation.py | 4 +-- sedbackend/apps/cvs/simulation/router.py | 6 ++-- sedbackend/apps/cvs/simulation/storage.py | 34 ++++++++++++++----- 8 files changed, 67 insertions(+), 23 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..350b2d37 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,11 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +# host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +# port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/life_cycle/exceptions.py b/sedbackend/apps/cvs/life_cycle/exceptions.py index 322cf9cc..a04968f3 100644 --- a/sedbackend/apps/cvs/life_cycle/exceptions.py +++ b/sedbackend/apps/cvs/life_cycle/exceptions.py @@ -28,3 +28,7 @@ class FileSizeException(Exception): class ProcessesVcsMatchException(Exception): pass + + +class DSMFileFailedDeletionException(Exception): + pass diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 706376d1..616c9b61 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -191,6 +191,11 @@ def save_dsm_file(application_sid: str, project_id: int, vcs_id: int, status_code=status.HTTP_404_NOT_FOUND, detail="No such application." ) + except exceptions.DSMFileFailedDeletionException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Failed to replace old DSM file." + ) def get_dsm_file_id(project_id: int, vcs_id: int) -> int: diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index 244f9e7c..99377637 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -87,7 +87,7 @@ async def get_dsm_file(native_project_id: int, vcs_id: int) -> int: @router.get( '/project/{native_project_id}/vcs/{vcs_id}/dsm/download', - summary="Fetch DSM file", + summary="Download DSM file", response_class=FileResponse, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 76ad7438..c50dd2df 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,4 +1,4 @@ -from typing import List +from typing import List, Tuple, Optional from fastapi import UploadFile from fastapi.logger import logger @@ -290,7 +290,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project try: file_id = get_dsm_file_id(db_connection, project_id, vcs_id) if file_id is not None: - file_storage.db_delete_file(db_connection, file_id, user_id) + delete_dsm_file(db_connection, project_id, vcs_id, file_id, user_id) except file_ex.FileNotFoundException: pass # File doesn't exist, so we don't need to delete it except Exception: @@ -298,7 +298,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project # File does not exist in persistent storage but exists in database delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement.delete(CVS_DSM_FILES_TABLE) \ - .where('vcs_id = %s', [vcs_id]) \ + .where('vcs = %s', [vcs_id]) \ .execute(return_affected_rows=True) except: pass @@ -343,7 +343,7 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i select_statement = MySQLStatementBuilder(db_connection) file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ - .where('vcs_id = %s', [vcs_id]) \ + .where('vcs = %s', [vcs_id]) \ .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) if file_res is None: @@ -352,7 +352,7 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i return file_res['file'] -def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> list[int]: +def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> list[Tuple[int, int]]: where_statement = "vcs IN ("+",".join(["%s" for _ in range(len(vcs_ids))])+")" logger.debug(f'where_statement: {where_statement}') @@ -361,10 +361,27 @@ def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List .where(where_statement, vcs_ids) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - return [file['file'] for file in file_res] + return [(file['vcs'], file['file']) for file in file_res] def get_dsm_file_path(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id) -> StoredFilePath: file_id = get_dsm_file_id(db_connection, project_id, vcs_id) return file_storage.db_get_file_path(db_connection, file_id, user_id) + +def delete_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, + file_id: Optional[int], user_id: int) -> bool: + + if file_id is None: + file_id = get_dsm_file_id(db_connection, project_id, vcs_id) + file_storage.db_delete_file(db_connection, file_id, user_id) + + delete_statement = MySQLStatementBuilder(db_connection) + _, rows = delete_statement.delete(CVS_DSM_FILES_TABLE) \ + .where('vcs = %s', [vcs_id]) \ + .execute(return_affected_rows=True) + + if rows == 0: + raise exceptions.DSMFileFailedDeletionException + + return True diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index e4990be8..2e18d486 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -23,10 +23,10 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int]) -> List[models.Simulation]: + design_group_ids: List[int], user_id: int) -> List[models.Simulation]: try: with get_connection() as con: - result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids) + result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids, user_id) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 74dbf9d1..27bb4d76 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -16,9 +16,9 @@ response_model=List[models.Simulation], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) -async def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int]) -> List[models.Simulation]: - return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids) +async def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], + user: User = Depends(get_current_active_user)) -> List[models.Simulation]: + return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id) # Temporary disabled ''' diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index f5463157..f9bbba5a 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -190,7 +190,7 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int]) -> List[models.Simulation]: + design_group_ids: List[int], user_id) -> List[models.Simulation]: design_results = [] if not check_sim_settings(sim_settings): @@ -213,8 +213,12 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) - for i, vcs_id in enumerate(vcs_ids): + for vcs_id in vcs_ids: market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] + dsm_id = [dsm for dsm in all_dsm_ids if dsm[0] == vcs_id] + dsm = None + if len(dsm_id) > 0: + dsm = get_dsm_from_file_id(db_connection, dsm_id[0][1], user_id) for design_group_id in design_group_ids: sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] if sim_data is None or sim_data == []: @@ -233,7 +237,10 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed processes, non_tech_processes = populate_processes(non_tech_add, sim_data, design, market_values, vd_values) - dsm = create_simple_dsm(processes) # TODO Change to using BPMN + if dsm is None: + dsm = create_simple_dsm(processes) + + logger.debug(f'DSM: {dsm}') sim = des.Des() @@ -646,15 +653,19 @@ def check_sim_settings(settings: models.EditSimSettings) -> bool: # TODO Change dsm creation to follow BPMN and the nodes in the BPMN. -# Currently the DSM only goes from one process to the other following the order of the index in the VCS +# Create DSM that only goes from one process to the other following the order of the index in the VCS def create_simple_dsm(processes: List[Process]) -> dict: - l = len(processes) - - index_list = list(range(0, l)) + n = len(processes) + 2 # +2 for start and end dsm = dict() - for i, p in enumerate(processes): - dsm.update({p.name: [1 if i + 1 == j else 0 for j in index_list]}) + for i in range(n): + if i == 0: + name = "Start" + elif i == n - 1: + name = "End" + else: + name = processes[i-1].name + dsm.update({name: [1 if i + 1 == j else "X" if i == j else 0 for j in range(n)]}) return dsm @@ -671,6 +682,11 @@ def get_dsm_from_csv(path): return dsm +def get_dsm_from_file_id(db_connection: PooledMySQLConnection, file_id: int, user_id: int) -> dict: + path = file_storage.db_get_file_path(db_connection, file_id, user_id) + return get_dsm_from_csv(path.path) + + def get_dsm_from_excel(path): pf = pd.read_excel(path) From fbe5c261ddb481687de0cff7f044e94c8b02cd8b Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 2 Jul 2023 10:05:26 +0200 Subject: [PATCH 040/210] refactored monte carlo sim --- requirements.txt | 2 +- .../apps/cvs/simulation/implementation.py | 86 ++--------- sedbackend/apps/cvs/simulation/router.py | 5 +- sedbackend/apps/cvs/simulation/storage.py | 138 +++++------------- 4 files changed, 51 insertions(+), 180 deletions(-) diff --git a/requirements.txt b/requirements.txt index 98517043..803c6d3c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.3.3 +desim-tool==0.4.0 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 2e18d486..2fb167ab 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -1,8 +1,7 @@ -from fastapi import HTTPException, UploadFile, Depends, Form +from fastapi import HTTPException, UploadFile from starlette import status -import tempfile -from typing import List, Optional +from typing import List from fastapi.logger import logger from sedbackend.apps.cvs.simulation import models, storage @@ -10,7 +9,6 @@ from sedbackend.apps.core.authentication import exceptions as auth_ex from sedbackend.apps.core.db import get_connection from sedbackend.apps.cvs.project import exceptions as project_exceptions -from sedbackend.apps.cvs.design import exceptions as design_exc from sedbackend.apps.cvs.simulation.exceptions import BadlyFormattedSettingsException, DSMFileNotFoundException, \ DesignIdsNotFoundException, FormulaEvalException, NegativeTimeException, ProcessNotFoundException, \ RateWrongOrderException, InvalidFlowSettingsException, VcsFailedException, FlowProcessNotFoundException, \ @@ -23,34 +21,31 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], user_id: int) -> List[models.Simulation]: + design_group_ids: List[int], user_id: int, is_monte_carlo: bool = False, + normalized_npv: bool = False) -> List[models.Simulation]: try: with get_connection() as con: - result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids, user_id) + result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids, user_id, is_monte_carlo, + normalized_npv) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail='Unauthorized user.', ) - except vcs_exceptions.VCSNotFoundException: # This exception will probably never fire - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not find vcs.', - ) except project_exceptions.CVSProjectNotFoundException: raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, + status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find project.', ) except market_input_exceptions.MarketInputNotFoundException: raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, + status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find market input', ) except ProcessNotFoundException: raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, + status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find process', ) except FormulaEvalException as e: @@ -169,69 +164,6 @@ def run_dsm_file_simulation(user_id: int, project_id: int, sim_params: models.Fi ) -def run_sim_monte_carlo(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], - normalized_npv: bool) -> List[models.Simulation]: - try: - with get_connection() as con: - result = storage.run_sim_monte_carlo(con, sim_settings, vcs_ids, design_group_ids, normalized_npv) - return result - except vcs_exceptions.GenericDatabaseException: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=f'Fel' - ) - except FormulaEvalException as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Could not evaluate formulas of process with id: {e.process_id}' - ) - except RateWrongOrderException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Wrong order of rate of entities. Per project assigned after per product' - ) - except NegativeTimeException as e: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Formula at process with id: {e.process_id} evaluated to negative time' - ) - except DesignIdsNotFoundException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'No design ids or empty array supplied' - ) - except VcsFailedException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Invalid vcs ids' - ) - except BadlyFormattedSettingsException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Settings are not correct' - ) - except CouldNotFetchSimulationDataException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not fetch simulation data' - ) - except CouldNotFetchMarketInputValuesException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not fetch market input values' - ) - except CouldNotFetchValueDriverDesignValuesException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not fetch value driver design values' - ) - except NoTechnicalProcessException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'No technical processes found' - ) - - def get_sim_settings(project_id: int) -> models.SimSettings: try: with get_connection() as con: diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 27bb4d76..ac63c46b 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -47,8 +47,9 @@ async def run_dsm_file_simulation(native_project_id: int, sim_params: models.Fil ) async def run_sim_monte_carlo(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], - normalized_npv: Optional[bool] = False) -> List[models.Simulation]: - return implementation.run_sim_monte_carlo(sim_settings, vcs_ids, design_group_ids, normalized_npv) + normalized_npv: Optional[bool] = False, + user: User = Depends(get_current_active_user)) -> List[models.Simulation]: + return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, True, normalized_npv) @router.get( diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index f9bbba5a..746298a7 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -41,9 +41,11 @@ 'minutes': TimeFormat.MINUTES }) -#TODO: Finish method. No checks on file this time since we won't be getting an uploaded file here, it will already be on the server. -def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, sim_settings: models.EditSimSettings, - vcs_ids: List[int], design_group_ids: List[int], normalized_npv: bool) -> List[models.Simulation]: + +# TODO: Finish method. No checks on file this time since we won't be getting an uploaded file here, it will already be on the server. +def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, + sim_settings: models.EditSimSettings, + vcs_ids: List[int], design_group_ids: List[int], normalized_npv: bool) -> List[models.Simulation]: design_results = [] if not check_sim_settings(sim_settings): @@ -65,7 +67,8 @@ def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project if not check_entity_rate(res, process): raise e.RateWrongOrderException - design_ids = [design.id for design in design_storage.get_all_designs(db_connection, project_id, design_group_id)] + design_ids = [design.id for design in + design_storage.get_all_designs(db_connection, project_id, design_group_id)] if design_ids is None or []: raise e.DesignIdsNotFoundException @@ -75,7 +78,7 @@ def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project processes, non_tech_processes = populate_processes(non_tech_add, res, db_connection, vcs_id, design_id) # BUG probably. Populate processes changes the order of the processes. - dsm = {} #TODO: Fetch DSM from file. Should be able to guess file based on vcs_id and proj_id + dsm = {} # TODO: Fetch DSM from file. Should be able to guess file based on vcs_id and proj_id sim = des.Des() @@ -111,9 +114,10 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr if file_extension == '.xlsx': try: - tmp_xlsx = tempfile.TemporaryFile() # Workaround because current python version doesn't support - tmp_xlsx.write(dsm_file.file.read()) # readable() attribute on SpooledTemporaryFile which UploadFile - tmp_xlsx.seek(0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 + tmp_xlsx = tempfile.TemporaryFile() # Workaround because current python version doesn't support + tmp_xlsx.write(dsm_file.file.read()) # readable() attribute on SpooledTemporaryFile which UploadFile + tmp_xlsx.seek( + 0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 dsm = get_dsm_from_excel(tmp_xlsx) if dsm is None: @@ -124,9 +128,10 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr tmp_xlsx.close() elif file_extension == '.csv': try: - tmp_csv = tempfile.TemporaryFile() # Workaround because current python version doesn't support - tmp_csv.write(dsm_file.file.read()) # readable() attribute on SpooledTemporaryFile which UploadFile - tmp_csv.seek(0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 + tmp_csv = tempfile.TemporaryFile() # Workaround because current python version doesn't support + tmp_csv.write(dsm_file.file.read()) # readable() attribute on SpooledTemporaryFile which UploadFile + tmp_csv.seek( + 0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 # This should hopefully open up the file for the processor. dsm = get_dsm_from_csv(tmp_csv) @@ -190,7 +195,8 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], user_id) -> List[models.Simulation]: + design_group_ids: List[int], user_id, is_monte_carlo: bool = False, normalized_npv: bool = False + ) -> List[models.Simulation]: design_results = [] if not check_sim_settings(sim_settings): @@ -202,6 +208,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed discount_rate = sim_settings.discount_rate process = sim_settings.flow_process time_unit = TIME_FORMAT_DICT.get(sim_settings.time_unit) + runs = sim_settings.runs all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) @@ -245,9 +252,15 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed sim = des.Des() try: - results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, - non_tech_add, dsm, time_unit, - discount_rate, runtime) + if is_monte_carlo: + results = sim.run_parallell_simulations(flow_time, interarrival, process, processes, + non_tech_processes, + non_tech_add, dsm, time_unit, discount_rate, runtime, + runs) + else: + results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, + non_tech_add, dsm, time_unit, + discount_rate, runtime) except Exception as exc: tb = sys.exc_info()[2] @@ -256,91 +269,16 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed print(f'{exc.__class__}, {exc}') raise e.SimulationFailedException - design_res = models.Simulation( - time=results.timesteps[-1], - mean_NPV=results.mean_npv(), - max_NPVs=results.all_max_npv(), - mean_payback_time=results.mean_npv_payback_time(), - all_npvs=results.npvs - ) - - design_results.append(design_res) - logger.debug('Returning the results') - return design_results - - -def run_sim_monte_carlo(db_connection: PooledMySQLConnection, simSettings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], normalized_npv: bool = False) -> List[ - models.Simulation]: - design_results = [] - - if not check_sim_settings(simSettings): - raise e.BadlyFormattedSettingsException - - interarrival = simSettings.interarrival_time - flow_time = simSettings.flow_time - runtime = simSettings.end_time - non_tech_add = simSettings.non_tech_add - discount_rate = simSettings.discount_rate - process = simSettings.flow_process - time_unit = TIME_FORMAT_DICT.get(simSettings.time_unit) - runs = simSettings.runs - - all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) - - all_market_values = get_all_market_values(db_connection, vcs_ids) - - all_designs = get_all_designs(db_connection, design_group_ids) - - all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) - - for vcs_id in vcs_ids: - market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] - for design_group_id in design_group_ids: - sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] - if sim_data is None or sim_data == []: - raise e.VcsFailedException - - if not check_entity_rate(sim_data, process): - raise e.RateWrongOrderException - - designs = [design.id for design in all_designs if design.design_group_id == design_group_id] - - if designs is None or []: - raise e.DesignIdsNotFoundException - - for design in designs: - vd_values = [vd for vd in all_vd_design_values if vd['design'] == design] - processes, non_tech_processes = populate_processes(non_tech_add, sim_data, design, market_values, - vd_values) - - dsm = create_simple_dsm(processes) - - sim = des.Des() - - try: - results = sim.run_parallell_simulations(flow_time, interarrival, process, processes, - non_tech_processes, - non_tech_add, dsm, time_unit, discount_rate, runtime, runs) - - except Exception as exc: - logger.debug(f'{exc.__class__}, {exc}') - raise e.SimulationFailedException - - if normalized_npv: - m_npv = results.normalize_npv() - else: - m_npv = results.mean_npv() - sim_res = models.Simulation( time=results.timesteps[-1], - mean_NPV=m_npv, + mean_NPV=results.normalize_npv() if normalized_npv else results.mean_npv(), max_NPVs=results.all_max_npv(), mean_payback_time=results.mean_npv_payback_time(), all_npvs=results.npvs ) - design_results.append(sim_res) + design_results.append(sim_res) + logger.debug('Returning the results') return design_results @@ -358,10 +296,11 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, vd_values_row = [vd for vd in vd_values if vd['vcs_row'] == row['id'] and vd['design'] == design] if row['category'] != 'Technical processes': try: - non_tech = models.NonTechnicalProcess(cost=nsp.eval(parse_formula(row['cost'], vd_values_row, mi_values)), - revenue=nsp.eval( - parse_formula(row['revenue'], vd_values_row, mi_values)), - name=row['iso_name']) + non_tech = models.NonTechnicalProcess( + cost=nsp.eval(parse_formula(row['cost'], vd_values_row, mi_values)), + revenue=nsp.eval( + parse_formula(row['revenue'], vd_values_row, mi_values)), + name=row['iso_name']) except Exception as exc: logger.debug(f'{exc.__class__}, {exc}') raise e.FormulaEvalException(row['id']) @@ -446,7 +385,7 @@ def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], d AND cvs_design_mi_formulas.design_group \ IN ({",".join(["%s" for _ in range(len(design_group_ids))])}) ORDER BY `index`' with db_connection.cursor(prepared=True) as cursor: - cursor.execute(query, vcs_ids+design_group_ids) + cursor.execute(query, vcs_ids + design_group_ids) res = cursor.fetchall() res = [dict(zip(cursor.column_names, row)) for row in res] except Error as error: @@ -652,7 +591,6 @@ def check_sim_settings(settings: models.EditSimSettings) -> bool: return settings_check -# TODO Change dsm creation to follow BPMN and the nodes in the BPMN. # Create DSM that only goes from one process to the other following the order of the index in the VCS def create_simple_dsm(processes: List[Process]) -> dict: n = len(processes) + 2 # +2 for start and end @@ -663,7 +601,7 @@ def create_simple_dsm(processes: List[Process]) -> dict: elif i == n - 1: name = "End" else: - name = processes[i-1].name + name = processes[i - 1].name dsm.update({name: [1 if i + 1 == j else "X" if i == j else 0 for j in range(n)]}) return dsm From 0fd3adbbefa1aa07d97b10e3e6a24771f025d372 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 2 Jul 2023 10:19:57 +0200 Subject: [PATCH 041/210] clean-up and db update --- sedbackend/apps/core/db.py | 6 +- sedbackend/apps/cvs/simulation/models.py | 1 - sedbackend/apps/cvs/simulation/storage.py | 121 ++-------------------- 3 files changed, 8 insertions(+), 120 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 350b2d37..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,11 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -# host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' -# port = 3306 -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/simulation/models.py b/sedbackend/apps/cvs/simulation/models.py index 0efbfcd8..3da9d356 100644 --- a/sedbackend/apps/cvs/simulation/models.py +++ b/sedbackend/apps/cvs/simulation/models.py @@ -2,7 +2,6 @@ from enum import Enum from pydantic import BaseModel from typing import Optional -import json from fastapi import Form from sedbackend.apps.cvs.link_design_lifecycle import models as link_model from dataclasses import dataclass diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 746298a7..82394271 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -23,7 +23,6 @@ from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import storage as vcs_storage -from sedbackend.apps.cvs.design import storage as design_storage from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage from sedbackend.apps.core.files import storage as file_storage @@ -42,76 +41,14 @@ }) -# TODO: Finish method. No checks on file this time since we won't be getting an uploaded file here, it will already be on the server. -def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, - sim_settings: models.EditSimSettings, - vcs_ids: List[int], design_group_ids: List[int], normalized_npv: bool) -> List[models.Simulation]: - design_results = [] - - if not check_sim_settings(sim_settings): - raise e.BadlyFormattedSettingsException - interarrival = sim_settings.interarrival_time - flow_time = sim_settings.flow_time - runtime = sim_settings.end_time - sim_settings.start_time - non_tech_add = sim_settings.non_tech_add - discount_rate = sim_settings.discount_rate - process = sim_settings.flow_process - time_unit = TIME_FORMAT_DICT.get(sim_settings.time_unit) - - for vcs_id in vcs_ids: - for design_group_id in design_group_ids: - res = get_sim_data(db_connection, vcs_id, design_group_id) - if res is None or res == []: - raise e.VcsFailedException - - if not check_entity_rate(res, process): - raise e.RateWrongOrderException - - design_ids = [design.id for design in - design_storage.get_all_designs(db_connection, project_id, design_group_id)] - - if design_ids is None or []: - raise e.DesignIdsNotFoundException - - for design_id in design_ids: - # get_design(design_id) - processes, non_tech_processes = populate_processes(non_tech_add, res, db_connection, vcs_id, - design_id) # BUG probably. Populate processes changes the order of the processes. - - dsm = {} # TODO: Fetch DSM from file. Should be able to guess file based on vcs_id and proj_id - - sim = des.Des() - - try: - results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, - non_tech_add, dsm, time_unit, - discount_rate, runtime) - - except Exception as exc: - tb = sys.exc_info()[2] - logger.debug( - f'{exc.__class__}, {exc}, {exc.with_traceback(tb)}') - print(f'{exc.__class__}, {exc}') - raise e.SimulationFailedException - - design_res = models.Simulation( - time=results.timesteps[-1], - mean_NPV=results.mean_npv(), - max_NPVs=results.all_max_npv(), - mean_payback_time=results.mean_npv_payback_time(), - all_npvs=results.npvs - ) - - design_results.append(design_res) - logger.debug('Returning the results') - return design_results - - -def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, +# TODO: Run simulation on DSM file +def get_dsm_from_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, sim_params: models.FileParams, - dsm_file: UploadFile) -> List[models.Simulation]: + dsm_file: UploadFile) -> dict: _, file_extension = os.path.splitext(dsm_file.filename) + dsm = {} + if file_extension == '.xlsx': try: tmp_xlsx = tempfile.TemporaryFile() # Workaround because current python version doesn't support @@ -144,53 +81,7 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr else: raise e.DSMFileNotFoundException - vcs_ids = [int(id) for id in sim_params.vcs_ids.split(',')] - design_ids = [int(id) for id in sim_params.design_ids.split(',')] - - interarrival = sim_params.interarrival_time - flow_time = sim_params.flow_time - runtime = sim_params.end_time - non_tech_add = sim_params.non_tech_add - discount_rate = sim_params.discount_rate - process = sim_params.flow_process - time_unit = TIME_FORMAT_DICT.get(sim_params.time_unit) - process = sim_params.flow_process - - design_results = [] - for vcs_id in vcs_ids: - res = get_sim_data(db_connection, vcs_id) - if not check_entity_rate(res, process): - raise e.RateWrongOrderException - - if sim_params.design_ids is None or []: - raise e.DesignIdsNotFoundException - - for design_id in design_ids: - processes, non_tech_processes = populate_processes( - non_tech_add, res, db_connection, design_id) - sim = des.Des() - - try: - results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, - non_tech_add, dsm, time_unit, - discount_rate, runtime) - - except Exception as exc: - logger.debug(f'{exc.__class__}, {exc}') - print(f'Sim failed {exc.__class__}, {exc}') - raise e.SimulationFailedException - - design_res = models.Simulation( - time=results.timesteps[-1], - mean_NPV=results.mean_npv(), - max_NPVs=results.all_max_npv(), - mean_payback_time=results.mean_npv_payback_time(), - all_npvs=results.npvs - ) - - design_results.append(design_res) - - return design_results + return dsm def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, From 73183c5bc0bcb2f73a569bdc4a4ba5cebff3f1d2 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 30 Jun 2023 16:19:03 +0200 Subject: [PATCH 042/210] added api call for download dsm --- sedbackend/apps/cvs/life_cycle/exceptions.py | 6 ---- .../apps/cvs/life_cycle/implementation.py | 23 ++++++++++-- sedbackend/apps/cvs/life_cycle/router.py | 23 +++++++++--- sedbackend/apps/cvs/life_cycle/storage.py | 36 ++++++++++++++----- sedbackend/apps/cvs/simulation/storage.py | 9 +++-- tests/apps/cvs/life_cycle/test_dsm_files.py | 10 +++--- 6 files changed, 79 insertions(+), 28 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/exceptions.py b/sedbackend/apps/cvs/life_cycle/exceptions.py index 482a415a..322cf9cc 100644 --- a/sedbackend/apps/cvs/life_cycle/exceptions.py +++ b/sedbackend/apps/cvs/life_cycle/exceptions.py @@ -28,9 +28,3 @@ class FileSizeException(Exception): class ProcessesVcsMatchException(Exception): pass - - -class FileNotFoundException(Exception): - def __init__(self, vcs_id: int = None): - self.vcs_id = vcs_id - diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index d93e9f2b..706376d1 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -7,7 +7,7 @@ from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.life_cycle import exceptions, storage, models from sedbackend.apps.cvs.project import exceptions as project_exceptions -from sedbackend.apps.core.files import models as file_models +from sedbackend.apps.core.files import models as file_models, exceptions as file_ex import sedbackend.apps.core.projects.exceptions as core_project_exceptions def create_process_node(project_id: int, vcs_id: int, node: models.ProcessNodePost) -> models.ProcessNodeGet: @@ -199,9 +199,26 @@ def get_dsm_file_id(project_id: int, vcs_id: int) -> int: res = storage.get_dsm_file_id(con, project_id, vcs_id) con.commit() return res - except exceptions.FileNotFoundException as e: + except file_ex.FileNotFoundException as e: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f"File for vcs with id {e.vcs_id} could not be found" + detail=f"File could not be found" ) + +def get_dsm_file_path(project_id: int, vcs_id: int, user_id) -> file_models.StoredFilePath: + try: + with get_connection() as con: + res = storage.get_dsm_file_path(con, project_id, vcs_id, user_id) + con.commit() + return res + except file_ex.FileNotFoundException as e: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"File could not be found" + ) + except auth_ex.UnauthorizedOperationException: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"User does not have access to the file" + ) diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index e8e9bc25..244f9e7c 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -5,10 +5,10 @@ from sedbackend.apps.core.users.models import User from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker from sedbackend.apps.core.projects.models import AccessLevel -from sedbackend.apps.core.projects.implementation import impl_get_subproject_native from sedbackend.apps.cvs.life_cycle import models, implementation from sedbackend.apps.cvs.project.router import CVS_APP_SID from sedbackend.apps.core.files import models as file_models +from fastapi.responses import FileResponse router = APIRouter() @@ -65,7 +65,7 @@ async def update_bpmn(native_project_id: int, vcs_id: int, bpmn: models.BPMNGet) # TODO only call one implementation function @router.post( - '/project/{native_project_id}/vcs/{vcs_id}/upload-dsm', + '/project/{native_project_id}/vcs/{vcs_id}/dsm', summary="Upload DSM file", response_model=bool, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] @@ -74,9 +74,9 @@ async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, user: User = Depends(get_current_active_user)) -> bool: return implementation.save_dsm_file(CVS_APP_SID, native_project_id, vcs_id, file, user.id) - + @router.get( - '/project/{native_project_id}/vcs/{vcs_id}/get-dsm-id', + '/project/{native_project_id}/vcs/{vcs_id}/dsm/id', summary="Fetch DSM file id", response_model=int, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] @@ -84,3 +84,18 @@ async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, async def get_dsm_file(native_project_id: int, vcs_id: int) -> int: return implementation.get_dsm_file_id(native_project_id, vcs_id) + +@router.get( + '/project/{native_project_id}/vcs/{vcs_id}/dsm/download', + summary="Fetch DSM file", + response_class=FileResponse, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +) +async def download_dsm_file(native_project_id: int, vcs_id: int, + user: User = Depends(get_current_active_user)) -> FileResponse: + stored_file_path = implementation.get_dsm_file_path(native_project_id, vcs_id, user.id) + resp = FileResponse( + path=stored_file_path.path, + filename=stored_file_path.filename + ) + return resp diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index f50801ce..64102444 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,11 +1,15 @@ +from typing import List + from fastapi import UploadFile from fastapi.logger import logger from mysql.connector.pooling import PooledMySQLConnection from mysqlsb import MySQLStatementBuilder, FetchType, Sort + +from sedbackend.apps.core.files.models import StoredFilePath from sedbackend.apps.cvs.life_cycle import exceptions, models from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions -from sedbackend.apps.core.files import models as file_models, storage as file_storage +from sedbackend.apps.core.files import models as file_models, storage as file_storage, exceptions as file_ex from sedbackend.apps.core.projects import storage as core_project_storage from mysql.connector import Error import magic @@ -23,7 +27,7 @@ CVS_DSM_FILES_TABLE = 'cvs_dsm_files' CVS_DSM_FILES_COLUMNS = ['vcs_id', 'file_id'] -MAX_FILE_SIZE = 100*10**6 # 100MB +MAX_FILE_SIZE = 100 * 10 ** 6 # 100MB def populate_process_node(db_connection, project_id, result) -> models.ProcessNodeGet: @@ -276,7 +280,6 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project_id: int, vcs_id: int, file: UploadFile, user_id) -> bool: - subproject = core_project_storage.db_get_subproject_native(db_connection, application_sid, project_id) model_file = file_models.StoredFilePost.import_fastapi_file(file, user_id, subproject.id) @@ -288,7 +291,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project file_id = get_dsm_file_id(db_connection, project_id, vcs_id) if file_id is not None: file_storage.db_delete_file(db_connection, file_id, user_id) - except exceptions.FileNotFoundException: + except file_ex.FileNotFoundException: pass # File doesn't exist, so we don't need to delete it except Exception: try: @@ -329,14 +332,13 @@ def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project insert_statement = MySQLStatementBuilder(db_connection) insert_statement.insert(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ - .set_values([vcs_id, stored_file.id])\ + .set_values([vcs_id, stored_file.id]) \ .execute(fetch_type=FetchType.FETCH_NONE) return True def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> int: - vcs_storage.get_vcs(db_connection, project_id, vcs_id) # Check if vcs exists and matches project id select_statement = MySQLStatementBuilder(db_connection) @@ -344,7 +346,25 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i .where('vcs_id = %s', [vcs_id]) \ .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - if file_res == None: - raise exceptions.FileNotFoundException + if file_res is None: + raise file_ex.FileNotFoundException return file_res['file_id'] + + +def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> int: + select_statement = MySQLStatementBuilder(db_connection) + file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ + .where(",".join(["%s" for _ in range(len(vcs_ids))]), vcs_ids) \ + .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + + if file_res is None: + raise file_ex.FileNotFoundException + + return file_res['file_id'] + + +def get_dsm_file_path(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id) -> StoredFilePath: + file_id = get_dsm_file_id(db_connection, project_id, vcs_id) + return file_storage.db_get_file_path(db_connection, file_id, user_id) + diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index dfe4f4d9..e742e702 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -23,6 +23,9 @@ from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import storage as vcs_storage +from sedbackend.apps.cvs.design import storage as design_storage +from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage +from sedbackend.apps.core.files import storage as file_storage SIM_SETTINGS_TABLE = "cvs_simulation_settings" SIM_SETTINGS_COLUMNS = ['project', 'time_unit', 'flow_process', 'flow_start_time', 'flow_time', @@ -62,7 +65,7 @@ def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project if not check_entity_rate(res, process): raise e.RateWrongOrderException - design_ids = [design.id for design in design_impl.get_all_designs(project_id, design_group_id)] + design_ids = [design.id for design in design_storage.get_all_designs(db_connection, project_id, design_group_id)] if design_ids is None or []: raise e.DesignIdsNotFoundException @@ -208,7 +211,9 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) - for vcs_id in vcs_ids: + # all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) + + for i, vcs_id in enumerate(vcs_ids): market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] for design_group_id in design_group_ids: sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] diff --git a/tests/apps/cvs/life_cycle/test_dsm_files.py b/tests/apps/cvs/life_cycle/test_dsm_files.py index 85eba76e..11639329 100644 --- a/tests/apps/cvs/life_cycle/test_dsm_files.py +++ b/tests/apps/cvs/life_cycle/test_dsm_files.py @@ -39,7 +39,7 @@ def test_upload_dsm_file(client, std_headers, std_user): #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', headers=std_headers, files=_file) @@ -100,7 +100,7 @@ def test_upload_invalid_file_extension(client, std_headers, std_user): _file = {'file': ('input-example.xlsx', _test_upload_file.open('rb'), 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')} #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', headers=std_headers, files=_file) @@ -146,7 +146,7 @@ def test_upload_invalid_dsm_file(client, std_headers, std_user): #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', headers=std_headers, files=_file) @@ -190,11 +190,11 @@ def test_get_dsm_file_id(client, std_headers, std_user): #Act - client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/upload-dsm', + client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', headers=std_headers, files=_file) - res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/get-dsm-id', + res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/id', headers=std_headers) #Assert From cf33ffb4d4d538c0eec6adfb579c8b69869dc372 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 30 Jun 2023 16:55:41 +0200 Subject: [PATCH 043/210] changed db column names for dsm and fixed get_multiple_dsm_file_id --- sedbackend/apps/cvs/life_cycle/storage.py | 18 +++++++++--------- .../apps/cvs/simulation/implementation.py | 7 +++++++ sedbackend/apps/cvs/simulation/storage.py | 2 +- sql/V230529_cvs_dsm_files.sql | 10 +++++----- 4 files changed, 22 insertions(+), 15 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 64102444..76ad7438 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -25,7 +25,7 @@ CVS_START_STOP_NODES_COLUMNS = CVS_NODES_COLUMNS + ['type'] CVS_DSM_FILES_TABLE = 'cvs_dsm_files' -CVS_DSM_FILES_COLUMNS = ['vcs_id', 'file_id'] +CVS_DSM_FILES_COLUMNS = ['vcs', 'file'] MAX_FILE_SIZE = 100 * 10 ** 6 # 100MB @@ -349,19 +349,19 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i if file_res is None: raise file_ex.FileNotFoundException - return file_res['file_id'] + return file_res['file'] -def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> int: +def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> list[int]: + where_statement = "vcs IN ("+",".join(["%s" for _ in range(len(vcs_ids))])+")" + logger.debug(f'where_statement: {where_statement}') + select_statement = MySQLStatementBuilder(db_connection) file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ - .where(",".join(["%s" for _ in range(len(vcs_ids))]), vcs_ids) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - - if file_res is None: - raise file_ex.FileNotFoundException + .where(where_statement, vcs_ids) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - return file_res['file_id'] + return [file['file'] for file in file_res] def get_dsm_file_path(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id) -> StoredFilePath: diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 3eaf785f..e4990be8 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -19,6 +19,7 @@ from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.market_input import exceptions as market_input_exceptions +from sedbackend.apps.core.files import exceptions as file_ex def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], @@ -102,6 +103,12 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], status_code=status.HTTP_400_BAD_REQUEST, detail=f'No technical processes found' ) + except file_ex.FileNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f'Could not find DSM file' + ) + def run_dsm_file_simulation(user_id: int, project_id: int, sim_params: models.FileParams, diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index e742e702..f5463157 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -211,7 +211,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) - # all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) + all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) for i, vcs_id in enumerate(vcs_ids): market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] diff --git a/sql/V230529_cvs_dsm_files.sql b/sql/V230529_cvs_dsm_files.sql index 46f6a674..bbd07694 100644 --- a/sql/V230529_cvs_dsm_files.sql +++ b/sql/V230529_cvs_dsm_files.sql @@ -1,12 +1,12 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_dsm_files` ( - `vcs_id` INT UNSIGNED NOT NULL, - `file_id` INT UNSIGNED NOT NULL, - PRIMARY KEY (`vcs_id`), - FOREIGN KEY (`vcs_id`) + `vcs` INT UNSIGNED NOT NULL, + `file` INT UNSIGNED NOT NULL, + PRIMARY KEY (`vcs`), + FOREIGN KEY (`vcs`) REFERENCES `seddb`.`cvs_vcss`(`id`) ON DELETE CASCADE, - FOREIGN KEY(`file_id`) + FOREIGN KEY(`file`) REFERENCES `seddb`.`files`(`id`) ON DELETE CASCADE ); \ No newline at end of file From f9b679ee70414f2795628b2c15d944a7e4c3bc79 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sat, 1 Jul 2023 10:52:48 +0200 Subject: [PATCH 044/210] changed format of dsm --- sedbackend/apps/core/db.py | 6 ++-- sedbackend/apps/cvs/life_cycle/exceptions.py | 4 +++ .../apps/cvs/life_cycle/implementation.py | 5 +++ sedbackend/apps/cvs/life_cycle/router.py | 2 +- sedbackend/apps/cvs/life_cycle/storage.py | 29 ++++++++++++---- .../apps/cvs/simulation/implementation.py | 4 +-- sedbackend/apps/cvs/simulation/router.py | 6 ++-- sedbackend/apps/cvs/simulation/storage.py | 34 ++++++++++++++----- 8 files changed, 67 insertions(+), 23 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..350b2d37 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,11 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +# host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +# port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/life_cycle/exceptions.py b/sedbackend/apps/cvs/life_cycle/exceptions.py index 322cf9cc..a04968f3 100644 --- a/sedbackend/apps/cvs/life_cycle/exceptions.py +++ b/sedbackend/apps/cvs/life_cycle/exceptions.py @@ -28,3 +28,7 @@ class FileSizeException(Exception): class ProcessesVcsMatchException(Exception): pass + + +class DSMFileFailedDeletionException(Exception): + pass diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 706376d1..616c9b61 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -191,6 +191,11 @@ def save_dsm_file(application_sid: str, project_id: int, vcs_id: int, status_code=status.HTTP_404_NOT_FOUND, detail="No such application." ) + except exceptions.DSMFileFailedDeletionException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Failed to replace old DSM file." + ) def get_dsm_file_id(project_id: int, vcs_id: int) -> int: diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index 244f9e7c..99377637 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -87,7 +87,7 @@ async def get_dsm_file(native_project_id: int, vcs_id: int) -> int: @router.get( '/project/{native_project_id}/vcs/{vcs_id}/dsm/download', - summary="Fetch DSM file", + summary="Download DSM file", response_class=FileResponse, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 76ad7438..c50dd2df 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,4 +1,4 @@ -from typing import List +from typing import List, Tuple, Optional from fastapi import UploadFile from fastapi.logger import logger @@ -290,7 +290,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project try: file_id = get_dsm_file_id(db_connection, project_id, vcs_id) if file_id is not None: - file_storage.db_delete_file(db_connection, file_id, user_id) + delete_dsm_file(db_connection, project_id, vcs_id, file_id, user_id) except file_ex.FileNotFoundException: pass # File doesn't exist, so we don't need to delete it except Exception: @@ -298,7 +298,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project # File does not exist in persistent storage but exists in database delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement.delete(CVS_DSM_FILES_TABLE) \ - .where('vcs_id = %s', [vcs_id]) \ + .where('vcs = %s', [vcs_id]) \ .execute(return_affected_rows=True) except: pass @@ -343,7 +343,7 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i select_statement = MySQLStatementBuilder(db_connection) file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ - .where('vcs_id = %s', [vcs_id]) \ + .where('vcs = %s', [vcs_id]) \ .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) if file_res is None: @@ -352,7 +352,7 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i return file_res['file'] -def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> list[int]: +def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> list[Tuple[int, int]]: where_statement = "vcs IN ("+",".join(["%s" for _ in range(len(vcs_ids))])+")" logger.debug(f'where_statement: {where_statement}') @@ -361,10 +361,27 @@ def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List .where(where_statement, vcs_ids) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - return [file['file'] for file in file_res] + return [(file['vcs'], file['file']) for file in file_res] def get_dsm_file_path(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id) -> StoredFilePath: file_id = get_dsm_file_id(db_connection, project_id, vcs_id) return file_storage.db_get_file_path(db_connection, file_id, user_id) + +def delete_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, + file_id: Optional[int], user_id: int) -> bool: + + if file_id is None: + file_id = get_dsm_file_id(db_connection, project_id, vcs_id) + file_storage.db_delete_file(db_connection, file_id, user_id) + + delete_statement = MySQLStatementBuilder(db_connection) + _, rows = delete_statement.delete(CVS_DSM_FILES_TABLE) \ + .where('vcs = %s', [vcs_id]) \ + .execute(return_affected_rows=True) + + if rows == 0: + raise exceptions.DSMFileFailedDeletionException + + return True diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index e4990be8..2e18d486 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -23,10 +23,10 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int]) -> List[models.Simulation]: + design_group_ids: List[int], user_id: int) -> List[models.Simulation]: try: with get_connection() as con: - result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids) + result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids, user_id) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 74dbf9d1..27bb4d76 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -16,9 +16,9 @@ response_model=List[models.Simulation], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) -async def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int]) -> List[models.Simulation]: - return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids) +async def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], + user: User = Depends(get_current_active_user)) -> List[models.Simulation]: + return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id) # Temporary disabled ''' diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index f5463157..f9bbba5a 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -190,7 +190,7 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int]) -> List[models.Simulation]: + design_group_ids: List[int], user_id) -> List[models.Simulation]: design_results = [] if not check_sim_settings(sim_settings): @@ -213,8 +213,12 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) - for i, vcs_id in enumerate(vcs_ids): + for vcs_id in vcs_ids: market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] + dsm_id = [dsm for dsm in all_dsm_ids if dsm[0] == vcs_id] + dsm = None + if len(dsm_id) > 0: + dsm = get_dsm_from_file_id(db_connection, dsm_id[0][1], user_id) for design_group_id in design_group_ids: sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] if sim_data is None or sim_data == []: @@ -233,7 +237,10 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed processes, non_tech_processes = populate_processes(non_tech_add, sim_data, design, market_values, vd_values) - dsm = create_simple_dsm(processes) # TODO Change to using BPMN + if dsm is None: + dsm = create_simple_dsm(processes) + + logger.debug(f'DSM: {dsm}') sim = des.Des() @@ -646,15 +653,19 @@ def check_sim_settings(settings: models.EditSimSettings) -> bool: # TODO Change dsm creation to follow BPMN and the nodes in the BPMN. -# Currently the DSM only goes from one process to the other following the order of the index in the VCS +# Create DSM that only goes from one process to the other following the order of the index in the VCS def create_simple_dsm(processes: List[Process]) -> dict: - l = len(processes) - - index_list = list(range(0, l)) + n = len(processes) + 2 # +2 for start and end dsm = dict() - for i, p in enumerate(processes): - dsm.update({p.name: [1 if i + 1 == j else 0 for j in index_list]}) + for i in range(n): + if i == 0: + name = "Start" + elif i == n - 1: + name = "End" + else: + name = processes[i-1].name + dsm.update({name: [1 if i + 1 == j else "X" if i == j else 0 for j in range(n)]}) return dsm @@ -671,6 +682,11 @@ def get_dsm_from_csv(path): return dsm +def get_dsm_from_file_id(db_connection: PooledMySQLConnection, file_id: int, user_id: int) -> dict: + path = file_storage.db_get_file_path(db_connection, file_id, user_id) + return get_dsm_from_csv(path.path) + + def get_dsm_from_excel(path): pf = pd.read_excel(path) From c17c3fbf4ecd8e13bde9e34fa1b252e101c2d517 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 2 Jul 2023 10:05:26 +0200 Subject: [PATCH 045/210] refactored monte carlo sim --- requirements.txt | 2 +- .../apps/cvs/simulation/implementation.py | 86 ++--------- sedbackend/apps/cvs/simulation/router.py | 5 +- sedbackend/apps/cvs/simulation/storage.py | 138 +++++------------- 4 files changed, 51 insertions(+), 180 deletions(-) diff --git a/requirements.txt b/requirements.txt index 98517043..803c6d3c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.3.3 +desim-tool==0.4.0 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 2e18d486..2fb167ab 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -1,8 +1,7 @@ -from fastapi import HTTPException, UploadFile, Depends, Form +from fastapi import HTTPException, UploadFile from starlette import status -import tempfile -from typing import List, Optional +from typing import List from fastapi.logger import logger from sedbackend.apps.cvs.simulation import models, storage @@ -10,7 +9,6 @@ from sedbackend.apps.core.authentication import exceptions as auth_ex from sedbackend.apps.core.db import get_connection from sedbackend.apps.cvs.project import exceptions as project_exceptions -from sedbackend.apps.cvs.design import exceptions as design_exc from sedbackend.apps.cvs.simulation.exceptions import BadlyFormattedSettingsException, DSMFileNotFoundException, \ DesignIdsNotFoundException, FormulaEvalException, NegativeTimeException, ProcessNotFoundException, \ RateWrongOrderException, InvalidFlowSettingsException, VcsFailedException, FlowProcessNotFoundException, \ @@ -23,34 +21,31 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], user_id: int) -> List[models.Simulation]: + design_group_ids: List[int], user_id: int, is_monte_carlo: bool = False, + normalized_npv: bool = False) -> List[models.Simulation]: try: with get_connection() as con: - result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids, user_id) + result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids, user_id, is_monte_carlo, + normalized_npv) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, detail='Unauthorized user.', ) - except vcs_exceptions.VCSNotFoundException: # This exception will probably never fire - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not find vcs.', - ) except project_exceptions.CVSProjectNotFoundException: raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, + status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find project.', ) except market_input_exceptions.MarketInputNotFoundException: raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, + status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find market input', ) except ProcessNotFoundException: raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, + status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find process', ) except FormulaEvalException as e: @@ -169,69 +164,6 @@ def run_dsm_file_simulation(user_id: int, project_id: int, sim_params: models.Fi ) -def run_sim_monte_carlo(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], - normalized_npv: bool) -> List[models.Simulation]: - try: - with get_connection() as con: - result = storage.run_sim_monte_carlo(con, sim_settings, vcs_ids, design_group_ids, normalized_npv) - return result - except vcs_exceptions.GenericDatabaseException: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=f'Fel' - ) - except FormulaEvalException as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Could not evaluate formulas of process with id: {e.process_id}' - ) - except RateWrongOrderException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Wrong order of rate of entities. Per project assigned after per product' - ) - except NegativeTimeException as e: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Formula at process with id: {e.process_id} evaluated to negative time' - ) - except DesignIdsNotFoundException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'No design ids or empty array supplied' - ) - except VcsFailedException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Invalid vcs ids' - ) - except BadlyFormattedSettingsException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Settings are not correct' - ) - except CouldNotFetchSimulationDataException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not fetch simulation data' - ) - except CouldNotFetchMarketInputValuesException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not fetch market input values' - ) - except CouldNotFetchValueDriverDesignValuesException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not fetch value driver design values' - ) - except NoTechnicalProcessException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'No technical processes found' - ) - - def get_sim_settings(project_id: int) -> models.SimSettings: try: with get_connection() as con: diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 27bb4d76..ac63c46b 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -47,8 +47,9 @@ async def run_dsm_file_simulation(native_project_id: int, sim_params: models.Fil ) async def run_sim_monte_carlo(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], - normalized_npv: Optional[bool] = False) -> List[models.Simulation]: - return implementation.run_sim_monte_carlo(sim_settings, vcs_ids, design_group_ids, normalized_npv) + normalized_npv: Optional[bool] = False, + user: User = Depends(get_current_active_user)) -> List[models.Simulation]: + return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, True, normalized_npv) @router.get( diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index f9bbba5a..746298a7 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -41,9 +41,11 @@ 'minutes': TimeFormat.MINUTES }) -#TODO: Finish method. No checks on file this time since we won't be getting an uploaded file here, it will already be on the server. -def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, sim_settings: models.EditSimSettings, - vcs_ids: List[int], design_group_ids: List[int], normalized_npv: bool) -> List[models.Simulation]: + +# TODO: Finish method. No checks on file this time since we won't be getting an uploaded file here, it will already be on the server. +def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, + sim_settings: models.EditSimSettings, + vcs_ids: List[int], design_group_ids: List[int], normalized_npv: bool) -> List[models.Simulation]: design_results = [] if not check_sim_settings(sim_settings): @@ -65,7 +67,8 @@ def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project if not check_entity_rate(res, process): raise e.RateWrongOrderException - design_ids = [design.id for design in design_storage.get_all_designs(db_connection, project_id, design_group_id)] + design_ids = [design.id for design in + design_storage.get_all_designs(db_connection, project_id, design_group_id)] if design_ids is None or []: raise e.DesignIdsNotFoundException @@ -75,7 +78,7 @@ def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project processes, non_tech_processes = populate_processes(non_tech_add, res, db_connection, vcs_id, design_id) # BUG probably. Populate processes changes the order of the processes. - dsm = {} #TODO: Fetch DSM from file. Should be able to guess file based on vcs_id and proj_id + dsm = {} # TODO: Fetch DSM from file. Should be able to guess file based on vcs_id and proj_id sim = des.Des() @@ -111,9 +114,10 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr if file_extension == '.xlsx': try: - tmp_xlsx = tempfile.TemporaryFile() # Workaround because current python version doesn't support - tmp_xlsx.write(dsm_file.file.read()) # readable() attribute on SpooledTemporaryFile which UploadFile - tmp_xlsx.seek(0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 + tmp_xlsx = tempfile.TemporaryFile() # Workaround because current python version doesn't support + tmp_xlsx.write(dsm_file.file.read()) # readable() attribute on SpooledTemporaryFile which UploadFile + tmp_xlsx.seek( + 0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 dsm = get_dsm_from_excel(tmp_xlsx) if dsm is None: @@ -124,9 +128,10 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr tmp_xlsx.close() elif file_extension == '.csv': try: - tmp_csv = tempfile.TemporaryFile() # Workaround because current python version doesn't support - tmp_csv.write(dsm_file.file.read()) # readable() attribute on SpooledTemporaryFile which UploadFile - tmp_csv.seek(0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 + tmp_csv = tempfile.TemporaryFile() # Workaround because current python version doesn't support + tmp_csv.write(dsm_file.file.read()) # readable() attribute on SpooledTemporaryFile which UploadFile + tmp_csv.seek( + 0) # is an alias for. PR is accepted for python v3.12, see https://github.com/python/cpython/pull/29560 # This should hopefully open up the file for the processor. dsm = get_dsm_from_csv(tmp_csv) @@ -190,7 +195,8 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], user_id) -> List[models.Simulation]: + design_group_ids: List[int], user_id, is_monte_carlo: bool = False, normalized_npv: bool = False + ) -> List[models.Simulation]: design_results = [] if not check_sim_settings(sim_settings): @@ -202,6 +208,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed discount_rate = sim_settings.discount_rate process = sim_settings.flow_process time_unit = TIME_FORMAT_DICT.get(sim_settings.time_unit) + runs = sim_settings.runs all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) @@ -245,9 +252,15 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed sim = des.Des() try: - results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, - non_tech_add, dsm, time_unit, - discount_rate, runtime) + if is_monte_carlo: + results = sim.run_parallell_simulations(flow_time, interarrival, process, processes, + non_tech_processes, + non_tech_add, dsm, time_unit, discount_rate, runtime, + runs) + else: + results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, + non_tech_add, dsm, time_unit, + discount_rate, runtime) except Exception as exc: tb = sys.exc_info()[2] @@ -256,91 +269,16 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed print(f'{exc.__class__}, {exc}') raise e.SimulationFailedException - design_res = models.Simulation( - time=results.timesteps[-1], - mean_NPV=results.mean_npv(), - max_NPVs=results.all_max_npv(), - mean_payback_time=results.mean_npv_payback_time(), - all_npvs=results.npvs - ) - - design_results.append(design_res) - logger.debug('Returning the results') - return design_results - - -def run_sim_monte_carlo(db_connection: PooledMySQLConnection, simSettings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], normalized_npv: bool = False) -> List[ - models.Simulation]: - design_results = [] - - if not check_sim_settings(simSettings): - raise e.BadlyFormattedSettingsException - - interarrival = simSettings.interarrival_time - flow_time = simSettings.flow_time - runtime = simSettings.end_time - non_tech_add = simSettings.non_tech_add - discount_rate = simSettings.discount_rate - process = simSettings.flow_process - time_unit = TIME_FORMAT_DICT.get(simSettings.time_unit) - runs = simSettings.runs - - all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) - - all_market_values = get_all_market_values(db_connection, vcs_ids) - - all_designs = get_all_designs(db_connection, design_group_ids) - - all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) - - for vcs_id in vcs_ids: - market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] - for design_group_id in design_group_ids: - sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] - if sim_data is None or sim_data == []: - raise e.VcsFailedException - - if not check_entity_rate(sim_data, process): - raise e.RateWrongOrderException - - designs = [design.id for design in all_designs if design.design_group_id == design_group_id] - - if designs is None or []: - raise e.DesignIdsNotFoundException - - for design in designs: - vd_values = [vd for vd in all_vd_design_values if vd['design'] == design] - processes, non_tech_processes = populate_processes(non_tech_add, sim_data, design, market_values, - vd_values) - - dsm = create_simple_dsm(processes) - - sim = des.Des() - - try: - results = sim.run_parallell_simulations(flow_time, interarrival, process, processes, - non_tech_processes, - non_tech_add, dsm, time_unit, discount_rate, runtime, runs) - - except Exception as exc: - logger.debug(f'{exc.__class__}, {exc}') - raise e.SimulationFailedException - - if normalized_npv: - m_npv = results.normalize_npv() - else: - m_npv = results.mean_npv() - sim_res = models.Simulation( time=results.timesteps[-1], - mean_NPV=m_npv, + mean_NPV=results.normalize_npv() if normalized_npv else results.mean_npv(), max_NPVs=results.all_max_npv(), mean_payback_time=results.mean_npv_payback_time(), all_npvs=results.npvs ) - design_results.append(sim_res) + design_results.append(sim_res) + logger.debug('Returning the results') return design_results @@ -358,10 +296,11 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, vd_values_row = [vd for vd in vd_values if vd['vcs_row'] == row['id'] and vd['design'] == design] if row['category'] != 'Technical processes': try: - non_tech = models.NonTechnicalProcess(cost=nsp.eval(parse_formula(row['cost'], vd_values_row, mi_values)), - revenue=nsp.eval( - parse_formula(row['revenue'], vd_values_row, mi_values)), - name=row['iso_name']) + non_tech = models.NonTechnicalProcess( + cost=nsp.eval(parse_formula(row['cost'], vd_values_row, mi_values)), + revenue=nsp.eval( + parse_formula(row['revenue'], vd_values_row, mi_values)), + name=row['iso_name']) except Exception as exc: logger.debug(f'{exc.__class__}, {exc}') raise e.FormulaEvalException(row['id']) @@ -446,7 +385,7 @@ def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], d AND cvs_design_mi_formulas.design_group \ IN ({",".join(["%s" for _ in range(len(design_group_ids))])}) ORDER BY `index`' with db_connection.cursor(prepared=True) as cursor: - cursor.execute(query, vcs_ids+design_group_ids) + cursor.execute(query, vcs_ids + design_group_ids) res = cursor.fetchall() res = [dict(zip(cursor.column_names, row)) for row in res] except Error as error: @@ -652,7 +591,6 @@ def check_sim_settings(settings: models.EditSimSettings) -> bool: return settings_check -# TODO Change dsm creation to follow BPMN and the nodes in the BPMN. # Create DSM that only goes from one process to the other following the order of the index in the VCS def create_simple_dsm(processes: List[Process]) -> dict: n = len(processes) + 2 # +2 for start and end @@ -663,7 +601,7 @@ def create_simple_dsm(processes: List[Process]) -> dict: elif i == n - 1: name = "End" else: - name = processes[i-1].name + name = processes[i - 1].name dsm.update({name: [1 if i + 1 == j else "X" if i == j else 0 for j in range(n)]}) return dsm From c49c605f433346764dc3b26e50cdcf1e024f1da7 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 2 Jul 2023 10:19:57 +0200 Subject: [PATCH 046/210] clean-up and db update --- sedbackend/apps/core/db.py | 6 +- sedbackend/apps/cvs/simulation/models.py | 1 - sedbackend/apps/cvs/simulation/storage.py | 121 ++-------------------- 3 files changed, 8 insertions(+), 120 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 350b2d37..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,11 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -# host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' -# port = 3306 -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/simulation/models.py b/sedbackend/apps/cvs/simulation/models.py index 0efbfcd8..3da9d356 100644 --- a/sedbackend/apps/cvs/simulation/models.py +++ b/sedbackend/apps/cvs/simulation/models.py @@ -2,7 +2,6 @@ from enum import Enum from pydantic import BaseModel from typing import Optional -import json from fastapi import Form from sedbackend.apps.cvs.link_design_lifecycle import models as link_model from dataclasses import dataclass diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 746298a7..82394271 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -23,7 +23,6 @@ from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import storage as vcs_storage -from sedbackend.apps.cvs.design import storage as design_storage from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage from sedbackend.apps.core.files import storage as file_storage @@ -42,76 +41,14 @@ }) -# TODO: Finish method. No checks on file this time since we won't be getting an uploaded file here, it will already be on the server. -def run_sim_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, - sim_settings: models.EditSimSettings, - vcs_ids: List[int], design_group_ids: List[int], normalized_npv: bool) -> List[models.Simulation]: - design_results = [] - - if not check_sim_settings(sim_settings): - raise e.BadlyFormattedSettingsException - interarrival = sim_settings.interarrival_time - flow_time = sim_settings.flow_time - runtime = sim_settings.end_time - sim_settings.start_time - non_tech_add = sim_settings.non_tech_add - discount_rate = sim_settings.discount_rate - process = sim_settings.flow_process - time_unit = TIME_FORMAT_DICT.get(sim_settings.time_unit) - - for vcs_id in vcs_ids: - for design_group_id in design_group_ids: - res = get_sim_data(db_connection, vcs_id, design_group_id) - if res is None or res == []: - raise e.VcsFailedException - - if not check_entity_rate(res, process): - raise e.RateWrongOrderException - - design_ids = [design.id for design in - design_storage.get_all_designs(db_connection, project_id, design_group_id)] - - if design_ids is None or []: - raise e.DesignIdsNotFoundException - - for design_id in design_ids: - # get_design(design_id) - processes, non_tech_processes = populate_processes(non_tech_add, res, db_connection, vcs_id, - design_id) # BUG probably. Populate processes changes the order of the processes. - - dsm = {} # TODO: Fetch DSM from file. Should be able to guess file based on vcs_id and proj_id - - sim = des.Des() - - try: - results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, - non_tech_add, dsm, time_unit, - discount_rate, runtime) - - except Exception as exc: - tb = sys.exc_info()[2] - logger.debug( - f'{exc.__class__}, {exc}, {exc.with_traceback(tb)}') - print(f'{exc.__class__}, {exc}') - raise e.SimulationFailedException - - design_res = models.Simulation( - time=results.timesteps[-1], - mean_NPV=results.mean_npv(), - max_NPVs=results.all_max_npv(), - mean_payback_time=results.mean_npv_payback_time(), - all_npvs=results.npvs - ) - - design_results.append(design_res) - logger.debug('Returning the results') - return design_results - - -def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, +# TODO: Run simulation on DSM file +def get_dsm_from_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, sim_params: models.FileParams, - dsm_file: UploadFile) -> List[models.Simulation]: + dsm_file: UploadFile) -> dict: _, file_extension = os.path.splitext(dsm_file.filename) + dsm = {} + if file_extension == '.xlsx': try: tmp_xlsx = tempfile.TemporaryFile() # Workaround because current python version doesn't support @@ -144,53 +81,7 @@ def run_sim_with_dsm_file(db_connection: PooledMySQLConnection, user_id: int, pr else: raise e.DSMFileNotFoundException - vcs_ids = [int(id) for id in sim_params.vcs_ids.split(',')] - design_ids = [int(id) for id in sim_params.design_ids.split(',')] - - interarrival = sim_params.interarrival_time - flow_time = sim_params.flow_time - runtime = sim_params.end_time - non_tech_add = sim_params.non_tech_add - discount_rate = sim_params.discount_rate - process = sim_params.flow_process - time_unit = TIME_FORMAT_DICT.get(sim_params.time_unit) - process = sim_params.flow_process - - design_results = [] - for vcs_id in vcs_ids: - res = get_sim_data(db_connection, vcs_id) - if not check_entity_rate(res, process): - raise e.RateWrongOrderException - - if sim_params.design_ids is None or []: - raise e.DesignIdsNotFoundException - - for design_id in design_ids: - processes, non_tech_processes = populate_processes( - non_tech_add, res, db_connection, design_id) - sim = des.Des() - - try: - results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, - non_tech_add, dsm, time_unit, - discount_rate, runtime) - - except Exception as exc: - logger.debug(f'{exc.__class__}, {exc}') - print(f'Sim failed {exc.__class__}, {exc}') - raise e.SimulationFailedException - - design_res = models.Simulation( - time=results.timesteps[-1], - mean_NPV=results.mean_npv(), - max_NPVs=results.all_max_npv(), - mean_payback_time=results.mean_npv_payback_time(), - all_npvs=results.npvs - ) - - design_results.append(design_res) - - return design_results + return dsm def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, From 4f7be1dad91c2e7bf775e506bef74b8635cb268b Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 2 Jul 2023 10:36:01 +0200 Subject: [PATCH 047/210] fixed missing exception error --- sedbackend/apps/cvs/vcs/storage.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index d66fc174..ee06bfae 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -5,10 +5,9 @@ from sedbackend.apps.cvs.project import exceptions as project_exceptions from sedbackend.apps.cvs.project.storage import get_cvs_project from sedbackend.apps.cvs.vcs import models, exceptions -from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage, models as life_cycle_models, \ - exceptions as life_cycle_exceptions +from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage, models as life_cycle_models from sedbackend.libs.datastructures.pagination import ListChunk -from sedbackend.apps.core.files import storage as file_storage +from sedbackend.apps.core.files import storage as file_storage, exceptions as file_exceptions from mysqlsb import MySQLStatementBuilder, Sort, FetchType DEBUG_ERROR_HANDLING = True # Set to false in production @@ -138,7 +137,7 @@ def delete_vcs(db_connection: PooledMySQLConnection, user_id: int, project_id: i try: dsm_file_id = life_cycle_storage.get_dsm_file_id(db_connection, project_id, vcs_id) file_storage.db_delete_file(db_connection, dsm_file_id, user_id) - except life_cycle_exceptions.FileNotFoundException: # DSM file does not exist + except file_exceptions.FileNotFoundException: # DSM file does not exist pass delete_statement = MySQLStatementBuilder(db_connection) From bdbcf9bcadef457f424ccc5b50324750e53e3718 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 2 Jul 2023 17:37:06 +0200 Subject: [PATCH 048/210] functions for handling dsm matrix --- .../apps/cvs/life_cycle/implementation.py | 68 +++++++++++++++++-- sedbackend/apps/cvs/life_cycle/router.py | 43 +++++++----- sedbackend/apps/cvs/life_cycle/storage.py | 54 ++++++++++++++- sedbackend/apps/cvs/simulation/storage.py | 20 +----- 4 files changed, 141 insertions(+), 44 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 616c9b61..db3698a5 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -1,3 +1,5 @@ +from typing import List + from fastapi import HTTPException, UploadFile from starlette import status @@ -10,6 +12,7 @@ from sedbackend.apps.core.files import models as file_models, exceptions as file_ex import sedbackend.apps.core.projects.exceptions as core_project_exceptions + def create_process_node(project_id: int, vcs_id: int, node: models.ProcessNodePost) -> models.ProcessNodeGet: try: with get_connection() as con: @@ -158,12 +161,51 @@ def update_bpmn(project_id: int, vcs_id: int, bpmn: models.BPMNGet) -> bool: detail=f'Project with id={project_id} is not a part of vcs with id={vcs_id}.', ) - -def save_dsm_file(application_sid: str, project_id: int, vcs_id: int, + +def save_dsm_file(project_id: int, vcs_id: int, file: UploadFile, user_id: int) -> bool: try: with get_connection() as con: - result = storage.save_dsm_file(con, application_sid, project_id, vcs_id, file, user_id) + result = storage.save_dsm_file(con, project_id, vcs_id, file, user_id) + con.commit() + return result + except exceptions.InvalidFileTypeException: + raise HTTPException( + status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, + detail='Wrong filetype' + ) + except exceptions.FileSizeException: + raise HTTPException( + status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, + detail='File too large' + ) + except exceptions.ProcessesVcsMatchException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='Processes in DSM does not match processes in VCS' + ) + except core_project_exceptions.SubProjectNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Sub-project not found." + ) + except ApplicationNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="No such application." + ) + except exceptions.DSMFileFailedDeletionException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Failed to replace old DSM file." + ) + + +def save_dsm(project_id: int, vcs_id: int, + dsm: List[List[str or float]], user_id: int) -> bool: + try: + with get_connection() as con: + result = storage.save_dsm(con, project_id, vcs_id, dsm, user_id) con.commit() return result except exceptions.InvalidFileTypeException: @@ -217,7 +259,25 @@ def get_dsm_file_path(project_id: int, vcs_id: int, user_id) -> file_models.Stor res = storage.get_dsm_file_path(con, project_id, vcs_id, user_id) con.commit() return res - except file_ex.FileNotFoundException as e: + except file_ex.FileNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"File could not be found" + ) + except auth_ex.UnauthorizedOperationException: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"User does not have access to the file" + ) + + +def get_dsm(project_id: int, vcs_id: int, user_id: int) -> List[List[str or float]]: + try: + with get_connection() as con: + res = storage.get_dsm(con, project_id, vcs_id, user_id) + con.commit() + return res + except file_ex.FileNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"File could not be found" diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index 99377637..6abbd48d 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -1,3 +1,5 @@ +from typing import List + from fastapi import APIRouter, Depends from fastapi import UploadFile @@ -63,16 +65,37 @@ async def update_bpmn(native_project_id: int, vcs_id: int, bpmn: models.BPMNGet) return implementation.update_bpmn(native_project_id, vcs_id, bpmn) -# TODO only call one implementation function +@router.get( + '/project/{native_project_id}/vcs/{vcs_id}/dsm', + summary="Get DSM", + response_model=List[List[str or float]], + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +) +async def get_dsm(native_project_id: int, vcs_id: int, + user: User = Depends(get_current_active_user)) -> List[List[str or float]]: + return implementation.get_dsm(native_project_id, vcs_id, user.id) + + @router.post( '/project/{native_project_id}/vcs/{vcs_id}/dsm', + summary="Save DSM", + response_model=bool, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] +) +async def save_dsm(native_project_id: int, vcs_id: int, dsm: List[List[str or float]], + user: User = Depends(get_current_active_user)) -> bool: + return implementation.save_dsm(native_project_id, vcs_id, dsm, user.id) + + +@router.post( + '/project/{native_project_id}/vcs/{vcs_id}/dsm/file', summary="Upload DSM file", response_model=bool, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, user: User = Depends(get_current_active_user)) -> bool: - return implementation.save_dsm_file(CVS_APP_SID, native_project_id, vcs_id, file, user.id) + return implementation.save_dsm_file(native_project_id, vcs_id, file, user.id) @router.get( @@ -83,19 +106,3 @@ async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, ) async def get_dsm_file(native_project_id: int, vcs_id: int) -> int: return implementation.get_dsm_file_id(native_project_id, vcs_id) - - -@router.get( - '/project/{native_project_id}/vcs/{vcs_id}/dsm/download', - summary="Download DSM file", - response_class=FileResponse, - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] -) -async def download_dsm_file(native_project_id: int, vcs_id: int, - user: User = Depends(get_current_active_user)) -> FileResponse: - stored_file_path = implementation.get_dsm_file_path(native_project_id, vcs_id, user.id) - resp = FileResponse( - path=stored_file_path.path, - filename=stored_file_path.filename - ) - return resp diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index c50dd2df..960adcf6 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,4 +1,5 @@ -from typing import List, Tuple, Optional +import csv +from typing import List, Tuple, Optional, TextIO from fastapi import UploadFile from fastapi.logger import logger @@ -8,6 +9,7 @@ from sedbackend.apps.core.files.models import StoredFilePath from sedbackend.apps.cvs.life_cycle import exceptions, models +from sedbackend.apps.cvs.project.router import CVS_APP_SID from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions from sedbackend.apps.core.files import models as file_models, storage as file_storage, exceptions as file_ex from sedbackend.apps.core.projects import storage as core_project_storage @@ -278,9 +280,9 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i return True -def save_dsm_file(db_connection: PooledMySQLConnection, application_sid, project_id: int, +def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, file: UploadFile, user_id) -> bool: - subproject = core_project_storage.db_get_subproject_native(db_connection, application_sid, project_id) + subproject = core_project_storage.db_get_subproject_native(db_connection, CVS_APP_SID, project_id) model_file = file_models.StoredFilePost.import_fastapi_file(file, user_id, subproject.id) @@ -369,6 +371,14 @@ def get_dsm_file_path(db_connection: PooledMySQLConnection, project_id: int, vcs return file_storage.db_get_file_path(db_connection, file_id, user_id) +def get_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id) -> List[List[str or float]]: + path = get_dsm_file_path(db_connection, project_id, vcs_id, user_id).path + with open(path, newline='') as f: + reader = csv.reader(f) + data = list(reader) + return data + + def delete_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, file_id: Optional[int], user_id: int) -> bool: @@ -385,3 +395,41 @@ def delete_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_i raise exceptions.DSMFileFailedDeletionException return True + + +def get_dsm_from_file_id(db_connection: PooledMySQLConnection, file_id: int, user_id: int) -> dict: + try: + path = file_storage.db_get_file_path(db_connection, file_id, user_id) + except Exception: + raise file_ex.FileNotFoundException + return get_dsm_from_csv(path.path) + + +def get_dsm_from_csv(path): + try: + df = pd.read_csv(path) + except Exception as e: + logger.debug(f'{e.__class__}, {e}') + + dsm = dict() + + for v in df.values: + dsm.update({v[0]: v[1::].tolist()}) + + return dsm + + +def csv_from_matrix(matrix: List[List[str or float]]) -> TextIO: + with open("dsm.csv", "w+") as dsm_file: + csv_writer = csv.writer(dsm_file, delimiter=',') + csv_writer.writerows(matrix) + + return dsm_file + + +def save_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, dsm: List[List[str or float]], + user_id: int) -> bool: + csv_file = csv_from_matrix(dsm) + dsm_file = pd.read_csv(csv_file) + + return save_dsm_file(db_connection, project_id, vcs_id, dsm_file, user_id) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 82394271..56087332 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -18,13 +18,13 @@ from mysqlsb import FetchType, MySQLStatementBuilder +from sedbackend.apps.cvs.life_cycle.storage import get_dsm_from_file_id, get_dsm_from_csv from sedbackend.libs.formula_parser.parser import NumericStringParser from sedbackend.libs.formula_parser import expressions as expr from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import storage as vcs_storage from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage -from sedbackend.apps.core.files import storage as file_storage SIM_SETTINGS_TABLE = "cvs_simulation_settings" SIM_SETTINGS_COLUMNS = ['project', 'time_unit', 'flow_process', 'flow_start_time', 'flow_time', @@ -498,24 +498,6 @@ def create_simple_dsm(processes: List[Process]) -> dict: return dsm -def get_dsm_from_csv(path): - try: - pf = pd.read_csv(path) - except Exception as e: - logger.debug(f'{e.__class__}, {e}') - - dsm = dict() - for v in pf.values: - dsm.update({v[0]: v[1::].tolist()}) - - return dsm - - -def get_dsm_from_file_id(db_connection: PooledMySQLConnection, file_id: int, user_id: int) -> dict: - path = file_storage.db_get_file_path(db_connection, file_id, user_id) - return get_dsm_from_csv(path.path) - - def get_dsm_from_excel(path): pf = pd.read_excel(path) From 65a0b2f35878b65389a390362f4ea4fcd13bc109 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 2 Jul 2023 17:49:36 +0200 Subject: [PATCH 049/210] fixed failing tests --- tests/apps/cvs/life_cycle/test_dsm_files.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/apps/cvs/life_cycle/test_dsm_files.py b/tests/apps/cvs/life_cycle/test_dsm_files.py index 11639329..d71d2170 100644 --- a/tests/apps/cvs/life_cycle/test_dsm_files.py +++ b/tests/apps/cvs/life_cycle/test_dsm_files.py @@ -39,7 +39,7 @@ def test_upload_dsm_file(client, std_headers, std_user): #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', headers=std_headers, files=_file) @@ -100,7 +100,7 @@ def test_upload_invalid_file_extension(client, std_headers, std_user): _file = {'file': ('input-example.xlsx', _test_upload_file.open('rb'), 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')} #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', headers=std_headers, files=_file) @@ -146,7 +146,7 @@ def test_upload_invalid_dsm_file(client, std_headers, std_user): #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', headers=std_headers, files=_file) @@ -190,7 +190,7 @@ def test_get_dsm_file_id(client, std_headers, std_user): #Act - client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', + client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', headers=std_headers, files=_file) @@ -203,4 +203,4 @@ def test_get_dsm_file_id(client, std_headers, std_user): #Cleanup tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) \ No newline at end of file + tu.delete_project_by_id(project.id, current_user.id) From 64d63d8a3ac74d79051bd6cb63e525354f8259ed Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 3 Jul 2023 10:24:30 +0200 Subject: [PATCH 050/210] fixed save dsm and created tests --- .../apps/cvs/life_cycle/implementation.py | 2 +- sedbackend/apps/cvs/life_cycle/storage.py | 33 +- tests/apps/cvs/life_cycle/files/input.csv | 10 +- tests/apps/cvs/life_cycle/test_dsm_files.py | 359 +++++++++--------- 4 files changed, 211 insertions(+), 193 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index db3698a5..7795c5bf 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -205,7 +205,7 @@ def save_dsm(project_id: int, vcs_id: int, dsm: List[List[str or float]], user_id: int) -> bool: try: with get_connection() as con: - result = storage.save_dsm(con, project_id, vcs_id, dsm, user_id) + result = storage.save_dsm_matrix(con, project_id, vcs_id, dsm, user_id) con.commit() return result except exceptions.InvalidFileTypeException: diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 960adcf6..9b2b28f7 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,4 +1,6 @@ import csv +import io +import tempfile from typing import List, Tuple, Optional, TextIO from fastapi import UploadFile @@ -280,12 +282,19 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i return True +def save_dsm_matrix(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, dsm: List[List[str or float]], + user_id: int) -> bool: + upload_file = csv_from_matrix(dsm) + return save_dsm_file(db_connection, project_id, vcs_id, upload_file, user_id) + + def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, file: UploadFile, user_id) -> bool: subproject = core_project_storage.db_get_subproject_native(db_connection, CVS_APP_SID, project_id) - model_file = file_models.StoredFilePost.import_fastapi_file(file, user_id, subproject.id) + logger.debug(f'model file: {model_file}') + if model_file.extension != ".csv": raise exceptions.InvalidFileTypeException @@ -309,9 +318,9 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, f.seek(0) tmp_file = f.read() mime = magic.from_buffer(tmp_file) - logger.debug(mime) + logger.debug(f'File mime: {mime}') # TODO doesn't work with windows if we create the file in excel. - if mime != "CSV text" and mime != "ASCII text": + if mime != "CSV text" and "ASCII text" not in mime: raise exceptions.InvalidFileTypeException if f.tell() > MAX_FILE_SIZE: @@ -355,7 +364,7 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> list[Tuple[int, int]]: - where_statement = "vcs IN ("+",".join(["%s" for _ in range(len(vcs_ids))])+")" + where_statement = "vcs IN (" + ",".join(["%s" for _ in range(len(vcs_ids))]) + ")" logger.debug(f'where_statement: {where_statement}') select_statement = MySQLStatementBuilder(db_connection) @@ -381,7 +390,6 @@ def get_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, def delete_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, file_id: Optional[int], user_id: int) -> bool: - if file_id is None: file_id = get_dsm_file_id(db_connection, project_id, vcs_id) file_storage.db_delete_file(db_connection, file_id, user_id) @@ -419,17 +427,12 @@ def get_dsm_from_csv(path): return dsm -def csv_from_matrix(matrix: List[List[str or float]]) -> TextIO: - with open("dsm.csv", "w+") as dsm_file: +def csv_from_matrix(matrix: List[List[str or float]]) -> UploadFile: + temp_name = "dsm.csv" + with open(temp_name, "w+") as dsm_file: csv_writer = csv.writer(dsm_file, delimiter=',') csv_writer.writerows(matrix) - return dsm_file - - -def save_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, dsm: List[List[str or float]], - user_id: int) -> bool: - csv_file = csv_from_matrix(dsm) - dsm_file = pd.read_csv(csv_file) + dsm_file = open(temp_name, "r+b") - return save_dsm_file(db_connection, project_id, vcs_id, dsm_file, user_id) + return UploadFile(filename=dsm_file.name, file=dsm_file) diff --git a/tests/apps/cvs/life_cycle/files/input.csv b/tests/apps/cvs/life_cycle/files/input.csv index 56b973fd..183d0ecc 100644 --- a/tests/apps/cvs/life_cycle/files/input.csv +++ b/tests/apps/cvs/life_cycle/files/input.csv @@ -1,5 +1,5 @@ -Processes, "Start", "Architectural design","Verification","End" -"Start", "X", 1, 0, 0 -"Architectural design", 0, "X", 1, 0 -"Verification", 0, 0, "X", 1 -"End", 0, 0, 0, "X" \ No newline at end of file +Processes,Start,Architectural design,Verification,End +Start,X,1,0,0 +Architectural design,0,X,1,0 +Verification,0,0,X,1 +End,0,0,0,X \ No newline at end of file diff --git a/tests/apps/cvs/life_cycle/test_dsm_files.py b/tests/apps/cvs/life_cycle/test_dsm_files.py index d71d2170..1b438699 100644 --- a/tests/apps/cvs/life_cycle/test_dsm_files.py +++ b/tests/apps/cvs/life_cycle/test_dsm_files.py @@ -6,201 +6,216 @@ import sedbackend.apps.cvs.life_cycle.implementation as impl_life_cycle import sedbackend.apps.core.files.implementation as impl_files - -def test_upload_dsm_file(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) - - row1 = tu.vcs_model.VcsRowPost( - index=0, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=17, - subprocess=None - ) - row2 = tu.vcs_model.VcsRowPost( - index=1, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=20, - subprocess=None - ) - - rows = [row1, row2] - table = tu.create_vcs_table(project.id, vcs.id, rows) - - cwd = os.getcwd() - _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') - _file = {'file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} - - - #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', - headers=std_headers, - files=_file) - - #Assert - assert res.status_code == 200 - - #Cleanup - tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - - -def test_upload_invalid_file_extension(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) - - row1 = tu.vcs_model.VcsRowPost( +std_rows = [tu.vcs_model.VcsRowPost( index=0, - stakeholder=tu.tu.random_str(5,50), + stakeholder=tu.tu.random_str(5, 50), stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), + stakeholder_expectations=tu.tu.random_str(5, 50), iso_process=17, subprocess=None - ) - row2 = tu.vcs_model.VcsRowPost( +), tu.vcs_model.VcsRowPost( index=1, - stakeholder=tu.tu.random_str(5,50), + stakeholder=tu.tu.random_str(5, 50), stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), + stakeholder_expectations=tu.tu.random_str(5, 50), iso_process=20, subprocess=None - ) - row3 = tu.vcs_model.VcsRowPost( +), tu.vcs_model.VcsRowPost( index=2, - stakeholder=tu.tu.random_str(5,50), + stakeholder=tu.tu.random_str(5, 50), stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), + stakeholder_expectations=tu.tu.random_str(5, 50), iso_process=22, subprocess=None - ) - row4 = tu.vcs_model.VcsRowPost( +), tu.vcs_model.VcsRowPost( index=3, - stakeholder=tu.tu.random_str(5,50), + stakeholder=tu.tu.random_str(5, 50), stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), + stakeholder_expectations=tu.tu.random_str(5, 50), iso_process=24, subprocess=None - ) - - rows = [row1, row2, row3, row4] +)] + + +def test_upload_dsm_file(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id) + + rows = [std_rows[0], std_rows[1]] + table = tu.create_vcs_table(project.id, vcs.id, rows) - table = tu.create_vcs_table(project.id, vcs.id, rows) + cwd = os.getcwd() + _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') + _file = {'file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} - cwd = os.getcwd() - _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input-example.xlsx') - _file = {'file': ('input-example.xlsx', _test_upload_file.open('rb'), 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')} + # Act + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', + headers=std_headers, + files=_file) - #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', - headers=std_headers, - files=_file) - - #Assert - assert res.status_code == 415 #InvalidFileTypeException - + # Assert + assert res.status_code == 200 - #Cleanup - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Cleanup + tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + + +def test_upload_invalid_file_extension(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id) + + rows = std_rows + + table = tu.create_vcs_table(project.id, vcs.id, rows) + + cwd = os.getcwd() + _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input-example.xlsx') + _file = {'file': ('input-example.xlsx', _test_upload_file.open('rb'), + 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')} + + # Act + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', + headers=std_headers, + files=_file) + + # Assert + assert res.status_code == 415 # InvalidFileTypeException + + # Cleanup + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_upload_invalid_dsm_file(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) - - row1 = tu.vcs_model.VcsRowPost( - index=0, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=17, - subprocess=None - ) - row2 = tu.vcs_model.VcsRowPost( - index=1, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=15, - subprocess=None - ) - - rows = [row1, row2] - table = tu.create_vcs_table(project.id, vcs.id, rows) - - cwd = os.getcwd() - _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') - _file = {'file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} - - - #Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', - headers=std_headers, - files=_file) - - #Assert - assert res.status_code == 400 #Bad request, should throw ProcessesVcsMatchException - - #Cleanup - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id) + + row1 = tu.vcs_model.VcsRowPost( + index=0, + stakeholder=tu.tu.random_str(5, 50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5, 50), + iso_process=17, + subprocess=None + ) + row2 = tu.vcs_model.VcsRowPost( + index=1, + stakeholder=tu.tu.random_str(5, 50), + stakeholder_needs=None, + stakeholder_expectations=tu.tu.random_str(5, 50), + iso_process=15, + subprocess=None + ) + + rows = [row1, row2] + tu.create_vcs_table(project.id, vcs.id, rows) + + cwd = os.getcwd() + _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') + _file = {'file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} + + # Act + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', + headers=std_headers, + files=_file) + + # Assert + assert res.status_code == 400 # Bad request, should throw ProcessesVcsMatchException + + # Cleanup + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) def test_get_dsm_file_id(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) - - row1 = tu.vcs_model.VcsRowPost( - index=0, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=17, - subprocess=None - ) - row2 = tu.vcs_model.VcsRowPost( - index=1, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=20, - subprocess=None - ) - - rows = [row1, row2] - table = tu.create_vcs_table(project.id, vcs.id, rows) - - cwd = os.getcwd() - _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') - _file = {'file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} - - - #Act - client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', - headers=std_headers, - files=_file) - - res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/id', - headers=std_headers) - - #Assert - assert res.status_code == 200 - - #Cleanup - tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id) + + rows = [std_rows[0], std_rows[1]] + table = tu.create_vcs_table(project.id, vcs.id, rows) + + cwd = os.getcwd() + _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') + _file = {'file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} + + # Act + client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', + headers=std_headers, + files=_file) + + res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/id', + headers=std_headers) + + # Assert + assert res.status_code == 200 + + # Cleanup + tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + + +def test_get_dsm_matrix(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id) + + rows = [std_rows[0], std_rows[1]] + table = tu.create_vcs_table(project.id, vcs.id, rows) + + cwd = os.getcwd() + _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') + _file = {'file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} + + # Act + client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm/file', + headers=std_headers, + files=_file) + + res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', + headers=std_headers) + + matrix = res.json() + + # Assert + assert res.status_code == 200 + assert len(matrix) == 5 + + +def test_save_dsm(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id) + + rows = [std_rows[0], std_rows[1]] + tu.create_vcs_table(project.id, vcs.id, rows) + + dsm = [["Processes", "Start", "Architectural design", "Verification", "End"], + ["Start", "X", "1", "0", "0"], + ["Architectural design", "0", "X", "1", "0"], + ["Verification", "0", "0", "X", "1"], + ["End", "0", "0", "0", "X"]] + + # Act + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/dsm', + headers=std_headers, + json=dsm) + + saved_dsm = impl_life_cycle.get_dsm(project.id, vcs.id, current_user.id) + + # Assert + assert res.status_code == 200 + assert dsm == saved_dsm From b73257f35aaed474b60eaa4fba99631f365857ab Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 3 Jul 2023 10:30:23 +0200 Subject: [PATCH 051/210] added test cleanup --- tests/apps/cvs/life_cycle/test_dsm_files.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/apps/cvs/life_cycle/test_dsm_files.py b/tests/apps/cvs/life_cycle/test_dsm_files.py index 1b438699..d483751d 100644 --- a/tests/apps/cvs/life_cycle/test_dsm_files.py +++ b/tests/apps/cvs/life_cycle/test_dsm_files.py @@ -173,7 +173,7 @@ def test_get_dsm_matrix(client, std_headers, std_user): vcs = tu.seed_random_vcs(project.id) rows = [std_rows[0], std_rows[1]] - table = tu.create_vcs_table(project.id, vcs.id, rows) + tu.create_vcs_table(project.id, vcs.id, rows) cwd = os.getcwd() _test_upload_file = Path(cwd + '/tests/apps/cvs/life_cycle/files/input.csv') @@ -193,6 +193,11 @@ def test_get_dsm_matrix(client, std_headers, std_user): assert res.status_code == 200 assert len(matrix) == 5 + # Cleanup + tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + def test_save_dsm(client, std_headers, std_user): # Setup @@ -219,3 +224,8 @@ def test_save_dsm(client, std_headers, std_user): # Assert assert res.status_code == 200 assert dsm == saved_dsm + + # Cleanup + tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) From b2dc7148b6fe6fc0ac928ed668c4effeb932afa9 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 3 Jul 2023 14:59:53 +0200 Subject: [PATCH 052/210] if dsm dont exist return empty --- sedbackend/apps/cvs/life_cycle/storage.py | 32 ++++++++++++++++++++--- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 9b2b28f7..f880b9ed 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -381,10 +381,14 @@ def get_dsm_file_path(db_connection: PooledMySQLConnection, project_id: int, vcs def get_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id) -> List[List[str or float]]: - path = get_dsm_file_path(db_connection, project_id, vcs_id, user_id).path - with open(path, newline='') as f: - reader = csv.reader(f) - data = list(reader) + try: + path = get_dsm_file_path(db_connection, project_id, vcs_id, user_id).path + with open(path, newline='') as f: + reader = csv.reader(f) + data = list(reader) + except Exception: + return empty_dsm(db_connection, project_id, vcs_id) + return data @@ -436,3 +440,23 @@ def csv_from_matrix(matrix: List[List[str or float]]) -> UploadFile: dsm_file = open(temp_name, "r+b") return UploadFile(filename=dsm_file.name, file=dsm_file) + + +def empty_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> List[List[str or float]]: + vcs_table = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) + + processes = ["Start"]+[row.iso_process.name if row.iso_process is not None else + row.subprocess.name for row in vcs_table]+["End"] + + dsm = [["Processes"]+processes] + for i in range(1, len(processes)+1): + row = [] + for j in range(len(processes)+1): + if j == 0: + row.append(processes[i-1]) + elif i == j: + row.append("X") + else: + row.append("") + dsm.append(row) + return dsm From 22595e11358f243b4faa9bb5b75ba87e3b9ea6c2 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 4 Jul 2023 14:54:26 +0200 Subject: [PATCH 053/210] fixed save dsm saving file in root --- .../apps/cvs/life_cycle/implementation.py | 30 +++++++++++++++++++ sedbackend/apps/cvs/life_cycle/storage.py | 21 ++++++++----- 2 files changed, 43 insertions(+), 8 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 7795c5bf..ffeff354 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -251,6 +251,36 @@ def get_dsm_file_id(project_id: int, vcs_id: int) -> int: status_code=status.HTTP_404_NOT_FOUND, detail=f"File could not be found" ) + except exceptions.InvalidFileTypeException: + raise HTTPException( + status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, + detail='Wrong filetype' + ) + except exceptions.FileSizeException: + raise HTTPException( + status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, + detail='File too large' + ) + except exceptions.ProcessesVcsMatchException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='Processes in DSM does not match processes in VCS' + ) + except core_project_exceptions.SubProjectNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Sub-project not found." + ) + except ApplicationNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="No such application." + ) + except exceptions.DSMFileFailedDeletionException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Failed to replace old DSM file." + ) def get_dsm_file_path(project_id: int, vcs_id: int, user_id) -> file_models.StoredFilePath: diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index f880b9ed..4243beec 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -1,5 +1,6 @@ import csv import io +import os import tempfile from typing import List, Tuple, Optional, TextIO @@ -432,14 +433,18 @@ def get_dsm_from_csv(path): def csv_from_matrix(matrix: List[List[str or float]]) -> UploadFile: - temp_name = "dsm.csv" - with open(temp_name, "w+") as dsm_file: - csv_writer = csv.writer(dsm_file, delimiter=',') - csv_writer.writerows(matrix) - - dsm_file = open(temp_name, "r+b") - - return UploadFile(filename=dsm_file.name, file=dsm_file) + fd, path = tempfile.mkstemp() + try: + with open(path, "w+") as dsm_file: + csv_writer = csv.writer(dsm_file, delimiter=',') + csv_writer.writerows(matrix) + finally: + dsm_file = open(path, "r+b") + upload_file = UploadFile(filename=dsm_file.name+".csv", file=dsm_file) + os.close(fd) + os.remove(path) + + return upload_file def empty_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> List[List[str or float]]: From 10e873a6a8b6f4e8c2e19971e8d412c98b15f667 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 4 Jul 2023 16:46:48 +0200 Subject: [PATCH 054/210] dsm fixes --- sedbackend/apps/cvs/life_cycle/storage.py | 32 +++++++++++------------ sedbackend/apps/cvs/simulation/storage.py | 13 ++++++--- 2 files changed, 26 insertions(+), 19 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 4243beec..4573f1c9 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -333,7 +333,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_table = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) vcs_processes = [row.iso_process.name if row.iso_process is not None else - row.subprocess.name for row in vcs_table] + f'{row.subprocess.name} ({row.subprocess.parent_process.name})' for row in vcs_table] for process in dsm_file['Processes'].values[1:-1]: if process not in vcs_processes: @@ -418,18 +418,17 @@ def get_dsm_from_file_id(db_connection: PooledMySQLConnection, file_id: int, use return get_dsm_from_csv(path.path) -def get_dsm_from_csv(path): +def get_dsm_from_csv(path) -> dict: try: df = pd.read_csv(path) - except Exception as e: - logger.debug(f'{e.__class__}, {e}') - - dsm = dict() + dsm = dict() - for v in df.values: - dsm.update({v[0]: v[1::].tolist()}) + for v in df.values: + dsm.update({v[0]: v[1::].tolist()}) - return dsm + return dsm + except Exception as e: + raise file_ex.FileNotFoundException def csv_from_matrix(matrix: List[List[str or float]]) -> UploadFile: @@ -440,7 +439,7 @@ def csv_from_matrix(matrix: List[List[str or float]]) -> UploadFile: csv_writer.writerows(matrix) finally: dsm_file = open(path, "r+b") - upload_file = UploadFile(filename=dsm_file.name+".csv", file=dsm_file) + upload_file = UploadFile(filename=dsm_file.name + ".csv", file=dsm_file) os.close(fd) os.remove(path) @@ -450,15 +449,16 @@ def csv_from_matrix(matrix: List[List[str or float]]) -> UploadFile: def empty_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> List[List[str or float]]: vcs_table = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) - processes = ["Start"]+[row.iso_process.name if row.iso_process is not None else - row.subprocess.name for row in vcs_table]+["End"] + processes = ["Start"] + [row.iso_process.name if row.iso_process is not None else + f'{row.subprocess.name} ({row.subprocess.parent_process.name})' for row in vcs_table] + [ + "End"] - dsm = [["Processes"]+processes] - for i in range(1, len(processes)+1): + dsm = [["Processes"] + processes] + for i in range(1, len(processes) + 1): row = [] - for j in range(len(processes)+1): + for j in range(len(processes) + 1): if j == 0: - row.append(processes[i-1]) + row.append(processes[i - 1]) elif i == j: row.append("X") else: diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 56087332..6c9b8829 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -25,6 +25,7 @@ import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import storage as vcs_storage from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage +from sedbackend.apps.core.files import exceptions as file_exceptions SIM_SETTINGS_TABLE = "cvs_simulation_settings" SIM_SETTINGS_COLUMNS = ['project', 'time_unit', 'flow_process', 'flow_start_time', 'flow_time', @@ -116,7 +117,10 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed dsm_id = [dsm for dsm in all_dsm_ids if dsm[0] == vcs_id] dsm = None if len(dsm_id) > 0: - dsm = get_dsm_from_file_id(db_connection, dsm_id[0][1], user_id) + try: + dsm = get_dsm_from_file_id(db_connection, dsm_id[0][1], user_id) + except file_exceptions.FileNotFoundException: + pass for design_group_id in design_group_ids: sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] if sim_data is None or sim_data == []: @@ -139,6 +143,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed dsm = create_simple_dsm(processes) logger.debug(f'DSM: {dsm}') + logger.debug(f'Processes: {[process.name for process in processes]}') sim = des.Des() @@ -220,6 +225,7 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, raise e.FormulaEvalException(row['id']) technical_processes.append(p) elif row['sub_name'] is not None: + sub_name = f'{row["sub_name"]} ({row["iso_name"]})' try: time = nsp.eval(parse_formula( row['time'], vd_values, mi_values)) @@ -232,7 +238,7 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, 'time', time, cost_formula)), nsp.eval(expr.replace_all( 'time', time, revenue_formula)), - row['sub_name'], non_tech_add, TIME_FORMAT_DICT.get( + sub_name, non_tech_add, TIME_FORMAT_DICT.get( row['time_unit'].lower()) ) @@ -355,7 +361,8 @@ def edit_simulation_settings(db_connection: PooledMySQLConnection, project_id: i rows = vcs_storage.get_vcs_table(db_connection, project_id, vcs.id) for row in rows: if (row.iso_process is not None and row.iso_process.name == sim_settings.flow_process) or \ - (row.subprocess is not None and row.subprocess.name == sim_settings.flow_process): + (row.subprocess is not None and f'{row.subprocess.name} ({row.subprocess.parent_process.name})' + == sim_settings.flow_process): flow_process_exists = True break From 7dbf1af990f087b3fa06bce19a0da8c158f134be Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 4 Jul 2023 16:57:15 +0200 Subject: [PATCH 055/210] fixed failing test --- tests/apps/cvs/testutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 4d498e0d..1d0c4fa1 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -607,7 +607,7 @@ def seed_random_sim_settings(user_id: int, project_id: int) -> sim_model.SimSett rows = seed_vcs_table_rows(user_id, project_id, vcs.id, 3) for row in rows: if row.subprocess is not None: - flow_process = row.subprocess.name + flow_process = f'{row.subprocess.name} ({row.subprocess.parent_process.name})' break elif row.iso_process is not None: flow_process = row.iso_process.name From fcda1f234b491ebe5e49be7d01dbfc00a5b155c8 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Wed, 5 Jul 2023 08:06:03 +0200 Subject: [PATCH 056/210] dont delete current dsm if new dsm fail --- sedbackend/apps/cvs/life_cycle/storage.py | 32 +++++++++++------------ 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 4573f1c9..be794870 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -299,22 +299,6 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, if model_file.extension != ".csv": raise exceptions.InvalidFileTypeException - try: - file_id = get_dsm_file_id(db_connection, project_id, vcs_id) - if file_id is not None: - delete_dsm_file(db_connection, project_id, vcs_id, file_id, user_id) - except file_ex.FileNotFoundException: - pass # File doesn't exist, so we don't need to delete it - except Exception: - try: - # File does not exist in persistent storage but exists in database - delete_statement = MySQLStatementBuilder(db_connection) - _, rows = delete_statement.delete(CVS_DSM_FILES_TABLE) \ - .where('vcs = %s', [vcs_id]) \ - .execute(return_affected_rows=True) - except: - pass - with model_file.file_object as f: f.seek(0) tmp_file = f.read() @@ -339,6 +323,22 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, if process not in vcs_processes: raise exceptions.ProcessesVcsMatchException + try: + file_id = get_dsm_file_id(db_connection, project_id, vcs_id) + if file_id is not None: + delete_dsm_file(db_connection, project_id, vcs_id, file_id, user_id) + except file_ex.FileNotFoundException: + pass # File doesn't exist, so we don't need to delete it + except Exception: + try: + # File does not exist in persistent storage but exists in database + delete_statement = MySQLStatementBuilder(db_connection) + _, rows = delete_statement.delete(CVS_DSM_FILES_TABLE) \ + .where('vcs = %s', [vcs_id]) \ + .execute(return_affected_rows=True) + except: + pass + f.seek(0) stored_file = file_storage.db_save_file(db_connection, model_file) From eb83321393408b93862043eaaeb7ec34e8a9f975 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Wed, 5 Jul 2023 16:26:43 +0200 Subject: [PATCH 057/210] fill empty dsm value with 0 --- sedbackend/apps/cvs/life_cycle/storage.py | 6 ++++-- sedbackend/apps/cvs/simulation/storage.py | 14 +++++++++++--- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index be794870..a2bd5b88 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -388,7 +388,7 @@ def get_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, reader = csv.reader(f) data = list(reader) except Exception: - return empty_dsm(db_connection, project_id, vcs_id) + return initial_dsm(db_connection, project_id, vcs_id) return data @@ -446,7 +446,7 @@ def csv_from_matrix(matrix: List[List[str or float]]) -> UploadFile: return upload_file -def empty_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> List[List[str or float]]: +def initial_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> List[List[str or float]]: vcs_table = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) processes = ["Start"] + [row.iso_process.name if row.iso_process is not None else @@ -461,6 +461,8 @@ def empty_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int row.append(processes[i - 1]) elif i == j: row.append("X") + elif i == j - 1: + row.append("1") else: row.append("") dsm.append(row) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 6c9b8829..4e47e43d 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -1,5 +1,7 @@ import sys import tempfile +from math import isnan + from fastapi import UploadFile from mysql.connector.pooling import PooledMySQLConnection import pandas as pd @@ -119,6 +121,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed if len(dsm_id) > 0: try: dsm = get_dsm_from_file_id(db_connection, dsm_id[0][1], user_id) + dsm = fill_dsm_with_zeros(dsm) except file_exceptions.FileNotFoundException: pass for design_group_id in design_group_ids: @@ -142,9 +145,6 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed if dsm is None: dsm = create_simple_dsm(processes) - logger.debug(f'DSM: {dsm}') - logger.debug(f'Processes: {[process.name for process in processes]}') - sim = des.Des() try: @@ -514,6 +514,14 @@ def get_dsm_from_excel(path): return dsm +def fill_dsm_with_zeros(dsm: dict) -> dict: + for value in dsm.values(): + for i in range(len(value)): + if value[i] == "" or (isinstance(value[i], float) and isnan(value[i])): + value[i] = 0 + return dsm + + def populate_sim_settings(db_result) -> models.SimSettings: logger.debug(f'Populating simulation settings') return models.SimSettings( From e5d697eb2e73df52835363b46b0543b1c3d6a782 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 6 Jul 2023 09:21:37 +0200 Subject: [PATCH 058/210] apply dsm to all --- .../apps/cvs/life_cycle/implementation.py | 39 +++++++++++++++++++ sedbackend/apps/cvs/life_cycle/models.py | 5 +++ sedbackend/apps/cvs/life_cycle/router.py | 13 ++++++- sedbackend/apps/cvs/life_cycle/storage.py | 25 ++++++++++++ tests/apps/cvs/life_cycle/test_dsm_files.py | 35 +++++++++++++++++ 5 files changed, 115 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index ffeff354..3faf5bf8 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -317,3 +317,42 @@ def get_dsm(project_id: int, vcs_id: int, user_id: int) -> List[List[str or floa status_code=status.HTTP_403_FORBIDDEN, detail=f"User does not have access to the file" ) + + +def apply_dsm_to_all(project_id: int, vcs_id: int, dsm: List[List[str or float]], + user_id: int) -> models.DSMApplyAllResponse: + try: + with get_connection() as con: + res = storage.apply_dsm_to_all(con, project_id, vcs_id, dsm, user_id) + con.commit() + return res + except exceptions.InvalidFileTypeException: + raise HTTPException( + status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, + detail='Wrong filetype' + ) + except exceptions.FileSizeException: + raise HTTPException( + status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE, + detail='File too large' + ) + except exceptions.ProcessesVcsMatchException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='Processes in DSM does not match processes in VCS.' + ) + except core_project_exceptions.SubProjectNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Sub-project not found." + ) + except ApplicationNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="No such application." + ) + except exceptions.DSMFileFailedDeletionException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Failed to replace old DSM file." + ) diff --git a/sedbackend/apps/cvs/life_cycle/models.py b/sedbackend/apps/cvs/life_cycle/models.py index c057e324..9fe25a21 100644 --- a/sedbackend/apps/cvs/life_cycle/models.py +++ b/sedbackend/apps/cvs/life_cycle/models.py @@ -35,3 +35,8 @@ class StartStopNodePost(NodePost): class BPMNGet(BaseModel): nodes: List[ProcessNodeGet] + + +class DSMApplyAllResponse(BaseModel): + success_vcs: List[vcs_models.VCS] + failed_vcs: List[vcs_models.VCS] diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index 6abbd48d..6fc53026 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -9,8 +9,6 @@ from sedbackend.apps.core.projects.models import AccessLevel from sedbackend.apps.cvs.life_cycle import models, implementation from sedbackend.apps.cvs.project.router import CVS_APP_SID -from sedbackend.apps.core.files import models as file_models -from fastapi.responses import FileResponse router = APIRouter() @@ -106,3 +104,14 @@ async def upload_dsm_file(native_project_id: int, vcs_id: int, file: UploadFile, ) async def get_dsm_file(native_project_id: int, vcs_id: int) -> int: return implementation.get_dsm_file_id(native_project_id, vcs_id) + + +@router.post( + '/project/{native_project_id}/vcs/{vcs_id}/dsm/all', + summary="Apply DSM to all VCS", + response_model=models.DSMApplyAllResponse, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] +) +async def apply_dsm_to_all(native_project_id: int, vcs_id: int, dsm: List[List[str or float]], + user: User = Depends(get_current_active_user)) -> models.DSMApplyAllResponse: + return implementation.apply_dsm_to_all(native_project_id, vcs_id, dsm, user.id) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index a2bd5b88..dd7a0ff3 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -319,6 +319,9 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_processes = [row.iso_process.name if row.iso_process is not None else f'{row.subprocess.name} ({row.subprocess.parent_process.name})' for row in vcs_table] + if len(dsm_file['Processes'].values[1:-1]) != len(vcs_processes): + raise exceptions.ProcessesVcsMatchException + for process in dsm_file['Processes'].values[1:-1]: if process not in vcs_processes: raise exceptions.ProcessesVcsMatchException @@ -467,3 +470,25 @@ def initial_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i row.append("") dsm.append(row) return dsm + + +def apply_dsm_to_all(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, dsm: List[List[str or float]], + user_id: int) -> models.DSMApplyAllResponse: + vcss = vcs_storage.get_all_vcs(db_connection, project_id).chunk + + save_dsm_matrix(db_connection, project_id, vcs_id, dsm, user_id) + + success_vcs = [[vcs for vcs in vcss if vcs.id == vcs_id][0]] + failed_vcs = [] + + vcss = [vcs for vcs in vcs_storage.get_all_vcs(db_connection, project_id).chunk if vcs.id != vcs_id] + + # Try to apply to other vcs. Will only pass if they have the same processes + for vcs in vcss: + try: + save_dsm_matrix(db_connection, project_id, vcs.id, dsm, user_id) + success_vcs.append(vcs) + except Exception: + failed_vcs.append(vcs) + + return models.DSMApplyAllResponse(success_vcs=success_vcs, failed_vcs=failed_vcs) diff --git a/tests/apps/cvs/life_cycle/test_dsm_files.py b/tests/apps/cvs/life_cycle/test_dsm_files.py index d483751d..53a9f05f 100644 --- a/tests/apps/cvs/life_cycle/test_dsm_files.py +++ b/tests/apps/cvs/life_cycle/test_dsm_files.py @@ -229,3 +229,38 @@ def test_save_dsm(client, std_headers, std_user): tu.delete_dsm_file_from_vcs_id(project.id, vcs.id, current_user.id) tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) + + +def test_apply_dsm_to_all(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcss = [tu.seed_random_vcs(project.id) for _ in range(3)] + + rows = [std_rows[0], std_rows[1]] + rows_alt = [std_rows[0], std_rows[1], std_rows[2]] + + tu.create_vcs_table(project.id, vcss[0].id, rows) + tu.create_vcs_table(project.id, vcss[1].id, rows) + tu.create_vcs_table(project.id, vcss[2].id, rows_alt) + + dsm = [["Processes", "Start", "Architectural design", "Verification", "End"], + ["Start", "X", "1", "0", "0"], + ["Architectural design", "0", "X", "1", "0"], + ["Verification", "0", "0", "X", "1"], + ["End", "0", "0", "0", "X"]] + + # Act + res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcss[0].id}/dsm/all', + headers=std_headers, + json=dsm) + + # Assert + assert res.status_code == 200 + assert [vcs["id"] for vcs in res.json()["success_vcs"]] == [vcss[0].id, vcss[1].id] + assert [vcs["id"] for vcs in res.json()["failed_vcs"]] == [vcss[2].id] + + # Cleanup + [tu.delete_dsm_file_from_vcs_id(project.id, vcs["id"], current_user.id) for vcs in res.json()["success_vcs"]] + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) + tu.delete_project_by_id(project.id, current_user.id) From bb978625f022557e410e947414fb0a8d4a7e6867 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 7 Jul 2023 09:24:59 +0200 Subject: [PATCH 059/210] raise exception if process name is not unique --- sedbackend/apps/cvs/vcs/exceptions.py | 4 ++ sedbackend/apps/cvs/vcs/implementation.py | 5 +++ sedbackend/apps/cvs/vcs/storage.py | 54 +++++++++++++++++++++-- 3 files changed, 59 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/exceptions.py b/sedbackend/apps/cvs/vcs/exceptions.py index 19d1011a..d2879f7f 100644 --- a/sedbackend/apps/cvs/vcs/exceptions.py +++ b/sedbackend/apps/cvs/vcs/exceptions.py @@ -123,6 +123,10 @@ class VCSandVCSRowIDMismatchException(Exception): pass +class VCSTableProcessNotUniqueException(Exception): + pass + + # ====================================================================================================================== # VCS Stakeholder needs # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index a0d22e7c..714d8307 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -565,6 +565,11 @@ def edit_vcs_table(project_id: int, vcs_id: int, updated_vcs_rows: List[models.V status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not create subprocess' ) + except exceptions.VCSTableProcessNotUniqueException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Process name must be unique' + ) # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index ee06bfae..6efc9ac7 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -333,9 +333,10 @@ def add_vcs_need_driver(db_connection: PooledMySQLConnection, need_id: int, valu return True + def add_vcs_multiple_needs_drivers(db_connection: PooledMySQLConnection, need_driver_ids: List[Tuple[int, int]]): logger.debug(f'Add value drivers to stakeholder needs') - + prepared_list = [] try: insert_statement = f'INSERT INTO {CVS_VCS_NEED_DRIVERS_TABLE} (stakeholder_need, value_driver) VALUES' @@ -343,15 +344,16 @@ def add_vcs_multiple_needs_drivers(db_connection: PooledMySQLConnection, need_dr insert_statement += f'(%s, %s),' prepared_list.append(need) prepared_list.append(driver) - + with db_connection.cursor(prepared=True) as cursor: cursor.execute(insert_statement[:-1], prepared_list) except Error as e: logger.debug(f'Error msg: {e.msg}') raise exceptions.GenericDatabaseException - + return True + def update_vcs_need_driver(db_connection: PooledMySQLConnection, need_id: int, value_drivers: List[int]) -> bool: logger.debug(f'Update value drivers in stakeholder need with id={need_id}.') @@ -872,6 +874,16 @@ def edit_vcs_table(db_connection: PooledMySQLConnection, project_id: int, vcs_id new_table_ids = [] + process_names = get_process_names_from_table(db_connection, updated_vcs_rows) + + set_process_names = set(process_names) + + logger.debug(f'Process names: {process_names}') + logger.debug(f'Set process names: {set_process_names}') + + if len(set_process_names) < len(process_names): + raise exceptions.VCSTableProcessNotUniqueException + for row in updated_vcs_rows: if row.iso_process is None and row.subprocess is None: @@ -949,7 +961,6 @@ def edit_vcs_table(db_connection: PooledMySQLConnection, project_id: int, vcs_id def remove_duplicate_names(db_connection: PooledMySQLConnection, vcs_id: int, rows: List[models.VcsRowPost]): - new_subprocesses: List[Tuple[int, models.VCSSubprocessPost]] = [] done = [] for i in range(0, len(rows)): @@ -1060,3 +1071,38 @@ def duplicate_whole_vcs(db_connection: PooledMySQLConnection, project_id: int, v [duplicate_vcs_table(db_connection, project_id, vcs.id, table) for vcs in vcs_list] return vcs_list + + +def get_process_names_from_table(db_connection: PooledMySQLConnection, + table_rows: List[models.VcsRowPost]) -> List[str]: + subprocess_ids = [row.subprocess for row in table_rows if row.subprocess is not None] + iso_process_ids = [row.iso_process for row in table_rows if row.iso_process is not None] + + if len(set(subprocess_ids)) < len(subprocess_ids) or len(set(iso_process_ids)) < len(iso_process_ids): + raise exceptions.VCSTableProcessNotUniqueException + + sub_where_statement = "id IN (" + ",".join(["%s" for _ in range(len(subprocess_ids))]) + ")" + iso_where_statement = "id IN (" + ",".join(["%s" for _ in range(len(iso_process_ids))]) + ")" + + sub_process_result = [] + iso_process_result = [] + + if len(subprocess_ids) > 0: + try: + select_statement = MySQLStatementBuilder(db_connection) + sub_process_result = select_statement.select(CVS_VCS_SUBPROCESS_TABLE, ['name']) \ + .where(sub_where_statement, subprocess_ids) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + except Error: + raise exceptions.SubprocessNotFoundException + + if len(iso_process_ids) > 0: + try: + select_statement = MySQLStatementBuilder(db_connection) + iso_process_result = select_statement.select(CVS_ISO_PROCESS_TABLE, ['name']) \ + .where(iso_where_statement, iso_process_ids) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + except Error: + raise exceptions.ISOProcessNotFoundException + + return [process["name"] for process in sub_process_result + iso_process_result] From 2a194e451c0f999914f9ee6d72cb33cc4a7e52a9 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 7 Jul 2023 10:03:50 +0200 Subject: [PATCH 060/210] raise exception if subprocess not unique on creation --- sedbackend/apps/cvs/vcs/exceptions.py | 4 ++++ sedbackend/apps/cvs/vcs/implementation.py | 5 +++++ sedbackend/apps/cvs/vcs/router.py | 1 - sedbackend/apps/cvs/vcs/storage.py | 9 +++++++++ 4 files changed, 18 insertions(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/vcs/exceptions.py b/sedbackend/apps/cvs/vcs/exceptions.py index d2879f7f..d8d56a3d 100644 --- a/sedbackend/apps/cvs/vcs/exceptions.py +++ b/sedbackend/apps/cvs/vcs/exceptions.py @@ -92,6 +92,10 @@ class SubprocessFailedCreationException(Exception): pass +class SubprocessNotUniqueException(Exception): + pass + + # ====================================================================================================================== # VCS Table # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index 714d8307..b5ce8e8f 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -429,6 +429,11 @@ def create_subprocess(project_id: int, vcs_id: int, subprocess_post: models.VCSS status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=err.msg ) + except exceptions.SubprocessNotUniqueException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Subprocess name must be unique.', + ) def edit_subprocess(project_id: int, subprocess_id: int, diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index 8449a93b..91bb37a1 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -4,7 +4,6 @@ from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker from sedbackend.apps.core.projects.models import AccessLevel from sedbackend.apps.core.users.models import User -from sedbackend.apps.cvs.design.router import router from sedbackend.apps.cvs.project.router import CVS_APP_SID from sedbackend.apps.cvs.vcs.models import ValueDriver from sedbackend.libs.datastructures.pagination import ListChunk diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 6efc9ac7..bd94ea35 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -557,6 +557,15 @@ def create_subprocess(db_connection: PooledMySQLConnection, project_id: int, vcs columns = ['vcs', 'name', 'iso_process'] values = [vcs_id, subprocess_post.name, subprocess_post.parent_process_id] + count_statement = MySQLStatementBuilder(db_connection) + count_result = count_statement.count(CVS_VCS_SUBPROCESS_TABLE) \ + .where(f'name = %s and vcs = %s', [subprocess_post.name, vcs_id]) \ + .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + count = count_result['count'] + + if count > 0: + raise exceptions.SubprocessNotUniqueException + insert_statement = MySQLStatementBuilder(db_connection) try: insert_statement \ From 95373e5ba32db070bc644ceef2d677a778425907 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 7 Jul 2023 11:05:51 +0200 Subject: [PATCH 061/210] rewrote failing tests --- .../simulation/test_sim_multiprocessing.py | 405 ++++++++---------- tests/apps/cvs/simulation/test_simulation.py | 90 ++-- tests/apps/cvs/testutils.py | 57 --- 3 files changed, 208 insertions(+), 344 deletions(-) diff --git a/tests/apps/cvs/simulation/test_sim_multiprocessing.py b/tests/apps/cvs/simulation/test_sim_multiprocessing.py index 5c8a090c..7b129dd5 100644 --- a/tests/apps/cvs/simulation/test_sim_multiprocessing.py +++ b/tests/apps/cvs/simulation/test_sim_multiprocessing.py @@ -5,239 +5,202 @@ def test_run_single_monte_carlo_sim(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = True - settings.runs = 5 - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 200 - - #Should probably assert some other stuff about the output to ensure that it is correct. - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) - - -def test_run_mc_sim_invalid_designs(client, std_headers, std_user): - #Setup - amount = 2 - - current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - vcss = [] - dgs = [] - - design_ids = [] - - #TODO Find a way to get a row that is the same across all vcs's - so that there is an interarrival process - for _ in range(amount): - vcs = tu.seed_random_vcs(project.id) - design_group = tu.seed_random_design_group(project.id) - vcss.append(vcs) - dgs.append(design_group) - tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 10) #Also creates the vcs rows - design = tu.seed_random_designs(project.id, design_group.id, 1) - design_ids.append(design[0].id + 7000) - - tu.seed_formulas_for_multiple_vcs(project.id, [vcs.id for vcs in vcss], [dg.id for dg in dgs], current_user.id) - - settings = tu.seed_simulation_settings(project.id, [vcs.id for vcs in vcss], design_ids) - settings.monte_carlo = False - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id for vcs in vcss], - "design_group_ids": design_ids - }) - - #Assert - assert res.status_code == 400 - - #Cleanup - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) + settings.monte_carlo = True + settings.runs = 5 + + # Act + res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id] + }) + + # Assert + assert res.status_code == 200 + + # Should probably assert some other stuff about the output to ensure that it is correct. + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) + + +def test_run_mc_sim_invalid_design_group(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) + settings.monte_carlo = True + settings.runs = 5 + + # Act + res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id + 9999] + }) + + # Assert + assert res.status_code == 400 + + # Should probably assert some other stuff about the output to ensure that it is correct. + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_mc_sim_invalid_vcss(client, std_headers, std_user): - #Setup - amount = 2 - - current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - vcss = [] - dgs = [] - - design_ids = [] - - #TODO Find a way to get a row that is the same across all vcs's - so that there is an interarrival process - for _ in range(amount): - vcs = tu.seed_random_vcs(project.id) - design_group = tu.seed_random_design_group(project.id) - vcss.append(vcs) - dgs.append(design_group) - tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 15) #Also creates the vcs rows - design = tu.seed_random_designs(project.id, design_group.id, 1) - design_ids.append(design[0].id) - - tu.seed_formulas_for_multiple_vcs(project.id, [vcs.id for vcs in vcss], [dg.id for dg in dgs], current_user.id) - - settings = tu.seed_simulation_settings(project.id, [vcs.id for vcs in vcss], design_ids) - settings.monte_carlo = False - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [(vcs.id + 4000) for vcs in vcss], - "design_group_ids": [design_group.id] - }) - - #Assert - #print(res.json()) - assert res.status_code == 400 - #Should probably assert some other stuff about the output to ensure that it is correct. - - - #Cleanup - for dg in dgs: - tu.delete_design_group(project.id, dg.id) - - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) + settings.monte_carlo = True + settings.runs = 5 + + # Act + res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id + 9999], + "design_group_ids": [design_group.id] + }) + + # Assert + assert res.status_code == 400 + + # Should probably assert some other stuff about the output to ensure that it is correct. + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_mc_sim_end_time_before_start_time(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.end_time = settings.start_time - 1 - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) + settings.monte_carlo = False + settings.end_time = settings.start_time - 1 + + # Act + res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id] + }) + + # Assert + assert res.status_code == 400 + assert res.json() == {'detail': 'Settings are not correct'} # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_mc_sim_no_flows(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.flow_start_time = None - settings.flow_process = None - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) + settings.monte_carlo = False + settings.flow_start_time = None + settings.flow_process = None + + # Act + res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id] + }) + + # Assert + assert res.status_code == 400 + assert res.json() == {'detail': 'Settings are not correct'} # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_mc_sim_both_flows(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.flow_start_time = 5 - settings.flow_process = 10 - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) + settings.monte_carlo = False + settings.flow_start_time = 5 + settings.flow_process = 10 + + # Act + res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id] + }) + + # Assert + assert res.status_code == 400 + assert res.json() == {'detail': 'Settings are not correct'} # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_mc_sim_rate_invalid_order(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - first_tech_process = tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id) - if first_tech_process is None: - raise sim_exceptions.NoTechnicalProcessException - settings.monte_carlo = False - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', - headers=std_headers, - json={ - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) + first_tech_process = tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id) + if first_tech_process is None: + raise sim_exceptions.NoTechnicalProcessException + settings.monte_carlo = False + + # Act + res = client.post(f'/api/cvs/project/{project.id}/simulation/run-multiprocessing', + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id] + }) + + # Assert + assert res.status_code == 400 + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index aa8ec6bc..b4ff497a 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -33,98 +33,56 @@ def test_run_single_simulation(client, std_headers, std_user): -def test_run_sim_invalid_designs(client, std_headers, std_user): - #Setup - amount = 2 +def test_run_sim_invalid_design_group(client, std_headers, std_user): + # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - vcss = [] - dgs = [] - - design_group_ids = [] - - #TODO Find a way to get a row that is the same across all vcs's - so that there is an interarrival process - for _ in range(amount): - vcs = tu.seed_random_vcs(project.id) - design_group = tu.seed_random_design_group(project.id) - vcss.append(vcs) - dgs.append(design_group) - tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 20) #Also creates the vcs rows - design = tu.seed_random_designs(project.id, design_group.id, 1) - design_group_ids.append(design_group.id + 7000) - - tu.seed_formulas_for_multiple_vcs(project.id, [vcs.id for vcs in vcss], [dg.id for dg in dgs], current_user.id) - settings = tu.seed_simulation_settings(project.id, [vcs.id for vcs in vcss], design_group_ids) + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) settings.monte_carlo = False - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', + # Act + res = client.post(f'/api/cvs/project/{project.id}/simulation/run', headers=std_headers, - json = { + json={ "sim_settings": settings.dict(), - "vcs_ids": [vcs.id for vcs in vcss], - "design_group_ids": design_group_ids + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id + 9999] }) - - #Assert + + # Assert assert res.status_code == 400 - #Cleanup - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) def test_run_sim_invalid_vcss(client, std_headers, std_user): - #Setup - amount = 2 + # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - vcss = [] - dgs = [] - - design_ids = [] - - #TODO Find a way to get a row that is the same across all vcs's - so that there is an interarrival process - for _ in range(amount): - vcs = tu.seed_random_vcs(project.id) - design_group = tu.seed_random_design_group(project.id) - vcss.append(vcs) - dgs.append(design_group) - tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 15) #Also creates the vcs rows - design = tu.seed_random_designs(project.id, design_group.id, 1) - design_ids.append(design[0].id) - - tu.seed_formulas_for_multiple_vcs(project.id, [vcs.id for vcs in vcss], [dg.id for dg in dgs], current_user.id) - settings = tu.seed_simulation_settings(project.id, [vcs.id for vcs in vcss], design_ids) + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) settings.monte_carlo = False - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', + # Act + res = client.post(f'/api/cvs/project/{project.id}/simulation/run', headers=std_headers, - json = { + json={ "sim_settings": settings.dict(), - "vcs_ids": [(vcs.id + 4000) for vcs in vcss], + "vcs_ids": [vcs.id + 9999], "design_group_ids": [design_group.id] }) - - #Assert - #print(res.json()) - assert res.status_code == 400 - #Should probably assert some other stuff about the output to ensure that it is correct. - - #Cleanup - for dg in dgs: - tu.delete_design_group(project.id, dg.id) + # Assert + assert res.status_code == 400 - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) def test_run_sim_end_time_before_start_time(client, std_headers, std_user): diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 1d0c4fa1..9657c1c7 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -455,63 +455,6 @@ def delete_formulas(project_id: int, vcsRow_Dg_ids: List[Tuple[int, int]]): connect_impl.delete_formulas(project_id, vcs_row, dg) -def seed_formulas_for_multiple_vcs(project_id: int, vcss: List[int], dgs: List[int], user_id: int): - tr = random_table_row(user_id, project_id, vcss[0]) - while tr.subprocess != None: - tr = random_table_row(user_id, project_id, vcss[0]) - - row_ids = [] - for vcs_id in vcss: - orig_table = vcs_impl.get_vcs_table(project_id, vcs_id) - table = [ - vcs_model.VcsRowPost( - id=tr.id, - index=tr.index, - stakeholder=tr.stakeholder, - stakeholder_needs=[ - vcs_model.StakeholderNeedPost( - id=need.id, - need=need.need, - value_dimension=need.value_dimension, - rank_weight=need.rank_weight, - value_drivers=[vd.id for vd in need.value_drivers]) - for need in tr.stakeholder_needs], - stakeholder_expectations=tr.stakeholder_expectations, - iso_process=None if tr.iso_process is None else tr.iso_process.id, - subprocess=None if tr.subprocess is None else tr.subprocess.id) - for tr in orig_table] - table.append(tr) - vcs_impl.edit_vcs_table(project_id, vcs_id, table) - row = list(filter(lambda row: row not in orig_table, - vcs_impl.get_vcs_table(project_id, vcs_id)))[0] - row_ids.append(row.id) - - time = str(tu.random.randint(1, 200)) - time_unit = random_time_unit() - cost = str(tu.random.randint(1, 2000)) - revenue = str(tu.random.randint(1, 10000)) - rate = Rate.PRODUCT.value - - # TODO when value drivers and market inputs are connected to the - # formulas, add them here. - value_driver_ids = [] - market_input_ids = [] - - formulaPost = connect_model.FormulaPost( - time=time, - time_unit=time_unit, - cost=cost, - revenue=revenue, - rate=rate, - value_driver_ids=value_driver_ids, - market_input_ids=market_input_ids - ) - - for row_id in row_ids: - for dg_id in dgs: - connect_impl.edit_formulas(project_id, row_id, dg_id, formulaPost) - - def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int) -> vcs_model.VcsRow: rows = list(sorted(vcs_impl.get_vcs_table( project_id, vcs_id), key=lambda row: row.index)) From f58bca5ef1bc5afd85c56ad9db5da11129de77a0 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 7 Jul 2023 14:14:08 +0200 Subject: [PATCH 062/210] subprocesses connected to project --- sedbackend/apps/core/db.py | 6 ++- sedbackend/apps/cvs/vcs/implementation.py | 10 ++--- sedbackend/apps/cvs/vcs/models.py | 2 +- sedbackend/apps/cvs/vcs/router.py | 12 +++--- sedbackend/apps/cvs/vcs/storage.py | 50 ++++++++++------------- sql/V230707_cvs.sql | 7 ++++ tests/apps/cvs/testutils.py | 10 ++--- tests/apps/cvs/vcs/test_subprocesses.py | 25 +++++------- 8 files changed, 60 insertions(+), 62 deletions(-) create mode 100644 sql/V230707_cvs.sql diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..350b2d37 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,11 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +# host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +# port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index b5ce8e8f..b8f46dc6 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -361,10 +361,10 @@ def get_iso_process(iso_process_id: int) -> models.VCSISOProcess: # ====================================================================================================================== -def get_all_subprocess(project_id: int, vcs_id: int) -> List[models.VCSSubprocess]: +def get_all_subprocess(project_id: int) -> List[models.VCSSubprocess]: try: with get_connection() as con: - return storage.get_all_subprocess(con, project_id, vcs_id) + return storage.get_all_subprocess(con, project_id) except project_exceptions.CVSProjectNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, @@ -385,7 +385,7 @@ def get_all_subprocess(project_id: int, vcs_id: int) -> List[models.VCSSubproces def get_subprocess(project_id: int, subprocess_id: int) -> models.VCSSubprocess: try: with get_connection() as con: - return storage.get_subprocess(con, project_id, subprocess_id) + return storage.get_subprocess(con, subprocess_id) except project_exceptions.CVSProjectNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, @@ -403,10 +403,10 @@ def get_subprocess(project_id: int, subprocess_id: int) -> models.VCSSubprocess: ) -def create_subprocess(project_id: int, vcs_id: int, subprocess_post: models.VCSSubprocessPost) -> models.VCSSubprocess: +def create_subprocess(project_id: int, subprocess_post: models.VCSSubprocessPost) -> models.VCSSubprocess: try: with get_connection() as con: - result = storage.create_subprocess(con, project_id, vcs_id, subprocess_post) + result = storage.create_subprocess(con, project_id, subprocess_post) con.commit() return result except project_exceptions.CVSProjectNotFoundException: diff --git a/sedbackend/apps/cvs/vcs/models.py b/sedbackend/apps/cvs/vcs/models.py index be5f60d3..3bea5711 100644 --- a/sedbackend/apps/cvs/vcs/models.py +++ b/sedbackend/apps/cvs/vcs/models.py @@ -43,7 +43,7 @@ class VCSISOProcess(BaseModel): class VCSSubprocess(BaseModel): id: int - vcs_id: int + project_id: int name: str parent_process: VCSISOProcess diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index 91bb37a1..262369a2 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -192,13 +192,13 @@ async def get_all_iso_process() -> List[models.VCSISOProcess]: @router.get( - '/project/{native_project_id}/vcs/{vcs_id}/subprocess/all', + '/project/{native_project_id}/subprocess/all', summary='Returns all subprocesses of a project', response_model=List[models.VCSSubprocess], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_all_subprocess(native_project_id: int, vcs_id: int) -> List[models.VCSSubprocess]: - return implementation.get_all_subprocess(native_project_id, vcs_id) +async def get_all_subprocess(native_project_id: int) -> List[models.VCSSubprocess]: + return implementation.get_all_subprocess(native_project_id) @router.get( @@ -212,14 +212,14 @@ async def get_subprocess(native_project_id: int, subprocess_id: int) -> models.V @router.post( - '/project/{native_project_id}/vcs/{vcs_id}/subprocess', + '/project/{native_project_id}/subprocess', summary='Creates a new subprocess', response_model=models.VCSSubprocess, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def create_subprocess(native_project_id: int, vcs_id: int, +async def create_subprocess(native_project_id: int, subprocess_post: models.VCSSubprocessPost) -> models.VCSSubprocess: - return implementation.create_subprocess(native_project_id, vcs_id, subprocess_post) + return implementation.create_subprocess(native_project_id, subprocess_post) @router.put( diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index bd94ea35..46a9ef81 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -505,20 +505,16 @@ def populate_iso_process(db_result): # ====================================================================================================================== -def get_all_subprocess(db_connection: PooledMySQLConnection, project_id: int, - vcs_id: int) -> List[models.VCSSubprocess]: - logger.debug(f'Fetching all subprocesses for vcs with id={vcs_id}.') +def get_all_subprocess(db_connection: PooledMySQLConnection, project_id: int) -> List[models.VCSSubprocess]: + logger.debug(f'Fetching all subprocesses for project with id={project_id}.') - get_vcs(db_connection, project_id, vcs_id) # Check if VCS exists and belongs to project - - query = f'SELECT cvs_subprocesses.id, cvs_subprocesses.vcs, cvs_subprocesses.name, \ + query = f'SELECT cvs_subprocesses.id, cvs_subprocesses.project, cvs_subprocesses.name, \ cvs_subprocesses.iso_process, cvs_iso_processes.name as iso_process_name, category \ FROM cvs_subprocesses INNER JOIN cvs_iso_processes ON cvs_subprocesses.iso_process = cvs_iso_processes.id \ - WHERE cvs_subprocesses.vcs = %s' + WHERE cvs_subprocesses.project = %s' - # INNER JOIN cvs_vcs_rows ON subprocess = cvs_subprocesses.id with db_connection.cursor(prepared=True) as cursor: - cursor.execute(query, [vcs_id]) + cursor.execute(query, [project_id]) res = cursor.fetchall() if res is None: @@ -531,10 +527,10 @@ def get_all_subprocess(db_connection: PooledMySQLConnection, project_id: int, return subprocess_list -def get_subprocess(db_connection: PooledMySQLConnection, project_id: int, subprocess_id: int) -> models.VCSSubprocess: +def get_subprocess(db_connection: PooledMySQLConnection, subprocess_id: int) -> models.VCSSubprocess: logger.debug(f'Fetching subprocess with id={subprocess_id}.') - query = f'SELECT cvs_subprocesses.id, cvs_subprocesses.vcs, cvs_subprocesses.name, \ + query = f'SELECT cvs_subprocesses.id, cvs_subprocesses.project, cvs_subprocesses.name, \ cvs_subprocesses.iso_process, cvs_iso_processes.name as iso_process_name, category \ FROM cvs_subprocesses INNER JOIN cvs_iso_processes ON iso_process = cvs_iso_processes.id\ WHERE cvs_subprocesses.id = %s' @@ -545,21 +541,19 @@ def get_subprocess(db_connection: PooledMySQLConnection, project_id: int, subpro raise exceptions.SubprocessNotFoundException(subprocess_id) res = dict(zip(cursor.column_names, res)) - get_vcs(db_connection, project_id, res['vcs']) # Check if VCS exists and belongs to project - return populate_subprocess(res) -def create_subprocess(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, +def create_subprocess(db_connection: PooledMySQLConnection, project_id: int, subprocess_post: models.VCSSubprocessPost) -> models.VCSSubprocess: logger.debug(f'Creating a subprocesses.') - columns = ['vcs', 'name', 'iso_process'] - values = [vcs_id, subprocess_post.name, subprocess_post.parent_process_id] + columns = ['project', 'name', 'iso_process'] + values = [project_id, subprocess_post.name, subprocess_post.parent_process_id] count_statement = MySQLStatementBuilder(db_connection) count_result = count_statement.count(CVS_VCS_SUBPROCESS_TABLE) \ - .where(f'name = %s and vcs = %s', [subprocess_post.name, vcs_id]) \ + .where(f'name = %s and project = %s', [subprocess_post.name, project_id]) \ .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) count = count_result['count'] @@ -584,10 +578,10 @@ def create_subprocess(db_connection: PooledMySQLConnection, project_id: int, vcs subprocess_id = insert_statement.last_insert_id - return get_subprocess(db_connection, project_id, subprocess_id) + return get_subprocess(db_connection, subprocess_id) -def create_multiple_subprocesses(db_connection: PooledMySQLConnection, vcs_id: int, +def create_multiple_subprocesses(db_connection: PooledMySQLConnection, project_id: int, subprocesses: List[Tuple[int, models.VCSSubprocessPost]]) -> List[Tuple[int, int]]: logger.debug(f'Creating {len(subprocesses)} subprocesses.') @@ -597,10 +591,10 @@ def create_multiple_subprocesses(db_connection: PooledMySQLConnection, vcs_id: i prepared_list = [] try: - insert_statement = f'INSERT INTO {CVS_VCS_SUBPROCESS_TABLE} (vcs, name, iso_process) VALUES ' + insert_statement = f'INSERT INTO {CVS_VCS_SUBPROCESS_TABLE} (project, name, iso_process) VALUES ' for subprocess in subprocesses: insert_statement += f'(%s,%s,%s),' - prepared_list.append(vcs_id) + prepared_list.append(project_id) prepared_list.append(subprocess[1].name) prepared_list.append(subprocess[1].parent_process_id) with db_connection.cursor(prepared=True) as cursor: @@ -618,7 +612,7 @@ def edit_subprocess(db_connection: PooledMySQLConnection, project_id: int, subpr new_subprocess: models.VCSSubprocessPut) -> bool: logger.debug(f'Editing subprocesses with id={subprocess_id}.') - get_subprocess(db_connection, project_id, subprocess_id) # Check if subprocess exists and belongs to project + get_subprocess(db_connection, subprocess_id) # Check if subprocess exists and belongs to project # Updating update_statement = MySQLStatementBuilder(db_connection) @@ -636,7 +630,7 @@ def edit_subprocess(db_connection: PooledMySQLConnection, project_id: int, subpr def delete_subprocess(db_connection: PooledMySQLConnection, project_id: int, subprocess_id: int) -> bool: logger.debug(f'Deleting subprocesses with id={subprocess_id}.') - subprocess = get_subprocess(db_connection, project_id, subprocess_id) + subprocess = get_subprocess(db_connection, subprocess_id) select_statement = MySQLStatementBuilder(db_connection) result = select_statement \ @@ -662,7 +656,7 @@ def populate_subprocess(db_result) -> models.VCSSubprocess: logger.debug(f'Populating model for subprocess with id={db_result["id"]}.') return models.VCSSubprocess( id=db_result['id'], - vcs_id=db_result['vcs'], + project_id=db_result['project'], name=db_result['name'], parent_process=models.VCSISOProcess( id=db_result['iso_process'], @@ -831,7 +825,7 @@ def populate_vcs_row(db_connection: PooledMySQLConnection, project_id: int, db_r if db_result['iso_process'] is not None: iso_process = get_iso_process(int(db_result['iso_process']), db_connection) elif db_result['subprocess'] is not None: - subprocess = get_subprocess(db_connection, project_id, db_result['subprocess']) + subprocess = get_subprocess(db_connection, db_result['subprocess']) return models.VcsRow( id=db_result['id'], @@ -879,7 +873,7 @@ def edit_vcs_table(db_connection: PooledMySQLConnection, project_id: int, vcs_id get_vcs(db_connection, project_id, vcs_id) # Check if VCS exists and belongs to project - updated_vcs_rows = remove_duplicate_names(db_connection, vcs_id, updated_vcs_rows) + updated_vcs_rows = remove_duplicate_names(db_connection, project_id, updated_vcs_rows) new_table_ids = [] @@ -968,7 +962,7 @@ def edit_vcs_table(db_connection: PooledMySQLConnection, project_id: int, vcs_id return True -def remove_duplicate_names(db_connection: PooledMySQLConnection, vcs_id: int, +def remove_duplicate_names(db_connection: PooledMySQLConnection, project_id: int, rows: List[models.VcsRowPost]): new_subprocesses: List[Tuple[int, models.VCSSubprocessPost]] = [] done = [] @@ -987,7 +981,7 @@ def remove_duplicate_names(db_connection: PooledMySQLConnection, vcs_id: int, new_subprocesses.append((j, sub)) done.append(j) - subprocesses = create_multiple_subprocesses(db_connection, vcs_id, new_subprocesses) + subprocesses = create_multiple_subprocesses(db_connection, project_id, new_subprocesses) for index, subprocess_id in subprocesses: row_post = models.VcsRowPost( diff --git a/sql/V230707_cvs.sql b/sql/V230707_cvs.sql new file mode 100644 index 00000000..0ad143ec --- /dev/null +++ b/sql/V230707_cvs.sql @@ -0,0 +1,7 @@ +SET FOREIGN_KEY_CHECKS=0; +ALTER TABLE `seddb`.`cvs_subprocesses` + ADD COLUMN `project` INT UNSIGNED NOT NULL AFTER `id`, + MODIFY COLUMN `name` VARCHAR(64), + DROP FOREIGN KEY `cvs_subprocesses_ibfk_2`, + DROP COLUMN `vcs`; +SET FOREIGN_KEY_CHECKS=1; diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 9657c1c7..f4b9c65d 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -142,7 +142,7 @@ def random_table_row( index = random.randint(1, 15) if random.randint(1, 8) == 2: - subprocess = random_subprocess(project_id, vcs_id) + subprocess = random_subprocess(project_id) subprocess_id = subprocess.id else: if random.randint(1, 5) == 1: #Give 1/5 chance to produce non-tech process @@ -171,7 +171,7 @@ def random_table_row( return table_row -def random_subprocess(project_id: int, vcs_id: int, name: str = None, parent_process_id: int = None): +def random_subprocess(project_id: int, name: str = None, parent_process_id: int = None): if name is None: name = tu.random_str(5, 50) if parent_process_id is None: @@ -181,14 +181,14 @@ def random_subprocess(project_id: int, vcs_id: int, name: str = None, parent_pro name=name, parent_process_id=parent_process_id ) - subp = vcs_impl.create_subprocess(project_id, vcs_id, subprocess) + subp = vcs_impl.create_subprocess(project_id, subprocess) return subp -def seed_random_subprocesses(project_id: int, vcs_id: int, amount=15): +def seed_random_subprocesses(project_id: int, amount=15): subprocess_list = [] for _ in range(amount): - subprocess_list.append(random_subprocess(project_id, vcs_id)) + subprocess_list.append(random_subprocess(project_id)) return subprocess_list diff --git a/tests/apps/cvs/vcs/test_subprocesses.py b/tests/apps/cvs/vcs/test_subprocesses.py index 08939a83..8eb564a9 100644 --- a/tests/apps/cvs/vcs/test_subprocesses.py +++ b/tests/apps/cvs/vcs/test_subprocesses.py @@ -7,10 +7,9 @@ def test_get_all_subprocesses(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) - tu.seed_random_subprocesses(project.id, vcs.id, 5) + tu.seed_random_subprocesses(project.id, 5) # Act - res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/subprocess/all', headers=std_headers) + res = client.get(f'/api/cvs/project/{project.id}/subprocess/all', headers=std_headers) # Assert assert res.status_code == 200 # 200 OK assert len(res.json()) == 5 @@ -23,9 +22,8 @@ def test_get_all_subprocesses_no_subprocesses(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) # Act - res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/subprocess/all', headers=std_headers) + res = client.get(f'/api/cvs/project/{project.id}/subprocess/all', headers=std_headers) # Assert assert res.status_code == 200 # 200 OK assert len(res.json()) == 0 @@ -38,8 +36,7 @@ def test_get_subprocess(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) - subprocess = tu.seed_random_subprocesses(project.id, vcs.id, 1)[0] + subprocess = tu.seed_random_subprocesses(project.id, 1)[0] # Act res = client.get(f'/api/cvs/project/{project.id}/subprocess/{subprocess.id}', headers=std_headers) # Assert @@ -55,8 +52,7 @@ def test_get_subprocess_not_found(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) - tu.seed_random_subprocesses(project.id, vcs.id, 1) + tu.seed_random_subprocesses(project.id, 1) # Act res = client.get(f'/api/cvs/project/{project.id}/subprocess/999', headers=std_headers) # Assert @@ -70,10 +66,9 @@ def test_create_subprocess(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) - subprocess = tu.seed_random_subprocesses(project.id, vcs.id, 1)[0] + subprocess = tu.seed_random_subprocesses(project.id, 1)[0] # Act - res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/subprocess', headers=std_headers, json={ + res = client.post(f'/api/cvs/project/{project.id}/subprocess', headers=std_headers, json={ 'name': 'New subprocess', 'parent_process_id': subprocess.parent_process.id }) @@ -91,7 +86,7 @@ def test_edit_subprocess(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id) - subprocess = tu.seed_random_subprocesses(project.id, vcs.id, 1)[0] + subprocess = tu.seed_random_subprocesses(project.id, 1)[0] # Act res = client.put(f'/api/cvs/project/{project.id}/subprocess/{subprocess.id}', headers=std_headers, json={ 'name': 'New name', @@ -113,12 +108,12 @@ def test_delete_subprocess(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id) - subprocess = tu.seed_random_subprocesses(project.id, vcs.id, 1)[0] + subprocess = tu.seed_random_subprocesses(project.id, 1)[0] # Act res = client.delete(f'/api/cvs/project/{project.id}/subprocess/{subprocess.id}', headers=std_headers) # Assert assert res.status_code == 200 # 200 OK - assert len(impl_vcs.get_all_subprocess(project.id, vcs.id)) == 0 + assert len(impl_vcs.get_all_subprocess(project.id)) == 0 # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) From 53bdbe412b961c9d84a33181a0764b7cbd82bc19 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 7 Jul 2023 14:16:58 +0200 Subject: [PATCH 063/210] db update --- sedbackend/apps/core/db.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 350b2d37..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,11 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -# host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' -# port = 3306 -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From cf8fd1f8e385ef549bf3d72c6fa77066728cea29 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 10 Jul 2023 16:44:50 +0200 Subject: [PATCH 064/210] name saved on subproject --- sedbackend/apps/cvs/project/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 0f4ff9f1..f56ef6c8 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -67,7 +67,7 @@ def create_cvs_project(db_connection: PooledMySQLConnection, project: models.CVS cvs_project_id = insert_statement.last_insert_id # Insert corresponding subproject row - subproject = proj_models.SubProjectPost(application_sid=CVS_APPLICATION_SID, native_project_id=cvs_project_id) + subproject = proj_models.SubProjectPost(name=project.name, application_sid=CVS_APPLICATION_SID, native_project_id=cvs_project_id) proj_storage.db_post_subproject(db_connection, subproject, user_id) return get_cvs_project(db_connection, cvs_project_id) From 8d5dc398fcca9218040ae669ab72a4f314783743 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 11 Jul 2023 15:43:52 +0200 Subject: [PATCH 065/210] value driver add relation to project when created --- sedbackend/apps/core/db.py | 4 ++-- sedbackend/apps/cvs/vcs/models.py | 1 + sedbackend/apps/cvs/vcs/storage.py | 29 +++++++++++++++++++++++++---- sql/V220608_cvs.sql | 13 +++++++++++++ 4 files changed, 41 insertions(+), 6 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..daa1d89a 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/vcs/models.py b/sedbackend/apps/cvs/vcs/models.py index be5f60d3..9d7f4754 100644 --- a/sedbackend/apps/cvs/vcs/models.py +++ b/sedbackend/apps/cvs/vcs/models.py @@ -89,6 +89,7 @@ class ValueDriver(BaseModel): class ValueDriverPost(BaseModel): name: str unit: Optional[str] = None + project_id: int # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 07a8043d..e16a8e86 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -20,6 +20,9 @@ CVS_VALUE_DRIVER_TABLE = 'cvs_value_drivers' CVS_VALUE_DRIVER_COLUMNS = ['id', 'user', 'name', 'unit'] +CVS_PROJECT_VALUE_DRIVER_TABLE = 'cvs_project_value_drivers' +CVS_PROJECT_VALUE_DRIVER_COLUMNS = ['project', 'value_driver'] + CVS_VCS_ROW_DRIVERS_TABLE = 'cvs_rowDrivers' CVS_VCS_ROW_DRIVERS_COLUMNS = ['vcs_row', 'value_driver'] @@ -328,9 +331,10 @@ def add_vcs_need_driver(db_connection: PooledMySQLConnection, need_id: int, valu return True + def add_vcs_multiple_needs_drivers(db_connection: PooledMySQLConnection, need_driver_ids: List[Tuple[int, int]]): logger.debug(f'Add value drivers to stakeholder needs') - + prepared_list = [] try: insert_statement = f'INSERT INTO {CVS_VCS_NEED_DRIVERS_TABLE} (stakeholder_need, value_driver) VALUES' @@ -338,15 +342,16 @@ def add_vcs_multiple_needs_drivers(db_connection: PooledMySQLConnection, need_dr insert_statement += f'(%s, %s),' prepared_list.append(need) prepared_list.append(driver) - + with db_connection.cursor(prepared=True) as cursor: cursor.execute(insert_statement[:-1], prepared_list) except Error as e: logger.debug(f'Error msg: {e.msg}') raise exceptions.GenericDatabaseException - + return True + def update_vcs_need_driver(db_connection: PooledMySQLConnection, need_id: int, value_drivers: List[int]) -> bool: logger.debug(f'Update value drivers in stakeholder need with id={need_id}.') @@ -361,6 +366,22 @@ def update_vcs_need_driver(db_connection: PooledMySQLConnection, need_id: int, v return True +def add_project_value_driver(db_connection: PooledMySQLConnection, project_id: int, value_driver_id: int) -> bool: + logger.debug(f'Adding relation between project_id={project_id} and value_driver_id={value_driver_id}') + + try: + insert_statement = MySQLStatementBuilder(db_connection) + insert_statement \ + .insert(table=CVS_PROJECT_VALUE_DRIVER_TABLE, columns=CVS_PROJECT_VALUE_DRIVER_COLUMNS) \ + .set_values([project_id, value_driver_id]) \ + .execute(fetch_type=FetchType.FETCH_NONE) + except Error as e: + logger.debug(f'Error msg: {e.msg}') + raise exceptions.GenericDatabaseException + + return True + + def get_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int) -> models.ValueDriver: logger.debug(f'Fetching value driver with id={value_driver_id}.') @@ -390,6 +411,7 @@ def create_value_driver(db_connection: PooledMySQLConnection, user_id: int, .set_values([user_id, value_driver_post.name, value_driver_post.unit]) \ .execute(fetch_type=FetchType.FETCH_NONE) value_driver_id = insert_statement.last_insert_id + add_project_value_driver(db_connection, value_driver_post.project_id, value_driver_id) except Error as e: logger.debug(f'Error msg: {e.msg}') raise exceptions.ValueDriverFailedToCreateException @@ -944,7 +966,6 @@ def edit_vcs_table(db_connection: PooledMySQLConnection, project_id: int, vcs_id def remove_duplicate_names(db_connection: PooledMySQLConnection, vcs_id: int, rows: List[models.VcsRowPost]): - new_subprocesses: List[Tuple[int, models.VCSSubprocessPost]] = [] done = [] for i in range(0, len(rows)): diff --git a/sql/V220608_cvs.sql b/sql/V220608_cvs.sql index e107290d..35718712 100644 --- a/sql/V220608_cvs.sql +++ b/sql/V220608_cvs.sql @@ -150,6 +150,19 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_value_drivers` REFERENCES `seddb`.`users`(`id`) ON DELETE CASCADE ); +# Value driver to project relation +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_project_value_drivers` +( + `project` INT UNSIGNED NOT NULL, + `value_driver` INT UNSIGNED NOT NULL, + PRIMARY KEY (`project`, `value_driver`), + FOREIGN KEY (`project`) + REFERENCES `seddb`.`cvs_projects`(`id`) + ON DELETE CASCADE, + FOREIGN KEY (`value_driver`) + REFERENCES `seddb`.`cvs_value_drivers`(`id`) + ON DELETE CASCADE +); #Vcs row and value driver connection CREATE TABLE IF NOT EXISTS `seddb`.`cvs_vcs_need_drivers` From 10c488b6e9319946eff4107bbcdb3308d39f6eea Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Wed, 12 Jul 2023 13:09:26 +0200 Subject: [PATCH 066/210] removed non tech from dsm --- sedbackend/apps/cvs/life_cycle/storage.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index dd7a0ff3..26583b3c 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -13,7 +13,7 @@ from sedbackend.apps.core.files.models import StoredFilePath from sedbackend.apps.cvs.life_cycle import exceptions, models from sedbackend.apps.cvs.project.router import CVS_APP_SID -from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions +from sedbackend.apps.cvs.vcs import storage as vcs_storage, exceptions as vcs_exceptions, models as vcs_models from sedbackend.apps.core.files import models as file_models, storage as file_storage, exceptions as file_ex from sedbackend.apps.core.projects import storage as core_project_storage from mysql.connector import Error @@ -316,8 +316,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, logger.debug(f'File content: {dsm_file}') vcs_table = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) - vcs_processes = [row.iso_process.name if row.iso_process is not None else - f'{row.subprocess.name} ({row.subprocess.parent_process.name})' for row in vcs_table] + vcs_processes = get_process_names_from_rows(vcs_table) if len(dsm_file['Processes'].values[1:-1]) != len(vcs_processes): raise exceptions.ProcessesVcsMatchException @@ -449,12 +448,21 @@ def csv_from_matrix(matrix: List[List[str or float]]) -> UploadFile: return upload_file +def get_process_names_from_rows(rows: List[vcs_models.VcsRow]) -> List[str]: + processes = [] + for row in rows: + if row.iso_process is not None and row.iso_process.category == "Technical processes": + processes.append(row.iso_process.name) + elif row.subprocess is not None and row.subprocess.parent_process.category == "Technical processes": + processes.append(f'{row.subprocess.name} ({row.subprocess.parent_process.name})') + + return processes + + def initial_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> List[List[str or float]]: vcs_table = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) - processes = ["Start"] + [row.iso_process.name if row.iso_process is not None else - f'{row.subprocess.name} ({row.subprocess.parent_process.name})' for row in vcs_table] + [ - "End"] + processes = ["Start"] + get_process_names_from_rows(vcs_table) + ["End"] dsm = [["Processes"] + processes] for i in range(1, len(processes) + 1): From 010536edeaab61fc8215043dbf2cd1625b60908a Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 13 Jul 2023 09:26:41 +0200 Subject: [PATCH 067/210] run monte carlo single process --- sedbackend/apps/cvs/simulation/implementation.py | 8 ++++---- sedbackend/apps/cvs/simulation/router.py | 8 +++++--- sedbackend/apps/cvs/simulation/storage.py | 15 +++++++++++---- 3 files changed, 20 insertions(+), 11 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 2fb167ab..7f8567cd 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -21,12 +21,12 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], user_id: int, is_monte_carlo: bool = False, - normalized_npv: bool = False) -> List[models.Simulation]: + design_group_ids: List[int], user_id: int, + normalized_npv: bool = False, is_multiprocessing: bool = False) -> List[models.Simulation]: try: with get_connection() as con: - result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids, user_id, is_monte_carlo, - normalized_npv) + result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids, user_id, + normalized_npv, is_multiprocessing) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index ac63c46b..6fde360d 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -17,8 +17,9 @@ dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) async def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], + normalized_npv: Optional[bool] = False, user: User = Depends(get_current_active_user)) -> List[models.Simulation]: - return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id) + return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, normalized_npv) # Temporary disabled ''' @@ -45,11 +46,12 @@ async def run_dsm_file_simulation(native_project_id: int, sim_params: models.Fil response_model=List[models.Simulation], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) -async def run_sim_monte_carlo(sim_settings: models.EditSimSettings, vcs_ids: List[int], +async def run_multiprocessing(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], normalized_npv: Optional[bool] = False, user: User = Depends(get_current_active_user)) -> List[models.Simulation]: - return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, True, normalized_npv) + return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, normalized_npv, + True) @router.get( diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 4e47e43d..17f9fc37 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -46,8 +46,8 @@ # TODO: Run simulation on DSM file def get_dsm_from_file(db_connection: PooledMySQLConnection, user_id: int, project_id: int, - sim_params: models.FileParams, - dsm_file: UploadFile) -> dict: + sim_params: models.FileParams, + dsm_file: UploadFile) -> dict: _, file_extension = os.path.splitext(dsm_file.filename) dsm = {} @@ -89,7 +89,8 @@ def get_dsm_from_file(db_connection: PooledMySQLConnection, user_id: int, projec def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], user_id, is_monte_carlo: bool = False, normalized_npv: bool = False + design_group_ids: List[int], user_id, normalized_npv: bool = False, + is_multiprocessing: bool = False ) -> List[models.Simulation]: design_results = [] @@ -102,6 +103,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed discount_rate = sim_settings.discount_rate process = sim_settings.flow_process time_unit = TIME_FORMAT_DICT.get(sim_settings.time_unit) + is_monte_carlo = sim_settings.monte_carlo runs = sim_settings.runs all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) @@ -148,7 +150,12 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed sim = des.Des() try: - if is_monte_carlo: + if is_monte_carlo and not is_multiprocessing: + results = sim.run_monte_carlo_simulation(flow_time, interarrival, process, processes, + non_tech_processes, + non_tech_add, dsm, time_unit, discount_rate, runtime, + runs) + elif is_monte_carlo and is_multiprocessing: results = sim.run_parallell_simulations(flow_time, interarrival, process, processes, non_tech_processes, non_tech_add, dsm, time_unit, discount_rate, runtime, From 177b6818c319ae193f470b9cbf9d748112ef0cea Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 13 Jul 2023 13:03:27 +0200 Subject: [PATCH 068/210] upgrade desigm-tool to 0.4.2 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 803c6d3c..c9e7a649 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.4.0 +desim-tool==0.4.2 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 From e3e9f2aea93345256824d7e321b4aba916953446 Mon Sep 17 00:00:00 2001 From: jyborn Date: Fri, 14 Jul 2023 22:45:24 +0200 Subject: [PATCH 069/210] get all started --- sedbackend/apps/core/db.py | 4 ++-- sedbackend/apps/cvs/vcs/storage.py | 23 +++++++++++------------ 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index daa1d89a..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' +host = 'core-db' database = 'seddb' -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 132bea17..73c1f9e2 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -271,19 +271,18 @@ def get_all_value_driver_vcs(db_connection: PooledMySQLConnection, project_id: i def get_all_value_driver(db_connection: PooledMySQLConnection, user_id: int) -> List[models.ValueDriver]: logger.debug(f'Fetching all value drivers for user with id={user_id}.') - where_statement = f'user = %s' - where_values = [user_id] - try: - select_statement = MySQLStatementBuilder(db_connection) - results = select_statement \ - .select(CVS_VALUE_DRIVER_TABLE, CVS_VALUE_DRIVER_COLUMNS) \ - .where(where_statement, where_values) \ - .order_by(['id'], Sort.ASCENDING) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + query = f'SELECT cvd.* \ + FROM cvs_value_drivers cvd \ + LEFT JOIN cvs_project_value_drivers cpvd ON cvd.id = cpvd.value_driver \ + LEFT JOIN projects_participants pp ON cpvd.project = pp.project_id \ + WHERE (pp.user_id = 1 OR (cpvd.project IS NULL AND cvd.user = 1))' - except Error as e: - logger.debug(f'Error msg: {e.msg}') - raise exceptions.ValueDriverNotFoundException + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(query, []) + res = cursor.fetchone() + if res is None: + raise exceptions.SubprocessNotFoundException(subprocess_id) + res = dict(zip(cursor.column_names, res)) return [populate_value_driver(result) for result in results] From 8e4c725e59dedb412fc64dc5684bfcc4104c7624 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 17 Jul 2023 17:05:01 +0200 Subject: [PATCH 070/210] get all value drivers working --- sedbackend/apps/core/db.py | 4 ++-- sedbackend/apps/cvs/vcs/models.py | 2 +- sedbackend/apps/cvs/vcs/storage.py | 35 +++++++++++++++++------------- 3 files changed, 23 insertions(+), 18 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..ff8bfe0c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' #'core-db' database = 'seddb' -port = 3306 +port = 3001 #3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/vcs/models.py b/sedbackend/apps/cvs/vcs/models.py index 372da16f..b52159f4 100644 --- a/sedbackend/apps/cvs/vcs/models.py +++ b/sedbackend/apps/cvs/vcs/models.py @@ -84,7 +84,7 @@ class ValueDriver(BaseModel): id: int name: str unit: Optional[str] = None - + projects: Optional[List[int]] = None class ValueDriverPost(BaseModel): name: str diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 73c1f9e2..2c576cdd 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -271,20 +271,21 @@ def get_all_value_driver_vcs(db_connection: PooledMySQLConnection, project_id: i def get_all_value_driver(db_connection: PooledMySQLConnection, user_id: int) -> List[models.ValueDriver]: logger.debug(f'Fetching all value drivers for user with id={user_id}.') - query = f'SELECT cvd.* \ - FROM cvs_value_drivers cvd \ - LEFT JOIN cvs_project_value_drivers cpvd ON cvd.id = cpvd.value_driver \ - LEFT JOIN projects_participants pp ON cpvd.project = pp.project_id \ - WHERE (pp.user_id = 1 OR (cpvd.project IS NULL AND cvd.user = 1))' - - with db_connection.cursor(prepared=True) as cursor: - cursor.execute(query, []) - res = cursor.fetchone() - if res is None: - raise exceptions.SubprocessNotFoundException(subprocess_id) - res = dict(zip(cursor.column_names, res)) - - return [populate_value_driver(result) for result in results] + try: + query = f'SELECT cvd.*, cpvd.project \ + FROM cvs_value_drivers cvd \ + LEFT JOIN cvs_project_value_drivers cpvd ON cvd.id = cpvd.value_driver \ + LEFT JOIN projects_participants pp ON cpvd.project = pp.project_id \ + WHERE (pp.user_id = %s OR (cpvd.project IS NULL AND cvd.user = %s))' + + with db_connection.cursor(prepared=True, dictionary=True) as cursor: + cursor.execute(query, [user_id, user_id]) + res = cursor.fetchall() + except Error as e: + logger.debug(f'Error msg: {e.msg}') + raise exceptions.ValueDriverNotFoundException + logger.debug(res) + return [populate_value_driver(result) for result in res] def get_all_value_drivers_vcs_row(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, @@ -468,10 +469,14 @@ def delete_all_value_drivers(db_connection: PooledMySQLConnection, user_id: int) def populate_value_driver(db_result) -> models.ValueDriver: + logger.debug(f'Populating value driver with: {db_result}') + project = None + if 'project' in db_result and db_result['project']: project = [db_result['project']] return models.ValueDriver( id=db_result['id'], name=db_result['name'], - unit=db_result['unit'] + unit=db_result['unit'], + projects=project ) From 8a0e8d3a1810e7b9de59ec5e80c0018a6ab8ddfa Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 18 Jul 2023 11:59:52 +0200 Subject: [PATCH 071/210] adding relation when used in project vcs --- sedbackend/apps/cvs/vcs/implementation.py | 14 ++++++++++++++ sedbackend/apps/cvs/vcs/router.py | 8 ++++++++ sedbackend/apps/cvs/vcs/storage.py | 19 ++++++++++--------- sql/V220608_cvs.sql | 1 + 4 files changed, 33 insertions(+), 9 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index b8f46dc6..8bff149d 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -325,6 +325,20 @@ def add_vcs_multiple_needs_drivers(need_driver_ids: List[Tuple[int, int]]): detail=f'Badly formatted request' ) + +def add_project_multiple_value_drivers(project_id: int, value_driver_ids: List[int]): + try: + with get_connection() as con: + res = storage.add_project_value_drivers(con, project_id, value_driver_ids) + con.commit() + return res + except exceptions.GenericDatabaseException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Badly formatted request' + ) + + # ====================================================================================================================== # VCS ISO Processes # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index 262369a2..268751b6 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -144,6 +144,14 @@ async def create_value_driver(value_driver_post: models.ValueDriverPost, user: User = Depends(get_current_active_user)) -> models.ValueDriver: return implementation.create_value_driver(user.id, value_driver_post) +@router.post( + '/project/{native_project_id}/value-driver', + summary=f'Add value drivers to project', + response_model=bool, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] +) +async def add_drivers_to_project(native_project_id: int, value_driver_ids: List[int]): + return implementation.add_project_multiple_value_drivers(native_project_id, value_driver_ids) @router.post( '/project/{native_project_id}/value-driver/need', diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 2c576cdd..d48e3b6a 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -371,17 +371,18 @@ def update_vcs_need_driver(db_connection: PooledMySQLConnection, need_id: int, v return True -def add_project_value_driver(db_connection: PooledMySQLConnection, project_id: int, value_driver_id: int) -> bool: - logger.debug(f'Adding relation between project_id={project_id} and value_driver_id={value_driver_id}') +def add_project_value_drivers(db_connection: PooledMySQLConnection, project_id: int, value_driver_ids: List[int]) -> bool: + logger.debug(f'Adding relation between project_id={project_id} and value_driver_ids={value_driver_ids}') try: - insert_statement = MySQLStatementBuilder(db_connection) - insert_statement \ - .insert(table=CVS_PROJECT_VALUE_DRIVER_TABLE, columns=CVS_PROJECT_VALUE_DRIVER_COLUMNS) \ - .set_values([project_id, value_driver_id]) \ - .execute(fetch_type=FetchType.FETCH_NONE) + insert_statement = f'INSERT INTO {CVS_PROJECT_VALUE_DRIVER_TABLE} (project, value_driver) VALUES (%s, %s) ON DUPLICATE KEY UPDATE `project`=`project`' + prepared_list = [] + for index, value_driver_id in enumerate(value_driver_ids): + prepared_list.append((project_id, value_driver_id)) + with db_connection.cursor(prepared=True) as cursor: + cursor.executemany(insert_statement, prepared_list) except Error as e: - logger.debug(f'Error msg: {e.msg}') + logger.debug(f'Error {e.errno} {e.msg}') raise exceptions.GenericDatabaseException return True @@ -416,7 +417,7 @@ def create_value_driver(db_connection: PooledMySQLConnection, user_id: int, .set_values([user_id, value_driver_post.name, value_driver_post.unit]) \ .execute(fetch_type=FetchType.FETCH_NONE) value_driver_id = insert_statement.last_insert_id - add_project_value_driver(db_connection, value_driver_post.project_id, value_driver_id) + add_project_value_drivers(db_connection, value_driver_post.project_id, [value_driver_id]) except Error as e: logger.debug(f'Error msg: {e.msg}') raise exceptions.ValueDriverFailedToCreateException diff --git a/sql/V220608_cvs.sql b/sql/V220608_cvs.sql index 35718712..40d6baf7 100644 --- a/sql/V220608_cvs.sql +++ b/sql/V220608_cvs.sql @@ -163,6 +163,7 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_project_value_drivers` REFERENCES `seddb`.`cvs_value_drivers`(`id`) ON DELETE CASCADE ); +CREATE UNIQUE INDEX `project_value_driver_index` ON `seddb`.`cvs_project_value_drivers` (project, value_driver); #Vcs row and value driver connection CREATE TABLE IF NOT EXISTS `seddb`.`cvs_vcs_need_drivers` From e8565903ccd91b73501cf49016f0132aa4b1516e Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 18 Jul 2023 18:24:43 +0200 Subject: [PATCH 072/210] project id fix --- sedbackend/apps/cvs/vcs/storage.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index d48e3b6a..c8efd6eb 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -272,11 +272,12 @@ def get_all_value_driver(db_connection: PooledMySQLConnection, user_id: int) -> logger.debug(f'Fetching all value drivers for user with id={user_id}.') try: - query = f'SELECT cvd.*, cpvd.project \ + query = f'SELECT DISTINCT cvd.*, cpvd.project \ FROM cvs_value_drivers cvd \ LEFT JOIN cvs_project_value_drivers cpvd ON cvd.id = cpvd.value_driver \ LEFT JOIN projects_participants pp ON cpvd.project = pp.project_id \ - WHERE (pp.user_id = %s OR (cpvd.project IS NULL AND cvd.user = %s))' + LEFT JOIN projects_subprojects ps ON cpvd.project = ps.native_project_id \ + WHERE (pp.user_id = %s OR (cvd.user = %s))' with db_connection.cursor(prepared=True, dictionary=True) as cursor: cursor.execute(query, [user_id, user_id]) From 74cf60280370f5c2e71d57a83fa23de083660395 Mon Sep 17 00:00:00 2001 From: jyborn Date: Thu, 20 Jul 2023 14:13:08 +0200 Subject: [PATCH 073/210] delete value driver fix --- sedbackend/apps/core/db.py | 2 +- sedbackend/apps/cvs/vcs/implementation.py | 4 ++-- sedbackend/apps/cvs/vcs/router.py | 6 ++--- sedbackend/apps/cvs/vcs/storage.py | 27 +++++++++++++++++++++-- 4 files changed, 31 insertions(+), 8 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index ff8bfe0c..9a0b9d0e 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -12,7 +12,7 @@ password = Environment.get_variable('MYSQL_PWD_RW') host = 'localhost' #'core-db' database = 'seddb' -port = 3001 #3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index 8bff149d..8322bb57 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -267,10 +267,10 @@ def edit_value_driver(value_driver_id: int, ) -def delete_value_driver(value_driver_id: int) -> bool: +def delete_value_driver(project_id: int, value_driver_id: int) -> bool: try: with get_connection() as con: - res = storage.delete_value_driver(con, value_driver_id) + res = storage.delete_project_value_driver(con, project_id, value_driver_id) con.commit() return res except exceptions.ValueDriverNotFoundException: diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index 268751b6..9196b509 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -172,12 +172,12 @@ async def edit_value_driver(value_driver_id: int, value_driver_post: models.Valu @router.delete( - '/value-driver/{value_driver_id}', + '/project/{native_project_id}/value-driver/{value_driver_id}', summary='Deletes a value driver', response_model=bool, ) -async def delete_value_driver(value_driver_id: int) -> bool: - return implementation.delete_value_driver(value_driver_id) +async def delete_value_driver(native_project_id: int, value_driver_id: int) -> bool: + return implementation.delete_value_driver(native_project_id, value_driver_id) # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index c8efd6eb..ace62980 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -372,7 +372,8 @@ def update_vcs_need_driver(db_connection: PooledMySQLConnection, need_id: int, v return True -def add_project_value_drivers(db_connection: PooledMySQLConnection, project_id: int, value_driver_ids: List[int]) -> bool: +def add_project_value_drivers(db_connection: PooledMySQLConnection, project_id: int, + value_driver_ids: List[int]) -> bool: logger.debug(f'Adding relation between project_id={project_id} and value_driver_ids={value_driver_ids}') try: @@ -445,7 +446,7 @@ def edit_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int return get_value_driver(db_connection, value_driver_id) -def delete_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int) -> bool: +def delete_value_driver(db_connection: PooledMySQLConnection, value_driver_id) -> bool: logger.debug(f'Deleting value driver with id={value_driver_id}.') delete_statement = MySQLStatementBuilder(db_connection) @@ -459,6 +460,28 @@ def delete_value_driver(db_connection: PooledMySQLConnection, value_driver_id: i return True +def delete_project_value_driver(db_connection: PooledMySQLConnection, project_id: int, value_driver_id: int) -> bool: + logger.debug(f'Deleting relation with project={project_id} AND value_driver={value_driver_id}.') + + delete_statement = MySQLStatementBuilder(db_connection) + _, rows = delete_statement.delete(CVS_PROJECT_VALUE_DRIVER_TABLE) \ + .where('project = %s AND value_driver = %s', [project_id, value_driver_id]) \ + .execute(return_affected_rows=True) + + if rows == 0: + raise exceptions.ValueDriverNotFoundException(value_driver_id=value_driver_id) + + count_statement = MySQLStatementBuilder(db_connection) + result = count_statement.count(CVS_PROJECT_VALUE_DRIVER_TABLE) \ + .where('value_driver = %s', [value_driver_id]) \ + .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + + if result['count'] == 0: + return delete_value_driver(db_connection, value_driver_id) + + return True + + def delete_all_value_drivers(db_connection: PooledMySQLConnection, user_id: int) -> bool: logger.debug(f'Deleting all value drivers for user with id={user_id}.') From a53a9db3fae9c3d9aec4cbdacc987b71b787848a Mon Sep 17 00:00:00 2001 From: jyborn Date: Fri, 21 Jul 2023 11:30:02 +0200 Subject: [PATCH 074/210] back to correct host and port --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 9a0b9d0e..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' #'core-db' +host = 'core-db' database = 'seddb' -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From c4b7b4c6d0bd484c70df107c94c6e490d681c4df Mon Sep 17 00:00:00 2001 From: jyborn Date: Fri, 21 Jul 2023 11:33:47 +0200 Subject: [PATCH 075/210] new file for sql --- sql/V220608_cvs.sql | 14 -------------- sql/V230721_cvs.sql | 14 ++++++++++++++ 2 files changed, 14 insertions(+), 14 deletions(-) create mode 100644 sql/V230721_cvs.sql diff --git a/sql/V220608_cvs.sql b/sql/V220608_cvs.sql index 40d6baf7..e107290d 100644 --- a/sql/V220608_cvs.sql +++ b/sql/V220608_cvs.sql @@ -150,20 +150,6 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_value_drivers` REFERENCES `seddb`.`users`(`id`) ON DELETE CASCADE ); -# Value driver to project relation -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_project_value_drivers` -( - `project` INT UNSIGNED NOT NULL, - `value_driver` INT UNSIGNED NOT NULL, - PRIMARY KEY (`project`, `value_driver`), - FOREIGN KEY (`project`) - REFERENCES `seddb`.`cvs_projects`(`id`) - ON DELETE CASCADE, - FOREIGN KEY (`value_driver`) - REFERENCES `seddb`.`cvs_value_drivers`(`id`) - ON DELETE CASCADE -); -CREATE UNIQUE INDEX `project_value_driver_index` ON `seddb`.`cvs_project_value_drivers` (project, value_driver); #Vcs row and value driver connection CREATE TABLE IF NOT EXISTS `seddb`.`cvs_vcs_need_drivers` diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql new file mode 100644 index 00000000..79a08553 --- /dev/null +++ b/sql/V230721_cvs.sql @@ -0,0 +1,14 @@ +# Value driver to project relation +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_project_value_drivers` +( + `project` INT UNSIGNED NOT NULL, + `value_driver` INT UNSIGNED NOT NULL, + PRIMARY KEY (`project`, `value_driver`), + FOREIGN KEY (`project`) + REFERENCES `seddb`.`cvs_projects`(`id`) + ON DELETE CASCADE, + FOREIGN KEY (`value_driver`) + REFERENCES `seddb`.`cvs_value_drivers`(`id`) + ON DELETE CASCADE +); +CREATE UNIQUE INDEX `project_value_driver_index` ON `seddb`.`cvs_project_value_drivers` (project, value_driver); \ No newline at end of file From a62ca7012d62e0233e9cef5daff817786e8c85ba Mon Sep 17 00:00:00 2001 From: jyborn Date: Fri, 21 Jul 2023 12:54:11 +0200 Subject: [PATCH 076/210] exception added --- sedbackend/apps/cvs/vcs/exceptions.py | 8 ++++++++ sedbackend/apps/cvs/vcs/implementation.py | 10 ++++++++++ sedbackend/apps/cvs/vcs/storage.py | 8 ++++---- 3 files changed, 22 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/exceptions.py b/sedbackend/apps/cvs/vcs/exceptions.py index d8d56a3d..2c663537 100644 --- a/sedbackend/apps/cvs/vcs/exceptions.py +++ b/sedbackend/apps/cvs/vcs/exceptions.py @@ -59,6 +59,14 @@ class ValueDriverFailedToCreateException(Exception): pass +class ProjectValueDriverNotFoundException(Exception): + pass + + +class ProjectValueDriverFailedToCreateException(Exception): + pass + + # ====================================================================================================================== # VCS ISO Processes # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index 8322bb57..ab98ca84 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -288,6 +288,11 @@ def delete_value_driver(project_id: int, value_driver_id: int) -> bool: status_code=status.HTTP_403_FORBIDDEN, detail='Unauthorized user.', ) + except exceptions.ProjectValueDriverNotFoundException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Could not find project={project_id} <-> value driver={value_driver_id} relation.' + ) def delete_all_value_drivers(user_id: int) -> bool: @@ -337,6 +342,11 @@ def add_project_multiple_value_drivers(project_id: int, value_driver_ids: List[i status_code=status.HTTP_400_BAD_REQUEST, detail=f'Badly formatted request' ) + except exceptions.ProjectValueDriverFailedToCreateException: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f'Failed to create project={project_id} and value driver={value_driver_ids} relation' + ) # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index ace62980..dc5cbe67 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -285,7 +285,7 @@ def get_all_value_driver(db_connection: PooledMySQLConnection, user_id: int) -> except Error as e: logger.debug(f'Error msg: {e.msg}') raise exceptions.ValueDriverNotFoundException - logger.debug(res) + return [populate_value_driver(result) for result in res] @@ -385,7 +385,7 @@ def add_project_value_drivers(db_connection: PooledMySQLConnection, project_id: cursor.executemany(insert_statement, prepared_list) except Error as e: logger.debug(f'Error {e.errno} {e.msg}') - raise exceptions.GenericDatabaseException + raise exceptions.ProjectValueDriverFailedToCreateException return True @@ -446,7 +446,7 @@ def edit_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int return get_value_driver(db_connection, value_driver_id) -def delete_value_driver(db_connection: PooledMySQLConnection, value_driver_id) -> bool: +def delete_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int) -> bool: logger.debug(f'Deleting value driver with id={value_driver_id}.') delete_statement = MySQLStatementBuilder(db_connection) @@ -469,7 +469,7 @@ def delete_project_value_driver(db_connection: PooledMySQLConnection, project_id .execute(return_affected_rows=True) if rows == 0: - raise exceptions.ValueDriverNotFoundException(value_driver_id=value_driver_id) + raise exceptions.ProjectValueDriverNotFoundException(project_id=project_id, value_driver_id=value_driver_id) count_statement = MySQLStatementBuilder(db_connection) result = count_statement.count(CVS_PROJECT_VALUE_DRIVER_TABLE) \ From 692083fb0f2ffbedd3e2edaf2d4c24c7ef3498c6 Mon Sep 17 00:00:00 2001 From: jyborn Date: Fri, 21 Jul 2023 15:34:16 +0200 Subject: [PATCH 077/210] value driver tests fixed + edit value driver fix --- sedbackend/apps/cvs/vcs/implementation.py | 4 ++-- sedbackend/apps/cvs/vcs/models.py | 6 ++++++ sedbackend/apps/cvs/vcs/router.py | 4 ++-- sedbackend/apps/cvs/vcs/storage.py | 2 +- tests/apps/cvs/testutils.py | 14 +++++++----- tests/apps/cvs/vcs/test_value_drivers.py | 26 +++++++++++++++-------- 6 files changed, 37 insertions(+), 19 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index ab98ca84..b63547a0 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -244,10 +244,10 @@ def create_value_driver(user_id: int, value_driver_post: models.ValueDriverPost) def edit_value_driver(value_driver_id: int, - value_driver_post: models.ValueDriverPost) -> models.ValueDriver: + value_driver: models.ValueDriverPut) -> models.ValueDriver: try: with get_connection() as con: - result = storage.edit_value_driver(con, value_driver_id, value_driver_post) + result = storage.edit_value_driver(con, value_driver_id, value_driver) con.commit() return result except exceptions.ValueDriverNotFoundException: diff --git a/sedbackend/apps/cvs/vcs/models.py b/sedbackend/apps/cvs/vcs/models.py index b52159f4..e51c069c 100644 --- a/sedbackend/apps/cvs/vcs/models.py +++ b/sedbackend/apps/cvs/vcs/models.py @@ -86,6 +86,12 @@ class ValueDriver(BaseModel): unit: Optional[str] = None projects: Optional[List[int]] = None + +class ValueDriverPut(BaseModel): + name: str + unit: Optional[str] = None + + class ValueDriverPost(BaseModel): name: str unit: Optional[str] = None diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index 9196b509..21f6f18d 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -167,8 +167,8 @@ async def add_drivers_to_needs(native_project_id: int, need_driver_ids: List[Tup summary='Edits a value driver', response_model=models.ValueDriver, ) -async def edit_value_driver(value_driver_id: int, value_driver_post: models.ValueDriverPost) -> models.ValueDriver: - return implementation.edit_value_driver(value_driver_id, value_driver_post) +async def edit_value_driver(value_driver_id: int, value_driver: models.ValueDriverPut) -> models.ValueDriver: + return implementation.edit_value_driver(value_driver_id, value_driver) @router.delete( diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index dc5cbe67..2e326143 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -428,7 +428,7 @@ def create_value_driver(db_connection: PooledMySQLConnection, user_id: int, def edit_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int, - new_value_driver: models.ValueDriverPost) -> models.ValueDriver: + new_value_driver: models.ValueDriverPut) -> models.ValueDriver: logger.debug(f'Editing value driver with id={value_driver_id}.') update_statement = MySQLStatementBuilder(db_connection) diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index f4b9c65d..b8c0743b 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -94,7 +94,7 @@ def delete_VCS_with_ids(user_id: int, project_id: int, vcs_id_list: List[int]): vcs_impl.delete_vcs(user_id, project_id, vcsid) -def random_value_driver(name: str = None, unit: str = None): +def random_value_driver_post(user_id: int, project_id: int, name: str = None, unit: str = None): if name is None: name = tu.random_str(5, 50) if unit is None: @@ -102,15 +102,19 @@ def random_value_driver(name: str = None, unit: str = None): return sedbackend.apps.cvs.vcs.models.ValueDriverPost( name=name, - unit=unit + unit=unit, + project_id=project_id ) -def seed_random_value_driver(user_id) -> sedbackend.apps.cvs.vcs.models.ValueDriver: - value_driver = random_value_driver() +def seed_random_value_driver(user_id: int, project_id: int = None) -> sedbackend.apps.cvs.vcs.models.ValueDriver: + if project_id is None: + project = seed_random_project(user_id) + project_id = project.id + value_driver_post = random_value_driver_post(user_id=user_id, project_id=project_id) new_value_driver = sedbackend.apps.cvs.vcs.implementation.create_value_driver( - user_id, value_driver) + user_id, value_driver_post) return new_value_driver diff --git a/tests/apps/cvs/vcs/test_value_drivers.py b/tests/apps/cvs/vcs/test_value_drivers.py index 5a37ef3b..b0c02b90 100644 --- a/tests/apps/cvs/vcs/test_value_drivers.py +++ b/tests/apps/cvs/vcs/test_value_drivers.py @@ -6,6 +6,7 @@ def test_get_all_value_drivers(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) for _ in range(5): tu.seed_random_value_driver(current_user.id) # Act @@ -59,11 +60,13 @@ def test_get_value_driver_not_found(client, std_headers, std_user): def test_create_value_driver(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) - vd = tu.seed_random_value_driver(current_user.id) + project = tu.seed_random_project(current_user.id) + vd = tu.random_value_driver_post(current_user.id, project.id) # Act res = client.post(f'/api/cvs/value-driver', headers=std_headers, json={ 'name': vd.name, - 'unit': vd.unit + 'unit': vd.unit, + 'project_id': vd.project_id }) # Assert assert res.status_code == 200 # 200 OK @@ -76,10 +79,12 @@ def test_create_value_driver(client, std_headers, std_user): def test_create_value_driver_missing_name(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) - vd = tu.seed_random_value_driver(current_user.id) + project = tu.seed_random_project(current_user.id) + vdPost = tu.random_value_driver_post(current_user.id, project.id) # Act res = client.post(f'/api/cvs/value-driver', headers=std_headers, json={ - 'unit': vd.unit + 'unit': vdPost.unit, + 'project_id': vdPost.project_id }) # Assert assert res.status_code == 422 # 422 Unprocessable Entity @@ -90,10 +95,12 @@ def test_create_value_driver_missing_name(client, std_headers, std_user): def test_create_value_driver_missing_unit(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) - vd = tu.seed_random_value_driver(current_user.id) + project = tu.seed_random_project(current_user.id) + vd = tu.random_value_driver_post(current_user.id, project.id) # Act res = client.post(f'/api/cvs/value-driver', headers=std_headers, json={ - 'name': vd.name + 'name': vd.name, + 'project_id': vd.project_id }) # Assert assert res.status_code == 200 # 200 OK @@ -108,7 +115,7 @@ def test_edit_value_driver(client, std_headers, std_user): # Act res = client.put(f'/api/cvs/value-driver/{vd.id}', headers=std_headers, json={ 'name': "new name", - 'unit': "new unit" + 'unit': "new unit", }) # Assert assert res.status_code == 200 # 200 OK @@ -121,9 +128,10 @@ def test_edit_value_driver(client, std_headers, std_user): def test_delete_value_driver(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) - vd = tu.seed_random_value_driver(current_user.id) + project = tu.seed_random_project(current_user.id) + vd = tu.seed_random_value_driver(current_user.id, project.id) # Act - res = client.delete(f'/api/cvs/value-driver/{vd.id}', headers=std_headers) + res = client.delete(f'/api/cvs/project/{project.id}/value-driver/{vd.id}', headers=std_headers) # Assert assert res.status_code == 200 # 200 OK assert len(impl_vcs.get_all_value_driver(current_user.id)) == 0 From 3c84639260bab3015c456650f94b3425f9b2fcc4 Mon Sep 17 00:00:00 2001 From: = <=> Date: Sun, 23 Jul 2023 13:53:38 +0200 Subject: [PATCH 078/210] project id set on tests --- tests/apps/cvs/design/test_design_group.py | 4 ++-- tests/apps/cvs/testutils.py | 14 ++++++------- tests/apps/cvs/vcs/test_value_drivers.py | 24 ++++++++++++---------- tests/apps/cvs/vcs/test_vcs_table.py | 4 ++-- 4 files changed, 23 insertions(+), 23 deletions(-) diff --git a/tests/apps/cvs/design/test_design_group.py b/tests/apps/cvs/design/test_design_group.py index 150366cc..bfed55f6 100644 --- a/tests/apps/cvs/design/test_design_group.py +++ b/tests/apps/cvs/design/test_design_group.py @@ -133,7 +133,7 @@ def test_add_value_driver_to_design_group(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) dg = tu.seed_random_design_group(project.id) - vd = tu.seed_random_value_driver(current_user.id) + vd = tu.seed_random_value_driver(current_user.id, project.id) # Act res = client.put(f'/api/cvs/project/{project.id}/design-group/{dg.id}', headers=std_headers, json={ 'name': dg.name, @@ -153,7 +153,7 @@ def test_remove_value_driver_from_design_group(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) dg = tu.seed_random_design_group(project.id) - vd = tu.seed_random_value_driver(current_user.id) + vd = tu.seed_random_value_driver(current_user.id, project.id) dg.vds.append(vd) # Act res = client.put(f'/api/cvs/project/{project.id}/design-group/{dg.id}', headers=std_headers, json={ diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index b8c0743b..c73f7d7f 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -107,10 +107,7 @@ def random_value_driver_post(user_id: int, project_id: int, name: str = None, un ) -def seed_random_value_driver(user_id: int, project_id: int = None) -> sedbackend.apps.cvs.vcs.models.ValueDriver: - if project_id is None: - project = seed_random_project(user_id) - project_id = project.id +def seed_random_value_driver(user_id: int, project_id: int) -> sedbackend.apps.cvs.vcs.models.ValueDriver: value_driver_post = random_value_driver_post(user_id=user_id, project_id=project_id) new_value_driver = sedbackend.apps.cvs.vcs.implementation.create_value_driver( @@ -161,7 +158,7 @@ def random_table_row( stakeholder_expectations = tu.random_str(5, 50) if stakeholder_needs is None: - stakeholder_needs = seed_stakeholder_needs(user_id) + stakeholder_needs = seed_stakeholder_needs(user_id, project_id) table_row = sedbackend.apps.cvs.vcs.models.VcsRowPost( index=index, @@ -208,6 +205,7 @@ def delete_subprocesses(subprocesses, project_id): def random_stakeholder_need(user_id, + project_id: int, need: str = None, rank_weight: float = None, value_driver_ids: List[int] = None) -> sedbackend.apps.cvs.vcs.models.StakeholderNeedPost: @@ -218,7 +216,7 @@ def random_stakeholder_need(user_id, rank_weight = round(random.random(), ndigits=4) if value_driver_ids is None: - vd = seed_random_value_driver(user_id) + vd = seed_random_value_driver(user_id, project_id) value_driver_ids = [vd.id] stakeholder_need = sedbackend.apps.cvs.vcs.models.StakeholderNeedPost( @@ -229,10 +227,10 @@ def random_stakeholder_need(user_id, return stakeholder_need -def seed_stakeholder_needs(user_id, amount=10) -> List[sedbackend.apps.cvs.vcs.models.StakeholderNeedPost]: +def seed_stakeholder_needs(user_id, project_id, amount=10) -> List[sedbackend.apps.cvs.vcs.models.StakeholderNeedPost]: stakeholder_needs = [] for _ in range(amount): - stakeholder_need = random_stakeholder_need(user_id) + stakeholder_need = random_stakeholder_need(user_id, project_id) stakeholder_needs.append(stakeholder_need) return stakeholder_needs diff --git a/tests/apps/cvs/vcs/test_value_drivers.py b/tests/apps/cvs/vcs/test_value_drivers.py index b0c02b90..2cff4a1a 100644 --- a/tests/apps/cvs/vcs/test_value_drivers.py +++ b/tests/apps/cvs/vcs/test_value_drivers.py @@ -8,7 +8,7 @@ def test_get_all_value_drivers(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) for _ in range(5): - tu.seed_random_value_driver(current_user.id) + tu.seed_random_value_driver(current_user.id, project.id) # Act res = client.get(f'/api/cvs/value-driver/all', headers=std_headers) # Assert @@ -16,7 +16,7 @@ def test_get_all_value_drivers(client, std_headers, std_user): assert len(res.json()) == 5 # Cleanup tu.delete_vd_from_user(current_user.id) - + tu.delete_project_by_id(project.id, current_user.id) def test_get_all_value_drivers_no_vds(client, std_headers, std_user): @@ -34,7 +34,8 @@ def test_get_all_value_drivers_no_vds(client, std_headers, std_user): def test_get_value_driver(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) - vd = tu.seed_random_value_driver(current_user.id) + project = tu.seed_random_project(current_user.id) + vd = tu.seed_random_value_driver(current_user.id, project.id) # Act res = client.get(f'/api/cvs/value-driver/{vd.id}', headers=std_headers) # Assert @@ -43,7 +44,7 @@ def test_get_value_driver(client, std_headers, std_user): assert res.json()['unit'] == vd.unit # Cleanup tu.delete_vd_from_user(current_user.id) - + tu.delete_project_by_id(project.id, current_user.id) def test_get_value_driver_not_found(client, std_headers, std_user): # Setup @@ -90,7 +91,7 @@ def test_create_value_driver_missing_name(client, std_headers, std_user): assert res.status_code == 422 # 422 Unprocessable Entity # Cleanup tu.delete_vd_from_user(current_user.id) - + tu.delete_project_by_id(project.id, current_user.id) def test_create_value_driver_missing_unit(client, std_headers, std_user): # Setup @@ -106,12 +107,13 @@ def test_create_value_driver_missing_unit(client, std_headers, std_user): assert res.status_code == 200 # 200 OK # Cleanup tu.delete_vd_from_user(current_user.id) - + tu.delete_project_by_id(project.id) def test_edit_value_driver(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) - vd = tu.seed_random_value_driver(current_user.id) + project = tu.seed_random_project(current_user.id) + vd = tu.seed_random_value_driver(current_user.id, project.id) # Act res = client.put(f'/api/cvs/value-driver/{vd.id}', headers=std_headers, json={ 'name': "new name", @@ -123,7 +125,7 @@ def test_edit_value_driver(client, std_headers, std_user): assert res.json()['unit'] == "new unit" # Cleanup tu.delete_vd_from_user(current_user.id) - + tu.delete_project_by_id(project.id, current_user.id) def test_delete_value_driver(client, std_headers, std_user): # Setup @@ -137,7 +139,7 @@ def test_delete_value_driver(client, std_headers, std_user): assert len(impl_vcs.get_all_value_driver(current_user.id)) == 0 # Cleanup tu.delete_vd_from_user(current_user.id) - + tu.delete_project_by_id(project.id, current_user.id) def test_get_all_value_drivers_from_vcs(client, std_headers, std_user): # Setup @@ -166,7 +168,7 @@ def test_add_value_drivers_to_needs(client, std_headers, std_user): vds = [] for _ in range(5): - new_vd = tu.seed_random_value_driver(current_user.id) + new_vd = tu.seed_random_value_driver(current_user.id, project.id) vds.append(new_vd) needs = [] @@ -201,7 +203,7 @@ def test_add_driver_needs_invalid_needs(client, std_headers, std_user): vds = [] for _ in range(5): - new_vd = tu.seed_random_value_driver(current_user.id) + new_vd = tu.seed_random_value_driver(current_user.id, project.id) vds.append(new_vd) need_driver_ids = [] diff --git a/tests/apps/cvs/vcs/test_vcs_table.py b/tests/apps/cvs/vcs/test_vcs_table.py index f5ee6f45..202cc518 100644 --- a/tests/apps/cvs/vcs/test_vcs_table.py +++ b/tests/apps/cvs/vcs/test_vcs_table.py @@ -43,7 +43,7 @@ def test_create_vcs_table(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id) - value_driver = tu.seed_random_value_driver(current_user.id) + value_driver = tu.seed_random_value_driver(current_user.id, project.id) # Act res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/table', headers=std_headers, json=[ @@ -77,7 +77,7 @@ def test_edit_vcs_table(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id) - value_driver = tu.seed_random_value_driver(current_user.id) + value_driver = tu.seed_random_value_driver(current_user.id, project.id) table = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1) # Act From 1144fb3a26770cf905ab16f47315170c5a1f8183 Mon Sep 17 00:00:00 2001 From: = <=> Date: Sun, 23 Jul 2023 14:05:57 +0200 Subject: [PATCH 079/210] create project test fix --- tests/apps/cvs/projects/test_projects.py | 2 +- tests/apps/cvs/vcs/test_value_drivers.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/apps/cvs/projects/test_projects.py b/tests/apps/cvs/projects/test_projects.py index 6e0aa67e..61e86de5 100644 --- a/tests/apps/cvs/projects/test_projects.py +++ b/tests/apps/cvs/projects/test_projects.py @@ -13,7 +13,7 @@ def test_create_cvs_project(client, admin_headers): #Setup - name = testutils.random_str(3, 30) + name = testutils.random_str(5, 30) description = testutils.random_str(20, 200) currency = testutils.random_str(0, 10) diff --git a/tests/apps/cvs/vcs/test_value_drivers.py b/tests/apps/cvs/vcs/test_value_drivers.py index 2cff4a1a..50cc67e0 100644 --- a/tests/apps/cvs/vcs/test_value_drivers.py +++ b/tests/apps/cvs/vcs/test_value_drivers.py @@ -107,7 +107,7 @@ def test_create_value_driver_missing_unit(client, std_headers, std_user): assert res.status_code == 200 # 200 OK # Cleanup tu.delete_vd_from_user(current_user.id) - tu.delete_project_by_id(project.id) + tu.delete_project_by_id(project.id, current_user.id) def test_edit_value_driver(client, std_headers, std_user): # Setup From 63732f298b12e5ddec687f16c9f2851dbe41f9bb Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 24 Jul 2023 11:55:12 +0200 Subject: [PATCH 080/210] combine value drivers moved to backend --- sedbackend/apps/core/db.py | 4 +-- sedbackend/apps/cvs/vcs/storage.py | 41 ++++++++++++++++++++++-------- 2 files changed, 33 insertions(+), 12 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..9a0b9d0e 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' #'core-db' database = 'seddb' -port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 2e326143..6d4a8ac8 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -1,4 +1,5 @@ -from typing import List, Tuple +from collections import defaultdict +from typing import List, Tuple, Type from fastapi.logger import logger from mysql.connector.pooling import PooledMySQLConnection from mysql.connector import Error @@ -6,6 +7,7 @@ from sedbackend.apps.cvs.project.storage import get_cvs_project from sedbackend.apps.cvs.vcs import models, exceptions from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage, models as life_cycle_models +from sedbackend.apps.cvs.vcs.models import ValueDriver from sedbackend.libs.datastructures.pagination import ListChunk from sedbackend.apps.core.files import storage as file_storage, exceptions as file_exceptions from mysqlsb import MySQLStatementBuilder, Sort, FetchType @@ -286,7 +288,7 @@ def get_all_value_driver(db_connection: PooledMySQLConnection, user_id: int) -> logger.debug(f'Error msg: {e.msg}') raise exceptions.ValueDriverNotFoundException - return [populate_value_driver(result) for result in res] + return combine_value_drivers([populate_value_driver(result) for result in res]) def get_all_value_drivers_vcs_row(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, @@ -390,22 +392,41 @@ def add_project_value_drivers(db_connection: PooledMySQLConnection, project_id: return True -def get_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int) -> models.ValueDriver: +def get_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int) -> ValueDriver: logger.debug(f'Fetching value driver with id={value_driver_id}.') - where_statement = f'id = %s' + where_statement = f'cvs_value_drivers.id = %s' where_values = [value_driver_id] + join_statement = f'cvs_project_value_drivers.value_driver = cvs_value_drivers.id' select_statement = MySQLStatementBuilder(db_connection) - result = select_statement \ - .select(CVS_VALUE_DRIVER_TABLE, CVS_VALUE_DRIVER_COLUMNS) \ + results = select_statement \ + .select(CVS_VALUE_DRIVER_TABLE, CVS_VALUE_DRIVER_COLUMNS + ['project']) \ + .inner_join(CVS_PROJECT_VALUE_DRIVER_TABLE, join_statement) \ .where(where_statement, where_values) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - - if result is None: + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + logger.debug(results) + if results is None: raise exceptions.ValueDriverNotFoundException(value_driver_id=value_driver_id) - return populate_value_driver(result) + vds = combine_value_drivers([populate_value_driver(result) for result in results]) + + return vds[0] + + +def combine_value_drivers(data: list[ValueDriver]) -> list[ValueDriver]: + combined_dict = {} + + for entry in data: + key = (entry.id, entry.name) + if key not in combined_dict: + combined_dict[key] = ValueDriver(id=entry.id, name=entry.name, unit=None, projects=[]) + combined_dict[key].unit = entry.unit if entry.unit is not None else combined_dict[key].unit + if entry.projects is not None: + combined_dict[key].projects.extend(entry.projects) + + combined_data = list(combined_dict.values()) + return combined_data def create_value_driver(db_connection: PooledMySQLConnection, user_id: int, From 7dbaa7871c53e035c9c01af6f429e8874b59e373 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 24 Jul 2023 12:11:32 +0200 Subject: [PATCH 081/210] change back to correct host and port --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 9a0b9d0e..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' #'core-db' +host = 'core-db' database = 'seddb' -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From c50d61e981f5f850b3be937ab262e6ccb21ff7f9 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 24 Jul 2023 12:37:22 +0200 Subject: [PATCH 082/210] get value driver error handling fix --- sedbackend/apps/cvs/vcs/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 6d4a8ac8..69fefe2a 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -406,7 +406,7 @@ def get_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int) .where(where_statement, where_values) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) logger.debug(results) - if results is None: + if results == 0: raise exceptions.ValueDriverNotFoundException(value_driver_id=value_driver_id) vds = combine_value_drivers([populate_value_driver(result) for result in results]) From a25c02c789747400d15571628cd830b004c6671f Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 24 Jul 2023 12:47:54 +0200 Subject: [PATCH 083/210] change test --- sedbackend/apps/core/db.py | 4 ++-- tests/apps/cvs/vcs/test_value_drivers.py | 8 +++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..ff8bfe0c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' #'core-db' database = 'seddb' -port = 3306 +port = 3001 #3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/tests/apps/cvs/vcs/test_value_drivers.py b/tests/apps/cvs/vcs/test_value_drivers.py index b0c02b90..a56999c8 100644 --- a/tests/apps/cvs/vcs/test_value_drivers.py +++ b/tests/apps/cvs/vcs/test_value_drivers.py @@ -34,7 +34,8 @@ def test_get_all_value_drivers_no_vds(client, std_headers, std_user): def test_get_value_driver(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) - vd = tu.seed_random_value_driver(current_user.id) + project = tu.seed_random_project(current_user.id) + vd = tu.seed_random_value_driver(current_user.id, project.id) # Act res = client.get(f'/api/cvs/value-driver/{vd.id}', headers=std_headers) # Assert @@ -43,6 +44,7 @@ def test_get_value_driver(client, std_headers, std_user): assert res.json()['unit'] == vd.unit # Cleanup tu.delete_vd_from_user(current_user.id) + tu.delete_project_by_id(project.id) def test_get_value_driver_not_found(client, std_headers, std_user): @@ -166,7 +168,7 @@ def test_add_value_drivers_to_needs(client, std_headers, std_user): vds = [] for _ in range(5): - new_vd = tu.seed_random_value_driver(current_user.id) + new_vd = tu.seed_random_value_driver(current_user.id, project.id) vds.append(new_vd) needs = [] @@ -201,7 +203,7 @@ def test_add_driver_needs_invalid_needs(client, std_headers, std_user): vds = [] for _ in range(5): - new_vd = tu.seed_random_value_driver(current_user.id) + new_vd = tu.seed_random_value_driver(current_user.id, project.id) vds.append(new_vd) need_driver_ids = [] From fb958b4e715bef644e128a3abf1f1fe215ce30d2 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 24 Jul 2023 12:58:05 +0200 Subject: [PATCH 084/210] revert host and port --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index ff8bfe0c..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' #'core-db' +host = 'core-db' database = 'seddb' -port = 3001 #3306 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From b2e3effaf39e4f43fbeed156130430ad70ef7fd3 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 24 Jul 2023 15:46:57 +0200 Subject: [PATCH 085/210] projects fixed on getValyeDriver --- sedbackend/apps/cvs/vcs/implementation.py | 10 +++--- sedbackend/apps/cvs/vcs/router.py | 14 +++++---- sedbackend/apps/cvs/vcs/storage.py | 37 +++++++++++------------ 3 files changed, 31 insertions(+), 30 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index b63547a0..abcc5534 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -194,7 +194,7 @@ def get_all_value_driver_vcs(project_id: int, vcs_id: int) -> List[models.ValueD def get_all_value_drivers_vcs_row(project_id: int, vcs_id: int, row_id: int) -> List[models.ValueDriver]: try: with get_connection() as con: - res = storage.get_all_value_drivers_vcs_row(con, project_id, vcs_id, row_id) + res = storage.get_all_value_drivers_vcs_row(con, project_id, vcs_id, row_id, user_id) con.commit() return res except exceptions.VCSNotFoundException: @@ -214,10 +214,10 @@ def get_all_value_drivers_vcs_row(project_id: int, vcs_id: int, row_id: int) -> ) -def get_value_driver(value_driver_id: int) -> models.ValueDriver: +def get_value_driver(value_driver_id: int, user_id: int) -> models.ValueDriver: try: with get_connection() as con: - return storage.get_value_driver(con, value_driver_id) + return storage.get_value_driver(con, value_driver_id, user_id) except exceptions.ValueDriverNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, @@ -244,10 +244,10 @@ def create_value_driver(user_id: int, value_driver_post: models.ValueDriverPost) def edit_value_driver(value_driver_id: int, - value_driver: models.ValueDriverPut) -> models.ValueDriver: + value_driver: models.ValueDriverPut, user_id: int) -> models.ValueDriver: try: with get_connection() as con: - result = storage.edit_value_driver(con, value_driver_id, value_driver) + result = storage.edit_value_driver(con, value_driver_id, value_driver, user_id) con.commit() return result except exceptions.ValueDriverNotFoundException: diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index 21f6f18d..6162f151 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -122,8 +122,9 @@ async def get_all_value_driver_vcs(native_project_id: int, vcs_id: int) -> List[ response_model=List[ValueDriver], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_value_drivers_vcs_row(native_project_id: int, vcs_id: int, vcs_row_id: int) -> List[ValueDriver]: - return vcs_impl.get_all_value_drivers_vcs_row(native_project_id, vcs_id, vcs_row_id) +async def get_value_drivers_vcs_row(native_project_id: int, vcs_id: int, vcs_row_id: int, + user: User = Depends(get_current_active_user)) -> List[ValueDriver]: + return vcs_impl.get_all_value_drivers_vcs_row(native_project_id, vcs_id, vcs_row_id, user.id) @router.get( @@ -131,8 +132,8 @@ async def get_value_drivers_vcs_row(native_project_id: int, vcs_id: int, vcs_row summary='Returns a value driver', response_model=models.ValueDriver, ) -async def get_value_driver(value_driver_id: int) -> models.ValueDriver: - return implementation.get_value_driver(value_driver_id) +async def get_value_driver(value_driver_id: int, user: User = Depends(get_current_active_user)) -> models.ValueDriver: + return implementation.get_value_driver(value_driver_id, user.id) @router.post( @@ -167,8 +168,9 @@ async def add_drivers_to_needs(native_project_id: int, need_driver_ids: List[Tup summary='Edits a value driver', response_model=models.ValueDriver, ) -async def edit_value_driver(value_driver_id: int, value_driver: models.ValueDriverPut) -> models.ValueDriver: - return implementation.edit_value_driver(value_driver_id, value_driver) +async def edit_value_driver(value_driver_id: int, value_driver: models.ValueDriverPut, + user: User = Depends(get_current_active_user)) -> models.ValueDriver: + return implementation.edit_value_driver(value_driver_id, value_driver, user.id) @router.delete( diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 0445affb..1504f2f8 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -292,7 +292,7 @@ def get_all_value_driver(db_connection: PooledMySQLConnection, user_id: int) -> def get_all_value_drivers_vcs_row(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, - vcs_row: int) -> List[models.ValueDriver]: + vcs_row: int, user_id: int) -> List[models.ValueDriver]: logger.debug(f'Fetching all value drivers for vcs with id={vcs_id} and vcs row with id={vcs_row}') get_vcs(db_connection, project_id, vcs_id) # Perform checks for existing VCS and matching project @@ -304,7 +304,7 @@ def get_all_value_drivers_vcs_row(db_connection: PooledMySQLConnection, project_ value_drivers += [vd.id for vd in need.value_drivers] value_drivers = list(dict.fromkeys(value_drivers)) - return [get_value_driver(db_connection, vd_id) for vd_id in value_drivers] + return [get_value_driver(db_connection, vd_id, user_id) for vd_id in value_drivers] def get_vcs_need_drivers(db_connection: PooledMySQLConnection, need_id: int) -> List[models.ValueDriver]: @@ -392,24 +392,23 @@ def add_project_value_drivers(db_connection: PooledMySQLConnection, project_id: return True -def get_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int) -> ValueDriver: - logger.debug(f'Fetching value driver with id={value_driver_id}.') +def get_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int, user_id: int) -> ValueDriver: + logger.debug(f'User={user_id} fetching value driver with id={value_driver_id}.') - where_statement = f'cvs_value_drivers.id = %s' - where_values = [value_driver_id] - join_statement = f'cvs_project_value_drivers.value_driver = cvs_value_drivers.id' + query = f'SELECT cvd.*, cpvd.project \ + FROM cvs_value_drivers cvd \ + INNER JOIN cvs_project_value_drivers cpvd ON cpvd.value_driver = cvd.id \ + INNER JOIN projects_subprojects ps ON cpvd.project = ps.native_project_id \ + WHERE cvd.id = %s AND (ps.owner_id = %s OR ps.id IN (SELECT project_id FROM projects_participants WHERE user_id = %s));' - select_statement = MySQLStatementBuilder(db_connection) - results = select_statement \ - .select(CVS_VALUE_DRIVER_TABLE, CVS_VALUE_DRIVER_COLUMNS + ['project']) \ - .inner_join(CVS_PROJECT_VALUE_DRIVER_TABLE, join_statement) \ - .where(where_statement, where_values) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - logger.debug(results) - if len(results) == 0: + with db_connection.cursor(prepared=True, dictionary=True) as cursor: + cursor.execute(query, [value_driver_id, user_id, user_id]) + res = cursor.fetchall() + + if len(res) == 0: raise exceptions.ValueDriverNotFoundException(value_driver_id=value_driver_id) - vds = combine_value_drivers([populate_value_driver(result) for result in results]) + vds = combine_value_drivers([populate_value_driver(result) for result in res]) return vds[0] @@ -445,11 +444,11 @@ def create_value_driver(db_connection: PooledMySQLConnection, user_id: int, logger.debug(f'Error msg: {e.msg}') raise exceptions.ValueDriverFailedToCreateException - return get_value_driver(db_connection, value_driver_id) + return get_value_driver(db_connection, value_driver_id, user_id) def edit_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int, - new_value_driver: models.ValueDriverPut) -> models.ValueDriver: + new_value_driver: models.ValueDriverPut, user_id: int) -> models.ValueDriver: logger.debug(f'Editing value driver with id={value_driver_id}.') update_statement = MySQLStatementBuilder(db_connection) @@ -464,7 +463,7 @@ def edit_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int if rows == 0: raise exceptions.ValueDriverFailedToUpdateException - return get_value_driver(db_connection, value_driver_id) + return get_value_driver(db_connection, value_driver_id, user_id) def delete_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int) -> bool: From d5fa8c7aca19d16f233ea5dde8c2a1e38cd56432 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 24 Jul 2023 15:56:59 +0200 Subject: [PATCH 086/210] missed param --- sedbackend/apps/cvs/vcs/implementation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index abcc5534..4389b59b 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -191,7 +191,7 @@ def get_all_value_driver_vcs(project_id: int, vcs_id: int) -> List[models.ValueD ) -def get_all_value_drivers_vcs_row(project_id: int, vcs_id: int, row_id: int) -> List[models.ValueDriver]: +def get_all_value_drivers_vcs_row(project_id: int, vcs_id: int, row_id: int, user_id: int) -> List[models.ValueDriver]: try: with get_connection() as con: res = storage.get_all_value_drivers_vcs_row(con, project_id, vcs_id, row_id, user_id) From 992e23783b60a125fc03aec689db2367cb433dce Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 24 Jul 2023 18:42:30 +0200 Subject: [PATCH 087/210] get projects updated with rights --- sedbackend/apps/core/db.py | 4 +- .../apps/cvs/life_cycle/implementation.py | 4 +- sedbackend/apps/cvs/life_cycle/router.py | 5 +- sedbackend/apps/cvs/life_cycle/storage.py | 12 ++-- .../link_design_lifecycle/implementation.py | 4 +- .../apps/cvs/link_design_lifecycle/router.py | 7 ++- .../apps/cvs/link_design_lifecycle/storage.py | 4 +- sedbackend/apps/cvs/market_input/storage.py | 2 +- sedbackend/apps/cvs/project/implementation.py | 4 +- sedbackend/apps/cvs/project/models.py | 1 + sedbackend/apps/cvs/project/router.py | 4 +- sedbackend/apps/cvs/project/storage.py | 55 ++++++++++--------- .../apps/cvs/simulation/implementation.py | 4 +- sedbackend/apps/cvs/simulation/router.py | 5 +- sedbackend/apps/cvs/simulation/storage.py | 4 +- sedbackend/apps/cvs/vcs/implementation.py | 12 ++-- sedbackend/apps/cvs/vcs/router.py | 12 ++-- sedbackend/apps/cvs/vcs/storage.py | 53 ++++++++++++------ tests/apps/cvs/testutils.py | 2 +- tests/apps/cvs/vcs/test_vcs.py | 14 ++--- 20 files changed, 119 insertions(+), 93 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..ff8bfe0c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' #'core-db' database = 'seddb' -port = 3306 +port = 3001 #3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/life_cycle/implementation.py b/sedbackend/apps/cvs/life_cycle/implementation.py index 3faf5bf8..705577bb 100644 --- a/sedbackend/apps/cvs/life_cycle/implementation.py +++ b/sedbackend/apps/cvs/life_cycle/implementation.py @@ -106,10 +106,10 @@ def update_node(project_id: int, node_id: int, node: models.NodePost) -> bool: ) -def get_bpmn(project_id: int, vcs_id: int) -> models.BPMNGet: +def get_bpmn(project_id: int, vcs_id: int, user_id: int) -> models.BPMNGet: try: with get_connection() as con: - result = storage.get_bpmn(con, project_id, vcs_id) + result = storage.get_bpmn(con, project_id, vcs_id, user_id) con.commit() return result except vcs_exceptions.VCSNotFoundException: diff --git a/sedbackend/apps/cvs/life_cycle/router.py b/sedbackend/apps/cvs/life_cycle/router.py index 6fc53026..a05439f9 100644 --- a/sedbackend/apps/cvs/life_cycle/router.py +++ b/sedbackend/apps/cvs/life_cycle/router.py @@ -49,8 +49,9 @@ async def update_bpmn_node(native_project_id: int, node_id: int, node: models.No response_model=models.BPMNGet, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_bpmn(native_project_id: int, vcs_id: int) -> models.BPMNGet: - return implementation.get_bpmn(native_project_id, vcs_id) +async def get_bpmn(native_project_id: int, vcs_id: int, + user: User = Depends(get_current_active_user)) -> models.BPMNGet: + return implementation.get_bpmn(native_project_id, vcs_id, user.id) @router.put( diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 26583b3c..ca63c1e2 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -80,7 +80,7 @@ def get_node(db_connection: PooledMySQLConnection, project_id: int, node_id: int raise exceptions.NodeNotFoundException # Check if vcs exists and matches project id - vcs_storage.get_vcs(db_connection, result['vcs'], project_id) + vcs_storage.check_vcs(db_connection, result['vcs'], project_id) return result @@ -227,11 +227,11 @@ def update_node(db_connection: PooledMySQLConnection, project_id: int, node_id: return True -def get_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> models.BPMNGet: +def get_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> models.BPMNGet: logger.debug(f'Get BPMN for vcs with id={vcs_id}.') # Check if vcs exists and matches project id - vcs_storage.get_vcs(db_connection, project_id, vcs_id) + vcs_storage.get_vcs(db_connection, project_id, vcs_id, user_id) where_statement = f'vcs = %s' where_values = [vcs_id] @@ -353,7 +353,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> int: - vcs_storage.get_vcs(db_connection, project_id, vcs_id) # Check if vcs exists and matches project id + vcs_storage.check_vcs(db_connection, project_id, vcs_id) # Check if vcs exists and matches project id select_statement = MySQLStatementBuilder(db_connection) file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ @@ -482,14 +482,14 @@ def initial_dsm(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i def apply_dsm_to_all(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, dsm: List[List[str or float]], user_id: int) -> models.DSMApplyAllResponse: - vcss = vcs_storage.get_all_vcs(db_connection, project_id).chunk + vcss = vcs_storage.get_all_vcs(db_connection, project_id, user_id).chunk save_dsm_matrix(db_connection, project_id, vcs_id, dsm, user_id) success_vcs = [[vcs for vcs in vcss if vcs.id == vcs_id][0]] failed_vcs = [] - vcss = [vcs for vcs in vcs_storage.get_all_vcs(db_connection, project_id).chunk if vcs.id != vcs_id] + vcss = [vcs for vcs in vcs_storage.get_all_vcs(db_connection, project_id, user_id).chunk if vcs.id != vcs_id] # Try to apply to other vcs. Will only pass if they have the same processes for vcs in vcss: diff --git a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py index 58881952..4c869b3b 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py @@ -54,10 +54,10 @@ def edit_formulas(project_id: int, vcs_row_id: int, design_group_id: int, new_fo ) -def get_all_formulas(project_id: int, vcs_id: int, design_group_id: int) -> List[models.FormulaGet]: +def get_all_formulas(project_id: int, vcs_id: int, design_group_id: int, user_id: int) -> List[models.FormulaGet]: with get_connection() as con: try: - res = storage.get_all_formulas(con, project_id, vcs_id, design_group_id) + res = storage.get_all_formulas(con, project_id, vcs_id, design_group_id, user_id) con.commit() return res except vcs_exceptions.VCSNotFoundException: diff --git a/sedbackend/apps/cvs/link_design_lifecycle/router.py b/sedbackend/apps/cvs/link_design_lifecycle/router.py index e0ecd60e..f4ec9c0d 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/router.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/router.py @@ -2,8 +2,10 @@ from fastapi import Depends, APIRouter +from sedbackend.apps.core.authentication.utils import get_current_active_user from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker from sedbackend.apps.core.projects.models import AccessLevel +from sedbackend.apps.core.users.models import User from sedbackend.apps.cvs.link_design_lifecycle import models, implementation from sedbackend.apps.cvs.project.router import CVS_APP_SID @@ -16,8 +18,9 @@ response_model=List[models.FormulaGet], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_all_formulas(native_project_id: int, vcs_id: int, dg_id: int) -> List[models.FormulaGet]: - return implementation.get_all_formulas(native_project_id, vcs_id, dg_id) +async def get_all_formulas(native_project_id: int, vcs_id: int, dg_id: int, + user: User = Depends(get_current_active_user)) -> List[models.FormulaGet]: + return implementation.get_all_formulas(native_project_id, vcs_id, dg_id, user.id) @router.put( diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 890d6e4a..e36ebdc1 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -77,11 +77,11 @@ def edit_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, - design_group_id: int) -> List[models.FormulaGet]: + design_group_id: int, user_id: int) -> List[models.FormulaGet]: logger.debug(f'Fetching all formulas with vcs_id={vcs_id}') get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project - get_vcs(db_connection, project_id, vcs_id) + get_vcs(db_connection, project_id, vcs_id, user_id) select_statement = MySQLStatementBuilder(db_connection) res = select_statement.select(CVS_FORMULAS_TABLE, CVS_FORMULAS_COLUMNS) \ diff --git a/sedbackend/apps/cvs/market_input/storage.py b/sedbackend/apps/cvs/market_input/storage.py index 1cab3c1d..6aad8130 100644 --- a/sedbackend/apps/cvs/market_input/storage.py +++ b/sedbackend/apps/cvs/market_input/storage.py @@ -138,7 +138,7 @@ def populate_market_input_values(db_result) -> models.MarketInputValue: def update_market_input_value(db_connection: PooledMySQLConnection, project_id: int, mi_value: models.MarketInputValue) -> bool: logger.debug(f'Update market input value') - vcs_storage.get_vcs(db_connection, project_id, mi_value.vcs_id) # check if vcs exists + vcs_storage.check_vcs(db_connection, project_id, mi_value.vcs_id) # check if vcs exists get_market_input(db_connection, project_id, mi_value.market_input_id) # check if market input exists count_statement = MySQLStatementBuilder(db_connection) diff --git a/sedbackend/apps/cvs/project/implementation.py b/sedbackend/apps/cvs/project/implementation.py index cd95acb4..3a899fb0 100644 --- a/sedbackend/apps/cvs/project/implementation.py +++ b/sedbackend/apps/cvs/project/implementation.py @@ -13,10 +13,10 @@ def get_all_cvs_project(user_id: int) -> ListChunk[models.CVSProject]: return storage.get_all_cvs_project(con, user_id) -def get_cvs_project(project_id: int) -> models.CVSProject: +def get_cvs_project(project_id: int, user_id: int) -> models.CVSProject: try: with get_connection() as con: - return storage.get_cvs_project(con, project_id) + return storage.get_cvs_project(con, project_id, user_id) except exceptions.CVSProjectNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, diff --git a/sedbackend/apps/cvs/project/models.py b/sedbackend/apps/cvs/project/models.py index 2c872a6f..52e5ac51 100644 --- a/sedbackend/apps/cvs/project/models.py +++ b/sedbackend/apps/cvs/project/models.py @@ -11,6 +11,7 @@ class CVSProject(BaseModel): currency: Optional[str] = Field(None, max_length=10) owner: User datetime_created: datetime + my_access_right: int class CVSProjectPost(BaseModel): diff --git a/sedbackend/apps/cvs/project/router.py b/sedbackend/apps/cvs/project/router.py index 9e7aa2dd..e546c2a6 100644 --- a/sedbackend/apps/cvs/project/router.py +++ b/sedbackend/apps/cvs/project/router.py @@ -28,8 +28,8 @@ async def get_all_cvs_project(user: User = Depends(get_current_active_user)) \ response_model=models.CVSProject, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_csv_project(native_project_id: int) -> models.CVSProject: - return implementation.get_cvs_project(native_project_id) +async def get_csv_project(native_project_id: int, user: User = Depends(get_current_active_user)) -> models.CVSProject: + return implementation.get_cvs_project(native_project_id, user.id) @router.post( diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index f56ef6c8..4b208077 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -1,7 +1,10 @@ +from fastapi import Depends from fastapi.logger import logger from mysql.connector.pooling import PooledMySQLConnection from sedbackend.apps.core.authentication import exceptions as auth_exceptions +from sedbackend.apps.core.authentication.utils import get_current_active_user +from sedbackend.apps.core.users.models import User from sedbackend.apps.core.users.storage import db_get_user_safe_with_id from sedbackend.apps.cvs.project import models as models, exceptions as exceptions from sedbackend.libs.datastructures.pagination import ListChunk @@ -17,37 +20,33 @@ def get_all_cvs_project(db_connection: PooledMySQLConnection, user_id: int) -> ListChunk[models.CVSProject]: logger.debug(f'Fetching all CVS projects for user with id={user_id}.') - where_statement = f'owner_id = %s' - where_values = [user_id] + query = f'SELECT DISTINCT p.*, COALESCE(pp.access_level, 4) AS my_access_right \ + FROM cvs_projects p \ + LEFT JOIN projects_subprojects ps ON p.id = ps.project_id AND ps.owner_id = %s \ + LEFT JOIN projects_participants pp ON p.id = pp.project_id AND pp.user_id = %s \ + WHERE p.owner_id = %s OR ps.owner_id = %s OR pp.user_id = %s;' - select_statement = MySQLStatementBuilder(db_connection) - results = select_statement.select(CVS_PROJECT_TABLE, CVS_PROJECT_COLUMNS) \ - .where(where_statement, where_values) \ - .order_by(['id'], Sort.ASCENDING) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + with db_connection.cursor(prepared=True, dictionary=True) as cursor: + cursor.execute(query, [user_id, user_id, user_id, user_id, user_id]) + result = cursor.fetchall() - project_list = [] - for result in results: - project_list.append(populate_cvs_project(db_connection, result)) + cvs_project_list = [populate_cvs_project(db_connection, res) for res in result] - count_statement = MySQLStatementBuilder(db_connection) - result = count_statement.count(CVS_PROJECT_TABLE) \ - .where(where_statement, where_values) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - chunk = ListChunk[models.CVSProject](chunk=project_list, length_total=result['count']) + return ListChunk[models.CVSProject](chunk=cvs_project_list, length_total=len(cvs_project_list)) - return chunk +def get_cvs_project(db_connection: PooledMySQLConnection, project_id: int, user_id: int) -> models.CVSProject: + logger.debug(f'Fetching CVS project with id={project_id} user={user_id}.') -def get_cvs_project(db_connection: PooledMySQLConnection, project_id: int) -> models.CVSProject: - logger.debug(f'Fetching CVS project with id={project_id}.') - - select_statement = MySQLStatementBuilder(db_connection) - result = select_statement \ - .select(CVS_PROJECT_TABLE, CVS_PROJECT_COLUMNS) \ - .where('id = %s', [project_id]) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + query = f'SELECT p.*, COALESCE(pp.access_level, 4) AS my_access_right \ + FROM cvs_projects p \ + LEFT JOIN projects_participants pp ON pp.project_id = %s AND pp.user_id = %s \ + WHERE p.id = %s;' + with db_connection.cursor(prepared=True, dictionary=True) as cursor: + cursor.execute(query, [project_id, user_id, project_id]) + result = cursor.fetchone() + logger.debug(result) if result is None: raise exceptions.CVSProjectNotFoundException @@ -70,11 +69,11 @@ def create_cvs_project(db_connection: PooledMySQLConnection, project: models.CVS subproject = proj_models.SubProjectPost(name=project.name, application_sid=CVS_APPLICATION_SID, native_project_id=cvs_project_id) proj_storage.db_post_subproject(db_connection, subproject, user_id) - return get_cvs_project(db_connection, cvs_project_id) + return get_cvs_project(db_connection, cvs_project_id, user_id) def edit_cvs_project(db_connection: PooledMySQLConnection, project_id: int, - new_project: models.CVSProjectPost) -> models.CVSProject: + new_project: models.CVSProjectPost, user_id: int) -> models.CVSProject: logger.debug(f'Editing CVS project with id={project_id}.') # Updating @@ -87,7 +86,7 @@ def edit_cvs_project(db_connection: PooledMySQLConnection, project_id: int, update_statement.where('id = %s', [project_id]) update_statement.execute(return_affected_rows=True) - return get_cvs_project(db_connection, project_id) + return get_cvs_project(db_connection, project_id, user_id) def delete_cvs_project(db_connection: PooledMySQLConnection, project_id: int, user_id: int) -> bool: @@ -106,6 +105,7 @@ def delete_cvs_project(db_connection: PooledMySQLConnection, project_id: int, us def populate_cvs_project(db_connection: PooledMySQLConnection, db_result) -> models.CVSProject: + logger.debug(f'Populating cvs project with {db_result}') return models.CVSProject( id=db_result['id'], name=db_result['name'], @@ -113,4 +113,5 @@ def populate_cvs_project(db_connection: PooledMySQLConnection, currency=db_result['currency'], owner=db_get_user_safe_with_id(db_connection, db_result['owner_id']), datetime_created=db_result['datetime_created'], + my_access_right=db_result['my_access_right'] ) diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 7f8567cd..2b31b346 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -188,10 +188,10 @@ def get_sim_settings(project_id: int) -> models.SimSettings: ) -def edit_sim_settings(project_id: int, sim_settings: models.EditSimSettings) -> bool: +def edit_sim_settings(project_id: int, sim_settings: models.EditSimSettings, user_id: int) -> bool: try: with get_connection() as con: - res = storage.edit_simulation_settings(con, project_id, sim_settings) + res = storage.edit_simulation_settings(con, project_id, sim_settings, user_id) con.commit() return res except InvalidFlowSettingsException: diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 6fde360d..64be845a 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -70,5 +70,6 @@ async def get_sim_settings(native_project_id: int) -> models.SimSettings: response_model=bool, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit, CVS_APP_SID))] ) -async def put_sim_settings(native_project_id: int, sim_settings: models.EditSimSettings) -> bool: - return implementation.edit_sim_settings(native_project_id, sim_settings) +async def put_sim_settings(native_project_id: int, sim_settings: models.EditSimSettings, + user: User = Depends(get_current_active_user)) -> bool: + return implementation.edit_sim_settings(native_project_id, sim_settings, user.id) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 17f9fc37..3ee828c0 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -346,7 +346,7 @@ def get_simulation_settings(db_connection: PooledMySQLConnection, project_id: in def edit_simulation_settings(db_connection: PooledMySQLConnection, project_id: int, - sim_settings: models.EditSimSettings): + sim_settings: models.EditSimSettings, user_id: int): logger.debug(f'Editing simulation settings for project {project_id}') if (sim_settings.flow_process is None and sim_settings.flow_start_time is None) \ @@ -363,7 +363,7 @@ def edit_simulation_settings(db_connection: PooledMySQLConnection, project_id: i if sim_settings.flow_process is not None: flow_process_exists = False - vcss = vcs_storage.get_all_vcs(db_connection, project_id).chunk + vcss = vcs_storage.get_all_vcs(db_connection, project_id, user_id).chunk for vcs in vcss: rows = vcs_storage.get_vcs_table(db_connection, project_id, vcs.id) for row in rows: diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index 4389b59b..c837d2ca 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -16,10 +16,10 @@ # VCS # ====================================================================================================================== -def get_all_vcs(project_id: int) -> ListChunk[models.VCS]: +def get_all_vcs(project_id: int, user_id: int) -> ListChunk[models.VCS]: try: with get_connection() as con: - return storage.get_all_vcs(con, project_id) + return storage.get_all_vcs(con, project_id, user_id) except project_exceptions.CVSProjectNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, @@ -27,10 +27,10 @@ def get_all_vcs(project_id: int) -> ListChunk[models.VCS]: ) -def get_vcs(project_id: int, vcs_id: int) -> models.VCS: +def get_vcs(project_id: int, vcs_id: int, user_id: int) -> models.VCS: try: with get_connection() as con: - return storage.get_vcs(con, project_id, vcs_id) + return storage.get_vcs(con, project_id, vcs_id, user_id) except exceptions.VCSNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, @@ -605,10 +605,10 @@ def edit_vcs_table(project_id: int, vcs_id: int, updated_vcs_rows: List[models.V # VCS Duplicate # ====================================================================================================================== -def duplicate_vcs(project_id: int, vcs_id: int, n: int) -> List[models.VCS]: +def duplicate_vcs(project_id: int, vcs_id: int, n: int, user_id: int) -> List[models.VCS]: try: with get_connection() as con: - res = storage.duplicate_whole_vcs(con, project_id, vcs_id, n) + res = storage.duplicate_whole_vcs(con, project_id, vcs_id, n, user_id) con.commit() return res except exceptions.VCSNotFoundException: diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index 6162f151..01d2e404 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -23,8 +23,8 @@ response_model=ListChunk[models.VCS], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_all_vcs(native_project_id: int) -> ListChunk[models.VCS]: - return implementation.get_all_vcs(native_project_id) +async def get_all_vcs(native_project_id: int, user: User = Depends(get_current_active_user)) -> ListChunk[models.VCS]: + return implementation.get_all_vcs(native_project_id, user.id) @router.get( @@ -33,8 +33,8 @@ async def get_all_vcs(native_project_id: int) -> ListChunk[models.VCS]: response_model=models.VCS, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_vcs(native_project_id: int, vcs_id: int) -> models.VCS: - return implementation.get_vcs(native_project_id, vcs_id) +async def get_vcs(native_project_id: int, vcs_id: int, user: User = Depends(get_current_active_user)) -> models.VCS: + return implementation.get_vcs(native_project_id, vcs_id, user.id) @router.post( @@ -263,5 +263,5 @@ async def delete_subprocess(native_project_id: int, subprocess_id: int) -> bool: response_model=List[models.VCS], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def duplicate_vcs(native_project_id: int, vcs_id: int, n: int) -> List[models.VCS]: - return implementation.duplicate_vcs(native_project_id, vcs_id, n) +async def duplicate_vcs(native_project_id: int, vcs_id: int, n: int, user: User = Depends(get_current_active_user)) -> List[models.VCS]: + return implementation.duplicate_vcs(native_project_id, vcs_id, n, user.id) diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 1504f2f8..fd2525c8 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -51,10 +51,10 @@ # ====================================================================================================================== -def get_all_vcs(db_connection: PooledMySQLConnection, project_id: int) -> ListChunk[models.VCS]: +def get_all_vcs(db_connection: PooledMySQLConnection, project_id: int, user_id: int) -> ListChunk[models.VCS]: logger.debug(f'Fetching all VCSs for project with id={project_id}.') - get_cvs_project(db_connection, project_id) # perform checks: project and user + get_cvs_project(db_connection, project_id, user_id) # perform checks: project and user where_statement = f'project = %s' where_values = [project_id] @@ -67,7 +67,7 @@ def get_all_vcs(db_connection: PooledMySQLConnection, project_id: int) -> ListCh vcs_list = [] for result in results: - vcs_list.append(populate_vcs(db_connection, result)) + vcs_list.append(populate_vcs(db_connection, result, user_id)) count_statement = MySQLStatementBuilder(db_connection) result = count_statement.count(CVS_VCS_TABLE) \ @@ -78,7 +78,7 @@ def get_all_vcs(db_connection: PooledMySQLConnection, project_id: int) -> ListCh return chunk -def get_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> models.VCS: +def get_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> models.VCS: logger.debug(f'Fetching VCS with id={vcs_id}.') select_statement = MySQLStatementBuilder(db_connection) @@ -93,13 +93,30 @@ def get_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) elif result['project'] != project_id: raise project_exceptions.CVSProjectNoMatchException - return populate_vcs(db_connection, result) + return populate_vcs(db_connection, result, user_id) + +def check_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> models.VCS: + logger.debug(f'CHECKING VCS with id={vcs_id}.') + + select_statement = MySQLStatementBuilder(db_connection) + result = select_statement \ + .select(CVS_VCS_TABLE, CVS_VCS_COLUMNS) \ + .where('id = %s', [vcs_id]) \ + .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + + if result is None: + raise exceptions.VCSNotFoundException + + elif result['project'] != project_id: + raise project_exceptions.CVSProjectNoMatchException + + return result def create_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_post: models.VCSPost) -> models.VCS: logger.debug(f'Creating a VCS in project with id={project_id}.') - get_cvs_project(db_connection, project_id) # Perform checks for existing project and correct user + #get_cvs_project(db_connection, project_id) # Perform checks for existing project and correct user if vcs_post.year_to < vcs_post.year_from: raise exceptions.VCSYearFromGreaterThanYearToException @@ -118,7 +135,7 @@ def create_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_post: def edit_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, new_vcs: models.VCSPost) -> models.VCS: logger.debug(f'Editing VCS with id={vcs_id}.') - get_vcs(db_connection, project_id, vcs_id) # Perform checks for existing project and correct user + check_vcs(db_connection, project_id, vcs_id) # Perform checks for existing project and correct user if new_vcs.year_to < new_vcs.year_from: raise exceptions.VCSYearFromGreaterThanYearToException @@ -156,12 +173,12 @@ def delete_vcs(db_connection: PooledMySQLConnection, user_id: int, project_id: i return True -def populate_vcs(db_connection: PooledMySQLConnection, db_result) -> models.VCS: +def populate_vcs(db_connection: PooledMySQLConnection, db_result, user_id: int) -> models.VCS: return models.VCS( id=db_result['id'], name=db_result['name'], description=db_result['description'], - project=get_cvs_project(db_connection, project_id=db_result['project']), + project=get_cvs_project(db_connection, project_id=db_result['project'], user_id=user_id), datetime_created=db_result['datetime_created'], year_from=db_result['year_from'], year_to=db_result['year_to'], @@ -248,7 +265,7 @@ def get_all_value_driver_vcs(db_connection: PooledMySQLConnection, project_id: i vcs_id: int) -> List[models.ValueDriver]: logger.debug(f'Fetching all value drivers for vcs with id={vcs_id}') - get_vcs(db_connection, project_id, vcs_id) # Perform checks for existing VCS and matching project + check_vcs(db_connection, project_id, vcs_id) # Perform checks for existing VCS and matching project try: select_statement = MySQLStatementBuilder(db_connection) @@ -295,7 +312,7 @@ def get_all_value_drivers_vcs_row(db_connection: PooledMySQLConnection, project_ vcs_row: int, user_id: int) -> List[models.ValueDriver]: logger.debug(f'Fetching all value drivers for vcs with id={vcs_id} and vcs row with id={vcs_row}') - get_vcs(db_connection, project_id, vcs_id) # Perform checks for existing VCS and matching project + check_vcs(db_connection, project_id, vcs_id) # Perform checks for existing VCS and matching project vcs_row = get_vcs_row(db_connection, project_id, vcs_row) value_drivers = [] @@ -860,7 +877,7 @@ def delete_stakeholder_need(db_connection: PooledMySQLConnection, need_id: int) def get_vcs_table(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> List[models.VcsRow]: logger.debug(f'Fetching all table for VCS with id={vcs_id}.') - get_vcs(db_connection, project_id, vcs_id) # Check if VCS exists and belongs to project + check_vcs(db_connection, project_id, vcs_id) # Check if VCS exists and belongs to project select_statement = MySQLStatementBuilder(db_connection) results = select_statement \ @@ -940,7 +957,7 @@ def edit_vcs_table(db_connection: PooledMySQLConnection, project_id: int, vcs_id updated_vcs_rows: List[models.VcsRowPost]) -> bool: logger.debug(f'Editing vcs table') - get_vcs(db_connection, project_id, vcs_id) # Check if VCS exists and belongs to project + check_vcs(db_connection, project_id, vcs_id) # Check if VCS exists and belongs to project updated_vcs_rows = remove_duplicate_names(db_connection, project_id, updated_vcs_rows) @@ -1068,8 +1085,9 @@ def remove_duplicate_names(db_connection: PooledMySQLConnection, project_id: int # Duplicate a vcs n times -def duplicate_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, n: int) -> List[models.VCS]: - vcs = get_vcs(db_connection, project_id, vcs_id) +def duplicate_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, n: int, + user_id: int) -> List[models.VCS]: + vcs = get_vcs(db_connection, project_id, vcs_id, user_id) vcs_list = [] for i in range(n): vcs_post = models.VCSPost( @@ -1133,12 +1151,13 @@ def duplicate_vcs_table(db_connection: PooledMySQLConnection, project_id: int, v return True -def duplicate_whole_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, n: int) -> List[models.VCS]: +def duplicate_whole_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, n: int, + user_id: int) -> List[models.VCS]: logger.debug(f'Duplicate vcs with id = {vcs_id}, {n} times') table = get_vcs_table(db_connection, project_id, vcs_id) - vcs_list = duplicate_vcs(db_connection, project_id, vcs_id, n) + vcs_list = duplicate_vcs(db_connection, project_id, vcs_id, n, user_id) [duplicate_vcs_table(db_connection, project_id, vcs.id, table) for vcs in vcs_list] diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index c73f7d7f..6a5f4f4e 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -586,7 +586,7 @@ def seed_random_sim_settings(user_id: int, project_id: int) -> sim_model.SimSett runs=runs ) - sim_impl.edit_sim_settings(project_id, sim_settings) + sim_impl.edit_sim_settings(project_id, sim_settings, user_id) return sim_impl.get_sim_settings(project_id) diff --git a/tests/apps/cvs/vcs/test_vcs.py b/tests/apps/cvs/vcs/test_vcs.py index a9fec112..b5e9a1e1 100644 --- a/tests/apps/cvs/vcs/test_vcs.py +++ b/tests/apps/cvs/vcs/test_vcs.py @@ -82,7 +82,7 @@ def test_create_vcs(client, std_headers, std_user): res = client.post(f'/api/cvs/project/{project.id}/vcs', headers=std_headers, json=vcs.dict()) # Assert assert res.status_code == 200 # 200 OK - assert len(impl_vcs.get_all_vcs(project.id).chunk) == 1 + assert len(impl_vcs.get_all_vcs(project.id, current_user.id).chunk) == 1 assert res.json()["name"] == vcs.name assert res.json()["description"] == vcs.description assert res.json()["year_from"] == vcs.year_from @@ -142,10 +142,10 @@ def test_edit_vcs(client, std_headers, std_user): }) # Assert assert res.status_code == 200 # 200 OK - assert impl_vcs.get_vcs(project.id, vcs.id).name == "new name" - assert impl_vcs.get_vcs(project.id, vcs.id).description == vcs.description - assert impl_vcs.get_vcs(project.id, vcs.id).year_from == vcs.year_from - assert impl_vcs.get_vcs(project.id, vcs.id).year_to == vcs.year_to + assert impl_vcs.get_vcs(project.id, vcs.id, current_user.id).name == "new name" + assert impl_vcs.get_vcs(project.id, vcs.id, current_user.id).description == vcs.description + assert impl_vcs.get_vcs(project.id, vcs.id, current_user.id).year_from == vcs.year_from + assert impl_vcs.get_vcs(project.id, vcs.id, current_user.id).year_to == vcs.year_to # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -185,7 +185,7 @@ def test_delete_vcs(client, std_headers, std_user): res = client.delete(f'/api/cvs/project/{project.id}/vcs/{vcs.id}', headers=std_headers) # Assert assert res.status_code == 200 # 200 OK - assert len(impl_vcs.get_all_vcs(project.id).chunk) == 0 + assert len(impl_vcs.get_all_vcs(project.id, current_user.id).chunk) == 0 # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -204,7 +204,7 @@ def test_duplicate_vcs(client, std_headers, std_user): res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/duplicate/{2}', headers=std_headers) # Assert assert res.status_code == 200 # 200 OK - assert len(impl_vcs.get_all_vcs(project.id).chunk) == 3 + assert len(impl_vcs.get_all_vcs(project.id, current_user.id).chunk) == 3 # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) From 0141ee5978bf1f5c0d91e6b17eb7e498c82b0578 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 24 Jul 2023 18:46:02 +0200 Subject: [PATCH 088/210] revert host and port --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index ff8bfe0c..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' #'core-db' +host = 'core-db' database = 'seddb' -port = 3001 #3306 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 4552ebce61bc77305d7eb3ccd54c29eb875d6138 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 25 Jul 2023 11:19:06 +0200 Subject: [PATCH 089/210] tests get_vcs fix --- sedbackend/apps/cvs/vcs/implementation.py | 4 +-- sedbackend/apps/cvs/vcs/router.py | 5 +-- sedbackend/apps/cvs/vcs/storage.py | 4 +-- .../test_connect_vcs_design.py | 34 +++++++++---------- tests/apps/cvs/design/test_design.py | 10 +++--- tests/apps/cvs/design/test_design_group.py | 2 +- tests/apps/cvs/life_cycle/test_dsm_files.py | 14 ++++---- .../market_input/test_market_input_values.py | 10 +++--- tests/apps/cvs/simulation/test_simulation.py | 8 ++--- tests/apps/cvs/simulation/testutils.py | 2 +- tests/apps/cvs/testutils.py | 4 +-- tests/apps/cvs/vcs/test_subprocesses.py | 4 +-- tests/apps/cvs/vcs/test_value_drivers.py | 10 +++--- tests/apps/cvs/vcs/test_vcs.py | 14 ++++---- tests/apps/cvs/vcs/test_vcs_table.py | 8 ++--- 15 files changed, 67 insertions(+), 66 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index c837d2ca..42447b83 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -53,10 +53,10 @@ def get_vcs(project_id: int, vcs_id: int, user_id: int) -> models.VCS: ) -def create_vcs(project_id: int, vcs_post: models.VCSPost) -> models.VCS: +def create_vcs(project_id: int, vcs_post: models.VCSPost, user_id: int) -> models.VCS: try: with get_connection() as con: - result = storage.create_vcs(con, project_id, vcs_post) + result = storage.create_vcs(con, project_id, vcs_post, user_id) con.commit() return result except project_exceptions.CVSProjectNotFoundException: diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index 01d2e404..8bf455aa 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -43,8 +43,9 @@ async def get_vcs(native_project_id: int, vcs_id: int, user: User = Depends(get_ response_model=models.VCS, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def create_vcs(native_project_id: int, vcs_post: models.VCSPost) -> models.VCS: - return implementation.create_vcs(native_project_id, vcs_post) +async def create_vcs(native_project_id: int, vcs_post: models.VCSPost, + user: User = Depends(get_current_active_user)) -> models.VCS: + return implementation.create_vcs(native_project_id, vcs_post, user.id) @router.put( diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index fd2525c8..7310d872 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -113,10 +113,10 @@ def check_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int return result -def create_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_post: models.VCSPost) -> models.VCS: +def create_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_post: models.VCSPost, user_id: int) -> models.VCS: logger.debug(f'Creating a VCS in project with id={project_id}.') - #get_cvs_project(db_connection, project_id) # Perform checks for existing project and correct user + get_cvs_project(db_connection, project_id, user_id) # Perform checks for existing project and correct user if vcs_post.year_to < vcs_post.year_from: raise exceptions.VCSYearFromGreaterThanYearToException diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index 3715a02c..1b066f61 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -11,7 +11,7 @@ def test_create_formulas(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) vcs_rows = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1) if vcs_rows is None: raise Exception @@ -57,7 +57,7 @@ def test_create_formulas_no_optional(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) vcs_rows = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1) if vcs_rows is None: raise Exception @@ -96,7 +96,7 @@ def test_get_all_formulas(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) # Act @@ -123,7 +123,7 @@ def test_get_all_formulas_invalid_project(client, std_headers, std_user): project = tu.seed_random_project(current_user.id) invalid_proj_id = project.id + 1 - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) # Act @@ -145,7 +145,7 @@ def test_get_all_formulas_invalid_vcs(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) invalid_vcs_id = vcs.id + 1 design_group = tu.seed_random_design_group(project.id) @@ -170,7 +170,7 @@ def get_all_formulas_invalid_design_group(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) invalid_dg_id = design_group.id + 1 @@ -193,7 +193,7 @@ def test_edit_formulas(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) # Act @@ -239,7 +239,7 @@ def test_edit_formulas_no_optional(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) # Act @@ -279,7 +279,7 @@ def test_edit_formulas_invalid_dg(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) vcs_rows = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1) if vcs_rows == None: raise Exception @@ -319,7 +319,7 @@ def test_edit_formulas_invalid_vcs_row(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) vcs_rows = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1) if vcs_rows == None: raise Exception @@ -359,7 +359,7 @@ def test_edit_formulas_invalid_project(client, std_headers, std_user): project = tu.seed_random_project(current_user.id) invalid_proj_id = project.id + 1 - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) vcs_rows = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1) if vcs_rows is None: raise Exception @@ -398,7 +398,7 @@ def test_delete_formulas(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) # Act @@ -424,7 +424,7 @@ def test_delete_formulas_invalid_project(client, std_headers, std_user): project = tu.seed_random_project(current_user.id) invalid_proj_id = project.id + 1 - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) # Act @@ -447,7 +447,7 @@ def test_delete_formulas_invalid_vcs_row(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) # Act @@ -471,7 +471,7 @@ def test_delete_formulas_invalid_design_group(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) invalid_dg_id = design_group.id + 1 @@ -495,7 +495,7 @@ def test_get_vcs_dg_pairs(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcss = [tu.seed_random_vcs(project.id) for _ in range(4)] + vcss = [tu.seed_random_vcs(project.id, current_user.id) for _ in range(4)] dgs = [tu.seed_random_design_group(project.id) for _ in range(4)] formulas = [] @@ -524,7 +524,7 @@ def test_get_vcs_dg_pairs_invalid_project(client, std_headers, std_user): project = tu.seed_random_project(current_user.id) invalid_proj_id = project.id + 1 - vcss = [tu.seed_random_vcs(project.id) for _ in range(4)] + vcss = [tu.seed_random_vcs(project.id, current_user.id) for _ in range(4)] dgs = [tu.seed_random_design_group(project.id) for _ in range(4)] formulas = [] diff --git a/tests/apps/cvs/design/test_design.py b/tests/apps/cvs/design/test_design.py index d7c90c40..12c0fd0c 100644 --- a/tests/apps/cvs/design/test_design.py +++ b/tests/apps/cvs/design/test_design.py @@ -9,7 +9,7 @@ def test_create_design(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 10) # To get value drivers to vcs design_group = tu.seed_random_design_group(project.id, None, vcs.id) # Act @@ -41,7 +41,7 @@ def test_create_design_no_values(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 10) # To get value drivers to vcs design_group = tu.seed_random_design_group(project.id, None, vcs.id) # Act @@ -68,7 +68,7 @@ def test_edit_designs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 10) # To get value drivers to vcs design_group = tu.seed_random_design_group(project.id, None, vcs.id) designs = tu.seed_random_designs(project.id, design_group.id, 1) @@ -102,7 +102,7 @@ def test_delete_designs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id, None, vcs.id) tu.seed_random_designs(project.id, design_group.id, 1) # Act @@ -123,7 +123,7 @@ def test_get_all_designs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id, None, vcs.id) tu.seed_random_designs(project.id, design_group.id, 10) # Act diff --git a/tests/apps/cvs/design/test_design_group.py b/tests/apps/cvs/design/test_design_group.py index bfed55f6..44bdb253 100644 --- a/tests/apps/cvs/design/test_design_group.py +++ b/tests/apps/cvs/design/test_design_group.py @@ -40,7 +40,7 @@ def test_create_design_group_from_vcs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 10) # Act res = client.post(f'/api/cvs/project/{project.id}/design-group', headers=std_headers, json={ diff --git a/tests/apps/cvs/life_cycle/test_dsm_files.py b/tests/apps/cvs/life_cycle/test_dsm_files.py index 53a9f05f..139d5ebf 100644 --- a/tests/apps/cvs/life_cycle/test_dsm_files.py +++ b/tests/apps/cvs/life_cycle/test_dsm_files.py @@ -41,7 +41,7 @@ def test_upload_dsm_file(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) rows = [std_rows[0], std_rows[1]] table = tu.create_vcs_table(project.id, vcs.id, rows) @@ -68,7 +68,7 @@ def test_upload_invalid_file_extension(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) rows = std_rows @@ -97,7 +97,7 @@ def test_upload_invalid_dsm_file(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) row1 = tu.vcs_model.VcsRowPost( index=0, @@ -140,7 +140,7 @@ def test_get_dsm_file_id(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) rows = [std_rows[0], std_rows[1]] table = tu.create_vcs_table(project.id, vcs.id, rows) @@ -170,7 +170,7 @@ def test_get_dsm_matrix(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) rows = [std_rows[0], std_rows[1]] tu.create_vcs_table(project.id, vcs.id, rows) @@ -203,7 +203,7 @@ def test_save_dsm(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) rows = [std_rows[0], std_rows[1]] tu.create_vcs_table(project.id, vcs.id, rows) @@ -235,7 +235,7 @@ def test_apply_dsm_to_all(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcss = [tu.seed_random_vcs(project.id) for _ in range(3)] + vcss = [tu.seed_random_vcs(project.id, current_user.id) for _ in range(3)] rows = [std_rows[0], std_rows[1]] rows_alt = [std_rows[0], std_rows[1], std_rows[2]] diff --git a/tests/apps/cvs/market_input/test_market_input_values.py b/tests/apps/cvs/market_input/test_market_input_values.py index cb679218..278687e7 100644 --- a/tests/apps/cvs/market_input/test_market_input_values.py +++ b/tests/apps/cvs/market_input/test_market_input_values.py @@ -9,7 +9,7 @@ def test_create_market_input(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) market_input = tu.seed_random_market_input(project.id) value = random.random() * 100 # Act @@ -37,7 +37,7 @@ def test_create_market_input_invalid_vcs_id(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) market_input = tu.seed_random_market_input(project.id) value = random.random() * 100 # Act @@ -60,7 +60,7 @@ def test_edit_market_input_value(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) market_input = tu.seed_random_market_input(project.id) market_input_value = tu.seed_random_market_input_values(project.id, vcs.id, market_input.id)[0] new_value = random.random() * 100 @@ -89,7 +89,7 @@ def test_delete_market_input_value(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) market_input = tu.seed_random_market_input(project.id) tu.seed_random_market_input_values(project.id, vcs.id, market_input.id) # Act @@ -108,7 +108,7 @@ def test_get_market_input_values(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) market_input = tu.seed_random_market_input(project.id) market_input_value = tu.seed_random_market_input_values(project.id, vcs.id, market_input.id)[0] # Act diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index b4ff497a..7865acaa 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -267,7 +267,7 @@ def test_run_single_xlsx_sim(client, std_headers, std_user): #Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) row1 = tu.vcs_model.VcsRowPost( index=0, @@ -393,7 +393,7 @@ def test_run_xlsx_sim(client, std_headers, std_user): vcss = [] designs = [] for _ in range(amount): - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) vcss.append(vcs.id) table = tu.create_vcs_table(project.id, vcs.id, rows) design_group = tu.seed_random_design_group(project.id) @@ -444,7 +444,7 @@ def test_run_single_csv_sim(client, std_headers, std_user): #Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) row1 = tu.vcs_model.VcsRowPost( index=0, @@ -570,7 +570,7 @@ def test_run_csv_sim(client, std_headers, std_user): designs = tu.seed_random_designs(project.id, design_group.id, 3) vcss = [] for _ in range(amount): - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) vcss.append(vcs.id) table = tu.create_vcs_table(project.id, vcs.id, rows) formulas = tu.create_formulas(project.id, table, design_group.id) diff --git a/tests/apps/cvs/simulation/testutils.py b/tests/apps/cvs/simulation/testutils.py index e9a24c00..c8d6632a 100644 --- a/tests/apps/cvs/simulation/testutils.py +++ b/tests/apps/cvs/simulation/testutils.py @@ -8,7 +8,7 @@ def setup_single_simulation(user_id) -> Tuple[CVSProject, VCS, DesignGroup, List[Design], SimSettings]: project = tu.seed_random_project(user_id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) tu.seed_random_formulas(project.id, vcs.id, design_group.id, user_id, 15) #Also creates the vcs rows design = tu.seed_random_designs(project.id, design_group.id, 1) diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 6a5f4f4e..3d5407ce 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -81,10 +81,10 @@ def random_VCS(name: str = None, description: str = None, year_from: int = None, return vcs -def seed_random_vcs(project_id): +def seed_random_vcs(project_id: int, user_id: int): vcs = random_VCS() - new_vcs = vcs_impl.create_vcs(project_id, vcs) + new_vcs = vcs_impl.create_vcs(project_id, vcs, user_id) return new_vcs diff --git a/tests/apps/cvs/vcs/test_subprocesses.py b/tests/apps/cvs/vcs/test_subprocesses.py index 8eb564a9..d95c6ce5 100644 --- a/tests/apps/cvs/vcs/test_subprocesses.py +++ b/tests/apps/cvs/vcs/test_subprocesses.py @@ -85,7 +85,7 @@ def test_edit_subprocess(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) subprocess = tu.seed_random_subprocesses(project.id, 1)[0] # Act res = client.put(f'/api/cvs/project/{project.id}/subprocess/{subprocess.id}', headers=std_headers, json={ @@ -107,7 +107,7 @@ def test_delete_subprocess(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) subprocess = tu.seed_random_subprocesses(project.id, 1)[0] # Act res = client.delete(f'/api/cvs/project/{project.id}/subprocess/{subprocess.id}', headers=std_headers) diff --git a/tests/apps/cvs/vcs/test_value_drivers.py b/tests/apps/cvs/vcs/test_value_drivers.py index 50cc67e0..54df00e2 100644 --- a/tests/apps/cvs/vcs/test_value_drivers.py +++ b/tests/apps/cvs/vcs/test_value_drivers.py @@ -145,7 +145,7 @@ def test_get_all_value_drivers_from_vcs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id) # Act @@ -163,7 +163,7 @@ def test_add_value_drivers_to_needs(client, std_headers, std_user): #Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) table = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id) vds = [] @@ -198,7 +198,7 @@ def test_add_driver_needs_invalid_needs(client, std_headers, std_user): #Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) table = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id) vds = [] @@ -229,7 +229,7 @@ def test_add_driver_needs_invalid_drivers(client, std_headers, std_user): #Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) table = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id) needs = [] @@ -259,7 +259,7 @@ def test_get_all_value_drivers_vcs_row(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) vcs_row = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1)[0] needs = vcs_row.stakeholder_needs value_drivers = [] diff --git a/tests/apps/cvs/vcs/test_vcs.py b/tests/apps/cvs/vcs/test_vcs.py index b5e9a1e1..2a3de73b 100644 --- a/tests/apps/cvs/vcs/test_vcs.py +++ b/tests/apps/cvs/vcs/test_vcs.py @@ -11,7 +11,7 @@ def test_get_vcs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) # Act res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}', headers=std_headers) # Assert @@ -40,7 +40,7 @@ def test_get_vcs_project_no_match(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) project2 = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) # Act res = client.get(f'/api/cvs/project/{project2.id}/vcs/{vcs.id}', headers=std_headers) # Assert @@ -58,7 +58,7 @@ def test_get_vcss(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - tu.seed_random_vcs(project.id) + tu.seed_random_vcs(project.id, current_user.id) # Act res = client.get(f'/api/cvs/project/{project.id}/vcs/all', headers=std_headers) # Assert @@ -131,7 +131,7 @@ def test_edit_vcs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) vcs.name = "new name" # Act res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}', headers=std_headers, @@ -155,7 +155,7 @@ def test_edit_vcs_year_from_greater_than_year_to(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) vcs.year_from = 2020 vcs.year_to = 2019 # Act @@ -180,7 +180,7 @@ def test_delete_vcs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) # Act res = client.delete(f'/api/cvs/project/{project.id}/vcs/{vcs.id}', headers=std_headers) # Assert @@ -199,7 +199,7 @@ def test_duplicate_vcs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) # Act res = client.post(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/duplicate/{2}', headers=std_headers) # Assert diff --git a/tests/apps/cvs/vcs/test_vcs_table.py b/tests/apps/cvs/vcs/test_vcs_table.py index 202cc518..ae096fde 100644 --- a/tests/apps/cvs/vcs/test_vcs_table.py +++ b/tests/apps/cvs/vcs/test_vcs_table.py @@ -12,7 +12,7 @@ def test_get_vcs_table(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 2) # Act res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/table', headers=std_headers) @@ -42,7 +42,7 @@ def test_create_vcs_table(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) value_driver = tu.seed_random_value_driver(current_user.id, project.id) # Act res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/table', headers=std_headers, @@ -76,7 +76,7 @@ def test_edit_vcs_table(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) value_driver = tu.seed_random_value_driver(current_user.id, project.id) table = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1) @@ -114,7 +114,7 @@ def test_delete_vcs_table(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) table = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 2) # Act From 326de45eac8be3331c6f9408d39e8d8eb0c30562 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 25 Jul 2023 12:44:11 +0200 Subject: [PATCH 090/210] testing --- sedbackend/apps/cvs/life_cycle/storage.py | 2 +- sedbackend/apps/cvs/project/implementation.py | 4 ++-- sedbackend/apps/cvs/project/router.py | 5 +++-- sedbackend/apps/cvs/project/storage.py | 1 + sedbackend/apps/cvs/vcs/storage.py | 8 +++++--- 5 files changed, 12 insertions(+), 8 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index ca63c1e2..a3c60674 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -271,7 +271,7 @@ def update_bpmn(db_connection: PooledMySQLConnection, project_id: int, vcs_id: i logger.debug(f'Updating bpmn with vcs id={vcs_id}.') # Check if vcs exists and matches project id - vcs_storage.get_vcs(db_connection, project_id, vcs_id) + vcs_storage.check_vcs(db_connection, project_id, vcs_id) for node in bpmn.nodes: updated_node = models.NodePost( diff --git a/sedbackend/apps/cvs/project/implementation.py b/sedbackend/apps/cvs/project/implementation.py index 3a899fb0..6f25ed86 100644 --- a/sedbackend/apps/cvs/project/implementation.py +++ b/sedbackend/apps/cvs/project/implementation.py @@ -37,10 +37,10 @@ def create_cvs_project(project_post: models.CVSProjectPost, user_id: int) -> mod return result -def edit_cvs_project(project_id: int, project_post: models.CVSProjectPost) -> models.CVSProject: +def edit_cvs_project(project_id: int, project_post: models.CVSProjectPost, user_id) -> models.CVSProject: try: with get_connection() as con: - result = storage.edit_cvs_project(con, project_id, project_post) + result = storage.edit_cvs_project(con, project_id, project_post, user_id) con.commit() return result except exceptions.CVSProjectNotFoundException: diff --git a/sedbackend/apps/cvs/project/router.py b/sedbackend/apps/cvs/project/router.py index e546c2a6..e7bf1301 100644 --- a/sedbackend/apps/cvs/project/router.py +++ b/sedbackend/apps/cvs/project/router.py @@ -49,8 +49,9 @@ async def create_csv_project(project_post: models.CVSProjectPost, response_model=models.CVSProject, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def edit_csv_project(native_project_id: int, project_post: models.CVSProjectPost) -> models.CVSProject: - return implementation.edit_cvs_project(project_id=native_project_id, project_post=project_post) +async def edit_csv_project(native_project_id: int, project_post: models.CVSProjectPost, + user: User = Depends(get_current_active_user)) -> models.CVSProject: + return implementation.edit_cvs_project(project_id=native_project_id, project_post=project_post, user_id=user.id) @router.delete( diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 4b208077..27ff5458 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -53,6 +53,7 @@ def get_cvs_project(db_connection: PooledMySQLConnection, project_id: int, user_ return populate_cvs_project(db_connection, result) + def create_cvs_project(db_connection: PooledMySQLConnection, project: models.CVSProjectPost, user_id: int) -> models.CVSProject: logger.debug(f'Creating a CVS project for user with id={user_id}.') diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 7310d872..0de6f5b3 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -95,6 +95,7 @@ def get_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, return populate_vcs(db_connection, result, user_id) + def check_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int) -> models.VCS: logger.debug(f'CHECKING VCS with id={vcs_id}.') @@ -113,10 +114,11 @@ def check_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int return result -def create_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_post: models.VCSPost, user_id: int) -> models.VCS: +def create_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_post: models.VCSPost, + user_id: int) -> models.VCS: logger.debug(f'Creating a VCS in project with id={project_id}.') - get_cvs_project(db_connection, project_id, user_id) # Perform checks for existing project and correct user + check_cvs_project(db_connection, project_id, user_id) # Perform checks for existing project and correct user if vcs_post.year_to < vcs_post.year_from: raise exceptions.VCSYearFromGreaterThanYearToException @@ -128,7 +130,7 @@ def create_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_post: .execute(fetch_type=FetchType.FETCH_NONE) vcs_id = insert_statement.last_insert_id - vcs = get_vcs(db_connection, project_id, vcs_id) + vcs = get_vcs(db_connection, project_id, vcs_id, user_id) return vcs From b9fdaee40644346dead6a77cee7ca039e804b4a8 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 25 Jul 2023 12:50:52 +0200 Subject: [PATCH 091/210] testing --- sedbackend/apps/cvs/vcs/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 0de6f5b3..cae6ec9f 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -118,7 +118,7 @@ def create_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_post: user_id: int) -> models.VCS: logger.debug(f'Creating a VCS in project with id={project_id}.') - check_cvs_project(db_connection, project_id, user_id) # Perform checks for existing project and correct user + get_cvs_project(db_connection, project_id, user_id) # Perform checks for existing project and correct user if vcs_post.year_to < vcs_post.year_from: raise exceptions.VCSYearFromGreaterThanYearToException From ba2b57ca2724b49e4369b0ecc93b87f04b27b8fd Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 25 Jul 2023 13:14:36 +0200 Subject: [PATCH 092/210] testing --- sedbackend/apps/cvs/vcs/implementation.py | 4 ++-- sedbackend/apps/cvs/vcs/router.py | 8 +++++--- sedbackend/apps/cvs/vcs/storage.py | 5 +++-- tests/apps/cvs/simulation/testutils.py | 2 +- tests/apps/cvs/testutils.py | 4 ++-- 5 files changed, 13 insertions(+), 10 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index 42447b83..aecf936a 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -76,10 +76,10 @@ def create_vcs(project_id: int, vcs_post: models.VCSPost, user_id: int) -> model ) -def edit_vcs(project_id: int, vcs_id: int, vcs_post: models.VCSPost) -> models.VCS: +def edit_vcs(project_id: int, vcs_id: int, vcs_post: models.VCSPost, user_id: int) -> models.VCS: try: with get_connection() as con: - result = storage.edit_vcs(con, project_id, vcs_id, vcs_post) + result = storage.edit_vcs(con, project_id, vcs_id, vcs_post, user_id) con.commit() return result except exceptions.VCSNotFoundException: diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index 8bf455aa..c62b719d 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -54,8 +54,9 @@ async def create_vcs(native_project_id: int, vcs_post: models.VCSPost, response_model=models.VCS, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def edit_vcs(native_project_id: int, vcs_id: int, vcs_post: models.VCSPost) -> models.VCS: - return implementation.edit_vcs(native_project_id, vcs_id, vcs_post) +async def edit_vcs(native_project_id: int, vcs_id: int, vcs_post: models.VCSPost, + user: User = Depends(get_current_active_user)) -> models.VCS: + return implementation.edit_vcs(native_project_id, vcs_id, vcs_post, user.id) @router.delete( @@ -264,5 +265,6 @@ async def delete_subprocess(native_project_id: int, subprocess_id: int) -> bool: response_model=List[models.VCS], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def duplicate_vcs(native_project_id: int, vcs_id: int, n: int, user: User = Depends(get_current_active_user)) -> List[models.VCS]: +async def duplicate_vcs(native_project_id: int, vcs_id: int, n: int, + user: User = Depends(get_current_active_user)) -> List[models.VCS]: return implementation.duplicate_vcs(native_project_id, vcs_id, n, user.id) diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index cae6ec9f..d5086095 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -134,7 +134,8 @@ def create_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_post: return vcs -def edit_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, new_vcs: models.VCSPost) -> models.VCS: +def edit_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, + new_vcs: models.VCSPost, user_id: int) -> models.VCS: logger.debug(f'Editing VCS with id={vcs_id}.') check_vcs(db_connection, project_id, vcs_id) # Perform checks for existing project and correct user @@ -152,7 +153,7 @@ def edit_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, update_statement.where('id = %s', [vcs_id]) _, rows = update_statement.execute(return_affected_rows=True) - return get_vcs(db_connection, project_id, vcs_id) + return get_vcs(db_connection, project_id, vcs_id, user_id) def delete_vcs(db_connection: PooledMySQLConnection, user_id: int, project_id: int, vcs_id: int) -> bool: diff --git a/tests/apps/cvs/simulation/testutils.py b/tests/apps/cvs/simulation/testutils.py index c8d6632a..feedce8f 100644 --- a/tests/apps/cvs/simulation/testutils.py +++ b/tests/apps/cvs/simulation/testutils.py @@ -8,7 +8,7 @@ def setup_single_simulation(user_id) -> Tuple[CVSProject, VCS, DesignGroup, List[Design], SimSettings]: project = tu.seed_random_project(user_id) - vcs = tu.seed_random_vcs(project.id, current_user.id) + vcs = tu.seed_random_vcs(project.id, user_id) design_group = tu.seed_random_design_group(project.id) tu.seed_random_formulas(project.id, vcs.id, design_group.id, user_id, 15) #Also creates the vcs rows design = tu.seed_random_designs(project.id, design_group.id, 1) diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 3d5407ce..b9433bdb 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -429,7 +429,7 @@ def seed_random_formulas(project_id: int, vcs_id: int, design_group_id: int, use connect_impl.edit_formulas( project_id, vcs_row.id, design_group_id, formula_post) - return connect_impl.get_all_formulas(project_id, vcs_id, design_group_id) + return connect_impl.get_all_formulas(project_id, vcs_id, design_group_id, user_id) def create_formulas(project_id: int, vcs_rows: List[vcs_model.VcsRow], dg_id: int) -> List[FormulaGet]: @@ -548,7 +548,7 @@ def seed_random_sim_settings(user_id: int, project_id: int) -> sim_model.SimSett start_time = round(tu.random.uniform(1, 300), ndigits=5) end_time = round(tu.random.uniform(300, 1000), ndigits=5) if tu.random.getrandbits(1): - vcs = seed_random_vcs(project_id) + vcs = seed_random_vcs(project_id, user_id) rows = seed_vcs_table_rows(user_id, project_id, vcs.id, 3) for row in rows: if row.subprocess is not None: From faf342c09896357de0f57f8bd9f80fc187de464a Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 25 Jul 2023 13:47:42 +0200 Subject: [PATCH 093/210] tests hopefully fixed --- sedbackend/apps/cvs/vcs/storage.py | 2 +- tests/apps/cvs/simulation/test_sim_multiprocessing.py | 2 +- tests/apps/cvs/simulation/test_simulation.py | 2 +- tests/apps/cvs/testutils.py | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index d5086095..ceb51af6 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -1099,7 +1099,7 @@ def duplicate_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: year_from=vcs.year_from, year_to=vcs.year_to ) - new_vcs = create_vcs(db_connection, vcs.project.id, vcs_post) + new_vcs = create_vcs(db_connection, vcs.project.id, vcs_post, user_id) vcs_list.append(new_vcs) return vcs_list diff --git a/tests/apps/cvs/simulation/test_sim_multiprocessing.py b/tests/apps/cvs/simulation/test_sim_multiprocessing.py index 7b129dd5..da5436e4 100644 --- a/tests/apps/cvs/simulation/test_sim_multiprocessing.py +++ b/tests/apps/cvs/simulation/test_sim_multiprocessing.py @@ -182,7 +182,7 @@ def test_run_mc_sim_rate_invalid_order(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - first_tech_process = tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id) + first_tech_process = tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id, current_user.id) if first_tech_process is None: raise sim_exceptions.NoTechnicalProcessException settings.monte_carlo = False diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index 7865acaa..46e489f1 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -204,7 +204,7 @@ def test_run_sim_rate_invalid_order(client, std_headers, std_user): #Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - flow_proc = tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id) + flow_proc = tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id, current_user.id) settings.monte_carlo = False settings.flow_process = flow_proc.iso_process.name if flow_proc.iso_process is not None else flow_proc.subprocess.name diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index b9433bdb..a7743bc7 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -457,11 +457,11 @@ def delete_formulas(project_id: int, vcsRow_Dg_ids: List[Tuple[int, int]]): connect_impl.delete_formulas(project_id, vcs_row, dg) -def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int) -> vcs_model.VcsRow: +def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int, user_id: int) -> vcs_model.VcsRow: rows = list(sorted(vcs_impl.get_vcs_table( project_id, vcs_id), key=lambda row: row.index)) formulas = connect_impl.get_all_formulas( - project_id, vcs_id, design_group_id) + project_id, vcs_id, design_group_id, user_id) rows.reverse() # Reverse to find last technical process for row in rows: From 0c10eb11ae315ddf73556eda9d7a2e2ace17bfde Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 25 Jul 2023 14:21:56 +0200 Subject: [PATCH 094/210] subprojects deleted on delete cvs project --- sedbackend/apps/core/db.py | 4 ++-- sedbackend/apps/cvs/project/exceptions.py | 4 ++++ sedbackend/apps/cvs/project/storage.py | 11 +++++++++++ 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..9a0b9d0e 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' #'core-db' database = 'seddb' -port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/project/exceptions.py b/sedbackend/apps/cvs/project/exceptions.py index b44c6ed8..a8f268c9 100644 --- a/sedbackend/apps/cvs/project/exceptions.py +++ b/sedbackend/apps/cvs/project/exceptions.py @@ -12,3 +12,7 @@ class CVSProjectFailedDeletionException(Exception): class CVSProjectNoMatchException(Exception): pass + + +class SubProjectFailedDeletionException(Exception): + pass \ No newline at end of file diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 27ff5458..0a98437b 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -16,6 +16,9 @@ CVS_PROJECT_TABLE = 'cvs_projects' CVS_PROJECT_COLUMNS = ['id', 'name', 'description', 'currency', 'owner_id', 'datetime_created'] +PROJECTS_SUBPROJECTS_TABLE = 'projects_subprojects' +PROJECTS_SUBPROJECTS_COLUMNS = ['id', 'name', 'application_sid', 'project_id', 'native_project_id', + 'owner_id', 'datetime_created'] def get_all_cvs_project(db_connection: PooledMySQLConnection, user_id: int) -> ListChunk[models.CVSProject]: logger.debug(f'Fetching all CVS projects for user with id={user_id}.') @@ -101,6 +104,14 @@ def delete_cvs_project(db_connection: PooledMySQLConnection, project_id: int, us if rows == 0: raise exceptions.CVSProjectFailedDeletionException + delete_subproject_statement = MySQLStatementBuilder(db_connection) + _, subproject_rows = delete_subproject_statement.delete(PROJECTS_SUBPROJECTS_TABLE) \ + .where('application_sid = %s AND native_project_id = %s', ['MOD.CVS', project_id]) \ + .execute(return_affected_rows=True) + + if subproject_rows == 0: + raise exceptions.SubProjectFailedDeletionException + return True From 1add77629448562a8e0c4d773582d5169d6b36d6 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 25 Jul 2023 14:23:23 +0200 Subject: [PATCH 095/210] exception added --- sedbackend/apps/cvs/project/implementation.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/sedbackend/apps/cvs/project/implementation.py b/sedbackend/apps/cvs/project/implementation.py index 6f25ed86..0d7763f4 100644 --- a/sedbackend/apps/cvs/project/implementation.py +++ b/sedbackend/apps/cvs/project/implementation.py @@ -71,6 +71,11 @@ def delete_cvs_project(project_id: int, user_id: int) -> bool: status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f'Failed to remove project with id={project_id}.', ) + except exceptions.SubProjectFailedDeletionException: + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f'Failed to remove sub-project with CVS native_project_id={project_id}' + ) except auth_ex.UnauthorizedOperationException: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, From 21ea620d2733e6aae548a4fa8c51dfbce65ae909 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 25 Jul 2023 14:55:57 +0200 Subject: [PATCH 096/210] include value drivers and external factors in formulas --- .../apps/cvs/link_design_lifecycle/models.py | 9 ++ .../apps/cvs/link_design_lifecycle/storage.py | 125 +++++++++++++++--- sql/V230707_cvs.sql | 7 + sql/V230721_cvs.sql | 28 ++-- 4 files changed, 139 insertions(+), 30 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/models.py b/sedbackend/apps/cvs/link_design_lifecycle/models.py index 52d2b294..5b07e1e5 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/models.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/models.py @@ -1,6 +1,11 @@ +from typing import List + from pydantic import BaseModel from enum import Enum +from sedbackend.apps.cvs.market_input.models import MarketInputGet +from sedbackend.apps.cvs.vcs.models import ValueDriver + class TimeFormat(str, Enum): """ @@ -28,6 +33,8 @@ class FormulaGet(BaseModel): cost: str revenue: str rate: Rate + value_drivers: List[ValueDriver] = [] + external_factors: List[MarketInputGet] = [] class FormulaPost(BaseModel): @@ -36,6 +43,8 @@ class FormulaPost(BaseModel): cost: str revenue: str rate: Rate + value_drivers: List[int] = [] + external_factors: List[int] = [] class VcsDgPairs(BaseModel): diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 890d6e4a..3f21b638 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -11,13 +11,13 @@ from mysqlsb import FetchType, MySQLStatementBuilder CVS_FORMULAS_TABLE = 'cvs_design_mi_formulas' -CVS_FORMULAS_COLUMNS = ['vcs_row', 'design_group', 'time', 'time_unit', 'cost', 'revenue', 'rate'] +CVS_FORMULAS_COLUMNS = ['project', 'vcs_row', 'design_group', 'time', 'time_unit', 'cost', 'revenue', 'rate'] CVS_FORMULAS_VALUE_DRIVERS_TABLE = 'cvs_formulas_value_drivers' -CVS_FORMULAS_VALUE_DRIVERS_COLUMNS = ['formulas', 'value_driver'] +CVS_FORMULAS_VALUE_DRIVERS_COLUMNS = ['vcs_row', 'design_group', 'value_driver'] -CVS_FORMULAS_MARKET_INPUTS_TABLE = 'cvs_formulas_market_inputs' -CVS_FORMULAS_MARKET_INPUTS_COLUMNS = ['formulas', 'market_input'] +CVS_FORMULAS_EXTERNAL_FACTORS_TABLE = 'cvs_formulas_external_factors' +CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS = ['vcs_row', 'design_group', 'external_factor'] def create_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, @@ -27,16 +27,74 @@ def create_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, desig values = [vcs_row_id, design_group_id, formulas.time, formulas.time_unit.value, formulas.cost, formulas.revenue, formulas.rate.value] - insert_statement = MySQLStatementBuilder(db_connection) - insert_statement \ - .insert(table=CVS_FORMULAS_TABLE, columns=CVS_FORMULAS_COLUMNS) \ - .set_values(values=values) \ - .execute(fetch_type=FetchType.FETCH_ONE) + try: + insert_statement = MySQLStatementBuilder(db_connection) + insert_statement \ + .insert(table=CVS_FORMULAS_TABLE, columns=CVS_FORMULAS_COLUMNS) \ + .set_values(values=values) \ + .execute(fetch_type=FetchType.FETCH_NONE) + except Exception as e: + logger.error(f'Error while inserting formulas: {e}') + raise exceptions.FormulasFailedUpdateException + + add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, formulas.value_drivers) + add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, formulas.external_factors) + + return True + + +def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, + value_drivers: List[int]): + try: + prepared_list = [] + insert_statement = f'INSERT INTO {CVS_FORMULAS_VALUE_DRIVERS_TABLE} (vcs_row, design_group, value_driver) VALUES' + for value_driver_id in value_drivers: + insert_statement += f'(%s, %s, %s),' + prepared_list.append(vcs_row_id) + prepared_list.append(design_group_id) + prepared_list.append(value_driver_id) + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(insert_statement[:-1], prepared_list) + except Exception as e: + logger.error(f'Error while inserting value drivers: {e}') + raise exceptions.FormulasFailedUpdateException + + +def delete_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, + value_drivers: List[int]): + delete_statement = MySQLStatementBuilder(db_connection) + _, rows = delete_statement \ + .delete(CVS_FORMULAS_VALUE_DRIVERS_TABLE) \ + .where('vcs_row = %s and design_group = %s and value_driver in %s', + [vcs_row_id, design_group_id, value_drivers]) \ + .execute(return_affected_rows=True) + + +def add_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, + external_factors: List[int]): + try: + prepared_list = [] + insert_statement = f'INSERT INTO {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} (vcs_row, design_group, market_input) VALUES' + for external_factor_id in external_factors: + insert_statement += f'(%s, %s, %s),' + prepared_list.append(vcs_row_id) + prepared_list.append(design_group_id) + prepared_list.append(external_factor_id) + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(insert_statement[:-1], prepared_list) + except Exception as e: + logger.error(f'Error while inserting external factors: {e}') + raise exceptions.FormulasFailedUpdateException - if insert_statement is not None: # TODO actually check for potential problems - return True - return False +def delete_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, + external_factors: List[int]): + delete_statement = MySQLStatementBuilder(db_connection) + _, rows = delete_statement \ + .delete(CVS_FORMULAS_EXTERNAL_FACTORS_TABLE) \ + .where('vcs_row = %s and design_group = %s and external_factors in %s', + [vcs_row_id, design_group_id, external_factors]) \ + .execute(return_affected_rows=True) def edit_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, @@ -56,20 +114,55 @@ def edit_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row create_formulas(db_connection, vcs_row_id, design_group_id, formulas) elif count == 1: logger.debug(f'Editing formulas') - columns = CVS_FORMULAS_COLUMNS[2:] + columns = CVS_FORMULAS_COLUMNS[3:] set_statement = ', '.join([col + ' = %s' for col in columns]) values = [formulas.time, formulas.time_unit.value, formulas.cost, formulas.revenue, formulas.rate.value] - # TODO update the connection with value drivers and mi also update_statement = MySQLStatementBuilder(db_connection) _, rows = update_statement \ .update(table=CVS_FORMULAS_TABLE, set_statement=set_statement, values=values) \ .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) \ .execute(return_affected_rows=True) - if rows > 1: - raise exceptions.TooManyFormulasUpdatedException + where_statement = "vcs_row = %s and design_group = %s and " \ + "value_driver IN (" + ",".join(["%s" for _ in range(len(formulas.value_drivers))]) + ")" + select_statement = MySQLStatementBuilder(db_connection) + value_driver_res = select_statement.select(CVS_FORMULAS_VALUE_DRIVERS_TABLE, CVS_FORMULAS_VALUE_DRIVERS_COLUMNS) \ + .where(where_statement, [vcs_row_id, design_group_id] + formulas.value_drivers) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + + delete_value_drivers = [value_driver_id['id'] for value_driver_id in value_driver_res if value_driver_id not in + formulas.value_drivers] + add_value_drivers = [value_driver_id for value_driver_id in formulas.value_drivers if value_driver_id not in + [value_driver['id'] for value_driver in value_driver_res]] + + if add_value_drivers: + add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, add_value_drivers) + if delete_value_drivers: + delete_value_driver_formulas(db_connection, vcs_row_id, design_group_id, delete_value_drivers) + + where_statement = "vcs_row = %s and design_group = %s and " \ + "external_factor IN (" + ",".join(["%s" for _ in range(len(formulas.external_factors))]) + ")" + select_statement = MySQLStatementBuilder(db_connection) + external_factor_res = select_statement.select(CVS_FORMULAS_EXTERNAL_FACTORS_TABLE, + CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS) \ + .where(where_statement, [vcs_row_id, design_group_id] + formulas.external_factors) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + + delete_external_factors = [external_factor_id['id'] for external_factor_id in external_factor_res if + external_factor_id not in + formulas.external_factors] + add_external_factors = [external_factor_id for external_factor_id in formulas.value_drivers if + external_factor_id not in + [external_factor['id'] for external_factor in external_factor_res]] + + if add_external_factors: + add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, add_external_factors) + if delete_external_factors: + delete_external_factor_formulas(db_connection, vcs_row_id, design_group_id, delete_external_factors) + + else: raise exceptions.FormulasFailedUpdateException diff --git a/sql/V230707_cvs.sql b/sql/V230707_cvs.sql index 0ad143ec..28190929 100644 --- a/sql/V230707_cvs.sql +++ b/sql/V230707_cvs.sql @@ -5,3 +5,10 @@ ALTER TABLE `seddb`.`cvs_subprocesses` DROP FOREIGN KEY `cvs_subprocesses_ibfk_2`, DROP COLUMN `vcs`; SET FOREIGN_KEY_CHECKS=1; + +# Add project column to formulas +ALTER TABLE `seddb`.`cvs_design_mi_formulas` + ADD COLUMN `project` INT UNSIGNED NOT NULL FIRST, + ADD FOREIGN KEY(`project`) + REFERENCES `seddb`.`cvs_projects`(`id`) + ON DELETE CASCADE \ No newline at end of file diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index 79a08553..c606a00a 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -1,14 +1,14 @@ -# Value driver to project relation -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_project_value_drivers` -( - `project` INT UNSIGNED NOT NULL, - `value_driver` INT UNSIGNED NOT NULL, - PRIMARY KEY (`project`, `value_driver`), - FOREIGN KEY (`project`) - REFERENCES `seddb`.`cvs_projects`(`id`) - ON DELETE CASCADE, - FOREIGN KEY (`value_driver`) - REFERENCES `seddb`.`cvs_value_drivers`(`id`) - ON DELETE CASCADE -); -CREATE UNIQUE INDEX `project_value_driver_index` ON `seddb`.`cvs_project_value_drivers` (project, value_driver); \ No newline at end of file +# Value driver to project relation +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_project_value_drivers` +( + `project` INT UNSIGNED NOT NULL, + `value_driver` INT UNSIGNED NOT NULL, + PRIMARY KEY (`project`, `value_driver`), + FOREIGN KEY (`project`) + REFERENCES `seddb`.`cvs_projects`(`id`) + ON DELETE CASCADE, + FOREIGN KEY (`value_driver`) + REFERENCES `seddb`.`cvs_value_drivers`(`id`) + ON DELETE CASCADE +); +CREATE UNIQUE INDEX IF NOT EXISTS `project_value_driver_index` ON `seddb`.`cvs_project_value_drivers` (project, value_driver); From cd7aaf4d20a4e0402be7326975091655b7707443 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 25 Jul 2023 15:46:24 +0200 Subject: [PATCH 097/210] delete value driver when no project relations added --- sedbackend/apps/cvs/project/storage.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 0a98437b..95687fa8 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -1,10 +1,6 @@ -from fastapi import Depends from fastapi.logger import logger from mysql.connector.pooling import PooledMySQLConnection -from sedbackend.apps.core.authentication import exceptions as auth_exceptions -from sedbackend.apps.core.authentication.utils import get_current_active_user -from sedbackend.apps.core.users.models import User from sedbackend.apps.core.users.storage import db_get_user_safe_with_id from sedbackend.apps.cvs.project import models as models, exceptions as exceptions from sedbackend.libs.datastructures.pagination import ListChunk @@ -112,6 +108,24 @@ def delete_cvs_project(db_connection: PooledMySQLConnection, project_id: int, us if subproject_rows == 0: raise exceptions.SubProjectFailedDeletionException + delete_value_drivers_without_project(db_connection) + + return True + + +# This function could be a mysql trigger instead +def delete_value_drivers_without_project(db_connection: PooledMySQLConnection) -> bool: + logger.debug(f'Checking and deleting if there are any value drivers without project relations') + + query = f'DELETE cvd FROM cvs_value_drivers cvd \ + LEFT JOIN cvs_project_value_drivers cpvd ON cvd.id = cpvd.value_driver \ + WHERE cpvd.value_driver IS NULL;' + + with db_connection.cursor() as cursor: + cursor.execute(query) + rows = cursor.rowcount + logger.debug(f'Removed {rows} value drivers') + return True From cc0f2767893674b1ee2d747c0d9a7d97b48488a7 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 25 Jul 2023 15:50:06 +0200 Subject: [PATCH 098/210] reverted db.py --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 9a0b9d0e..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' #'core-db' +host = 'core-db' database = 'seddb' -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From e3c49abb7c4f74c3ed70842145a48a9b782af3a0 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 27 Jul 2023 12:31:23 +0200 Subject: [PATCH 099/210] changed structure of formulas --- .../link_design_lifecycle/implementation.py | 2 +- .../apps/cvs/link_design_lifecycle/models.py | 2 - .../apps/cvs/link_design_lifecycle/storage.py | 157 ++++++++++-------- sql/V230707_cvs.sql | 46 ++++- .../test_connect_vcs_design.py | 38 ++--- 5 files changed, 147 insertions(+), 98 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py index 58881952..0b0c79e5 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py @@ -14,7 +14,7 @@ def edit_formulas(project_id: int, vcs_row_id: int, design_group_id: int, new_formulas: models.FormulaPost) -> bool: with get_connection() as con: try: - res = storage.edit_formulas(con, project_id, vcs_row_id, design_group_id, new_formulas) + res = storage.update_formulas(con, project_id, vcs_row_id, design_group_id, new_formulas) con.commit() return res except vcs_exceptions.VCSNotFoundException: diff --git a/sedbackend/apps/cvs/link_design_lifecycle/models.py b/sedbackend/apps/cvs/link_design_lifecycle/models.py index 5b07e1e5..3c239810 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/models.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/models.py @@ -43,8 +43,6 @@ class FormulaPost(BaseModel): cost: str revenue: str rate: Rate - value_drivers: List[int] = [] - external_factors: List[int] = [] class VcsDgPairs(BaseModel): diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 3f21b638..e284f226 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -14,18 +14,22 @@ CVS_FORMULAS_COLUMNS = ['project', 'vcs_row', 'design_group', 'time', 'time_unit', 'cost', 'revenue', 'rate'] CVS_FORMULAS_VALUE_DRIVERS_TABLE = 'cvs_formulas_value_drivers' -CVS_FORMULAS_VALUE_DRIVERS_COLUMNS = ['vcs_row', 'design_group', 'value_driver'] +CVS_FORMULAS_VALUE_DRIVERS_COLUMNS = ['vcs_row', 'design_group', 'value_driver', 'project'] CVS_FORMULAS_EXTERNAL_FACTORS_TABLE = 'cvs_formulas_external_factors' CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS = ['vcs_row', 'design_group', 'external_factor'] -def create_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, - formulas: models.FormulaPost) -> bool: +def create_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, + formulas: models.FormulaPost): logger.debug(f'Creating formulas') - values = [vcs_row_id, design_group_id, formulas.time, formulas.time_unit.value, formulas.cost, formulas.revenue, - formulas.rate.value] + # TODO extract from formula. For example with regex + value_drivers = [] + external_factors = [] + + values = [project_id, vcs_row_id, design_group_id, formulas.time, formulas.time_unit.value, formulas.cost, + formulas.revenue, formulas.rate.value] try: insert_statement = MySQLStatementBuilder(db_connection) @@ -37,22 +41,46 @@ def create_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, desig logger.error(f'Error while inserting formulas: {e}') raise exceptions.FormulasFailedUpdateException - add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, formulas.value_drivers) - add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, formulas.external_factors) + if value_drivers: + add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, value_drivers, project_id) + if external_factors: + add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, external_factors) - return True + +def edit_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, project_id: int, + formulas: models.FormulaPost): + logger.debug(f'Editing formulas') + + # TODO extract from formula. For example with regex + value_drivers = [] + external_factors = [] + + columns = CVS_FORMULAS_COLUMNS[3:] + set_statement = ', '.join([col + ' = %s' for col in columns]) + + values = [formulas.time, formulas.time_unit.value, formulas.cost, formulas.revenue, formulas.rate.value] + + # Update formula row + update_statement = MySQLStatementBuilder(db_connection) + _, rows = update_statement \ + .update(table=CVS_FORMULAS_TABLE, set_statement=set_statement, values=values) \ + .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) \ + .execute(return_affected_rows=True) + + update_value_driver_formulas(db_connection, vcs_row_id, design_group_id, value_drivers, project_id) + + update_external_factor_formulas(db_connection, vcs_row_id, design_group_id, external_factors) def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, - value_drivers: List[int]): + value_drivers: List[int], project_id: int): try: prepared_list = [] - insert_statement = f'INSERT INTO {CVS_FORMULAS_VALUE_DRIVERS_TABLE} (vcs_row, design_group, value_driver) VALUES' + insert_statement = f'INSERT INTO {CVS_FORMULAS_VALUE_DRIVERS_TABLE} (vcs_row, design_group, value_driver, project) VALUES' for value_driver_id in value_drivers: - insert_statement += f'(%s, %s, %s),' - prepared_list.append(vcs_row_id) - prepared_list.append(design_group_id) - prepared_list.append(value_driver_id) + insert_statement += f'(%s, %s, %s, %s),' + prepared_list.append(...[vcs_row_id, design_group_id, value_driver_id, project_id]) + logger.debug(f'Insert_statement: {insert_statement}') with db_connection.cursor(prepared=True) as cursor: cursor.execute(insert_statement[:-1], prepared_list) except Exception as e: @@ -70,6 +98,25 @@ def delete_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_i .execute(return_affected_rows=True) +def update_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, + value_drivers: List[int], project_id: int): + where_statement = "vcs_row = %s and design_group = %s" + select_statement = MySQLStatementBuilder(db_connection) + value_driver_res = select_statement.select(CVS_FORMULAS_VALUE_DRIVERS_TABLE, CVS_FORMULAS_VALUE_DRIVERS_COLUMNS) \ + .where(where_statement, [vcs_row_id, design_group_id]) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + + delete_value_drivers = [value_driver['id'] for value_driver in value_driver_res if value_driver['id'] not in + value_drivers] + add_value_drivers = [value_driver_id for value_driver_id in value_drivers if value_driver_id not in + [value_driver['id'] for value_driver in value_driver_res]] + + if len(add_value_drivers): + add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, add_value_drivers, project_id) + if len(delete_value_drivers): + delete_value_driver_formulas(db_connection, vcs_row_id, design_group_id, delete_value_drivers) + + def add_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, external_factors: List[int]): try: @@ -97,8 +144,30 @@ def delete_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_ro .execute(return_affected_rows=True) -def edit_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, - formulas: models.FormulaPost) -> bool: +def update_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, + external_factors: List[int]): + where_statement = "vcs_row = %s and design_group = %s" + select_statement = MySQLStatementBuilder(db_connection) + external_factor_res = select_statement.select(CVS_FORMULAS_EXTERNAL_FACTORS_TABLE, + CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS) \ + .where(where_statement, [vcs_row_id, design_group_id]) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + + delete_external_factors = [external_factor['id'] for external_factor in external_factor_res if + external_factor['id'] not in + external_factors] + add_external_factors = [external_factor_id for external_factor_id in external_factors if + external_factor_id not in + [external_factor['id'] for external_factor in external_factor_res]] + + if len(add_external_factors): + add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, add_external_factors) + if len(delete_external_factors): + delete_external_factor_formulas(db_connection, vcs_row_id, design_group_id, delete_external_factors) + + +def update_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, + formulas: models.FormulaPost) -> bool: get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project get_vcs_row(db_connection, project_id, vcs_row_id) @@ -108,61 +177,10 @@ def edit_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) count = count['count'] - logger.debug(f'count: {count}') - if count == 0: - create_formulas(db_connection, vcs_row_id, design_group_id, formulas) + create_formulas(db_connection, project_id, vcs_row_id, design_group_id, formulas) elif count == 1: - logger.debug(f'Editing formulas') - columns = CVS_FORMULAS_COLUMNS[3:] - set_statement = ', '.join([col + ' = %s' for col in columns]) - - values = [formulas.time, formulas.time_unit.value, formulas.cost, formulas.revenue, formulas.rate.value] - - update_statement = MySQLStatementBuilder(db_connection) - _, rows = update_statement \ - .update(table=CVS_FORMULAS_TABLE, set_statement=set_statement, values=values) \ - .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) \ - .execute(return_affected_rows=True) - - where_statement = "vcs_row = %s and design_group = %s and " \ - "value_driver IN (" + ",".join(["%s" for _ in range(len(formulas.value_drivers))]) + ")" - select_statement = MySQLStatementBuilder(db_connection) - value_driver_res = select_statement.select(CVS_FORMULAS_VALUE_DRIVERS_TABLE, CVS_FORMULAS_VALUE_DRIVERS_COLUMNS) \ - .where(where_statement, [vcs_row_id, design_group_id] + formulas.value_drivers) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - - delete_value_drivers = [value_driver_id['id'] for value_driver_id in value_driver_res if value_driver_id not in - formulas.value_drivers] - add_value_drivers = [value_driver_id for value_driver_id in formulas.value_drivers if value_driver_id not in - [value_driver['id'] for value_driver in value_driver_res]] - - if add_value_drivers: - add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, add_value_drivers) - if delete_value_drivers: - delete_value_driver_formulas(db_connection, vcs_row_id, design_group_id, delete_value_drivers) - - where_statement = "vcs_row = %s and design_group = %s and " \ - "external_factor IN (" + ",".join(["%s" for _ in range(len(formulas.external_factors))]) + ")" - select_statement = MySQLStatementBuilder(db_connection) - external_factor_res = select_statement.select(CVS_FORMULAS_EXTERNAL_FACTORS_TABLE, - CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS) \ - .where(where_statement, [vcs_row_id, design_group_id] + formulas.external_factors) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - - delete_external_factors = [external_factor_id['id'] for external_factor_id in external_factor_res if - external_factor_id not in - formulas.external_factors] - add_external_factors = [external_factor_id for external_factor_id in formulas.value_drivers if - external_factor_id not in - [external_factor['id'] for external_factor in external_factor_res]] - - if add_external_factors: - add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, add_external_factors) - if delete_external_factors: - delete_external_factor_formulas(db_connection, vcs_row_id, design_group_id, delete_external_factors) - - + edit_formulas(db_connection, vcs_row_id, design_group_id, project_id, formulas) else: raise exceptions.FormulasFailedUpdateException @@ -205,7 +223,6 @@ def delete_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_r logger.debug(f'Deleting formulas with vcs_row_id: {vcs_row_id}') get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project - # get_cvs_project(project_id) get_vcs_row(db_connection, project_id, vcs_row_id) delete_statement = MySQLStatementBuilder(db_connection) diff --git a/sql/V230707_cvs.sql b/sql/V230707_cvs.sql index 28190929..6de2b0a3 100644 --- a/sql/V230707_cvs.sql +++ b/sql/V230707_cvs.sql @@ -11,4 +11,48 @@ ALTER TABLE `seddb`.`cvs_design_mi_formulas` ADD COLUMN `project` INT UNSIGNED NOT NULL FIRST, ADD FOREIGN KEY(`project`) REFERENCES `seddb`.`cvs_projects`(`id`) - ON DELETE CASCADE \ No newline at end of file + ON DELETE CASCADE; + +DROP TABLE IF EXISTS `seddb`.`cvs_formulas_market_inputs`; +DROP TABLE IF EXISTS `seddb`.`cvs_formulas_value_drivers`; + +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_external_factors` +( + `vcs_row` INT UNSIGNED NOT NULL, + `design_group` INT UNSIGNED NOT NULL, + `external_factor` INT UNSIGNED NOT NULL, + PRIMARY KEY(`vcs_row`, `design_group`, `external_factor`), + FOREIGN KEY (`vcs_row`) + REFERENCES `seddb`.`cvs_design_mi_formulas`(`vcs_row`) + ON DELETE CASCADE, + FOREIGN KEY (`design_group`) + REFERENCES `seddb`.`cvs_design_mi_formulas`(`design_group`) + ON DELETE CASCADE, + FOREIGN KEY(`external_factor`) + REFERENCES `seddb`.`cvs_market_inputs`(`id`) + ON DELETE CASCADE +); + + +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` +( + `vcs_row` INT UNSIGNED NOT NULL, + `design_group` INT UNSIGNED NOT NULL, + `value_driver` INT UNSIGNED NOT NULL, + `project` INT UNSIGNED NOT NULL, + PRIMARY KEY(`vcs_row`, `design_group`, `value_driver`), + FOREIGN KEY (`vcs_row`) + REFERENCES `seddb`.`cvs_design_mi_formulas`(`vcs_row`) + ON DELETE CASCADE, + FOREIGN KEY (`design_group`) + REFERENCES `seddb`.`cvs_design_mi_formulas`(`design_group`) + ON DELETE CASCADE, + FOREIGN KEY(`value_driver`) + REFERENCES `seddb`.`cvs_value_drivers`(`id`) + ON DELETE CASCADE, + FOREIGN KEY (`project`, `value_driver`) + REFERENCES `seddb`.`cvs_project_value_drivers`(`project`, `value_driver`) + ON DELETE CASCADE +); + + diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index 3715a02c..83667d3c 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -25,21 +25,15 @@ def test_create_formulas(client, std_headers, std_user): revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() - # TODO when value drivers and market inputs are connected to the - # formulas, add them here. - value_driver_ids = [] - market_input_ids = [] - res = client.put(f'/api/cvs/project/{project.id}/vcs-row/{row_id}/design-group/{design_group.id}/formulas', headers=std_headers, json={ + "project": project.id, "time": time, "time_unit": time_unit, "cost": cost, "revenue": revenue, - "rate": rate, - "value_driver_ids": value_driver_ids, - "market_input_ids": market_input_ids + "rate": rate }) # Assert @@ -74,6 +68,7 @@ def test_create_formulas_no_optional(client, std_headers, std_user): res = client.put(f'/api/cvs/project/{project.id}/vcs-row/{row_id}/design-group/{design_group.id}/formulas', headers=std_headers, json={ + "project": project.id, "time": time, "time_unit": time_unit, "cost": cost, @@ -91,6 +86,7 @@ def test_create_formulas_no_optional(client, std_headers, std_user): tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) + def test_get_all_formulas(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) @@ -135,7 +131,7 @@ def test_get_all_formulas_invalid_project(client, std_headers, std_user): # Assert assert res.status_code == 404 - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -159,12 +155,11 @@ def test_get_all_formulas_invalid_vcs(client, std_headers, std_user): # Assert assert res.status_code == 404 - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) - def get_all_formulas_invalid_design_group(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) @@ -184,10 +179,11 @@ def get_all_formulas_invalid_design_group(client, std_headers, std_user): # Assert assert res.status_code == 404 - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) + def test_edit_formulas(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) @@ -205,11 +201,6 @@ def test_edit_formulas(client, std_headers, std_user): revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() - # TODO when value drivers and market inputs are connected to the - # formulas, add them here. - value_driver_ids = [] - market_input_ids = [] - res = client.put( f'/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas', headers=std_headers, @@ -218,9 +209,7 @@ def test_edit_formulas(client, std_headers, std_user): "time_unit": time_unit, "cost": cost, "revenue": revenue, - "rate": rate, - "value_driver_ids": value_driver_ids, - "market_input_ids": market_input_ids + "rate": rate }) # Assert @@ -438,10 +427,11 @@ def test_delete_formulas_invalid_project(client, std_headers, std_user): # Assert assert res.status_code == 404 - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) + def test_delete_formulas_invalid_vcs_row(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) @@ -461,7 +451,7 @@ def test_delete_formulas_invalid_vcs_row(client, std_headers, std_user): # Assert assert res.status_code == 404 - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -485,7 +475,7 @@ def test_delete_formulas_invalid_design_group(client, std_headers, std_user): # Assert assert res.status_code == 400 - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) @@ -538,6 +528,6 @@ def test_get_vcs_dg_pairs_invalid_project(client, std_headers, std_user): # Assert assert res.status_code == 404 - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) From 650d7842bf46c71a0eba3a24d8bd67590267a55b Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 28 Jul 2023 11:58:13 +0200 Subject: [PATCH 100/210] handle used value drivers and external factors --- .../apps/cvs/link_design_lifecycle/models.py | 11 ++- .../apps/cvs/link_design_lifecycle/storage.py | 84 ++++++++++++++----- .../test_connect_vcs_design.py | 19 ++++- 3 files changed, 88 insertions(+), 26 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/models.py b/sedbackend/apps/cvs/link_design_lifecycle/models.py index 3c239810..51660864 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/models.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/models.py @@ -33,8 +33,8 @@ class FormulaGet(BaseModel): cost: str revenue: str rate: Rate - value_drivers: List[ValueDriver] = [] - external_factors: List[MarketInputGet] = [] + used_value_drivers: List[int] = [] + used_external_factors: List[int] = [] class FormulaPost(BaseModel): @@ -51,3 +51,10 @@ class VcsDgPairs(BaseModel): design_group: str design_group_id: int has_formulas: int + + +class ValueDriverFormula(BaseModel): + vcs_row_id: int + design_group_id: int + value_driver_id: int + project_id: int \ No newline at end of file diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index e284f226..9563c25f 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -2,11 +2,10 @@ from fastapi.logger import logger from mysql.connector.pooling import PooledMySQLConnection - +import re from sedbackend.apps.cvs.design.storage import get_design_group from sedbackend.apps.cvs.vcs.storage import get_vcs_row from sedbackend.apps.cvs.vcs.storage import get_vcs -from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.link_design_lifecycle import models, exceptions from mysqlsb import FetchType, MySQLStatementBuilder @@ -24,9 +23,10 @@ def create_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_r formulas: models.FormulaPost): logger.debug(f'Creating formulas') - # TODO extract from formula. For example with regex - value_drivers = [] - external_factors = [] + value_driver_ids, external_factor_ids = find_vd_and_ef([formulas.time, formulas.cost, formulas.revenue]) + + logger.debug(f'Value driver ids: {value_driver_ids}') + logger.debug(f'External factor ids: {external_factor_ids}') values = [project_id, vcs_row_id, design_group_id, formulas.time, formulas.time_unit.value, formulas.cost, formulas.revenue, formulas.rate.value] @@ -41,10 +41,27 @@ def create_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_r logger.error(f'Error while inserting formulas: {e}') raise exceptions.FormulasFailedUpdateException - if value_drivers: - add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, value_drivers, project_id) - if external_factors: - add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, external_factors) + if value_driver_ids: + add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, value_driver_ids, project_id) + if external_factor_ids: + add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, external_factor_ids) + + +def find_vd_and_ef(texts: List[str]) -> (List[str], List[int]): + value_driver_ids = [] + external_factor_ids = [] + + pattern = r'\{(?Pvd|ef):(?P\d+),"([^"]+)"\}' + + for text in texts: + matches = re.findall(pattern, text) + for tag, id_number, _ in matches: + if tag == "vd": + value_driver_ids.append(int(id_number)) + elif tag == "ef": + external_factor_ids.append(int(id_number)) + + return value_driver_ids, external_factor_ids def edit_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, project_id: int, @@ -79,10 +96,11 @@ def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: insert_statement = f'INSERT INTO {CVS_FORMULAS_VALUE_DRIVERS_TABLE} (vcs_row, design_group, value_driver, project) VALUES' for value_driver_id in value_drivers: insert_statement += f'(%s, %s, %s, %s),' - prepared_list.append(...[vcs_row_id, design_group_id, value_driver_id, project_id]) - logger.debug(f'Insert_statement: {insert_statement}') + prepared_list += [vcs_row_id, design_group_id, value_driver_id, project_id] + insert_statement = insert_statement[:-1] + insert_statement += ' ON DUPLICATE KEY UPDATE vcs_row = vcs_row' # On duplicate do nothing with db_connection.cursor(prepared=True) as cursor: - cursor.execute(insert_statement[:-1], prepared_list) + cursor.execute(insert_statement, prepared_list) except Exception as e: logger.error(f'Error while inserting value drivers: {e}') raise exceptions.FormulasFailedUpdateException @@ -121,14 +139,14 @@ def add_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_row_i external_factors: List[int]): try: prepared_list = [] - insert_statement = f'INSERT INTO {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} (vcs_row, design_group, market_input) VALUES' + insert_statement = f'INSERT INTO {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} (vcs_row, design_group, external_factor) VALUES' for external_factor_id in external_factors: insert_statement += f'(%s, %s, %s),' - prepared_list.append(vcs_row_id) - prepared_list.append(design_group_id) - prepared_list.append(external_factor_id) + prepared_list += [vcs_row_id, design_group_id, external_factor_id] + insert_statement = insert_statement[:-1] + insert_statement += ' ON DUPLICATE KEY UPDATE vcs_row = vcs_row' # On duplicate do nothing with db_connection.cursor(prepared=True) as cursor: - cursor.execute(insert_statement[:-1], prepared_list) + cursor.execute(insert_statement, prepared_list) except Exception as e: logger.error(f'Error while inserting external factors: {e}') raise exceptions.FormulasFailedUpdateException @@ -201,9 +219,33 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) if res is None: - raise vcs_exceptions.VCSNotFoundException + raise exceptions.FormulasNotFoundException + + if len(res): + where_statement = "(vcs_row, design_group) IN (" + ",".join(["(%s, %s)" for _ in range(len(res))]) + ")" + prepared_list = [] + for r in res: + prepared_list += [r['vcs_row'], r['design_group']] + + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(f"SELECT * FROM {CVS_FORMULAS_VALUE_DRIVERS_TABLE} WHERE {where_statement}", + prepared_list) + all_vds = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] + + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(f"SELECT * FROM {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} WHERE {where_statement}", + prepared_list) + all_efs = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] + + formulas = [] + for r in res: + r['value_drivers'] = [vd['value_driver'] for vd in all_vds if vd['vcs_row'] == r['vcs_row'] and + vd['design_group'] == r['design_group']] + r['external_factors'] = [ef['external_factor'] for ef in all_efs if ef['vcs_row'] == r['vcs_row'] and + ef['design_group'] == r['design_group']] + formulas.append(populate_formula(r)) - return [populate_formula(r) for r in res] + return formulas def populate_formula(db_result) -> models.FormulaGet: @@ -214,7 +256,9 @@ def populate_formula(db_result) -> models.FormulaGet: time_unit=db_result['time_unit'], cost=db_result['cost'], revenue=db_result['revenue'], - rate=db_result['rate'] + rate=db_result['rate'], + used_value_drivers=db_result['value_drivers'] if db_result['value_drivers'] is not None else [], + used_external_factors=db_result['external_factors'] if db_result['external_factors'] is not None else [] ) diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index 83667d3c..67cd9eca 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -17,14 +17,20 @@ def test_create_formulas(client, std_headers, std_user): raise Exception row_id = vcs_rows[0].id design_group = tu.seed_random_design_group(project.id) + value_driver = tu.seed_random_value_driver(current_user.id, project.id) + external_factor = tu.seed_random_market_input(project.id) # Act - time = testutils.random_str(10, 200) - time_unit = tu.random_time_unit() - cost = testutils.random_str(10, 200) - revenue = testutils.random_str(10, 200) + + time = '2+{vd:' + str(value_driver.id) + ',"' + str(value_driver.name) + '"}+{vd:' + str( + value_driver.id) + ',"' + str(value_driver.name) + '"}' + cost = '2+{ef:' + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' + revenue = '20+{vd:' + str(value_driver.id) + ',"' + str(value_driver.name) + '"}+{ef:' + str( + external_factor.id) + ',"' + str(external_factor.name) + '"}' + rate = tu.random_rate_choice() + time_unit = tu.random_time_unit() res = client.put(f'/api/cvs/project/{project.id}/vcs-row/{row_id}/design-group/{design_group.id}/formulas', headers=std_headers, json={ @@ -36,8 +42,13 @@ def test_create_formulas(client, std_headers, std_user): "rate": rate }) + res_get = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + headers=std_headers) + # Assert assert res.status_code == 200 + assert res_get.json()[0]['used_value_drivers'][0] == value_driver.id + assert res_get.json()[0]['used_external_factors'][0] == external_factor.id # Cleanup tu.delete_design_group(project.id, design_group.id) From 3ea61055fe8ee02543c85a8dc979ffcc9a6d1df8 Mon Sep 17 00:00:00 2001 From: jyborn Date: Fri, 28 Jul 2023 17:16:20 +0200 Subject: [PATCH 101/210] external factor refactor started --- sedbackend/apps/core/db.py | 4 +- .../apps/cvs/market_input/exceptions.py | 8 +- .../apps/cvs/market_input/implementation.py | 52 +++++------ sedbackend/apps/cvs/market_input/models.py | 15 ++-- sedbackend/apps/cvs/market_input/router.py | 19 ++-- sedbackend/apps/cvs/market_input/storage.py | 90 ++++++++++--------- .../apps/cvs/simulation/implementation.py | 4 +- tests/apps/cvs/testutils.py | 4 +- 8 files changed, 101 insertions(+), 95 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..ff8bfe0c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' #'core-db' database = 'seddb' -port = 3306 +port = 3001 #3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/market_input/exceptions.py b/sedbackend/apps/cvs/market_input/exceptions.py index 581e497c..c78410c7 100644 --- a/sedbackend/apps/cvs/market_input/exceptions.py +++ b/sedbackend/apps/cvs/market_input/exceptions.py @@ -1,8 +1,8 @@ -class MarketInputNotFoundException(Exception): +class ExternalFactorNotFoundException(Exception): pass -class MarketInputAlreadyExistException(Exception): +class ExternalFactorAlreadyExistException(Exception): pass @@ -11,9 +11,9 @@ def __init__(self, time_unit: str = None) -> None: self.time_unit = time_unit -class MarketInputFailedDeletionException(Exception): +class ExternalFactorFailedDeletionException(Exception): pass -class MarketInputFormulasNotFoundException(Exception): +class ExternalFactorFormulasNotFoundException(Exception): pass diff --git a/sedbackend/apps/cvs/market_input/implementation.py b/sedbackend/apps/cvs/market_input/implementation.py index ae73d3fe..b8e5a12c 100644 --- a/sedbackend/apps/cvs/market_input/implementation.py +++ b/sedbackend/apps/cvs/market_input/implementation.py @@ -15,10 +15,10 @@ ######################################################################################################################## -def get_all_market_inputs(project_id: int) -> List[models.MarketInputGet]: +def get_all_market_inputs(project_id: int) -> List[models.ExternalFactor]: try: with get_connection() as con: - db_result = storage.get_all_market_input(con, project_id) + db_result = storage.get_all_external_factors(con, project_id) con.commit() return db_result except auth_ex.UnauthorizedOperationException: @@ -31,17 +31,17 @@ def get_all_market_inputs(project_id: int) -> List[models.MarketInputGet]: status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not find project with id={project_id}.', ) - except exceptions.MarketInputNotFoundException: + except exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not find market input', ) -def get_market_input(project_id: int, market_input_id: int) -> models.MarketInputGet: +def get_market_input(project_id: int, market_input_id: int) -> models.ExternalFactor: try: with get_connection() as con: - db_result = storage.get_market_input(con, project_id, market_input_id) + db_result = storage.get_external_factor(con, project_id, market_input_id) con.commit() return db_result except auth_ex.UnauthorizedOperationException: @@ -54,17 +54,17 @@ def get_market_input(project_id: int, market_input_id: int) -> models.MarketInpu status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not find project with id={project_id}.', ) - except exceptions.MarketInputNotFoundException: + except exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not find market input', ) -def create_market_input(project_id: int, market_input: models.MarketInputPost) -> models.MarketInputGet: +def create_market_input(project_id: int, market_input: models.ExternalFactorPost) -> models.ExternalFactor: try: with get_connection() as con: - db_result = storage.create_market_input(con, project_id, market_input) + db_result = storage.create_external_factor(con, project_id, market_input) con.commit() return db_result except auth_ex.UnauthorizedOperationException: @@ -74,10 +74,10 @@ def create_market_input(project_id: int, market_input: models.MarketInputPost) - ) -def update_market_input(project_id: int, market_input_id: int, market_input: models.MarketInputPost) -> bool: +def update_external_factor(project_id: int, external_factor: models.ExternalFactor) -> bool: try: with get_connection() as con: - db_result = storage.update_market_input(con, project_id, market_input_id, market_input) + db_result = storage.update_external_factor(con, project_id, external_factor) con.commit() return db_result except auth_ex.UnauthorizedOperationException: @@ -85,10 +85,10 @@ def update_market_input(project_id: int, market_input_id: int, market_input: mod status_code=status.HTTP_403_FORBIDDEN, detail='Unauthorized user.', ) - except exceptions.MarketInputNotFoundException: + except exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not find market input with id={market_input_id}', + detail=f'Could not find external factor with id={external_factor.id}', ) except proj_exceptions.CVSProjectNotFoundException: raise HTTPException( @@ -98,17 +98,17 @@ def update_market_input(project_id: int, market_input_id: int, market_input: mod except proj_exceptions.CVSProjectNoMatchException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Market input with id={market_input_id} is not a part from project with id={project_id}.', + detail=f'External factor with id={external_factor.id} is not a part from project with id={project_id}.', ) def delete_market_input(project_id: int, mi_id: int) -> bool: try: with get_connection() as con: - res = storage.delete_market_input(con, project_id, mi_id) + res = storage.delete_external_factor(con, project_id, mi_id) con.commit() return res - except exceptions.MarketInputFailedDeletionException: + except exceptions.ExternalFactorFailedDeletionException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not delete market input with id: {mi_id}' @@ -125,13 +125,13 @@ def delete_market_input(project_id: int, mi_id: int) -> bool: ) -def get_all_formula_market_inputs(formulas_id: int) -> List[models.MarketInputGet]: +def get_all_formula_market_inputs(formulas_id: int) -> List[models.ExternalFactor]: try: with get_connection() as con: - res = storage.get_all_formula_market_inputs(con, formulas_id) + res = storage.get_all_formula_external_factors(con, formulas_id) con.commit() return res - except exceptions.MarketInputAlreadyExistException: + except exceptions.ExternalFactorAlreadyExistException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not find market inputs for formula with vcs_row id: {formulas_id}' @@ -143,13 +143,13 @@ def get_all_formula_market_inputs(formulas_id: int) -> List[models.MarketInputGe ######################################################################################################################## -def update_market_input_value(project_id: int, mi_value: models.MarketInputValue) -> bool: +def update_market_input_value(project_id: int, mi_value: models.ExternalFactorValue) -> bool: try: with get_connection() as con: res = storage.update_market_input_value(con, project_id, mi_value) con.commit() return res - except exceptions.MarketInputNotFoundException: + except exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not find market input with id={mi_value.market_input_id}.', @@ -171,13 +171,13 @@ def update_market_input_value(project_id: int, mi_value: models.MarketInputValue ) -def update_market_input_values(project_id: int, mi_values: List[models.MarketInputValue]) -> bool: +def update_market_input_values(project_id: int, mi_values: List[models.ExternalFactorValue]) -> bool: try: with get_connection() as con: - res = storage.update_market_input_values(con, project_id, mi_values) + res = storage.update_external_factor_values(con, project_id, mi_values) con.commit() return res - except exceptions.MarketInputNotFoundException: + except exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find market input', @@ -194,10 +194,10 @@ def update_market_input_values(project_id: int, mi_values: List[models.MarketInp ) -def get_all_market_values(project_id: int) -> List[models.MarketInputValue]: +def get_all_market_values(project_id: int) -> List[models.ExternalFactorValue]: try: with get_connection() as con: - res = storage.get_all_market_input_values(con, project_id) + res = storage.get_all_external_factor_values(con, project_id) con.commit() return res except proj_exceptions.CVSProjectNotFoundException: @@ -213,7 +213,7 @@ def delete_market_value(project_id: int, vcs_id: int, mi_id: int) -> bool: res = storage.delete_market_value(con, project_id, vcs_id, mi_id) con.commit() return res - except exceptions.MarketInputFailedDeletionException: + except exceptions.ExternalFactorFailedDeletionException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not delete market input value with market input id: {mi_id} and vcs id: {vcs_id}' diff --git a/sedbackend/apps/cvs/market_input/models.py b/sedbackend/apps/cvs/market_input/models.py index 866edb1c..49543166 100644 --- a/sedbackend/apps/cvs/market_input/models.py +++ b/sedbackend/apps/cvs/market_input/models.py @@ -1,13 +1,15 @@ +from typing import Optional, List + from pydantic import BaseModel -class MarketInputGet(BaseModel): +class ExternalFactor(BaseModel): id: int name: str unit: str -class MarketInputPost(BaseModel): +class ExternalFactorPost(BaseModel): name: str unit: str @@ -16,8 +18,11 @@ class MarketInputPost(BaseModel): # Market Values ###################################################################################################################### - -class MarketInputValue(BaseModel): +class VcsEFValuePair(BaseModel): vcs_id: int - market_input_id: int value: float + + +class ExternalFactorValue(BaseModel): + external_factor: ExternalFactor + external_factor_value: Optional[List[VcsEFValuePair]] diff --git a/sedbackend/apps/cvs/market_input/router.py b/sedbackend/apps/cvs/market_input/router.py index 05d07444..840a40dd 100644 --- a/sedbackend/apps/cvs/market_input/router.py +++ b/sedbackend/apps/cvs/market_input/router.py @@ -17,20 +17,20 @@ @router.get( '/project/{native_project_id}/market-input/all', summary='Get all market inputs', - response_model=List[models.MarketInputGet], + response_model=List[models.ExternalFactor], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_all_market_input(native_project_id: int) -> List[models.MarketInputGet]: +async def get_all_market_input(native_project_id: int) -> List[models.ExternalFactor]: return implementation.get_all_market_inputs(native_project_id) @router.post( '/project/{native_project_id}/market-input', summary='Creates a market input', - response_model=models.MarketInputGet, + response_model=models.ExternalFactor, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def create_market_input(native_project_id: int, market_input: models.MarketInputPost) -> models.MarketInputGet: +async def create_market_input(native_project_id: int, market_input: models.ExternalFactorPost) -> models.ExternalFactor: return implementation.create_market_input(native_project_id, market_input) @@ -40,9 +40,8 @@ async def create_market_input(native_project_id: int, market_input: models.Marke response_model=bool, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def update_market_input(native_project_id: int, market_input_id: int, - market_input: models.MarketInputPost) -> bool: - return implementation.update_market_input(native_project_id, market_input_id, market_input) +async def update_market_input(native_project_id: int, external_factor: models.ExternalFactor) -> bool: + return implementation.update_external_factor(native_project_id, external_factor) @router.delete( @@ -63,14 +62,14 @@ async def delete_market_input(native_project_id: int, market_input_id: int) -> b summary='Create or update values for market inputs', response_model=bool ) -async def update_market_values(native_project_id: int, mi_values: List[models.MarketInputValue]) -> bool: +async def update_market_values(native_project_id: int, mi_values: List[models.ExternalFactorValue]) -> bool: return implementation.update_market_input_values(native_project_id, mi_values) @router.get( '/project/{native_project_id}/market-input-values', summary='Fetch all market input values for a project', - response_model=List[models.MarketInputValue] + response_model=List[models.ExternalFactorValue] ) -async def get_all_market_values(native_project_id: int) -> List[models.MarketInputValue]: +async def get_all_market_values(native_project_id: int) -> List[models.ExternalFactorValue]: return implementation.get_all_market_values(native_project_id) diff --git a/sedbackend/apps/cvs/market_input/storage.py b/sedbackend/apps/cvs/market_input/storage.py index 6aad8130..8e10d491 100644 --- a/sedbackend/apps/cvs/market_input/storage.py +++ b/sedbackend/apps/cvs/market_input/storage.py @@ -19,34 +19,34 @@ # Market Input ######################################################################################################################## -def populate_market_input(db_result) -> models.MarketInputGet: - return models.MarketInputGet( +def populate_external_factor(db_result) -> models.ExternalFactor: + return models.ExternalFactor( id=db_result['id'], name=db_result['name'], unit=db_result['unit'] ) -def get_market_input(db_connection: PooledMySQLConnection, project_id: int, - market_input_id: int) -> models.MarketInputGet: - logger.debug(f'Fetching market input with id={market_input_id}.') +def get_external_factor(db_connection: PooledMySQLConnection, project_id: int, + external_factor_id: int) -> models.ExternalFactor: + logger.debug(f'Fetching external factor with id={external_factor_id}.') select_statement = MySQLStatementBuilder(db_connection) db_result = select_statement \ .select(CVS_MARKET_INPUT_TABLE, CVS_MARKET_INPUT_COLUMN) \ - .where('id = %s', [market_input_id]) \ + .where('id = %s', [external_factor_id]) \ .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) if db_result is None: - raise exceptions.MarketInputNotFoundException + raise exceptions.ExternalFactorNotFoundException if db_result['project'] != project_id: raise project_exceptions.CVSProjectNoMatchException - return populate_market_input(db_result) + return populate_external_factor(db_result) -def get_all_market_input(db_connection: PooledMySQLConnection, project_id: int) -> List[models.MarketInputGet]: - logger.debug(f'Fetching all market inputs for project with id={project_id}.') +def get_all_external_factors(db_connection: PooledMySQLConnection, project_id: int) -> List[models.ExternalFactor]: + logger.debug(f'Fetching all external factors for project with id={project_id}.') select_statement = MySQLStatementBuilder(db_connection) results = select_statement \ @@ -55,60 +55,60 @@ def get_all_market_input(db_connection: PooledMySQLConnection, project_id: int) .order_by(['id'], Sort.ASCENDING) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - return [populate_market_input(db_result) for db_result in results] + return [populate_external_factor(db_result) for db_result in results] -def create_market_input(db_connection: PooledMySQLConnection, project_id: int, - market_input: models.MarketInputPost) -> models.MarketInputGet: - logger.debug(f'Create market input') +def create_external_factor(db_connection: PooledMySQLConnection, project_id: int, + external_factor: models.ExternalFactorPost) -> models.ExternalFactor: + logger.debug(f'Create external factor') insert_statement = MySQLStatementBuilder(db_connection) insert_statement \ .insert(table=CVS_MARKET_INPUT_TABLE, columns=CVS_MARKET_INPUT_COLUMN[1:]) \ - .set_values([project_id, market_input.name, market_input.unit]) \ + .set_values([project_id, external_factor.name, external_factor.unit]) \ .execute(fetch_type=FetchType.FETCH_NONE) - return get_market_input(db_connection, project_id, insert_statement.last_insert_id) + return get_external_factor(db_connection, project_id, insert_statement.last_insert_id) -def update_market_input(db_connection: PooledMySQLConnection, project_id: int, market_input_id: int, - market_input: models.MarketInputPost) -> bool: - logger.debug(f'Update market input with vcs row id={market_input_id}') +def update_external_factor(db_connection: PooledMySQLConnection, project_id: int, + external_factor: models.ExternalFactor) -> bool: + logger.debug(f'Update external factor with id={external_factor.id}') - get_market_input(db_connection, project_id, market_input_id) # check if market input exists and belongs to project + get_external_factor(db_connection, project_id, external_factor.id) # check if external factor exists and belongs to project update_statement = MySQLStatementBuilder(db_connection) update_statement.update( table=CVS_MARKET_INPUT_TABLE, set_statement='name = %s, unit = %s', - values=[market_input.name, market_input.unit], + values=[external_factor.name, external_factor.unit], ) - update_statement.where('id = %s', [market_input_id]) + update_statement.where('id = %s', [external_factor.id]) _, rows = update_statement.execute(return_affected_rows=True) return True -def delete_market_input(db_connection: PooledMySQLConnection, project_id: int, mi_id: int) -> bool: - logger.debug(f'Deleting market input with id: {mi_id}') +def delete_external_factor(db_connection: PooledMySQLConnection, project_id: int, external_factor_id: int) -> bool: + logger.debug(f'Deleting external factor with id: {external_factor_id}') - get_market_input(db_connection, project_id, mi_id) # check if market input exists and belongs to project + get_external_factor(db_connection, project_id, external_factor_id) # check if external factor exists and belongs to project delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement \ .delete(CVS_MARKET_INPUT_TABLE) \ - .where('id = %s', [mi_id]) \ + .where('id = %s', [external_factor_id]) \ .execute(return_affected_rows=True) if rows != 1: - raise exceptions.MarketInputFailedDeletionException + raise exceptions.ExternalFactorFailedDeletionException return True -def get_all_formula_market_inputs(db_connection: PooledMySQLConnection, - formulas_id: int) -> List[models.MarketInputValue]: - logger.debug(f'Fetching all market inputs for formulas with vcs_row id: {formulas_id}') +def get_all_formula_external_factors(db_connection: PooledMySQLConnection, + formulas_id: int) -> List[models.ExternalFactorValue]: + logger.debug(f'Fetching all external factors for formulas with vcs_row id: {formulas_id}') select_statement = MySQLStatementBuilder(db_connection) res = select_statement \ @@ -118,17 +118,17 @@ def get_all_formula_market_inputs(db_connection: PooledMySQLConnection, .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) if res is None: - raise exceptions.MarketInputFormulasNotFoundException + raise exceptions.ExternalFactorFormulasNotFoundException - return [populate_market_input(r) for r in res] + return [populate_external_factor(r) for r in res] ######################################################################################################################## -# Market Values +# External Factor values ######################################################################################################################## -def populate_market_input_values(db_result) -> models.MarketInputValue: - return models.MarketInputValue( +def populate_external_factor_values(db_result) -> models.ExternalFactorValue: + return models.ExternalFactorValue( vcs_id=db_result['vcs'], market_input_id=db_result['market_input'], value=db_result['value'] @@ -136,10 +136,10 @@ def populate_market_input_values(db_result) -> models.MarketInputValue: def update_market_input_value(db_connection: PooledMySQLConnection, project_id: int, - mi_value: models.MarketInputValue) -> bool: + external_factor_value: models.ExternalFactorValue) -> bool: logger.debug(f'Update market input value') vcs_storage.check_vcs(db_connection, project_id, mi_value.vcs_id) # check if vcs exists - get_market_input(db_connection, project_id, mi_value.market_input_id) # check if market input exists + get_external_factor(db_connection, project_id, mi_value.market_input_id) # check if market input exists count_statement = MySQLStatementBuilder(db_connection) count_result = count_statement \ @@ -164,11 +164,12 @@ def update_market_input_value(db_connection: PooledMySQLConnection, project_id: return True -def update_market_input_values(db_connection: PooledMySQLConnection, project_id: int, - mi_values: List[models.MarketInputValue]) -> bool: +def update_external_factor_values(db_connection: PooledMySQLConnection, project_id: int, + mi_values: List[models.ExternalFactorValue]) -> bool: logger.debug(f'Update market input values') - curr_mi_values = get_all_market_input_values(db_connection, project_id) + curr_mi_values = get_all_external_factor_values(db_connection, project_id) + # delete if no longer exists for value in curr_mi_values: if [value.vcs_id, value.market_input_id] not in [[v.vcs_id, v.market_input_id] for v in mi_values]: @@ -180,7 +181,8 @@ def update_market_input_values(db_connection: PooledMySQLConnection, project_id: return True -def get_all_market_input_values(db_connection: PooledMySQLConnection, project_id: int) -> List[models.MarketInputValue]: +def get_all_external_factor_values(db_connection: PooledMySQLConnection, + project_id: int) -> List[models.ExternalFactorValue]: logger.debug(f'Fetching all market values for project with id: {project_id}') columns = CVS_MARKET_VALUES_COLUMN @@ -192,13 +194,13 @@ def get_all_market_input_values(db_connection: PooledMySQLConnection, project_id .where('project = %s', [project_id]) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - return [populate_market_input_values(r) for r in res] + return [populate_external_factor_values(r) for r in res] def delete_market_value(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, mi_id: int) -> bool: logger.debug(f'Deleting market input value with vcs id: {vcs_id} and market input id: {mi_id}') - get_market_input(db_connection, project_id, mi_id) # check if market input exists and belongs to project + get_external_factor(db_connection, project_id, mi_id) # check if market input exists and belongs to project delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement \ @@ -207,6 +209,6 @@ def delete_market_value(db_connection: PooledMySQLConnection, project_id: int, v .execute(return_affected_rows=True) if rows != 1: - raise exceptions.MarketInputFailedDeletionException + raise exceptions.ExternalFactorFailedDeletionException return True diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 2b31b346..12a12fd9 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -38,7 +38,7 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find project.', ) - except market_input_exceptions.MarketInputNotFoundException: + except market_input_exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find market input', @@ -127,7 +127,7 @@ def run_dsm_file_simulation(user_id: int, project_id: int, sim_params: models.Fi status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not find project.', ) - except market_input_exceptions.MarketInputNotFoundException: + except market_input_exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not find market input', diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index a7743bc7..1e79ff0e 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -597,7 +597,7 @@ def seed_random_sim_settings(user_id: int, project_id: int) -> sim_model.SimSett def seed_random_market_input(project_id: int): name = tu.random_str(5, 50) unit = tu.random_str(5, 50) - market_input_post = market_input_model.MarketInputPost( + market_input_post = market_input_model.ExternalFactorPost( name=name, unit=unit ) @@ -605,7 +605,7 @@ def seed_random_market_input(project_id: int): def seed_random_market_input_values(project_id: int, vcs_id: int, market_input_id: int): - market_input_impl.update_market_input_values(project_id, [market_input_model.MarketInputValue( + market_input_impl.update_market_input_values(project_id, [market_input_model.ExternalFactorValue( vcs_id=vcs_id, market_input_id=market_input_id, value=random.random() * 100)]) From 94666a4212c10209c2c5f42e5f5b8cfb0435b0b1 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 08:22:40 +0200 Subject: [PATCH 102/210] handle vd and ef on edit formulas --- sedbackend/apps/core/db.py | 6 +++-- .../apps/cvs/link_design_lifecycle/storage.py | 11 +++------ .../test_connect_vcs_design.py | 23 +++++++++++++------ 3 files changed, 23 insertions(+), 17 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..20dba37e 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,11 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +#host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +#port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 9563c25f..32be999a 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -25,9 +25,6 @@ def create_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_r value_driver_ids, external_factor_ids = find_vd_and_ef([formulas.time, formulas.cost, formulas.revenue]) - logger.debug(f'Value driver ids: {value_driver_ids}') - logger.debug(f'External factor ids: {external_factor_ids}') - values = [project_id, vcs_row_id, design_group_id, formulas.time, formulas.time_unit.value, formulas.cost, formulas.revenue, formulas.rate.value] @@ -68,9 +65,7 @@ def edit_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_ formulas: models.FormulaPost): logger.debug(f'Editing formulas') - # TODO extract from formula. For example with regex - value_drivers = [] - external_factors = [] + value_driver_ids, external_factor_ids = find_vd_and_ef([formulas.time, formulas.cost, formulas.revenue]) columns = CVS_FORMULAS_COLUMNS[3:] set_statement = ', '.join([col + ' = %s' for col in columns]) @@ -84,9 +79,9 @@ def edit_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_ .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) \ .execute(return_affected_rows=True) - update_value_driver_formulas(db_connection, vcs_row_id, design_group_id, value_drivers, project_id) + update_value_driver_formulas(db_connection, vcs_row_id, design_group_id, value_driver_ids, project_id) - update_external_factor_formulas(db_connection, vcs_row_id, design_group_id, external_factors) + update_external_factor_formulas(db_connection, vcs_row_id, design_group_id, external_factor_ids) def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index 67cd9eca..60973fa7 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -203,13 +203,20 @@ def test_edit_formulas(client, std_headers, std_user): vcs = tu.seed_random_vcs(project.id) design_group = tu.seed_random_design_group(project.id) - # Act formulas = tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 1) - time = testutils.random_str(10, 200) + value_driver = tu.seed_random_value_driver(current_user.id, project.id) + external_factor = tu.seed_random_market_input(project.id) + + # Act + + time = '2+{vd:' + str(value_driver.id) + ',"' + str(value_driver.name) + '"}+{vd:' + str( + value_driver.id) + ',"' + str(value_driver.name) + '"}' + cost = '2+{ef:' + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' + revenue = '20+{vd:' + str(value_driver.id) + ',"' + str(value_driver.name) + '"}+{ef:' + str( + external_factor.id) + ',"' + str(external_factor.name) + '"}' + time_unit = tu.random_time_unit() - cost = testutils.random_str(10, 200) - revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() res = client.put( @@ -223,13 +230,15 @@ def test_edit_formulas(client, std_headers, std_user): "rate": rate }) + res_get = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + headers=std_headers) + # Assert assert res.status_code == 200 + assert res_get.json()[0]['used_value_drivers'][0] == value_driver.id + assert res_get.json()[0]['used_external_factors'][0] == external_factor.id # Cleanup - tu.delete_formulas(project.id, [(formula.vcs_row_id, formula.design_group_id) for formula in formulas]) - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) From c4969a9f116e4a521818307716226e83cc0a5900 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 08:57:10 +0200 Subject: [PATCH 103/210] updated formula parser --- sedbackend/apps/cvs/simulation/storage.py | 36 +++++++++++++-------- tests/apps/cvs/simulation/test_sim_utils.py | 12 +++---- 2 files changed, 27 insertions(+), 21 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 17f9fc37..42ea9638 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -1,3 +1,4 @@ +import re import sys import tempfile from math import isnan @@ -440,25 +441,32 @@ def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[in def parse_formula(formula: str, vd_values, mi_values) -> str: - new_formula = formula - vd_names = expr.get_prefix_variables('VD', new_formula) - mi_names = expr.get_prefix_variables('EF', new_formula) - - for vd in vd_values: - for name in vd_names: - unit = vd["unit"] if vd["unit"] is not None and vd["unit"] != "" else "N/A" - if name == f'{vd["name"]} [{unit}]': - new_formula = expr.replace_prefix_variables("VD", name, str(vd["value"]), new_formula) - for mi in mi_values: - for name in mi_names: - unit = mi["unit"] if mi["unit"] is not None and mi["unit"] != "" else "N/A" - if name == f'{mi["name"]} [{unit}]': - new_formula = expr.replace_prefix_variables("EF", name, str(mi["value"]), new_formula) + new_formula = replace_vd_ef(formula, vd_values, mi_values) new_formula = expr.remove_strings_replace_zero(new_formula) return new_formula +def replace_vd_ef(formula, vd_values, ef_values): + pattern = r'\{(?Pvd|ef):(?P\d+),"([^"]+)"\}' + + def replace(match): + tag, id_number, _ = match.groups() + id_number = int(id_number) + if tag == "vd": + for vd in vd_values: + if vd["id"] == id_number: + return str(vd["value"]) + elif tag == "ef": + for ef in ef_values: + if ef["id"] == id_number: + return str(ef["value"]) + return match.group() + + replaced_text = re.sub(pattern, replace, formula) + return replaced_text + + def check_entity_rate(db_results, flow_process_name: str): rate_check = True # Set the flow_process_index to be highest possible. diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index ca42a260..acc3941a 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -21,19 +21,17 @@ def test_parse_formula_simple(): def test_parse_formula_values(): # Setup - vd_values = [{"id": 1, "name": "Speed", "unit": "km/h", "value": 3}, - {"id": 2, "name": "Weight", "unit": "kg", "value": 4}] - mi_values = [{"id": 1, "name": "Test", "unit": "T", "value": 5}, - {"id": 2, "name": "Test 2", "unit": "T2", "value": 6}] - formula = f'2*"VD(Speed [km/h])"+"EF(Test 2 [T2])"' + vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + mi_values = [{"id": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] + formula = '2+{vd:47241,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}' nsp = NumericStringParser() # Act new_formula = parse_formula(formula, vd_values, mi_values) # Assert - assert new_formula == "2*3+6" - assert nsp.eval(new_formula) == 12 + assert new_formula == "2+10/5" + assert nsp.eval(new_formula) == 4 def test_parse_formula_vd_no_exist(): From e39798ca167250f7c76c103ba4cd8b533a3b09e8 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 09:24:21 +0200 Subject: [PATCH 104/210] if vd or ef not found replace with zero --- sedbackend/apps/cvs/simulation/storage.py | 37 +++------------------ tests/apps/cvs/simulation/test_sim_utils.py | 27 +++------------ 2 files changed, 9 insertions(+), 55 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 42ea9638..98996059 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -16,7 +16,6 @@ import os from typing import List -from sedbackend.apps.cvs.design.models import ValueDriverDesignValue from sedbackend.apps.cvs.design.storage import get_all_designs from mysqlsb import FetchType, MySQLStatementBuilder @@ -299,28 +298,9 @@ def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], d return res -def get_vd_design_values(db_connection: PooledMySQLConnection, vcs_row_id: int, - design: int) -> List[ValueDriverDesignValue]: - select_statement = MySQLStatementBuilder(db_connection) - res = select_statement \ - .select('cvs_vd_design_values', ['cvs_value_drivers.id', 'design', 'name', 'value', 'unit']) \ - .inner_join('cvs_value_drivers', 'cvs_vd_design_values.value_driver = cvs_value_drivers.id') \ - .inner_join('cvs_vcs_need_drivers', 'cvs_vcs_need_drivers.value_driver = cvs_value_drivers.id') \ - .inner_join('cvs_stakeholder_needs', 'cvs_stakeholder_needs.id = cvs_vcs_need_drivers.stakeholder_need') \ - .where('vcs_row = %s and design = %s', [vcs_row_id, design]) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - - logger.debug(f'Fetched {len(res)} value driver design values') - return res - - def get_all_vd_design_values(db_connection: PooledMySQLConnection, designs: List[int]): try: - query = f'SELECT cvs_value_drivers.id, design, name, value, unit, vcs_row \ - FROM cvs_vd_design_values \ - INNER JOIN cvs_value_drivers ON cvs_vd_design_values.value_driver = cvs_value_drivers.id \ - INNER JOIN cvs_vcs_need_drivers ON cvs_vcs_need_drivers.value_driver = cvs_value_drivers.id \ - INNER JOIN cvs_stakeholder_needs ON cvs_stakeholder_needs.id = cvs_vcs_need_drivers.stakeholder_need \ + query = f'SELECT * FROM cvs_vd_design_values \ WHERE design IN ({",".join(["%s" for _ in range(len(designs))])})' with db_connection.cursor(prepared=True) as cursor: cursor.execute(query, designs) @@ -360,7 +340,6 @@ def edit_simulation_settings(db_connection: PooledMySQLConnection, project_id: i .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) count = count['count'] - logger.debug(count) if sim_settings.flow_process is not None: flow_process_exists = False @@ -426,9 +405,7 @@ def get_market_values(db_connection: PooledMySQLConnection, vcs: int): def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[int]): try: - query = f'SELECT id, name, value, unit, vcs \ - FROM cvs_market_input_values \ - INNER JOIN cvs_market_inputs ON cvs_market_input_values.market_input = cvs_market_inputs.id \ + query = f'SELECT * FROM cvs_market_input_values \ WHERE cvs_market_input_values.vcs IN ({",".join(["%s" for _ in range(len(vcs_ids))])})' with db_connection.cursor(prepared=True) as cursor: cursor.execute(query, vcs_ids) @@ -440,14 +417,7 @@ def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[in return res -def parse_formula(formula: str, vd_values, mi_values) -> str: - new_formula = replace_vd_ef(formula, vd_values, mi_values) - new_formula = expr.remove_strings_replace_zero(new_formula) - - return new_formula - - -def replace_vd_ef(formula, vd_values, ef_values): +def parse_formula(formula: str, vd_values, ef_values): pattern = r'\{(?Pvd|ef):(?P\d+),"([^"]+)"\}' def replace(match): @@ -464,6 +434,7 @@ def replace(match): return match.group() replaced_text = re.sub(pattern, replace, formula) + replaced_text = re.sub(pattern, '0', replaced_text) # If there are any tags left, replace them with 0 return replaced_text diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index acc3941a..eafd2a47 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -36,34 +36,17 @@ def test_parse_formula_values(): def test_parse_formula_vd_no_exist(): # Setup - vd_values = [{"id": 1, "name": "Speed", "unit": "km/h", "value": 3}, - {"id": 2, "name": "Weight", "unit": "kg", "value": 4}] - mi_values = [{"id": 1, "name": "Test", "unit": "T", "value": 5}, - {"id": 2, "name": "Test 2", "unit": "T2", "value": 6}] - formula = f'2*"VD(DontExist [km/h])"+"EF(Dont Exist [T2])"' - nsp = NumericStringParser() - - # Act - new_formula = parse_formula(formula, vd_values, mi_values) - - # Assert - assert new_formula == "2*0+0" - assert nsp.eval(new_formula) == 0 - - -def test_parse_formula_unit_no_exist(): - # Setup - vd_values = [{"id": 1, "name": "Speed", "unit": None, "value": 3}] - mi_values = [{"id": 2, "name": "Test 2", "unit": None, "value": 6}] - formula = f'2*"VD(Speed [N/A])"+"EF(Test 2 [N/A])"' + vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + mi_values = [{"id": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] + formula = '2+{vd:1,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}' nsp = NumericStringParser() # Act new_formula = parse_formula(formula, vd_values, mi_values) # Assert - assert new_formula == "2*3+6" - assert nsp.eval(new_formula) == 12 + assert new_formula == "2+0/5" + assert nsp.eval(new_formula) == 2 def test_get_prefix_variables(): From c437a3a7e6d608f4e02775daca2abbc1cf8d1e79 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 09:24:45 +0200 Subject: [PATCH 105/210] update db --- sedbackend/apps/core/db.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 20dba37e..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,11 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -#host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' -#port = 3306 -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 5b624d709176f28e6c8ad76b412e4ad36c81e8b8 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 09:35:09 +0200 Subject: [PATCH 106/210] fixed sql syntax error --- sql/V230721_cvs.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index c606a00a..1bd30081 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -11,4 +11,4 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_project_value_drivers` REFERENCES `seddb`.`cvs_value_drivers`(`id`) ON DELETE CASCADE ); -CREATE UNIQUE INDEX IF NOT EXISTS `project_value_driver_index` ON `seddb`.`cvs_project_value_drivers` (project, value_driver); +CREATE UNIQUE INDEX `project_value_driver_index` ON `seddb`.`cvs_project_value_drivers` (project, value_driver); \ No newline at end of file From f4f3877b3b785af034bee0562ab6e71bfae971c3 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 09:42:04 +0200 Subject: [PATCH 107/210] moved sql changes to one file --- sql/V230707_cvs.sql | 58 ------------------------------------- sql/V230721_cvs.sql | 69 +++++++++++++++++++++++++++++++++++++++++---- 2 files changed, 64 insertions(+), 63 deletions(-) delete mode 100644 sql/V230707_cvs.sql diff --git a/sql/V230707_cvs.sql b/sql/V230707_cvs.sql deleted file mode 100644 index 6de2b0a3..00000000 --- a/sql/V230707_cvs.sql +++ /dev/null @@ -1,58 +0,0 @@ -SET FOREIGN_KEY_CHECKS=0; -ALTER TABLE `seddb`.`cvs_subprocesses` - ADD COLUMN `project` INT UNSIGNED NOT NULL AFTER `id`, - MODIFY COLUMN `name` VARCHAR(64), - DROP FOREIGN KEY `cvs_subprocesses_ibfk_2`, - DROP COLUMN `vcs`; -SET FOREIGN_KEY_CHECKS=1; - -# Add project column to formulas -ALTER TABLE `seddb`.`cvs_design_mi_formulas` - ADD COLUMN `project` INT UNSIGNED NOT NULL FIRST, - ADD FOREIGN KEY(`project`) - REFERENCES `seddb`.`cvs_projects`(`id`) - ON DELETE CASCADE; - -DROP TABLE IF EXISTS `seddb`.`cvs_formulas_market_inputs`; -DROP TABLE IF EXISTS `seddb`.`cvs_formulas_value_drivers`; - -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_external_factors` -( - `vcs_row` INT UNSIGNED NOT NULL, - `design_group` INT UNSIGNED NOT NULL, - `external_factor` INT UNSIGNED NOT NULL, - PRIMARY KEY(`vcs_row`, `design_group`, `external_factor`), - FOREIGN KEY (`vcs_row`) - REFERENCES `seddb`.`cvs_design_mi_formulas`(`vcs_row`) - ON DELETE CASCADE, - FOREIGN KEY (`design_group`) - REFERENCES `seddb`.`cvs_design_mi_formulas`(`design_group`) - ON DELETE CASCADE, - FOREIGN KEY(`external_factor`) - REFERENCES `seddb`.`cvs_market_inputs`(`id`) - ON DELETE CASCADE -); - - -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` -( - `vcs_row` INT UNSIGNED NOT NULL, - `design_group` INT UNSIGNED NOT NULL, - `value_driver` INT UNSIGNED NOT NULL, - `project` INT UNSIGNED NOT NULL, - PRIMARY KEY(`vcs_row`, `design_group`, `value_driver`), - FOREIGN KEY (`vcs_row`) - REFERENCES `seddb`.`cvs_design_mi_formulas`(`vcs_row`) - ON DELETE CASCADE, - FOREIGN KEY (`design_group`) - REFERENCES `seddb`.`cvs_design_mi_formulas`(`design_group`) - ON DELETE CASCADE, - FOREIGN KEY(`value_driver`) - REFERENCES `seddb`.`cvs_value_drivers`(`id`) - ON DELETE CASCADE, - FOREIGN KEY (`project`, `value_driver`) - REFERENCES `seddb`.`cvs_project_value_drivers`(`project`, `value_driver`) - ON DELETE CASCADE -); - - diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index 1bd30081..b2bfbc56 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -1,14 +1,73 @@ # Value driver to project relation CREATE TABLE IF NOT EXISTS `seddb`.`cvs_project_value_drivers` ( - `project` INT UNSIGNED NOT NULL, - `value_driver` INT UNSIGNED NOT NULL, + `project` INT UNSIGNED NOT NULL, + `value_driver` INT UNSIGNED NOT NULL, PRIMARY KEY (`project`, `value_driver`), FOREIGN KEY (`project`) - REFERENCES `seddb`.`cvs_projects`(`id`) + REFERENCES `seddb`.`cvs_projects` (`id`) ON DELETE CASCADE, FOREIGN KEY (`value_driver`) - REFERENCES `seddb`.`cvs_value_drivers`(`id`) + REFERENCES `seddb`.`cvs_value_drivers` (`id`) + ON DELETE CASCADE +); +CREATE UNIQUE INDEX `project_value_driver_index` ON `seddb`.`cvs_project_value_drivers` (project, value_driver); + + +SET FOREIGN_KEY_CHECKS = 0; +ALTER TABLE `seddb`.`cvs_subprocesses` + ADD COLUMN `project` INT UNSIGNED NOT NULL AFTER `id`, + MODIFY COLUMN `name` VARCHAR(64), + DROP FOREIGN KEY `cvs_subprocesses_ibfk_2`, + DROP COLUMN `vcs`; +SET FOREIGN_KEY_CHECKS = 1; + +# Add project column to formulas +ALTER TABLE `seddb`.`cvs_design_mi_formulas` + ADD COLUMN `project` INT UNSIGNED NOT NULL FIRST, + ADD FOREIGN KEY (`project`) + REFERENCES `seddb`.`cvs_projects` (`id`) + ON DELETE CASCADE; + +DROP TABLE IF EXISTS `seddb`.`cvs_formulas_market_inputs`; +DROP TABLE IF EXISTS `seddb`.`cvs_formulas_value_drivers`; + + +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_external_factors` +( + `vcs_row` INT UNSIGNED NOT NULL, + `design_group` INT UNSIGNED NOT NULL, + `external_factor` INT UNSIGNED NOT NULL, + PRIMARY KEY (`vcs_row`, `design_group`, `external_factor`), + FOREIGN KEY (`vcs_row`) + REFERENCES `seddb`.`cvs_design_mi_formulas` (`vcs_row`) + ON DELETE CASCADE, + FOREIGN KEY (`design_group`) + REFERENCES `seddb`.`cvs_design_mi_formulas` (`design_group`) + ON DELETE CASCADE, + FOREIGN KEY (`external_factor`) + REFERENCES `seddb`.`cvs_market_inputs` (`id`) + ON DELETE CASCADE +); + + +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` +( + `vcs_row` INT UNSIGNED NOT NULL, + `design_group` INT UNSIGNED NOT NULL, + `value_driver` INT UNSIGNED NOT NULL, + `project` INT UNSIGNED NOT NULL, + PRIMARY KEY (`vcs_row`, `design_group`, `value_driver`), + FOREIGN KEY (`vcs_row`) + REFERENCES `seddb`.`cvs_design_mi_formulas` (`vcs_row`) + ON DELETE CASCADE, + FOREIGN KEY (`design_group`) + REFERENCES `seddb`.`cvs_design_mi_formulas` (`design_group`) + ON DELETE CASCADE, + FOREIGN KEY (`value_driver`) + REFERENCES `seddb`.`cvs_value_drivers` (`id`) + ON DELETE CASCADE, + FOREIGN KEY (`project`, `value_driver`) + REFERENCES `seddb`.`cvs_project_value_drivers` (`project`, `value_driver`) ON DELETE CASCADE ); -CREATE UNIQUE INDEX `project_value_driver_index` ON `seddb`.`cvs_project_value_drivers` (project, value_driver); \ No newline at end of file From db0d1c6283ffcb8d2007e982f3affb613900d8b5 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 09:49:30 +0200 Subject: [PATCH 108/210] removed unused code and irrelevant tests --- sedbackend/libs/formula_parser/expressions.py | 93 ------------------- tests/apps/cvs/simulation/test_sim_utils.py | 58 ------------ 2 files changed, 151 deletions(-) diff --git a/sedbackend/libs/formula_parser/expressions.py b/sedbackend/libs/formula_parser/expressions.py index 0a2ea6e7..c34286bf 100644 --- a/sedbackend/libs/formula_parser/expressions.py +++ b/sedbackend/libs/formula_parser/expressions.py @@ -13,96 +13,3 @@ def replace_all(pattern, replacement, string): """ new_string = re.sub(r'\b' + pattern + r'\b', str(replacement), string) return new_string - - -def get_prefix_ids(prefix, string): - """ - Finds all matching ids with the given prefix. - - :param prefix: The prefix that will be searched for - :param string: - - :return List[int]: A list of all matching ids in the string that have the prefix "prefix" - """ - - ids = re.findall(r'\b' + prefix + r'[0-9]+', string) - - num_ids = [] - for str_id in ids: - id = ''.join([n for n in str_id if n.isdigit()]) - num_ids.append(id) - - return num_ids - - -def get_prefix_variables(prefix, string): - """ - Finds all matching variables with the given prefix. - - :param prefix: The prefix that will be searched for in - :param string: - - :return List[str]: A list of all matching variables in the string that have the prefix "prefix" - """ - - regex_pattern = fr'\"{prefix}\((.*?)\)\"' - matches = re.findall(regex_pattern, string) - - return matches - - -def get_prefix_names(prefix, string): - """ - Finds all matching names with the given prefix. - - :param prefix: The prefix that will be searched for in - :param string: - - :return List[str]: A list of all matching names in the string that have the prefix "prefix" - """ - - matches = get_prefix_variables(prefix, string) - results = [] - for match in matches: - # Find the last occurrence of square brackets in the match - last_bracket_index = match.rfind('[') - if last_bracket_index != -1: - # Remove the square brackets and their contents only if they're the last ones - if ']' in match[last_bracket_index:]: - match = match[:last_bracket_index].strip() - results.append(match) - - return results - - -def replace_prefix_variables(prefix: str, variable: str, replacement: str, string: str): - """ - Replaces all *exact* matches of pattern in str with replacement. - - :param prefix: The prefix that will be searched for in - :param variable: The name that will be searched for in - :param replacement: - :param string: The string in which to replace - - :return string: - """ - new_string = re.sub(rf"\"{re.escape(prefix)}\({re.escape(variable)}\)\"", replacement, string) - return new_string - - -# Author: ChatGPT -def remove_strings_replace_zero(input_str): - - """ - Removes all strings and replaces all variables with a 0. - - :param input_str: The string to remove strings and replace variables with a 0 - - :return string: The string with all strings removed and all variables replaced with a 0 - """ - - # Replace all strings with a 0 - regex_pattern = r'\".*?\"' - output_str = re.sub(regex_pattern, '0', input_str) - - return output_str diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index eafd2a47..a2a10df2 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -47,61 +47,3 @@ def test_parse_formula_vd_no_exist(): # Assert assert new_formula == "2+0/5" assert nsp.eval(new_formula) == 2 - - -def test_get_prefix_variables(): - # Setup - text_vd = f'2*"VD(Speed [km/h])"+"VD(Test [T])"' - text_mi = f'2*"EF(Test (OK) [T])"+"EF(Test2 [T2])"' - - # Act - variables_vd = get_prefix_variables("VD", text_vd) - variables_mi = get_prefix_variables("EF", text_mi) - - # Assert - assert variables_vd == ["Speed [km/h]", "Test [T]"] - assert variables_mi == ["Test (OK) [T]", "Test2 [T2]"] - - -def test_get_prefix_names(): - # Setup - text_vd = f'2*"VD(Speed King [L] [km/h])"+"VD(Test(L) */&¢€ [T])"' - text_mi = f'2*"EF(Test (OK) [T])"+"EF(Test2 [T2])"' - - # Act - names_vd = get_prefix_names("VD", text_vd) - names_mi = get_prefix_names("EF", text_mi) - - # Assert - assert names_vd == ["Speed King [L]", "Test(L) */&¢€"] - assert names_mi == ["Test (OK)", "Test2"] - - -def test_replace_prefix_variables(): - # Setup - text_vd = f'2*"VD(Speed [km/h])"+"VD(Test [T])"' - text_mi = f'2*"EF(Test (OK) [T])"+"EF(Test2 [T2])"' - - # Act - new_text_vd = replace_prefix_variables("VD", "Speed [km/h]", "2", text_vd) - new_text_mi = replace_prefix_variables("EF", "Test (OK) [T]", "4", text_mi) - - # Assert - assert new_text_vd == f'2*2+"VD(Test [T])"' - assert new_text_mi == f'2*4+"EF(Test2 [T2])"' - - -def test_value_not_found(): - # Setup - vd_values = [{"id": 1, "name": "Speed", "unit": "km/h", "value": 2}] - mi_values = [{"id": 2, "name": "Test 2", "unit": "T", "value": 3}] - formula = f'2*"VD(NO [km/h])"+"EF(NOTFOUND [T])"' - nsp = NumericStringParser() - - # Act - new_formula = parse_formula(formula, vd_values, mi_values) - - # Assert - assert new_formula == "2*0+0" - assert nsp.eval(new_formula) == 0 - From 2e989c1c7dc1332638067bf92917c98a5e12623f Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 09:53:47 +0200 Subject: [PATCH 109/210] removed nonexisting import --- tests/apps/cvs/simulation/test_sim_utils.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index a2a10df2..0e09f64e 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -1,6 +1,4 @@ from sedbackend.apps.cvs.simulation.storage import parse_formula -from sedbackend.libs.formula_parser.expressions import get_prefix_names, get_prefix_variables, \ - replace_prefix_variables from sedbackend.libs.formula_parser.parser import NumericStringParser From 059a35c622d2783a55586fb363fe0822b0d6a7a5 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 15:10:01 +0200 Subject: [PATCH 110/210] fixed sim not running --- sedbackend/apps/cvs/life_cycle/storage.py | 1 - .../apps/cvs/link_design_lifecycle/storage.py | 15 +++++++------ sedbackend/apps/cvs/simulation/storage.py | 21 +++++++------------ 3 files changed, 17 insertions(+), 20 deletions(-) diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index a3c60674..92b52df3 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -368,7 +368,6 @@ def get_dsm_file_id(db_connection: PooledMySQLConnection, project_id: int, vcs_i def get_multiple_dsm_file_id(db_connection: PooledMySQLConnection, vcs_ids: List[int]) -> list[Tuple[int, int]]: where_statement = "vcs IN (" + ",".join(["%s" for _ in range(len(vcs_ids))]) + ")" - logger.debug(f'where_statement: {where_statement}') select_statement = MySQLStatementBuilder(db_connection) file_res = select_statement.select(CVS_DSM_FILES_TABLE, CVS_DSM_FILES_COLUMNS) \ diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 8831f3f9..f7b45624 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -106,8 +106,9 @@ def delete_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_i delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement \ .delete(CVS_FORMULAS_VALUE_DRIVERS_TABLE) \ - .where('vcs_row = %s and design_group = %s and value_driver in %s', - [vcs_row_id, design_group_id, value_drivers]) \ + .where( + f'vcs_row = %s and design_group = %s and value_driver in ({",".join(["%s" for _ in range(len(value_drivers))])})', + [vcs_row_id, design_group_id] + value_drivers) \ .execute(return_affected_rows=True) @@ -119,10 +120,11 @@ def update_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_i .where(where_statement, [vcs_row_id, design_group_id]) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - delete_value_drivers = [value_driver['id'] for value_driver in value_driver_res if value_driver['id'] not in + delete_value_drivers = [value_driver['value_driver'] for value_driver in value_driver_res if + value_driver['value_driver'] not in value_drivers] add_value_drivers = [value_driver_id for value_driver_id in value_drivers if value_driver_id not in - [value_driver['id'] for value_driver in value_driver_res]] + [value_driver['value_driver'] for value_driver in value_driver_res]] if len(add_value_drivers): add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, add_value_drivers, project_id) @@ -152,8 +154,9 @@ def delete_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_ro delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement \ .delete(CVS_FORMULAS_EXTERNAL_FACTORS_TABLE) \ - .where('vcs_row = %s and design_group = %s and external_factors in %s', - [vcs_row_id, design_group_id, external_factors]) \ + .where( + f'vcs_row = %s and design_group = %s and external_factors in ({",".join(["%s" for _ in range(len(external_factors))])})', + [vcs_row_id, design_group_id] + external_factors) \ .execute(return_affected_rows=True) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 3cf8bf58..8db6a92f 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -300,7 +300,10 @@ def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], d def get_all_vd_design_values(db_connection: PooledMySQLConnection, designs: List[int]): try: - query = f'SELECT * FROM cvs_vd_design_values \ + query = f'SELECT cvs_vd_design_values.value_driver, design, value, vcs_row \ + FROM cvs_vd_design_values \ + INNER JOIN cvs_vcs_need_drivers ON cvs_vcs_need_drivers.value_driver = cvs_vd_design_values.value_driver \ + INNER JOIN cvs_stakeholder_needs ON cvs_stakeholder_needs.id = cvs_vcs_need_drivers.stakeholder_need \ WHERE design IN ({",".join(["%s" for _ in range(len(designs))])})' with db_connection.cursor(prepared=True) as cursor: cursor.execute(query, designs) @@ -393,16 +396,6 @@ def create_sim_settings(db_connection: PooledMySQLConnection, project_id: int, return True -def get_market_values(db_connection: PooledMySQLConnection, vcs: int): - select_statement = MySQLStatementBuilder(db_connection) - res = select_statement \ - .select('cvs_market_input_values', ['id', 'name', 'value', 'unit']) \ - .inner_join('cvs_market_inputs', 'cvs_market_input_values.market_input = cvs_market_inputs.id') \ - .where('vcs = %s', [vcs]) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - return res - - def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[int]): try: query = f'SELECT * FROM cvs_market_input_values \ @@ -425,16 +418,18 @@ def replace(match): id_number = int(id_number) if tag == "vd": for vd in vd_values: - if vd["id"] == id_number: + if vd["value_driver"] == id_number: return str(vd["value"]) elif tag == "ef": for ef in ef_values: - if ef["id"] == id_number: + if ef["market_input"] == id_number: return str(ef["value"]) return match.group() replaced_text = re.sub(pattern, replace, formula) replaced_text = re.sub(pattern, '0', replaced_text) # If there are any tags left, replace them with 0 + + logger.debug(f'Parsed formula: {replaced_text}') return replaced_text From 4c8b4d8a75aa75e4c6eaa6c8dfafaeea34f9bd8a Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 15:24:28 +0200 Subject: [PATCH 111/210] fixed failing test --- tests/apps/cvs/simulation/test_sim_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index 0e09f64e..2ccc1edb 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -19,8 +19,8 @@ def test_parse_formula_simple(): def test_parse_formula_values(): # Setup - vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] - mi_values = [{"id": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] + vd_values = [{"value_driver": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] formula = '2+{vd:47241,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}' nsp = NumericStringParser() From 0021d056b93abd539ef1885bc60b61e7cda1936b Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 31 Jul 2023 15:29:13 +0200 Subject: [PATCH 112/210] forgot the second failing test --- tests/apps/cvs/simulation/test_sim_utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index 2ccc1edb..416c7c09 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -34,8 +34,8 @@ def test_parse_formula_values(): def test_parse_formula_vd_no_exist(): # Setup - vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] - mi_values = [{"id": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] + vd_values = [{"value_driver": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] formula = '2+{vd:1,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}' nsp = NumericStringParser() From 267f2383cd7aa423b623d67af69f97b9b14c3406 Mon Sep 17 00:00:00 2001 From: = <=> Date: Mon, 31 Jul 2023 23:36:26 +0200 Subject: [PATCH 113/210] update external factor value started --- .../apps/cvs/market_input/implementation.py | 34 +++++----- sedbackend/apps/cvs/market_input/models.py | 6 +- sedbackend/apps/cvs/market_input/router.py | 10 +-- sedbackend/apps/cvs/market_input/storage.py | 66 ++++++++++++------- .../cvs/market_input/test_market_input.py | 8 +-- .../market_input/test_market_input_values.py | 6 +- tests/apps/cvs/testutils.py | 6 +- 7 files changed, 80 insertions(+), 56 deletions(-) diff --git a/sedbackend/apps/cvs/market_input/implementation.py b/sedbackend/apps/cvs/market_input/implementation.py index b8e5a12c..96ae4d98 100644 --- a/sedbackend/apps/cvs/market_input/implementation.py +++ b/sedbackend/apps/cvs/market_input/implementation.py @@ -15,7 +15,7 @@ ######################################################################################################################## -def get_all_market_inputs(project_id: int) -> List[models.ExternalFactor]: +def get_all_external_factors(project_id: int) -> List[models.ExternalFactor]: try: with get_connection() as con: db_result = storage.get_all_external_factors(con, project_id) @@ -38,10 +38,10 @@ def get_all_market_inputs(project_id: int) -> List[models.ExternalFactor]: ) -def get_market_input(project_id: int, market_input_id: int) -> models.ExternalFactor: +def get_external_factor(project_id: int, external_factor_id: int) -> models.ExternalFactor: try: with get_connection() as con: - db_result = storage.get_external_factor(con, project_id, market_input_id) + db_result = storage.get_external_factor(con, project_id, external_factor_id) con.commit() return db_result except auth_ex.UnauthorizedOperationException: @@ -61,10 +61,10 @@ def get_market_input(project_id: int, market_input_id: int) -> models.ExternalFa ) -def create_market_input(project_id: int, market_input: models.ExternalFactorPost) -> models.ExternalFactor: +def create_external_factor(project_id: int, external_factor_post: models.ExternalFactorPost) -> models.ExternalFactor: try: with get_connection() as con: - db_result = storage.create_external_factor(con, project_id, market_input) + db_result = storage.create_external_factor(con, project_id, external_factor_post) con.commit() return db_result except auth_ex.UnauthorizedOperationException: @@ -102,16 +102,16 @@ def update_external_factor(project_id: int, external_factor: models.ExternalFact ) -def delete_market_input(project_id: int, mi_id: int) -> bool: +def delete_external_factor(project_id: int, external_factor_id: int) -> bool: try: with get_connection() as con: - res = storage.delete_external_factor(con, project_id, mi_id) + res = storage.delete_external_factor(con, project_id, external_factor_id) con.commit() return res except exceptions.ExternalFactorFailedDeletionException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not delete market input with id: {mi_id}' + detail=f'Could not delete market input with id: {external_factor_id}' ) except proj_exceptions.CVSProjectNotFoundException: raise HTTPException( @@ -121,7 +121,7 @@ def delete_market_input(project_id: int, mi_id: int) -> bool: except proj_exceptions.CVSProjectNoMatchException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Market input with id={mi_id} is not a part from project with id={project_id}.', + detail=f'Market input with id={external_factor_id} is not a part from project with id={project_id}.', ) @@ -146,7 +146,7 @@ def get_all_formula_market_inputs(formulas_id: int) -> List[models.ExternalFacto def update_market_input_value(project_id: int, mi_value: models.ExternalFactorValue) -> bool: try: with get_connection() as con: - res = storage.update_market_input_value(con, project_id, mi_value) + res = storage.update_external_factor_value(con, project_id, mi_value) con.commit() return res except exceptions.ExternalFactorNotFoundException: @@ -171,10 +171,10 @@ def update_market_input_value(project_id: int, mi_value: models.ExternalFactorVa ) -def update_market_input_values(project_id: int, mi_values: List[models.ExternalFactorValue]) -> bool: +def update_external_factor_values(project_id: int, external_factor_values: List[models.ExternalFactorValue]) -> bool: try: with get_connection() as con: - res = storage.update_external_factor_values(con, project_id, mi_values) + res = storage.update_external_factor_values(con, project_id, external_factor_values) con.commit() return res except exceptions.ExternalFactorNotFoundException: @@ -194,7 +194,7 @@ def update_market_input_values(project_id: int, mi_values: List[models.ExternalF ) -def get_all_market_values(project_id: int) -> List[models.ExternalFactorValue]: +def get_all_external_factor_values(project_id: int) -> List[models.ExternalFactorValue]: try: with get_connection() as con: res = storage.get_all_external_factor_values(con, project_id) @@ -207,16 +207,16 @@ def get_all_market_values(project_id: int) -> List[models.ExternalFactorValue]: ) -def delete_market_value(project_id: int, vcs_id: int, mi_id: int) -> bool: +def delete_external_factor_value(project_id: int, vcs_id: int, external_factor_id: int) -> bool: try: with get_connection() as con: - res = storage.delete_market_value(con, project_id, vcs_id, mi_id) + res = storage.delete_market_value(con, project_id, vcs_id, external_factor_id) con.commit() return res except exceptions.ExternalFactorFailedDeletionException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f'Could not delete market input value with market input id: {mi_id} and vcs id: {vcs_id}' + detail=f'Could not delete external factor value with external factor id: {external_factor_id} and vcs id: {vcs_id}' ) except proj_exceptions.CVSProjectNotFoundException: raise HTTPException( @@ -226,5 +226,5 @@ def delete_market_value(project_id: int, vcs_id: int, mi_id: int) -> bool: except proj_exceptions.CVSProjectNoMatchException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Market input with id={mi_id} is not a part from project with id={project_id}.', + detail=f'External factor with id={external_factor_id} is not a part from project with id={project_id}.', ) diff --git a/sedbackend/apps/cvs/market_input/models.py b/sedbackend/apps/cvs/market_input/models.py index 49543166..6e3e2418 100644 --- a/sedbackend/apps/cvs/market_input/models.py +++ b/sedbackend/apps/cvs/market_input/models.py @@ -24,5 +24,7 @@ class VcsEFValuePair(BaseModel): class ExternalFactorValue(BaseModel): - external_factor: ExternalFactor - external_factor_value: Optional[List[VcsEFValuePair]] + id: int + name: str + unit: str + external_factor_values: Optional[List[VcsEFValuePair]] diff --git a/sedbackend/apps/cvs/market_input/router.py b/sedbackend/apps/cvs/market_input/router.py index 840a40dd..48b01653 100644 --- a/sedbackend/apps/cvs/market_input/router.py +++ b/sedbackend/apps/cvs/market_input/router.py @@ -21,7 +21,7 @@ dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) async def get_all_market_input(native_project_id: int) -> List[models.ExternalFactor]: - return implementation.get_all_market_inputs(native_project_id) + return implementation.get_all_external_factors(native_project_id) @router.post( @@ -31,7 +31,7 @@ async def get_all_market_input(native_project_id: int) -> List[models.ExternalFa dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) async def create_market_input(native_project_id: int, market_input: models.ExternalFactorPost) -> models.ExternalFactor: - return implementation.create_market_input(native_project_id, market_input) + return implementation.create_external_factor(native_project_id, market_input) @router.put( @@ -51,7 +51,7 @@ async def update_market_input(native_project_id: int, external_factor: models.Ex dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) async def delete_market_input(native_project_id: int, market_input_id: int) -> bool: - return implementation.delete_market_input(native_project_id, market_input_id) + return implementation.delete_external_factor(native_project_id, market_input_id) ######################################################################################################################## @@ -63,7 +63,7 @@ async def delete_market_input(native_project_id: int, market_input_id: int) -> b response_model=bool ) async def update_market_values(native_project_id: int, mi_values: List[models.ExternalFactorValue]) -> bool: - return implementation.update_market_input_values(native_project_id, mi_values) + return implementation.update_external_factor_values(native_project_id, mi_values) @router.get( @@ -72,4 +72,4 @@ async def update_market_values(native_project_id: int, mi_values: List[models.Ex response_model=List[models.ExternalFactorValue] ) async def get_all_market_values(native_project_id: int) -> List[models.ExternalFactorValue]: - return implementation.get_all_market_values(native_project_id) + return implementation.get_all_external_factor_values(native_project_id) diff --git a/sedbackend/apps/cvs/market_input/storage.py b/sedbackend/apps/cvs/market_input/storage.py index 8e10d491..492b9b5d 100644 --- a/sedbackend/apps/cvs/market_input/storage.py +++ b/sedbackend/apps/cvs/market_input/storage.py @@ -5,6 +5,7 @@ from mysqlsb import MySQLStatementBuilder, FetchType, Sort from sedbackend.apps.cvs.market_input import models, exceptions +from sedbackend.apps.cvs.market_input.models import ExternalFactorValue from sedbackend.apps.cvs.vcs import storage as vcs_storage from sedbackend.apps.cvs.project import exceptions as project_exceptions @@ -127,19 +128,39 @@ def get_all_formula_external_factors(db_connection: PooledMySQLConnection, # External Factor values ######################################################################################################################## -def populate_external_factor_values(db_result) -> models.ExternalFactorValue: - return models.ExternalFactorValue( - vcs_id=db_result['vcs'], - market_input_id=db_result['market_input'], - value=db_result['value'] - ) - - -def update_market_input_value(db_connection: PooledMySQLConnection, project_id: int, - external_factor_value: models.ExternalFactorValue) -> bool: +def populate_external_factor_values(db_result) -> list[ExternalFactorValue]: + data_dict = {} + + for item in db_result: + external_factor = item["market_input"] + if external_factor not in data_dict: + data_dict[external_factor] = ExternalFactorValue( + id=external_factor, + name=item["name"], + unit=item["unit"], + external_factor_values=[ + { + "vcs_id": item["vcs"], + "value": item["value"], + } + ], + ) + else: + data_dict[external_factor].external_factor_values.append( + { + "vcs_id": item["vcs"], + "value": item["value"], + } + ) + + return list(data_dict.values()) + + +def update_external_factor_value(db_connection: PooledMySQLConnection, project_id: int, + external_factor_value: models.ExternalFactorValue) -> bool: logger.debug(f'Update market input value') - vcs_storage.check_vcs(db_connection, project_id, mi_value.vcs_id) # check if vcs exists - get_external_factor(db_connection, project_id, mi_value.market_input_id) # check if market input exists + vcs_storage.check_vcs(db_connection, project_id, external_factor_value.vcs_id) # check if vcs exists + get_external_factor(db_connection, project_id, external_factor_value.market_input_id) # check if market input exists count_statement = MySQLStatementBuilder(db_connection) count_result = count_statement \ @@ -165,27 +186,28 @@ def update_market_input_value(db_connection: PooledMySQLConnection, project_id: def update_external_factor_values(db_connection: PooledMySQLConnection, project_id: int, - mi_values: List[models.ExternalFactorValue]) -> bool: + ef_values: List[models.ExternalFactorValue]) -> bool: logger.debug(f'Update market input values') - curr_mi_values = get_all_external_factor_values(db_connection, project_id) + curr_ef_values = get_all_external_factor_values(db_connection, project_id) # delete if no longer exists - for value in curr_mi_values: - if [value.vcs_id, value.market_input_id] not in [[v.vcs_id, v.market_input_id] for v in mi_values]: - delete_market_value(db_connection, project_id, value.vcs_id, value.market_input_id) + for currEFV in curr_ef_values: + for curr_vcs_val_pair in currEFV.external_factor_values: + if curr_vcs_val_pair.vcs_id not in [[[v.vcs_id] for v in efv.external_factor_values] for efv in ef_values]: + delete_market_value(db_connection, project_id, curr_vcs_val_pair.vcs_id, currEFV.id) - for mi_value in mi_values: - update_market_input_value(db_connection, project_id, mi_value) + for ef_value in ef_values: + update_external_factor_value(db_connection, project_id, ef_value) return True def get_all_external_factor_values(db_connection: PooledMySQLConnection, project_id: int) -> List[models.ExternalFactorValue]: - logger.debug(f'Fetching all market values for project with id: {project_id}') + logger.debug(f'Fetching all external factors for project with id: {project_id}') - columns = CVS_MARKET_VALUES_COLUMN + columns = ['vcs', 'market_input', 'value', 'name', 'unit'] select_statement = MySQLStatementBuilder(db_connection) res = select_statement \ @@ -194,7 +216,7 @@ def get_all_external_factor_values(db_connection: PooledMySQLConnection, .where('project = %s', [project_id]) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - return [populate_external_factor_values(r) for r in res] + return populate_external_factor_values(res) def delete_market_value(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, mi_id: int) -> bool: diff --git a/tests/apps/cvs/market_input/test_market_input.py b/tests/apps/cvs/market_input/test_market_input.py index 0eacb5ed..fb863a8b 100644 --- a/tests/apps/cvs/market_input/test_market_input.py +++ b/tests/apps/cvs/market_input/test_market_input.py @@ -13,7 +13,7 @@ def test_create_market_input(client, std_headers, std_user): 'unit': "new unit", }) # Assert - market_inputs = impl_market_input.get_all_market_inputs(project.id) + market_inputs = impl_market_input.get_all_external_factors(project.id) assert res.status_code == 200 # 200 OK assert res.json()["name"] == "new market input" assert res.json()["unit"] == "new unit" @@ -80,7 +80,7 @@ def test_edit_market_input(client, std_headers, std_user): 'unit': "new unit", }) # Assert - market_input_updated = impl_market_input.get_market_input(project.id, market_input.id) + market_input_updated = impl_market_input.get_external_factor(project.id, market_input.id) assert res.status_code == 200 # 200 OK assert market_input_updated.name == "new market input" assert market_input_updated.unit == "new unit" @@ -100,7 +100,7 @@ def test_edit_market_input_no_changes(client, std_headers, std_user): 'unit': market_input.unit, }) # Assert - market_input_updated = impl_market_input.get_market_input(project.id, market_input.id) + market_input_updated = impl_market_input.get_external_factor(project.id, market_input.id) assert res.status_code == 200 # 200 OK assert market_input_updated.name == market_input.name assert market_input_updated.unit == market_input.unit @@ -134,7 +134,7 @@ def test_delete_market_input(client, std_headers, std_user): # Act res = client.delete(f'/api/cvs/project/{project.id}/market-input/{market_input.id}', headers=std_headers) # Assert - market_inputs = impl_market_input.get_all_market_inputs(project.id) + market_inputs = impl_market_input.get_all_external_factors(project.id) assert res.status_code == 200 # 200 OK assert len(market_inputs) == 0 # Cleanup diff --git a/tests/apps/cvs/market_input/test_market_input_values.py b/tests/apps/cvs/market_input/test_market_input_values.py index 278687e7..201d2a75 100644 --- a/tests/apps/cvs/market_input/test_market_input_values.py +++ b/tests/apps/cvs/market_input/test_market_input_values.py @@ -21,7 +21,7 @@ def test_create_market_input(client, std_headers, std_user): } ]) # Assert - market_input_values = impl_market_input.get_all_market_values(project.id) + market_input_values = impl_market_input.get_all_external_factor_values(project.id) assert res.status_code == 200 # 200 OK assert len(market_input_values) == 1 assert market_input_values[0].market_input_id == market_input.id @@ -73,7 +73,7 @@ def test_edit_market_input_value(client, std_headers, std_user): } ]) # Assert - market_input_values = impl_market_input.get_all_market_values(project.id) + market_input_values = impl_market_input.get_all_external_factor_values(project.id) assert res.status_code == 200 # 200 OK assert len(market_input_values) == 1 assert market_input_values[0].market_input_id == market_input_value.market_input_id @@ -95,7 +95,7 @@ def test_delete_market_input_value(client, std_headers, std_user): # Act res = client.put(f'/api/cvs/project/{project.id}/market-input-values', headers=std_headers, json=[]) # Assert - market_input_values = impl_market_input.get_all_market_values(project.id) + market_input_values = impl_market_input.get_all_external_factor_values(project.id) assert res.status_code == 200 # 200 OK assert len(market_input_values) == 0 diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 1e79ff0e..2132061d 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -601,16 +601,16 @@ def seed_random_market_input(project_id: int): name=name, unit=unit ) - return market_input_impl.create_market_input(project_id, market_input_post) + return market_input_impl.create_external_factor(project_id, market_input_post) def seed_random_market_input_values(project_id: int, vcs_id: int, market_input_id: int): - market_input_impl.update_market_input_values(project_id, [market_input_model.ExternalFactorValue( + market_input_impl.update_external_factor_values(project_id, [market_input_model.ExternalFactorValue( vcs_id=vcs_id, market_input_id=market_input_id, value=random.random() * 100)]) - return market_input_impl.get_all_market_values(project_id) + return market_input_impl.get_all_external_factor_values(project_id) # ====================================================================================================================== From 9253696c474f32c49482244ce3e77dcde6962de2 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 1 Aug 2023 16:01:26 +0200 Subject: [PATCH 114/210] return vd and ef object instead of id --- .../apps/cvs/link_design_lifecycle/models.py | 4 +-- .../apps/cvs/link_design_lifecycle/storage.py | 29 +++++++++++++------ .../test_connect_vcs_design.py | 8 ++--- 3 files changed, 26 insertions(+), 15 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/models.py b/sedbackend/apps/cvs/link_design_lifecycle/models.py index 51660864..3062e46a 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/models.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/models.py @@ -33,8 +33,8 @@ class FormulaGet(BaseModel): cost: str revenue: str rate: Rate - used_value_drivers: List[int] = [] - used_external_factors: List[int] = [] + used_value_drivers: List[ValueDriver] = [] + used_external_factors: List[MarketInputGet] = [] class FormulaPost(BaseModel): diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index f7b45624..09834e41 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -4,7 +4,8 @@ from mysql.connector.pooling import PooledMySQLConnection import re from sedbackend.apps.cvs.design.storage import get_design_group -from sedbackend.apps.cvs.vcs.storage import get_vcs_row +from sedbackend.apps.cvs.market_input.storage import populate_market_input +from sedbackend.apps.cvs.vcs.storage import get_vcs_row, populate_value_driver from sedbackend.apps.cvs.vcs.storage import get_vcs from sedbackend.apps.cvs.link_design_lifecycle import models, exceptions from mysqlsb import FetchType, MySQLStatementBuilder @@ -12,6 +13,9 @@ CVS_FORMULAS_TABLE = 'cvs_design_mi_formulas' CVS_FORMULAS_COLUMNS = ['project', 'vcs_row', 'design_group', 'time', 'time_unit', 'cost', 'revenue', 'rate'] +CVS_VALUE_DRIVERS_TABLE = 'cvs_value_drivers' +CVS_VALUE_DRIVERS_COLUMNS = ['id', 'user', 'name', 'unit'] + CVS_FORMULAS_VALUE_DRIVERS_TABLE = 'cvs_formulas_value_drivers' CVS_FORMULAS_VALUE_DRIVERS_COLUMNS = ['vcs_row', 'design_group', 'value_driver', 'project'] @@ -226,20 +230,24 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ prepared_list += [r['vcs_row'], r['design_group']] with db_connection.cursor(prepared=True) as cursor: - cursor.execute(f"SELECT * FROM {CVS_FORMULAS_VALUE_DRIVERS_TABLE} WHERE {where_statement}", - prepared_list) + cursor.execute( + f"SELECT id, name, unit, vcs_row, design_group FROM cvs_formulas_value_drivers " + f"INNER JOIN cvs_value_drivers ON cvs_formulas_value_drivers.value_driver = cvs_value_drivers.id WHERE {where_statement}", + prepared_list) all_vds = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] with db_connection.cursor(prepared=True) as cursor: - cursor.execute(f"SELECT * FROM {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} WHERE {where_statement}", - prepared_list) + cursor.execute( + f"SELECT id, name, unit, vcs_row, design_group FROM {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} " + f"INNER JOIN cvs_market_inputs ON {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE}.external_factor = cvs_market_inputs.id WHERE {where_statement}", + prepared_list) all_efs = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] formulas = [] for r in res: - r['value_drivers'] = [vd['value_driver'] for vd in all_vds if vd['vcs_row'] == r['vcs_row'] and + r['value_drivers'] = [vd for vd in all_vds if vd['vcs_row'] == r['vcs_row'] and vd['design_group'] == r['design_group']] - r['external_factors'] = [ef['external_factor'] for ef in all_efs if ef['vcs_row'] == r['vcs_row'] and + r['external_factors'] = [ef for ef in all_efs if ef['vcs_row'] == r['vcs_row'] and ef['design_group'] == r['design_group']] formulas.append(populate_formula(r)) @@ -255,8 +263,11 @@ def populate_formula(db_result) -> models.FormulaGet: cost=db_result['cost'], revenue=db_result['revenue'], rate=db_result['rate'], - used_value_drivers=db_result['value_drivers'] if db_result['value_drivers'] is not None else [], - used_external_factors=db_result['external_factors'] if db_result['external_factors'] is not None else [] + used_value_drivers=[populate_value_driver(valueDriver) for valueDriver in db_result['value_drivers']] if + db_result['value_drivers'] is not None else [], + used_external_factors=[populate_market_input(externalFactor) for externalFactor in + db_result['external_factors']] if + db_result['external_factors'] is not None else [], ) diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index 43409071..4bf561b8 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -47,8 +47,8 @@ def test_create_formulas(client, std_headers, std_user): # Assert assert res.status_code == 200 - assert res_get.json()[0]['used_value_drivers'][0] == value_driver.id - assert res_get.json()[0]['used_external_factors'][0] == external_factor.id + assert res_get.json()[0]['used_value_drivers'][0]['id'] == value_driver.id + assert res_get.json()[0]['used_external_factors'][0]['id'] == external_factor.id # Cleanup tu.delete_design_group(project.id, design_group.id) @@ -235,8 +235,8 @@ def test_edit_formulas(client, std_headers, std_user): # Assert assert res.status_code == 200 - assert res_get.json()[0]['used_value_drivers'][0] == value_driver.id - assert res_get.json()[0]['used_external_factors'][0] == external_factor.id + assert res_get.json()[0]['used_value_drivers'][0]['id'] == value_driver.id + assert res_get.json()[0]['used_external_factors'][0]['id'] == external_factor.id # Cleanup tu.delete_project_by_id(project.id, current_user.id) From 229aca628a5c20624a583d2fc8a95a0c82fd0754 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 1 Aug 2023 16:18:21 +0200 Subject: [PATCH 115/210] adding, updating, deleting external factors fixed --- .../apps/cvs/market_input/implementation.py | 2 +- sedbackend/apps/cvs/market_input/storage.py | 144 +++++++++++------- 2 files changed, 88 insertions(+), 58 deletions(-) diff --git a/sedbackend/apps/cvs/market_input/implementation.py b/sedbackend/apps/cvs/market_input/implementation.py index 96ae4d98..09b0874a 100644 --- a/sedbackend/apps/cvs/market_input/implementation.py +++ b/sedbackend/apps/cvs/market_input/implementation.py @@ -210,7 +210,7 @@ def get_all_external_factor_values(project_id: int) -> List[models.ExternalFacto def delete_external_factor_value(project_id: int, vcs_id: int, external_factor_id: int) -> bool: try: with get_connection() as con: - res = storage.delete_market_value(con, project_id, vcs_id, external_factor_id) + res = storage.delete_external_factor_value(con, project_id, vcs_id, external_factor_id) con.commit() return res except exceptions.ExternalFactorFailedDeletionException: diff --git a/sedbackend/apps/cvs/market_input/storage.py b/sedbackend/apps/cvs/market_input/storage.py index 492b9b5d..29f8cbeb 100644 --- a/sedbackend/apps/cvs/market_input/storage.py +++ b/sedbackend/apps/cvs/market_input/storage.py @@ -5,7 +5,8 @@ from mysqlsb import MySQLStatementBuilder, FetchType, Sort from sedbackend.apps.cvs.market_input import models, exceptions -from sedbackend.apps.cvs.market_input.models import ExternalFactorValue +from sedbackend.apps.cvs.market_input.models import ExternalFactorValue, VcsEFValuePair, ExternalFactor, \ + ExternalFactorPost from sedbackend.apps.cvs.vcs import storage as vcs_storage from sedbackend.apps.cvs.project import exceptions as project_exceptions @@ -76,7 +77,8 @@ def update_external_factor(db_connection: PooledMySQLConnection, project_id: int external_factor: models.ExternalFactor) -> bool: logger.debug(f'Update external factor with id={external_factor.id}') - get_external_factor(db_connection, project_id, external_factor.id) # check if external factor exists and belongs to project + get_external_factor(db_connection, project_id, + external_factor.id) # check if external factor exists and belongs to project update_statement = MySQLStatementBuilder(db_connection) update_statement.update( @@ -93,7 +95,8 @@ def update_external_factor(db_connection: PooledMySQLConnection, project_id: int def delete_external_factor(db_connection: PooledMySQLConnection, project_id: int, external_factor_id: int) -> bool: logger.debug(f'Deleting external factor with id: {external_factor_id}') - get_external_factor(db_connection, project_id, external_factor_id) # check if external factor exists and belongs to project + get_external_factor(db_connection, project_id, + external_factor_id) # check if external factor exists and belongs to project delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement \ @@ -138,66 +141,92 @@ def populate_external_factor_values(db_result) -> list[ExternalFactorValue]: id=external_factor, name=item["name"], unit=item["unit"], - external_factor_values=[ - { - "vcs_id": item["vcs"], - "value": item["value"], - } - ], + external_factor_values=[], ) - else: + if item["vcs"] is not None and item["value"] is not None: data_dict[external_factor].external_factor_values.append( - { - "vcs_id": item["vcs"], - "value": item["value"], - } + VcsEFValuePair(vcs_id=item["vcs"], value=item["value"]) ) - return list(data_dict.values()) def update_external_factor_value(db_connection: PooledMySQLConnection, project_id: int, external_factor_value: models.ExternalFactorValue) -> bool: - logger.debug(f'Update market input value') - vcs_storage.check_vcs(db_connection, project_id, external_factor_value.vcs_id) # check if vcs exists - get_external_factor(db_connection, project_id, external_factor_value.market_input_id) # check if market input exists - - count_statement = MySQLStatementBuilder(db_connection) - count_result = count_statement \ - .count(CVS_MARKET_VALUES_TABLE) \ - .where('market_input = %s AND vcs = %s', [mi_value.market_input_id, mi_value.vcs_id]) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - count = count_result['count'] - - if count == 0: - insert_statement = MySQLStatementBuilder(db_connection) - insert_statement \ - .insert(table=CVS_MARKET_VALUES_TABLE, columns=CVS_MARKET_VALUES_COLUMN) \ - .set_values([mi_value.vcs_id, mi_value.market_input_id, mi_value.value]) \ - .execute(fetch_type=FetchType.FETCH_NONE) - else: - update_statement = MySQLStatementBuilder(db_connection) - update_statement \ - .update(table=CVS_MARKET_VALUES_TABLE, set_statement='value = %s', values=[mi_value.value]) \ - .where('vcs = %s AND market_input = %s', [mi_value.vcs_id, mi_value.market_input_id]) \ - .execute(fetch_type=FetchType.FETCH_NONE) + logger.debug(f'Update external factor value') + + if len(external_factor_value.external_factor_values) == 0: + return True + prepared_values = [] + prepared_statement = '' + for index, value in enumerate(external_factor_value.external_factor_values): + if index != len(external_factor_value.external_factor_values) - 1: + prepared_statement += '(%s, %s, %s),' + else: + prepared_statement += '(%s, %s, %s)' + prepared_values += [value.vcs_id, external_factor_value.id, value.value] + + query = f'INSERT INTO cvs_market_input_values (vcs, market_input, value) \ + VALUES ' + prepared_statement + ' ON DUPLICATE KEY UPDATE value = VALUES(value);' + + with db_connection.cursor(prepared=True) as cursor: + res = cursor.execute(query, prepared_values) + logger.debug(res) + + return True + + +def compare_and_delete_external_factor_values(db_connection: PooledMySQLConnection, project_id: int, + prev_ef_values: List[models.ExternalFactorValue], + new_ef_values: List[models.ExternalFactorValue]): + # Delete external factor values that does not exist in the new table but did in the previous one + efv_dict2 = {efv.id: {vcs_pair.vcs_id for vcs_pair in efv.external_factor_values} for efv in new_ef_values} + logger.debug(efv_dict2) + for efv in prev_ef_values: + parent_id = efv.id + if parent_id in efv_dict2: + vcs_ids_set1 = {vcs_pair.vcs_id for vcs_pair in efv.external_factor_values} + vcs_ids_set2 = efv_dict2[parent_id] + for vcs_id in vcs_ids_set1 - vcs_ids_set2: + delete_external_factor_value(db_connection, project_id, vcs_id, parent_id) return True +def sync_new_external_factors(db_connection: PooledMySQLConnection, project_id: int, + prev_ef_values: List[models.ExternalFactorValue], + new_ef_values: List[models.ExternalFactorValue]): + ef_ids_to_remove = {efv.id for efv in prev_ef_values} - {efv.id for efv in new_ef_values} + for ef_remove_id in ef_ids_to_remove: + delete_external_factor(db_connection, project_id, ef_remove_id) + updated_ef_values = [efv for efv in prev_ef_values if efv.id not in ef_ids_to_remove] + + for new_efv in new_ef_values: + matching_efv = next((efv for efv in updated_ef_values if efv.id == new_efv.id), None) + if matching_efv: + if matching_efv.name != new_efv.name or matching_efv.unit != new_efv.unit: + update_external_factor(db_connection, project_id, + ExternalFactor(id=new_efv.id, name=new_efv.name, unit=new_efv.unit)) + else: + new_ef = create_external_factor(db_connection, project_id, + ExternalFactorPost(name=new_efv.name, unit=new_efv.unit)) + updated_ef_values.append(ExternalFactorValue(id=new_ef.id, name=new_ef.name, unit=new_ef.unit, + external_factor_values=new_efv.external_factor_values)) + return updated_ef_values + + def update_external_factor_values(db_connection: PooledMySQLConnection, project_id: int, ef_values: List[models.ExternalFactorValue]) -> bool: - logger.debug(f'Update market input values') + logger.debug(f'Update external factor values for project={project_id}') + + old_ef_values = get_all_external_factor_values(db_connection, project_id) - curr_ef_values = get_all_external_factor_values(db_connection, project_id) + compare_and_delete_external_factor_values(db_connection, project_id, old_ef_values, ef_values) - # delete if no longer exists - for currEFV in curr_ef_values: - for curr_vcs_val_pair in currEFV.external_factor_values: - if curr_vcs_val_pair.vcs_id not in [[[v.vcs_id] for v in efv.external_factor_values] for efv in ef_values]: - delete_market_value(db_connection, project_id, curr_vcs_val_pair.vcs_id, currEFV.id) + # Add, update or remove External Factors that has changed since previously + ef_values_new_ids = sync_new_external_factors(db_connection, project_id, old_ef_values, ef_values) - for ef_value in ef_values: + # Update values for External factors + for ef_value in ef_values_new_ids: update_external_factor_value(db_connection, project_id, ef_value) return True @@ -207,27 +236,28 @@ def get_all_external_factor_values(db_connection: PooledMySQLConnection, project_id: int) -> List[models.ExternalFactorValue]: logger.debug(f'Fetching all external factors for project with id: {project_id}') - columns = ['vcs', 'market_input', 'value', 'name', 'unit'] + query = f'SELECT vcs, cvs_market_inputs.id AS market_input, value, name, unit \ + FROM cvs_market_inputs \ + LEFT JOIN cvs_market_input_values ON cvs_market_input_values.market_input = cvs_market_inputs.id \ + WHERE cvs_market_inputs.project = %s;' - select_statement = MySQLStatementBuilder(db_connection) - res = select_statement \ - .select(CVS_MARKET_VALUES_TABLE, columns) \ - .inner_join('cvs_market_inputs', 'market_input = cvs_market_inputs.id') \ - .where('project = %s', [project_id]) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + with db_connection.cursor(prepared=True, dictionary=True) as cursor: + cursor.execute(query, [project_id]) + res = cursor.fetchall() return populate_external_factor_values(res) -def delete_market_value(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, mi_id: int) -> bool: - logger.debug(f'Deleting market input value with vcs id: {vcs_id} and market input id: {mi_id}') +def delete_external_factor_value(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, + ef_id: int) -> bool: + logger.debug(f'Deleting external factor value with vcs id: {vcs_id} and market input id: {ef_id}') - get_external_factor(db_connection, project_id, mi_id) # check if market input exists and belongs to project + get_external_factor(db_connection, project_id, ef_id) # check if market input exists and belongs to project delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement \ .delete(CVS_MARKET_VALUES_TABLE) \ - .where('vcs = %s AND market_input = %s', [vcs_id, mi_id]) \ + .where('vcs = %s AND market_input = %s', [vcs_id, ef_id]) \ .execute(return_affected_rows=True) if rows != 1: From 632e2a107a3ba64d559fd215caba300b5b591f34 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 1 Aug 2023 16:31:06 +0200 Subject: [PATCH 116/210] revert port and host --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index ff8bfe0c..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' #'core-db' +host = 'core-db' database = 'seddb' -port = 3001 #3306 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 8c277aeb6875356d7731fdcc7ae0965d69e4983f Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 1 Aug 2023 16:48:17 +0200 Subject: [PATCH 117/210] test fix --- sedbackend/apps/cvs/market_input/router.py | 8 ++++++-- tests/apps/cvs/market_input/test_market_input_values.py | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/sedbackend/apps/cvs/market_input/router.py b/sedbackend/apps/cvs/market_input/router.py index 48b01653..5da52bcc 100644 --- a/sedbackend/apps/cvs/market_input/router.py +++ b/sedbackend/apps/cvs/market_input/router.py @@ -5,6 +5,7 @@ from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker from sedbackend.apps.core.projects.models import AccessLevel from sedbackend.apps.cvs.market_input import models, implementation +from sedbackend.apps.cvs.market_input.models import ExternalFactor from sedbackend.apps.cvs.project.router import CVS_APP_SID router = APIRouter() @@ -40,8 +41,11 @@ async def create_market_input(native_project_id: int, market_input: models.Exter response_model=bool, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def update_market_input(native_project_id: int, external_factor: models.ExternalFactor) -> bool: - return implementation.update_external_factor(native_project_id, external_factor) +async def update_market_input(native_project_id: int, market_input_id: int, + external_factor: models.ExternalFactorPost) -> bool: + return implementation.update_external_factor(native_project_id, + ExternalFactor(id=market_input_id, name=external_factor.name, + unit=external_factor.unit)) @router.delete( diff --git a/tests/apps/cvs/market_input/test_market_input_values.py b/tests/apps/cvs/market_input/test_market_input_values.py index 201d2a75..588867ed 100644 --- a/tests/apps/cvs/market_input/test_market_input_values.py +++ b/tests/apps/cvs/market_input/test_market_input_values.py @@ -5,7 +5,7 @@ import sedbackend.apps.cvs.market_input.implementation as impl_market_input -def test_create_market_input(client, std_headers, std_user): +def test_create_market_input_value(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) From d68c774859a58e1c4f3f38ce721f94ce2bcb824c Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Wed, 2 Aug 2023 09:22:53 +0200 Subject: [PATCH 118/210] add comment to formula --- sedbackend/apps/core/db.py | 6 ++- .../link_design_lifecycle/implementation.py | 4 +- .../apps/cvs/link_design_lifecycle/models.py | 21 +++++--- .../apps/cvs/link_design_lifecycle/router.py | 6 +-- .../apps/cvs/link_design_lifecycle/storage.py | 41 ++++++++------- sql/V230721_cvs.sql | 5 ++ .../test_connect_vcs_design.py | 51 +++++++++++-------- tests/apps/cvs/testutils.py | 32 ++++++------ 8 files changed, 97 insertions(+), 69 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..20dba37e 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,11 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +#host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +#port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py index e0161c28..59a48d82 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py @@ -11,7 +11,7 @@ from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions -def edit_formulas(project_id: int, vcs_row_id: int, design_group_id: int, new_formulas: models.FormulaPost) -> bool: +def edit_formulas(project_id: int, vcs_row_id: int, design_group_id: int, new_formulas: models.FormulaRowPost) -> bool: with get_connection() as con: try: res = storage.update_formulas(con, project_id, vcs_row_id, design_group_id, new_formulas) @@ -54,7 +54,7 @@ def edit_formulas(project_id: int, vcs_row_id: int, design_group_id: int, new_fo ) -def get_all_formulas(project_id: int, vcs_id: int, design_group_id: int, user_id: int) -> List[models.FormulaGet]: +def get_all_formulas(project_id: int, vcs_id: int, design_group_id: int, user_id: int) -> List[models.FormulaRowGet]: with get_connection() as con: try: res = storage.get_all_formulas(con, project_id, vcs_id, design_group_id, user_id) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/models.py b/sedbackend/apps/cvs/link_design_lifecycle/models.py index 3062e46a..57f702e9 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/models.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/models.py @@ -25,23 +25,28 @@ class Rate(Enum): PROJECT = 'per_project' -class FormulaGet(BaseModel): +class Formula(BaseModel): + formula: str + comment: str + + +class FormulaRowGet(BaseModel): vcs_row_id: int design_group_id: int - time: str + time: Formula time_unit: TimeFormat - cost: str - revenue: str + cost: Formula + revenue: Formula rate: Rate used_value_drivers: List[ValueDriver] = [] used_external_factors: List[MarketInputGet] = [] -class FormulaPost(BaseModel): - time: str +class FormulaRowPost(BaseModel): + time: Formula time_unit: TimeFormat - cost: str - revenue: str + cost: Formula + revenue: Formula rate: Rate diff --git a/sedbackend/apps/cvs/link_design_lifecycle/router.py b/sedbackend/apps/cvs/link_design_lifecycle/router.py index f4ec9c0d..7d90d337 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/router.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/router.py @@ -15,11 +15,11 @@ @router.get( '/project/{native_project_id}/vcs/{vcs_id}/design-group/{dg_id}/formulas/all', summary=f'Get all formulas for a single vcs and design group', - response_model=List[models.FormulaGet], + response_model=List[models.FormulaRowGet], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) async def get_all_formulas(native_project_id: int, vcs_id: int, dg_id: int, - user: User = Depends(get_current_active_user)) -> List[models.FormulaGet]: + user: User = Depends(get_current_active_user)) -> List[models.FormulaRowGet]: return implementation.get_all_formulas(native_project_id, vcs_id, dg_id, user.id) @@ -29,7 +29,7 @@ async def get_all_formulas(native_project_id: int, vcs_id: int, dg_id: int, response_model=bool, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def edit_formulas(native_project_id: int, vcs_row_id: int, dg_id: int, new_formulas: models.FormulaPost) -> bool: +async def edit_formulas(native_project_id: int, vcs_row_id: int, dg_id: int, new_formulas: models.FormulaRowPost) -> bool: return implementation.edit_formulas(native_project_id, vcs_row_id, dg_id, new_formulas) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 09834e41..1d8f38c8 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -11,7 +11,8 @@ from mysqlsb import FetchType, MySQLStatementBuilder CVS_FORMULAS_TABLE = 'cvs_design_mi_formulas' -CVS_FORMULAS_COLUMNS = ['project', 'vcs_row', 'design_group', 'time', 'time_unit', 'cost', 'revenue', 'rate'] +CVS_FORMULAS_COLUMNS = ['project', 'vcs_row', 'design_group', 'time', 'time_comment', 'time_unit', 'cost', + 'cost_comment', 'revenue', 'revenue_comment', 'rate'] CVS_VALUE_DRIVERS_TABLE = 'cvs_value_drivers' CVS_VALUE_DRIVERS_COLUMNS = ['id', 'user', 'name', 'unit'] @@ -24,13 +25,16 @@ def create_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, - formulas: models.FormulaPost): + formula_row: models.FormulaRowPost): logger.debug(f'Creating formulas') - value_driver_ids, external_factor_ids = find_vd_and_ef([formulas.time, formulas.cost, formulas.revenue]) + value_driver_ids, external_factor_ids = find_vd_and_ef( + [formula_row.time.formula, formula_row.cost.formula, formula_row.revenue.formula]) - values = [project_id, vcs_row_id, design_group_id, formulas.time, formulas.time_unit.value, formulas.cost, - formulas.revenue, formulas.rate.value] + values = [project_id, vcs_row_id, design_group_id, formula_row.time.formula, formula_row.time.comment, + formula_row.time_unit.value, + formula_row.cost.formula, formula_row.cost.comment, + formula_row.revenue.formula, formula_row.revenue.comment, formula_row.rate.value] try: insert_statement = MySQLStatementBuilder(db_connection) @@ -66,15 +70,18 @@ def find_vd_and_ef(texts: List[str]) -> (List[str], List[int]): def edit_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, project_id: int, - formulas: models.FormulaPost): + formula_row: models.FormulaRowPost): logger.debug(f'Editing formulas') - value_driver_ids, external_factor_ids = find_vd_and_ef([formulas.time, formulas.cost, formulas.revenue]) + value_driver_ids, external_factor_ids = find_vd_and_ef( + [formula_row.time.formula, formula_row.cost.formula, formula_row.revenue.formula]) columns = CVS_FORMULAS_COLUMNS[3:] set_statement = ', '.join([col + ' = %s' for col in columns]) - values = [formulas.time, formulas.time_unit.value, formulas.cost, formulas.revenue, formulas.rate.value] + values = [formula_row.time.formula, formula_row.time.comment, formula_row.time_unit.value, formula_row.cost.formula, + formula_row.cost.comment, formula_row.revenue.formula, formula_row.revenue.comment, + formula_row.rate.value] # Update formula row update_statement = MySQLStatementBuilder(db_connection) @@ -187,7 +194,7 @@ def update_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_ro def update_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, - formulas: models.FormulaPost) -> bool: + formula_row: models.FormulaRowPost) -> bool: get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project get_vcs_row(db_connection, project_id, vcs_row_id) @@ -198,9 +205,9 @@ def update_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_r count = count['count'] if count == 0: - create_formulas(db_connection, project_id, vcs_row_id, design_group_id, formulas) + create_formulas(db_connection, project_id, vcs_row_id, design_group_id, formula_row) elif count == 1: - edit_formulas(db_connection, vcs_row_id, design_group_id, project_id, formulas) + edit_formulas(db_connection, vcs_row_id, design_group_id, project_id, formula_row) else: raise exceptions.FormulasFailedUpdateException @@ -208,7 +215,7 @@ def update_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_r def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, - design_group_id: int, user_id: int) -> List[models.FormulaGet]: + design_group_id: int, user_id: int) -> List[models.FormulaRowGet]: logger.debug(f'Fetching all formulas with vcs_id={vcs_id}') get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project @@ -254,14 +261,14 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ return formulas -def populate_formula(db_result) -> models.FormulaGet: - return models.FormulaGet( +def populate_formula(db_result) -> models.FormulaRowGet: + return models.FormulaRowGet( vcs_row_id=db_result['vcs_row'], design_group_id=db_result['design_group'], - time=db_result['time'], + time=models.Formula(formula=db_result['time'], comment=db_result['time_comment']), time_unit=db_result['time_unit'], - cost=db_result['cost'], - revenue=db_result['revenue'], + cost=models.Formula(formula=db_result['cost'], comment=db_result['cost_comment']), + revenue=models.Formula(formula=db_result['revenue'], comment=db_result['revenue_comment']), rate=db_result['rate'], used_value_drivers=[populate_value_driver(valueDriver) for valueDriver in db_result['value_drivers']] if db_result['value_drivers'] is not None else [], diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index b2bfbc56..aa356c1c 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -71,3 +71,8 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` REFERENCES `seddb`.`cvs_project_value_drivers` (`project`, `value_driver`) ON DELETE CASCADE ); + +ALTER TABLE `seddb`.`cvs_design_mi_formulas` + ADD COLUMN `time_comment` TEXT NULL AFTER `time`, + ADD COLUMN `cost_comment` TEXT NULL AFTER `cost`, + ADD COLUMN `revenue_comment` TEXT NULL AFTER `revenue` diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index 4bf561b8..94ff8237 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -4,6 +4,7 @@ import tests.apps.cvs.testutils as tu import sedbackend.apps.core.users.implementation as impl_users +from sedbackend.apps.cvs.link_design_lifecycle.models import Formula def test_create_formulas(client, std_headers, std_user): @@ -27,6 +28,9 @@ def test_create_formulas(client, std_headers, std_user): cost = '2+{ef:' + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' revenue = '20+{vd:' + str(value_driver.id) + ',"' + str(value_driver.name) + '"}+{ef:' + str( external_factor.id) + ',"' + str(external_factor.name) + '"}' + time_comment = testutils.random_str(10, 200) + cost_comment = testutils.random_str(10, 200) + revenue_comment = testutils.random_str(10, 200) rate = tu.random_rate_choice() @@ -35,10 +39,10 @@ def test_create_formulas(client, std_headers, std_user): headers=std_headers, json={ "project": project.id, - "time": time, + "time": {"formula": time, "comment": time_comment}, "time_unit": time_unit, - "cost": cost, - "revenue": revenue, + "cost": {"formula": cost, "comment": cost_comment}, + "revenue": {"formula": revenue, "comment": revenue_comment}, "rate": rate }) @@ -80,10 +84,10 @@ def test_create_formulas_no_optional(client, std_headers, std_user): headers=std_headers, json={ "project": project.id, - "time": time, + "time": {"formula": time, "comment": ""}, "time_unit": time_unit, - "cost": cost, - "revenue": revenue, + "cost": {"formula": cost, "comment": ""}, + "revenue": {"formula": revenue, "comment": ""}, "rate": rate }) @@ -223,10 +227,11 @@ def test_edit_formulas(client, std_headers, std_user): f'/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas', headers=std_headers, json={ - "time": time, + "project": project.id, + "time": {"formula": time, "comment": ""}, "time_unit": time_unit, - "cost": cost, - "revenue": revenue, + "cost": {"formula": cost, "comment": ""}, + "revenue": {"formula": revenue, "comment": ""}, "rate": rate }) @@ -264,10 +269,11 @@ def test_edit_formulas_no_optional(client, std_headers, std_user): f'/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas', headers=std_headers, json={ - "time": time, + "project": project.id, + "time": {"formula": time, "comment": ""}, "time_unit": time_unit, - "cost": cost, - "revenue": revenue, + "cost": {"formula": cost, "comment": ""}, + "revenue": {"formula": revenue, "comment": ""}, "rate": rate }) @@ -306,10 +312,11 @@ def test_edit_formulas_invalid_dg(client, std_headers, std_user): res = client.put(f'/api/cvs/project/{project.id}/vcs-row/{row_id}/design-group/{dg_invalid_id}/formulas', headers=std_headers, json={ - "time": time, + "project": project.id, + "time": {"formula": time, "comment": ""}, "time_unit": time_unit, - "cost": cost, - "revenue": revenue, + "cost": {"formula": cost, "comment": ""}, + "revenue": {"formula": revenue, "comment": ""}, "rate": rate }) @@ -345,10 +352,11 @@ def test_edit_formulas_invalid_vcs_row(client, std_headers, std_user): res = client.put(f'/api/cvs/project/{project.id}/vcs-row/{row_id}/design-group/{design_group.id}/formulas', headers=std_headers, json={ - "time": time, + "project": project.id, + "time": {"formula": time, "comment": ""}, "time_unit": time_unit, - "cost": cost, - "revenue": revenue, + "cost": {"formula": cost, "comment": ""}, + "revenue": {"formula": revenue, "comment": ""}, "rate": rate }) @@ -385,10 +393,11 @@ def test_edit_formulas_invalid_project(client, std_headers, std_user): res = client.put(f'/api/cvs/project/{invalid_proj_id}/vcs-row/{row_id}/design-group/{design_group.id}/formulas', headers=std_headers, json={ - "time": time, + "project": project.id, + "time": {"formula": time, "comment": ""}, "time_unit": time_unit, - "cost": cost, - "revenue": revenue, + "cost": {"formula": cost, "comment": ""}, + "revenue": {"formula": revenue, "comment": ""}, "rate": rate }) diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index a7743bc7..e100238b 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -15,7 +15,7 @@ import sedbackend.apps.cvs.project.models import sedbackend.apps.cvs.vcs.implementation as vcs_impl import sedbackend.apps.cvs.vcs.models as vcs_model -from sedbackend.apps.cvs.link_design_lifecycle.models import FormulaGet, TimeFormat, Rate +from sedbackend.apps.cvs.link_design_lifecycle.models import FormulaRowGet, TimeFormat, Rate from sedbackend.apps.cvs.market_input import models as market_input_model, implementation as market_input_impl import tests.testutils as tu @@ -404,7 +404,7 @@ def seed_random_designs(project_id: int, dg_id: int, amount: int = 10): # ====================================================================================================================== def seed_random_formulas(project_id: int, vcs_id: int, design_group_id: int, user_id: int, - amount: int = 10) -> List[connect_model.FormulaGet]: + amount: int = 10) -> List[connect_model.FormulaRowGet]: vcs_rows = seed_vcs_table_rows(user_id, project_id, vcs_id, amount) for i, vcs_row in enumerate(vcs_rows): @@ -418,11 +418,11 @@ def seed_random_formulas(project_id: int, vcs_id: int, design_group_id: int, use else: rate = Rate.PRODUCT - formula_post = connect_model.FormulaPost( - time=time, + formula_post = connect_model.FormulaRowPost( + time=connect_model.Formula(formula=time, comment=""), time_unit=time_unit, - cost=cost, - revenue=revenue, + cost=connect_model.Formula(formula=cost, comment=""), + revenue=connect_model.Formula(formula=revenue, comment=""), rate=rate ) @@ -432,7 +432,7 @@ def seed_random_formulas(project_id: int, vcs_id: int, design_group_id: int, use return connect_impl.get_all_formulas(project_id, vcs_id, design_group_id, user_id) -def create_formulas(project_id: int, vcs_rows: List[vcs_model.VcsRow], dg_id: int) -> List[FormulaGet]: +def create_formulas(project_id: int, vcs_rows: List[vcs_model.VcsRow], dg_id: int) -> List[FormulaRowGet]: for row in vcs_rows: time = str(tu.random.randint(1, 200)) time_unit = random_time_unit() @@ -440,11 +440,11 @@ def create_formulas(project_id: int, vcs_rows: List[vcs_model.VcsRow], dg_id: in revenue = str(tu.random.randint(1, 10000)) rate = Rate.PRODUCT.value - formula_post = connect_model.FormulaPost( - time=time, + formula_post = connect_model.FormulaRowPost( + time=connect_model.Formula(formula=time, comment=""), time_unit=time_unit, - cost=cost, - revenue=revenue, + cost=connect_model.Formula(formula=cost, comment=""), + revenue=connect_model.Formula(formula=revenue, comment=""), rate=rate ) connect_impl.edit_formulas(project_id, row.id, dg_id, formula_post) @@ -476,12 +476,12 @@ def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int, last = next(filter(lambda x: x.vcs_row_id == last_id, formulas)) - new_last = connect_model.FormulaPost( - time=last.time, + new_last = connect_model.FormulaRowPost( + time=connect_model.Formula(formula=last.time, comment=""), time_unit=last.time_unit, - cost=last.cost, - revenue=last.revenue, - rate=Rate.PROJECT.value + cost=connect_model.Formula(formula=last.cost, comment=""), + revenue=connect_model.Formula(formula=last.revenue, comment=""), + rate=last.rate ) connect_impl.edit_formulas(project_id, last_id, design_group_id, new_last) From 5bc687c79b2fb39e75438846e97e850e2ad5325f Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Wed, 2 Aug 2023 10:27:09 +0200 Subject: [PATCH 119/210] add multiplcation signs where missing --- sedbackend/apps/core/db.py | 15 ++++---- sedbackend/apps/cvs/simulation/storage.py | 17 +++++++++ tests/apps/cvs/simulation/test_sim_utils.py | 39 ++++++++++++++++++++- 3 files changed, 61 insertions(+), 10 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 20dba37e..abf0840d 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -5,16 +5,13 @@ from contextlib import contextmanager - connection_pool = None user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -#host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' -#port = 3306 -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( @@ -24,10 +21,10 @@ database=database, port=port, autocommit=False, - get_warnings=True, # Change for production environments (True in dev) - raise_on_warnings=True, # Change for production environments (True in dev) - pool_size=4, # Change for production environments (as few as possible in dev) - connection_timeout=10 # Might want to increase this for production + get_warnings=True, # Change for production environments (True in dev) + raise_on_warnings=True, # Change for production environments (True in dev) + pool_size=4, # Change for production environments (as few as possible in dev) + connection_timeout=10 # Might want to increase this for production ) except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 8db6a92f..04b8d5bd 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -410,9 +410,26 @@ def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[in return res +def add_multiplication_signs(formula: str) -> str: + # Define a regular expression pattern to find the positions where the multiplication sign is missing + pattern = r'(\d)([a-zA-Z({\[<])|([}\])>]|})([a-zA-Z({\[<])|([}\])>]|{)(\d)' + + # Use the re.sub() function to replace the matches with the correct format + def replace(match): + if match.group(2): + return f"{match.group(1)}*{match.group(2)}" + elif match.group(3) and match.group(4): + return f"{match.group(3)}*{match.group(4)}" + + result = re.sub(pattern, replace, formula) + return result + + def parse_formula(formula: str, vd_values, ef_values): pattern = r'\{(?Pvd|ef):(?P\d+),"([^"]+)"\}' + formula = add_multiplication_signs(formula) + def replace(match): tag, id_number, _ = match.groups() id_number = int(id_number) diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index 416c7c09..9a544514 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -1,4 +1,4 @@ -from sedbackend.apps.cvs.simulation.storage import parse_formula +from sedbackend.apps.cvs.simulation.storage import parse_formula, add_multiplication_signs from sedbackend.libs.formula_parser.parser import NumericStringParser @@ -45,3 +45,40 @@ def test_parse_formula_vd_no_exist(): # Assert assert new_formula == "2+0/5" assert nsp.eval(new_formula) == 2 + + +def test_add_multiplication_signs(): + # Setup + formula = '2{vd:47241,"Design Similarity [0-1]"}{ef:114,"Fuel Cost [k€/liter]"}' + + # Act + new_formula = add_multiplication_signs(formula) + + # Assert + assert new_formula == '2*{vd:47241,"Design Similarity [0-1]"}*{ef:114,"Fuel Cost [k€/liter]"}' + + +def test_add_multiplication_valid_formula(): + # Setup + formula = '2*{vd:47241,"Design Similarity [0-1]"}*{ef:114,"Fuel Cost [k€/liter]"}' + + # Act + new_formula = add_multiplication_signs(formula) + + # Assert + assert new_formula == '2*{vd:47241,"Design Similarity [0-1]"}*{ef:114,"Fuel Cost [k€/liter]"}' + + +def test_parse_without_multiplication_signs(): + # Setup + vd_values = [{"value_driver": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] + formula = '2{vd:47241,"Design Similarity [0-1]"}{ef:114,"Fuel Cost [k€/liter]"}' + nsp = NumericStringParser() + + # Act + new_formula = parse_formula(formula, vd_values, mi_values) + + # Assert + assert new_formula == "2*10*5" + assert nsp.eval(new_formula) == 100 From c55e4b013df9f55ae6b92b5f948e91e20f84914d Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Wed, 2 Aug 2023 10:44:17 +0200 Subject: [PATCH 120/210] fixed failing test --- sedbackend/apps/core/db.py | 9 +- tests/apps/cvs/simulation/test_simulation.py | 354 ------------------- tests/apps/cvs/testutils.py | 8 +- 3 files changed, 9 insertions(+), 362 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index abf0840d..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -5,6 +5,7 @@ from contextlib import contextmanager + connection_pool = None user = 'rw' @@ -21,10 +22,10 @@ database=database, port=port, autocommit=False, - get_warnings=True, # Change for production environments (True in dev) - raise_on_warnings=True, # Change for production environments (True in dev) - pool_size=4, # Change for production environments (as few as possible in dev) - connection_timeout=10 # Might want to increase this for production + get_warnings=True, # Change for production environments (True in dev) + raise_on_warnings=True, # Change for production environments (True in dev) + pool_size=4, # Change for production environments (as few as possible in dev) + connection_timeout=10 # Might want to increase this for production ) except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index 46e489f1..63b0b0c7 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -260,357 +260,3 @@ def test_run_sim_invalid_proj(client, std_headers, std_user): tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) - -#Temporarly disabled -''' -def test_run_single_xlsx_sim(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id, current_user.id) - - row1 = tu.vcs_model.VcsRowPost( - index=0, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=17, - subprocess=None - ) - row2 = tu.vcs_model.VcsRowPost( - index=1, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=20, - subprocess=None - ) - row3 = tu.vcs_model.VcsRowPost( - index=2, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=22, - subprocess=None - ) - row4 = tu.vcs_model.VcsRowPost( - index=3, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=24, - subprocess=None - ) - - rows = [row1, row2, row3, row4] - - table = tu.create_vcs_table(project.id, vcs.id, rows) - design_group = tu.seed_random_design_group(project.id) - design = tu.seed_random_designs(project.id, design_group.id, 1)[0] - formulas = tu.create_formulas(project.id, table, design_group.id) - - cwd = os.getcwd() - _test_upload_file = Path(cwd + '/tests/apps/cvs/simulation/files/input-example.xlsx') - _file = {'dsm_file': ('input-example.xlsx', _test_upload_file.open('rb'), 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')} - - sim_data = { - "time_unit": tu.TimeFormat.YEAR.value, - "flow_process": "Verification", - "flow_start_time": None, - "flow_time": 5, - "interarrival_time": 10, - "start_time": 1, - "end_time": 30, - "discount_rate": 0.08, - "non_tech_add": tu.NonTechCost.CONTINOUSLY.value, - "monte_carlo": False, - "runs": None, - "vcs_ids": str(vcs.id), - "design_ids": str(design.id), - "normalized_npv": False - } - - #Act - res = client.post(f'/api/cvs/project/{project.id}/sim/upload-dsm', - headers=std_headers, - files=_file, - data = sim_data) - - #Assert - assert res.status_code == 200 - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) - - -def test_run_xlsx_sim(client, std_headers, std_user): - #Setup - amount = 3 - - current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - - row1 = tu.vcs_model.VcsRowPost( - index=0, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=17, - subprocess=None - ) - row2 = tu.vcs_model.VcsRowPost( - index=1, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=20, - subprocess=None - ) - row3 = tu.vcs_model.VcsRowPost( - index=2, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=22, - subprocess=None - ) - row4 = tu.vcs_model.VcsRowPost( - index=3, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=24, - subprocess=None - ) - - rows = [row1, row2, row3, row4] - - - vcss = [] - designs = [] - for _ in range(amount): - vcs = tu.seed_random_vcs(project.id, current_user.id) - vcss.append(vcs.id) - table = tu.create_vcs_table(project.id, vcs.id, rows) - design_group = tu.seed_random_design_group(project.id) - design = tu.seed_random_designs(project.id, design_group.id, 1) - designs.append(design[0]) - formulas = tu.create_formulas(project.id, table, design_group.id) - - cwd = os.getcwd() - _test_upload_file = Path(cwd + '/tests/apps/cvs/simulation/files/input-example.xlsx') - _file = {'dsm_file': ('input-example.xlsx', _test_upload_file.open('rb'), 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')} - - sim_data = { - "time_unit": tu.TimeFormat.YEAR.value, - "flow_process": "Verification", - "flow_start_time": None, - "flow_time": 5, - "interarrival_time": 10, - "start_time": 1, - "end_time": 30, - "discount_rate": 0.08, - "non_tech_add": tu.NonTechCost.CONTINOUSLY.value, - "monte_carlo": False, - "runs": None, - "vcs_ids": ','.join([str(vcs) for vcs in vcss]), - "design_ids": ','.join([str(design.id) for design in designs]), - "normalized_npv": False - } - - #Act - res = client.post(f'/api/cvs/project/{project.id}/sim/upload-dsm', - headers=std_headers, - files=_file, - data = sim_data) - - - #Assert - assert res.status_code == 200 - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, vcss) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) - - -def test_run_single_csv_sim(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - vcs = tu.seed_random_vcs(project.id, current_user.id) - - row1 = tu.vcs_model.VcsRowPost( - index=0, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=17, - subprocess=None - ) - row2 = tu.vcs_model.VcsRowPost( - index=1, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=20, - subprocess=None - ) - row3 = tu.vcs_model.VcsRowPost( - index=2, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=22, - subprocess=None - ) - row4 = tu.vcs_model.VcsRowPost( - index=3, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=24, - subprocess=None - ) - - rows = [row1, row2, row3, row4] - - table = tu.create_vcs_table(project.id, vcs.id, rows) - design_group = tu.seed_random_design_group(project.id) - design = tu.seed_random_designs(project.id, design_group.id, 1)[0] - formulas = tu.create_formulas(project.id, table, design_group.id) - - cwd = os.getcwd() - _test_upload_file = Path(cwd + '/tests/apps/cvs/simulation/files/input.csv') - _file = {'dsm_file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} - - sim_data = { - "time_unit": tu.TimeFormat.YEAR.value, - "flow_process": "Verification", - "flow_start_time": None, - "flow_time": 5, - "interarrival_time": 10, - "start_time": 1, - "end_time": 30, - "discount_rate": 0.08, - "non_tech_add": tu.NonTechCost.CONTINOUSLY.value, - "monte_carlo": False, - "runs": None, - "vcs_ids": str(vcs.id), - "design_ids": str(design.id), - "normalized_npv": False - } - - #Act - res = client.post(f'/api/cvs/project/{project.id}/sim/upload-dsm', - headers=std_headers, - files=_file, - data = sim_data) - - - #Assert - assert res.status_code == 200 - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) - - -def test_run_csv_sim(client, std_headers, std_user): - #Setup - amount = 3 - - current_user = impl_users.impl_get_user_with_username(std_user.username) - project = tu.seed_random_project(current_user.id) - - row1 = tu.vcs_model.VcsRowPost( - index=0, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=17, - subprocess=None - ) - row2 = tu.vcs_model.VcsRowPost( - index=1, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=20, - subprocess=None - ) - row3 = tu.vcs_model.VcsRowPost( - index=2, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=22, - subprocess=None - ) - row4 = tu.vcs_model.VcsRowPost( - index=3, - stakeholder=tu.tu.random_str(5,50), - stakeholder_needs=None, - stakeholder_expectations=tu.tu.random_str(5,50), - iso_process=24, - subprocess=None - ) - - rows = [row1, row2, row3, row4] - design_group = tu.seed_random_design_group(project.id) - designs = tu.seed_random_designs(project.id, design_group.id, 3) - vcss = [] - for _ in range(amount): - vcs = tu.seed_random_vcs(project.id, current_user.id) - vcss.append(vcs.id) - table = tu.create_vcs_table(project.id, vcs.id, rows) - formulas = tu.create_formulas(project.id, table, design_group.id) - - cwd = os.getcwd() - _test_upload_file = Path(cwd + '/tests/apps/cvs/simulation/files/input.csv') - _file = {'dsm_file': ('input.csv', _test_upload_file.open('rb'), 'text/csv')} - - sim_data = { - "time_unit": tu.TimeFormat.YEAR.value, - "flow_process": "Verification", - "flow_start_time": None, - "flow_time": 5, - "interarrival_time": 10, - "start_time": 1, - "end_time": 30, - "discount_rate": 0.08, - "non_tech_add": tu.NonTechCost.CONTINOUSLY.value, - "monte_carlo": False, - "runs": None, - "vcs_ids": ','.join([str(vcs) for vcs in vcss]), - "design_ids": ','.join([str(design.id) for design in designs]), - "normalized_npv": False - } - - #Act - res = client.post(f'/api/cvs/project/{project.id}/sim/upload-dsm', - headers=std_headers, - files=_file, - data = sim_data) - - - #Assert - assert res.status_code == 200 - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(project.id, vcss) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) - - ''' \ No newline at end of file diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index e100238b..25fefa3c 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -477,11 +477,11 @@ def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int, last = next(filter(lambda x: x.vcs_row_id == last_id, formulas)) new_last = connect_model.FormulaRowPost( - time=connect_model.Formula(formula=last.time, comment=""), + time=last.time, time_unit=last.time_unit, - cost=connect_model.Formula(formula=last.cost, comment=""), - revenue=connect_model.Formula(formula=last.revenue, comment=""), - rate=last.rate + cost=last.cost, + revenue=last.revenue, + rate=Rate.PROJECT.value ) connect_impl.edit_formulas(project_id, last_id, design_group_id, new_last) From 99cc923949f2668cb75afc10ba7bbb700e0e310d Mon Sep 17 00:00:00 2001 From: jyborn Date: Wed, 2 Aug 2023 10:54:41 +0200 Subject: [PATCH 121/210] test --- .../apps/cvs/market_input/implementation.py | 12 ++-- sedbackend/apps/cvs/market_input/router.py | 4 +- sedbackend/apps/cvs/market_input/storage.py | 1 + .../cvs/market_input/test_market_input.py | 10 ++-- .../market_input/test_market_input_values.py | 59 +++++++++++-------- tests/apps/cvs/testutils.py | 24 +++++--- 6 files changed, 64 insertions(+), 46 deletions(-) diff --git a/sedbackend/apps/cvs/market_input/implementation.py b/sedbackend/apps/cvs/market_input/implementation.py index 09b0874a..2343d61d 100644 --- a/sedbackend/apps/cvs/market_input/implementation.py +++ b/sedbackend/apps/cvs/market_input/implementation.py @@ -143,21 +143,21 @@ def get_all_formula_market_inputs(formulas_id: int) -> List[models.ExternalFacto ######################################################################################################################## -def update_market_input_value(project_id: int, mi_value: models.ExternalFactorValue) -> bool: +def update_exterrnal_factor_value(project_id: int, ef_value: models.ExternalFactorValue) -> bool: try: with get_connection() as con: - res = storage.update_external_factor_value(con, project_id, mi_value) + res = storage.update_external_factor_value(con, project_id, ef_value) con.commit() return res except exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not find market input with id={mi_value.market_input_id}.', + detail=f'Could not find external factor with id={ef_value.id}.', ) except vcs_exceptions.VCSNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not find vcs with id={mi_value.vcs_id}.', + detail=f'Could not find vcs with id={ef_value.external_factor_values[0].vcs_id}.', ) except proj_exceptions.CVSProjectNotFoundException: raise HTTPException( @@ -167,7 +167,7 @@ def update_market_input_value(project_id: int, mi_value: models.ExternalFactorVa except proj_exceptions.CVSProjectNoMatchException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Market input with id={mi_value.market_input_id} is not a part from project with id={project_id}.', + detail=f'External factor with id={ef_value.id} is not a part from project with id={project_id}.', ) @@ -180,7 +180,7 @@ def update_external_factor_values(project_id: int, external_factor_values: List[ except exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f'Could not find market input', + detail=f'Could not find external factor', ) except vcs_exceptions.VCSNotFoundException: raise HTTPException( diff --git a/sedbackend/apps/cvs/market_input/router.py b/sedbackend/apps/cvs/market_input/router.py index 5da52bcc..1a36eda8 100644 --- a/sedbackend/apps/cvs/market_input/router.py +++ b/sedbackend/apps/cvs/market_input/router.py @@ -66,8 +66,8 @@ async def delete_market_input(native_project_id: int, market_input_id: int) -> b summary='Create or update values for market inputs', response_model=bool ) -async def update_market_values(native_project_id: int, mi_values: List[models.ExternalFactorValue]) -> bool: - return implementation.update_external_factor_values(native_project_id, mi_values) +async def update_market_values(native_project_id: int, ef_values: List[models.ExternalFactorValue]) -> bool: + return implementation.update_external_factor_values(native_project_id, ef_values) @router.get( diff --git a/sedbackend/apps/cvs/market_input/storage.py b/sedbackend/apps/cvs/market_input/storage.py index 29f8cbeb..2e10c4c5 100644 --- a/sedbackend/apps/cvs/market_input/storage.py +++ b/sedbackend/apps/cvs/market_input/storage.py @@ -153,6 +153,7 @@ def populate_external_factor_values(db_result) -> list[ExternalFactorValue]: def update_external_factor_value(db_connection: PooledMySQLConnection, project_id: int, external_factor_value: models.ExternalFactorValue) -> bool: logger.debug(f'Update external factor value') + get_external_factor(db_connection, project_id, external_factor_value.id) if len(external_factor_value.external_factor_values) == 0: return True diff --git a/tests/apps/cvs/market_input/test_market_input.py b/tests/apps/cvs/market_input/test_market_input.py index fb863a8b..df3f21da 100644 --- a/tests/apps/cvs/market_input/test_market_input.py +++ b/tests/apps/cvs/market_input/test_market_input.py @@ -43,7 +43,7 @@ def test_get_all_market_inputs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - market_input = tu.seed_random_market_input(project.id) + market_input = tu.seed_random_external_factor(project.id) # Act res = client.get(f'/api/cvs/project/{project.id}/market-input/all', headers=std_headers) # Assert @@ -73,7 +73,7 @@ def test_edit_market_input(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - market_input = tu.seed_random_market_input(project.id) + market_input = tu.seed_random_external_factor(project.id) # Act res = client.put(f'/api/cvs/project/{project.id}/market-input/{market_input.id}', headers=std_headers, json={ 'name': "new market input", @@ -93,7 +93,7 @@ def test_edit_market_input_no_changes(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - market_input = tu.seed_random_market_input(project.id) + market_input = tu.seed_random_external_factor(project.id) # Act res = client.put(f'/api/cvs/project/{project.id}/market-input/{market_input.id}', headers=std_headers, json={ 'name': market_input.name, @@ -113,7 +113,7 @@ def test_edit_market_input_no_name(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - market_input = tu.seed_random_market_input(project.id) + market_input = tu.seed_random_external_factor(project.id) # Act res = client.put(f'/api/cvs/project/{project.id}/market-input/{market_input.id}', headers=std_headers, json={ 'name': None, @@ -130,7 +130,7 @@ def test_delete_market_input(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - market_input = tu.seed_random_market_input(project.id) + market_input = tu.seed_random_external_factor(project.id) # Act res = client.delete(f'/api/cvs/project/{project.id}/market-input/{market_input.id}', headers=std_headers) # Assert diff --git a/tests/apps/cvs/market_input/test_market_input_values.py b/tests/apps/cvs/market_input/test_market_input_values.py index 588867ed..c526d07c 100644 --- a/tests/apps/cvs/market_input/test_market_input_values.py +++ b/tests/apps/cvs/market_input/test_market_input_values.py @@ -5,12 +5,12 @@ import sedbackend.apps.cvs.market_input.implementation as impl_market_input -def test_create_market_input_value(client, std_headers, std_user): +def test_create_external_factor_value(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id, current_user.id) - market_input = tu.seed_random_market_input(project.id) + market_input = tu.seed_random_external_factor(project.id) value = random.random() * 100 # Act res = client.put(f'/api/cvs/project/{project.id}/market-input-values', headers=std_headers, json=[ @@ -24,28 +24,32 @@ def test_create_market_input_value(client, std_headers, std_user): market_input_values = impl_market_input.get_all_external_factor_values(project.id) assert res.status_code == 200 # 200 OK assert len(market_input_values) == 1 - assert market_input_values[0].market_input_id == market_input.id - assert market_input_values[0].vcs_id == vcs.id - assert abs(market_input_values[0].value-value) < 0.0001 + assert market_input_values[0].id == market_input.id + assert market_input_values[0].external_factor_values[0].vcs_id == vcs.id + assert abs(market_input_values[0].external_factor_values[0].value-value) < 0.0001 # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) -def test_create_market_input_invalid_vcs_id(client, std_headers, std_user): +def test_create_external_factor_value_invalid_vcs_id(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id, current_user.id) - market_input = tu.seed_random_market_input(project.id) + market_input = tu.seed_random_external_factor(project.id) value = random.random() * 100 # Act - res = client.put(f'/api/cvs/project/{project.id}/market-input-values', headers=std_headers, json=[ + res = client.put(f'/api/cvs/project/{project.id}/market-input-value', headers=std_headers, json=[ { - 'market_input_id': market_input.id, - 'vcs_id': vcs.id+1, - 'value': value + 'id': market_input.id, + 'name': market_input.name, + 'unit': market_input.unit, + 'external_factor_values': { + 'vcs_id': vcs.id+1, + 'value': value + } } ]) # Assert @@ -56,29 +60,32 @@ def test_create_market_input_invalid_vcs_id(client, std_headers, std_user): tu.delete_vd_from_user(current_user.id) -def test_edit_market_input_value(client, std_headers, std_user): +def test_edit_external_factor_value(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id, current_user.id) - market_input = tu.seed_random_market_input(project.id) - market_input_value = tu.seed_random_market_input_values(project.id, vcs.id, market_input.id)[0] + external_factor = tu.seed_random_external_factor(project.id) + external_factor_value = tu.seed_random_external_factor_values(project.id, vcs.id, external_factor.id)[0] new_value = random.random() * 100 # Act res = client.put(f'/api/cvs/project/{project.id}/market-input-values', headers=std_headers, json=[ { - 'market_input_id': market_input.id, - 'vcs_id': vcs.id, - 'value': new_value + 'id': external_factor_value.id, + 'name': external_factor_value.name, + 'unit': external_factor_value.unit, + 'external_factor_values': [ + {'vcs_id': vcs.id, 'value': new_value} + ] } ]) # Assert - market_input_values = impl_market_input.get_all_external_factor_values(project.id) + external_factor_values = impl_market_input.get_all_external_factor_values(project.id) assert res.status_code == 200 # 200 OK - assert len(market_input_values) == 1 - assert market_input_values[0].market_input_id == market_input_value.market_input_id - assert market_input_values[0].vcs_id == market_input_value.vcs_id - assert abs(market_input_values[0].value-new_value) < 0.0001 + assert len(external_factor_values) == 1 + assert external_factor_values[0].id == external_factor_value.id + assert external_factor_values[0].external_factor_values[0].vcs_id == external_factor_value.external_factor_values[0].vcs_id + assert abs(external_factor_values[0].external_factor_values[0].value-new_value) < 0.0001 # Cleanup tu.delete_project_by_id(project.id, current_user.id) @@ -90,8 +97,8 @@ def test_delete_market_input_value(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id, current_user.id) - market_input = tu.seed_random_market_input(project.id) - tu.seed_random_market_input_values(project.id, vcs.id, market_input.id) + market_input = tu.seed_random_external_factor(project.id) + tu.seed_random_external_factor_values(project.id, vcs.id, market_input.id) # Act res = client.put(f'/api/cvs/project/{project.id}/market-input-values', headers=std_headers, json=[]) # Assert @@ -109,8 +116,8 @@ def test_get_market_input_values(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id, current_user.id) - market_input = tu.seed_random_market_input(project.id) - market_input_value = tu.seed_random_market_input_values(project.id, vcs.id, market_input.id)[0] + market_input = tu.seed_random_external_factor(project.id) + market_input_value = tu.seed_random_external_factor_values(project.id, vcs.id, market_input.id)[0] # Act res = client.get(f'/api/cvs/project/{project.id}/market-input-values', headers=std_headers) # Assert diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 2132061d..ffd284c9 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -4,6 +4,7 @@ from sedbackend.apps.core.files import implementation as impl_files import sedbackend.apps.cvs.simulation.implementation as sim_impl import sedbackend.apps.cvs.simulation.models as sim_model +from sedbackend.apps.cvs.market_input.models import ExternalFactorValue, VcsEFValuePair from sedbackend.apps.cvs.simulation.models import NonTechCost import sedbackend.apps.cvs.design.implementation as design_impl import sedbackend.apps.cvs.design.models as design_model @@ -591,10 +592,10 @@ def seed_random_sim_settings(user_id: int, project_id: int) -> sim_model.SimSett # ====================================================================================================================== -# Market Input +# External factors # ====================================================================================================================== -def seed_random_market_input(project_id: int): +def seed_random_external_factor(project_id: int): name = tu.random_str(5, 50) unit = tu.random_str(5, 50) market_input_post = market_input_model.ExternalFactorPost( @@ -604,11 +605,20 @@ def seed_random_market_input(project_id: int): return market_input_impl.create_external_factor(project_id, market_input_post) -def seed_random_market_input_values(project_id: int, vcs_id: int, market_input_id: int): - market_input_impl.update_external_factor_values(project_id, [market_input_model.ExternalFactorValue( - vcs_id=vcs_id, - market_input_id=market_input_id, - value=random.random() * 100)]) +def seed_random_external_factor_values(project_id: int, vcs_id: int, ef_id: int): + name = tu.random_str(5, 50) + unit = tu.random_str(5, 50) + value = random.random() * 100 + market_input_impl.update_external_factor_values(project_id, [ + ExternalFactorValue( + id=ef_id, + name=name, + unit=unit, + external_factor_values=[ + VcsEFValuePair(vcs_id=vcs_id, value=value) + ] + ) + ]) return market_input_impl.get_all_external_factor_values(project_id) From 2d9759935aef9daaea1e92dd7f8d5b0f495117e1 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Wed, 2 Aug 2023 11:06:23 +0200 Subject: [PATCH 122/210] allow formula comment to be none --- sedbackend/apps/cvs/link_design_lifecycle/models.py | 4 ++-- .../apps/cvs/connect_design_vcs/test_connect_vcs_design.py | 6 +----- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/models.py b/sedbackend/apps/cvs/link_design_lifecycle/models.py index 57f702e9..fec05444 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/models.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/models.py @@ -1,4 +1,4 @@ -from typing import List +from typing import List, Optional from pydantic import BaseModel from enum import Enum @@ -27,7 +27,7 @@ class Rate(Enum): class Formula(BaseModel): formula: str - comment: str + comment: Optional[str] = None class FormulaRowGet(BaseModel): diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index 94ff8237..5fe52542 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -1,10 +1,6 @@ -import pytest - import tests.testutils as testutils import tests.apps.cvs.testutils as tu - import sedbackend.apps.core.users.implementation as impl_users -from sedbackend.apps.cvs.link_design_lifecycle.models import Formula def test_create_formulas(client, std_headers, std_user): @@ -30,7 +26,7 @@ def test_create_formulas(client, std_headers, std_user): external_factor.id) + ',"' + str(external_factor.name) + '"}' time_comment = testutils.random_str(10, 200) cost_comment = testutils.random_str(10, 200) - revenue_comment = testutils.random_str(10, 200) + revenue_comment = None rate = tu.random_rate_choice() From 54eb1ee234f0124e2a14bce1e4b3c61bc90461f6 Mon Sep 17 00:00:00 2001 From: jyborn Date: Wed, 2 Aug 2023 11:07:25 +0200 Subject: [PATCH 123/210] test fix --- sedbackend/apps/cvs/market_input/storage.py | 2 +- .../market_input/test_market_input_values.py | 35 +++++++++++-------- 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/sedbackend/apps/cvs/market_input/storage.py b/sedbackend/apps/cvs/market_input/storage.py index 2e10c4c5..13d64e4e 100644 --- a/sedbackend/apps/cvs/market_input/storage.py +++ b/sedbackend/apps/cvs/market_input/storage.py @@ -245,7 +245,7 @@ def get_all_external_factor_values(db_connection: PooledMySQLConnection, with db_connection.cursor(prepared=True, dictionary=True) as cursor: cursor.execute(query, [project_id]) res = cursor.fetchall() - + logger.debug(res) return populate_external_factor_values(res) diff --git a/tests/apps/cvs/market_input/test_market_input_values.py b/tests/apps/cvs/market_input/test_market_input_values.py index c526d07c..52c0cf29 100644 --- a/tests/apps/cvs/market_input/test_market_input_values.py +++ b/tests/apps/cvs/market_input/test_market_input_values.py @@ -10,23 +10,28 @@ def test_create_external_factor_value(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id, current_user.id) - market_input = tu.seed_random_external_factor(project.id) + external_factor = tu.seed_random_external_factor(project.id) value = random.random() * 100 # Act res = client.put(f'/api/cvs/project/{project.id}/market-input-values', headers=std_headers, json=[ { - 'market_input_id': market_input.id, - 'vcs_id': vcs.id, - 'value': value + 'id': external_factor.id, + 'name': external_factor.name, + 'unit': external_factor.unit, + 'external_factor_values': [ + {'vcs_id': vcs.id, 'value': value} + ] } ]) # Assert - market_input_values = impl_market_input.get_all_external_factor_values(project.id) + efvs = impl_market_input.get_all_external_factor_values(project.id) assert res.status_code == 200 # 200 OK - assert len(market_input_values) == 1 - assert market_input_values[0].id == market_input.id - assert market_input_values[0].external_factor_values[0].vcs_id == vcs.id - assert abs(market_input_values[0].external_factor_values[0].value-value) < 0.0001 + assert len(efvs) == 1 + assert efvs[0].id == external_factor.id + assert efvs[0].name == external_factor.name + assert efvs[0].unit == external_factor.unit + assert efvs[0].external_factor_values[0].vcs_id == vcs.id + assert abs(efvs[0].external_factor_values[0].value - value) < 0.0001 # Cleanup tu.delete_project_by_id(project.id, current_user.id) @@ -80,12 +85,14 @@ def test_edit_external_factor_value(client, std_headers, std_user): } ]) # Assert - external_factor_values = impl_market_input.get_all_external_factor_values(project.id) + efvs = impl_market_input.get_all_external_factor_values(project.id) assert res.status_code == 200 # 200 OK - assert len(external_factor_values) == 1 - assert external_factor_values[0].id == external_factor_value.id - assert external_factor_values[0].external_factor_values[0].vcs_id == external_factor_value.external_factor_values[0].vcs_id - assert abs(external_factor_values[0].external_factor_values[0].value-new_value) < 0.0001 + assert len(efvs) == 1 + assert efvs[0].id == external_factor_value.id + assert efvs[0].name == external_factor_value.name + assert efvs[0].unit == external_factor_value.unit + assert efvs[0].external_factor_values[0].vcs_id == external_factor_value.external_factor_values[0].vcs_id + assert abs(efvs[0].external_factor_values[0].value-new_value) < 0.0001 # Cleanup tu.delete_project_by_id(project.id, current_user.id) From 8b14124bd2b08fde6e07b98f58f24b942c0cb81e Mon Sep 17 00:00:00 2001 From: jyborn Date: Wed, 2 Aug 2023 13:17:25 +0200 Subject: [PATCH 124/210] tests fixed --- sedbackend/apps/core/db.py | 4 ++-- sedbackend/apps/cvs/market_input/storage.py | 23 +++++++++++-------- .../market_input/test_market_input_values.py | 12 ++++++---- 3 files changed, 23 insertions(+), 16 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..ff8bfe0c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' #'core-db' database = 'seddb' -port = 3306 +port = 3001 #3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/market_input/storage.py b/sedbackend/apps/cvs/market_input/storage.py index 13d64e4e..0d71b991 100644 --- a/sedbackend/apps/cvs/market_input/storage.py +++ b/sedbackend/apps/cvs/market_input/storage.py @@ -132,8 +132,9 @@ def get_all_formula_external_factors(db_connection: PooledMySQLConnection, ######################################################################################################################## def populate_external_factor_values(db_result) -> list[ExternalFactorValue]: - data_dict = {} + logger.debug(f'Populating external factor values') + data_dict = {} for item in db_result: external_factor = item["market_input"] if external_factor not in data_dict: @@ -147,12 +148,14 @@ def populate_external_factor_values(db_result) -> list[ExternalFactorValue]: data_dict[external_factor].external_factor_values.append( VcsEFValuePair(vcs_id=item["vcs"], value=item["value"]) ) - return list(data_dict.values()) + result = list(data_dict.values()) + return result def update_external_factor_value(db_connection: PooledMySQLConnection, project_id: int, external_factor_value: models.ExternalFactorValue) -> bool: logger.debug(f'Update external factor value') + get_external_factor(db_connection, project_id, external_factor_value.id) if len(external_factor_value.external_factor_values) == 0: @@ -170,8 +173,7 @@ def update_external_factor_value(db_connection: PooledMySQLConnection, project_i VALUES ' + prepared_statement + ' ON DUPLICATE KEY UPDATE value = VALUES(value);' with db_connection.cursor(prepared=True) as cursor: - res = cursor.execute(query, prepared_values) - logger.debug(res) + cursor.execute(query, prepared_values) return True @@ -181,7 +183,6 @@ def compare_and_delete_external_factor_values(db_connection: PooledMySQLConnecti new_ef_values: List[models.ExternalFactorValue]): # Delete external factor values that does not exist in the new table but did in the previous one efv_dict2 = {efv.id: {vcs_pair.vcs_id for vcs_pair in efv.external_factor_values} for efv in new_ef_values} - logger.debug(efv_dict2) for efv in prev_ef_values: parent_id = efv.id if parent_id in efv_dict2: @@ -196,22 +197,25 @@ def compare_and_delete_external_factor_values(db_connection: PooledMySQLConnecti def sync_new_external_factors(db_connection: PooledMySQLConnection, project_id: int, prev_ef_values: List[models.ExternalFactorValue], new_ef_values: List[models.ExternalFactorValue]): + ef_ids_to_remove = {efv.id for efv in prev_ef_values} - {efv.id for efv in new_ef_values} for ef_remove_id in ef_ids_to_remove: delete_external_factor(db_connection, project_id, ef_remove_id) updated_ef_values = [efv for efv in prev_ef_values if efv.id not in ef_ids_to_remove] - for new_efv in new_ef_values: + for index, new_efv in enumerate(new_ef_values): matching_efv = next((efv for efv in updated_ef_values if efv.id == new_efv.id), None) if matching_efv: if matching_efv.name != new_efv.name or matching_efv.unit != new_efv.unit: update_external_factor(db_connection, project_id, ExternalFactor(id=new_efv.id, name=new_efv.name, unit=new_efv.unit)) + updated_ef_values[index] = new_efv else: new_ef = create_external_factor(db_connection, project_id, ExternalFactorPost(name=new_efv.name, unit=new_efv.unit)) updated_ef_values.append(ExternalFactorValue(id=new_ef.id, name=new_ef.name, unit=new_ef.unit, external_factor_values=new_efv.external_factor_values)) + return updated_ef_values @@ -220,8 +224,9 @@ def update_external_factor_values(db_connection: PooledMySQLConnection, project_ logger.debug(f'Update external factor values for project={project_id}') old_ef_values = get_all_external_factor_values(db_connection, project_id) - - compare_and_delete_external_factor_values(db_connection, project_id, old_ef_values, ef_values) + # Delete external factor values that have been removed + if len(old_ef_values) > 0: + compare_and_delete_external_factor_values(db_connection, project_id, old_ef_values, ef_values) # Add, update or remove External Factors that has changed since previously ef_values_new_ids = sync_new_external_factors(db_connection, project_id, old_ef_values, ef_values) @@ -245,7 +250,7 @@ def get_all_external_factor_values(db_connection: PooledMySQLConnection, with db_connection.cursor(prepared=True, dictionary=True) as cursor: cursor.execute(query, [project_id]) res = cursor.fetchall() - logger.debug(res) + return populate_external_factor_values(res) diff --git a/tests/apps/cvs/market_input/test_market_input_values.py b/tests/apps/cvs/market_input/test_market_input_values.py index 52c0cf29..0997776a 100644 --- a/tests/apps/cvs/market_input/test_market_input_values.py +++ b/tests/apps/cvs/market_input/test_market_input_values.py @@ -123,16 +123,18 @@ def test_get_market_input_values(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) vcs = tu.seed_random_vcs(project.id, current_user.id) - market_input = tu.seed_random_external_factor(project.id) - market_input_value = tu.seed_random_external_factor_values(project.id, vcs.id, market_input.id)[0] + external_factor = tu.seed_random_external_factor(project.id) + efv = tu.seed_random_external_factor_values(project.id, vcs.id, external_factor.id)[0] # Act res = client.get(f'/api/cvs/project/{project.id}/market-input-values', headers=std_headers) # Assert assert res.status_code == 200 # 200 OK assert len(res.json()) == 1 - assert res.json()[0]['market_input_id'] == market_input_value.market_input_id - assert res.json()[0]['vcs_id'] == market_input_value.vcs_id - assert abs(res.json()[0]['value']-market_input_value.value) < 0.0001 + assert res.json()[0]['id'] == efv.id + assert res.json()[0]['name'] == efv.name + assert res.json()[0]['unit'] == efv.unit + assert res.json()[0]['external_factor_values'][0]['vcs_id'] == efv.external_factor_values[0].vcs_id + assert abs(res.json()[0]['external_factor_values'][0]['value'] - efv.external_factor_values[0].value) < 0.0001 # Cleanup tu.delete_project_by_id(project.id, current_user.id) From 87ec345f2e5cdd4444c6e054f6583fee47a57d1a Mon Sep 17 00:00:00 2001 From: jyborn Date: Wed, 2 Aug 2023 13:23:18 +0200 Subject: [PATCH 125/210] revert host and port --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index ff8bfe0c..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' #'core-db' +host = 'core-db' database = 'seddb' -port = 3001 #3306 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 6cf6442797978d2b8de128e007a7fa9d50f2cd55 Mon Sep 17 00:00:00 2001 From: jyborn Date: Wed, 2 Aug 2023 13:28:21 +0200 Subject: [PATCH 126/210] added some comments --- sedbackend/apps/cvs/market_input/models.py | 1 + sedbackend/apps/cvs/market_input/storage.py | 12 +++++++++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/sedbackend/apps/cvs/market_input/models.py b/sedbackend/apps/cvs/market_input/models.py index 6e3e2418..9e3e02a7 100644 --- a/sedbackend/apps/cvs/market_input/models.py +++ b/sedbackend/apps/cvs/market_input/models.py @@ -23,6 +23,7 @@ class VcsEFValuePair(BaseModel): value: float +# Combined External Factor and the connected value pairs of vcs and value class ExternalFactorValue(BaseModel): id: int name: str diff --git a/sedbackend/apps/cvs/market_input/storage.py b/sedbackend/apps/cvs/market_input/storage.py index 0d71b991..0de2161a 100644 --- a/sedbackend/apps/cvs/market_input/storage.py +++ b/sedbackend/apps/cvs/market_input/storage.py @@ -148,10 +148,11 @@ def populate_external_factor_values(db_result) -> list[ExternalFactorValue]: data_dict[external_factor].external_factor_values.append( VcsEFValuePair(vcs_id=item["vcs"], value=item["value"]) ) - result = list(data_dict.values()) - return result + return list(data_dict.values()) + +# Updates all value pairs to match input external_factor_value.external_factor_values def update_external_factor_value(db_connection: PooledMySQLConnection, project_id: int, external_factor_value: models.ExternalFactorValue) -> bool: logger.debug(f'Update external factor value') @@ -178,6 +179,8 @@ def update_external_factor_value(db_connection: PooledMySQLConnection, project_i return True +# Compare previously stored list of ExternalFactorValues with an updated version and +# deletes any values that don't appear in the new one def compare_and_delete_external_factor_values(db_connection: PooledMySQLConnection, project_id: int, prev_ef_values: List[models.ExternalFactorValue], new_ef_values: List[models.ExternalFactorValue]): @@ -194,6 +197,8 @@ def compare_and_delete_external_factor_values(db_connection: PooledMySQLConnecti return True +# Removes, updates or adds ExternalFactors based on differences between stored and new +# so everything matches the new list def sync_new_external_factors(db_connection: PooledMySQLConnection, project_id: int, prev_ef_values: List[models.ExternalFactorValue], new_ef_values: List[models.ExternalFactorValue]): @@ -223,7 +228,8 @@ def update_external_factor_values(db_connection: PooledMySQLConnection, project_ ef_values: List[models.ExternalFactorValue]) -> bool: logger.debug(f'Update external factor values for project={project_id}') - old_ef_values = get_all_external_factor_values(db_connection, project_id) + old_ef_values = get_all_external_factor_values(db_connection, project_id) # get stored values for comparisons + # Delete external factor values that have been removed if len(old_ef_values) > 0: compare_and_delete_external_factor_values(db_connection, project_id, old_ef_values, ef_values) From 0cd951c2d673f6a1d3525b1469d39391394d9b67 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Wed, 2 Aug 2023 14:31:14 +0200 Subject: [PATCH 127/210] merge develop --- sedbackend/apps/core/db.py | 6 ++++-- sedbackend/apps/cvs/link_design_lifecycle/models.py | 4 ++-- sedbackend/apps/cvs/link_design_lifecycle/storage.py | 4 ++-- .../apps/cvs/connect_design_vcs/test_connect_vcs_design.py | 4 ++-- 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..20dba37e 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,11 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +#host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +#port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/link_design_lifecycle/models.py b/sedbackend/apps/cvs/link_design_lifecycle/models.py index fec05444..7a8b9dd1 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/models.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/models.py @@ -3,7 +3,7 @@ from pydantic import BaseModel from enum import Enum -from sedbackend.apps.cvs.market_input.models import MarketInputGet +from sedbackend.apps.cvs.market_input.models import ExternalFactor from sedbackend.apps.cvs.vcs.models import ValueDriver @@ -39,7 +39,7 @@ class FormulaRowGet(BaseModel): revenue: Formula rate: Rate used_value_drivers: List[ValueDriver] = [] - used_external_factors: List[MarketInputGet] = [] + used_external_factors: List[ExternalFactor] = [] class FormulaRowPost(BaseModel): diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 1d8f38c8..a9a9a45e 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -4,7 +4,7 @@ from mysql.connector.pooling import PooledMySQLConnection import re from sedbackend.apps.cvs.design.storage import get_design_group -from sedbackend.apps.cvs.market_input.storage import populate_market_input +from sedbackend.apps.cvs.market_input.storage import populate_external_factor from sedbackend.apps.cvs.vcs.storage import get_vcs_row, populate_value_driver from sedbackend.apps.cvs.vcs.storage import get_vcs from sedbackend.apps.cvs.link_design_lifecycle import models, exceptions @@ -272,7 +272,7 @@ def populate_formula(db_result) -> models.FormulaRowGet: rate=db_result['rate'], used_value_drivers=[populate_value_driver(valueDriver) for valueDriver in db_result['value_drivers']] if db_result['value_drivers'] is not None else [], - used_external_factors=[populate_market_input(externalFactor) for externalFactor in + used_external_factors=[populate_external_factor(externalFactor) for externalFactor in db_result['external_factors']] if db_result['external_factors'] is not None else [], ) diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index 5fe52542..b8cbd4ac 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -15,7 +15,7 @@ def test_create_formulas(client, std_headers, std_user): row_id = vcs_rows[0].id design_group = tu.seed_random_design_group(project.id) value_driver = tu.seed_random_value_driver(current_user.id, project.id) - external_factor = tu.seed_random_market_input(project.id) + external_factor = tu.seed_random_external_factor(project.id) # Act @@ -206,7 +206,7 @@ def test_edit_formulas(client, std_headers, std_user): formulas = tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 1) value_driver = tu.seed_random_value_driver(current_user.id, project.id) - external_factor = tu.seed_random_market_input(project.id) + external_factor = tu.seed_random_external_factor(project.id) # Act From 9f28a47170951f92153b951f782fb8fcc71eecb9 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Wed, 2 Aug 2023 14:34:40 +0200 Subject: [PATCH 128/210] update db --- sedbackend/apps/core/db.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 20dba37e..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,11 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -#host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' -#port = 3306 -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 0abbb966161274b3f4a3d3386297f0c29c8ce6c8 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 3 Aug 2023 09:45:02 +0200 Subject: [PATCH 129/210] possible to use value drivers outside of project --- .../cvs/link_design_lifecycle/exceptions.py | 4 +++ .../link_design_lifecycle/implementation.py | 17 +++++---- .../apps/cvs/link_design_lifecycle/storage.py | 36 +++++++++++++++++-- 3 files changed, 47 insertions(+), 10 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/exceptions.py b/sedbackend/apps/cvs/link_design_lifecycle/exceptions.py index 7fc8d07c..a071e8bc 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/exceptions.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/exceptions.py @@ -21,3 +21,7 @@ class FormulasFailedDeletionException(Exception): class TooManyFormulasUpdatedException(Exception): pass + + +class CouldNotAddValueDriverToProjectException(Exception): + pass diff --git a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py index 59a48d82..9f8e2738 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py @@ -3,9 +3,7 @@ from starlette import status from sedbackend.apps.core.db import get_connection from sedbackend.apps.cvs.link_design_lifecycle import models, storage -from sedbackend.apps.cvs.link_design_lifecycle.exceptions import FormulasFailedDeletionException, \ - FormulasFailedUpdateException, TooManyFormulasUpdatedException, \ - WrongTimeUnitException +from sedbackend.apps.cvs.link_design_lifecycle import exceptions from sedbackend.apps.cvs.project import exceptions as project_exceptions from sedbackend.apps.cvs.design import exceptions as design_exceptions from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions @@ -22,12 +20,12 @@ def edit_formulas(project_id: int, vcs_row_id: int, design_group_id: int, new_fo status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find vcs' ) - except FormulasFailedUpdateException: + except exceptions.FormulasFailedUpdateException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f'No formulas updated. Are the formulas changed?' ) - except TooManyFormulasUpdatedException: + except exceptions.TooManyFormulasUpdatedException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f'Too many formulas tried to be updated.' @@ -52,6 +50,11 @@ def edit_formulas(project_id: int, vcs_row_id: int, design_group_id: int, new_fo status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find vcs row with id={vcs_row_id}.', ) + except exceptions.CouldNotAddValueDriverToProjectException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f'Failed to add value driver from another project' + ) def get_all_formulas(project_id: int, vcs_id: int, design_group_id: int, user_id: int) -> List[models.FormulaRowGet]: @@ -65,7 +68,7 @@ def get_all_formulas(project_id: int, vcs_id: int, design_group_id: int, user_id status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find VCS with id {vcs_id}' ) - except WrongTimeUnitException as e: # Where exactly does this fire???? + except exceptions.WrongTimeUnitException as e: # Where exactly does this fire???? raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f'Wrong time unit. Given unit: {e.time_unit}' @@ -88,7 +91,7 @@ def delete_formulas(project_id: int, vcs_row_id: int, design_group_id: int) -> b res = storage.delete_formulas(con, project_id, vcs_row_id, design_group_id) con.commit() return res - except FormulasFailedDeletionException: + except exceptions.FormulasFailedDeletionException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=f'Could not delete formulas with row id: {vcs_row_id}' diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index a9a9a45e..902b3ba7 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -23,6 +23,9 @@ CVS_FORMULAS_EXTERNAL_FACTORS_TABLE = 'cvs_formulas_external_factors' CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS = ['vcs_row', 'design_group', 'external_factor'] +CVS_PROJECT_VALUE_DRIVERS_TABLE = 'cvs_project_value_drivers' +CVS_PROJECT_VALUE_DRIVERS_COLUMNS = ['project', 'value_driver'] + def create_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, formula_row: models.FormulaRowPost): @@ -97,6 +100,33 @@ def edit_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_ def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, value_drivers: List[int], project_id: int): + + # Add value driver to project if not already added + select_statement = MySQLStatementBuilder(db_connection) + project_value_driver_res = select_statement \ + .select(CVS_PROJECT_VALUE_DRIVERS_TABLE, CVS_PROJECT_VALUE_DRIVERS_COLUMNS) \ + .where(f'project = %s and value_driver in ({",".join(["%s" for _ in range(len(value_drivers))])})', + [project_id] + value_drivers) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + + value_drivers_outside_project = [vd_id for vd_id in value_drivers if + vd_id not in [res['value_driver'] for res in project_value_driver_res]] + + if value_drivers_outside_project: + try: + prepared_list = [] + insert_statement = f'INSERT INTO {CVS_PROJECT_VALUE_DRIVERS_TABLE} (project, value_driver) VALUES' + for value_driver_id in value_drivers_outside_project: + insert_statement += f'(%s, %s),' + prepared_list += [project_id, value_driver_id] + insert_statement = insert_statement[:-1] + with db_connection.cursor(prepared=True) as cursor: + cursor.execute(insert_statement, prepared_list) + except Exception as e: + logger.error(f'Error adding value driver to project: {e}') + raise exceptions.CouldNotAddValueDriverToProjectException + + # Add value driver to formulas try: prepared_list = [] insert_statement = f'INSERT INTO {CVS_FORMULAS_VALUE_DRIVERS_TABLE} (vcs_row, design_group, value_driver, project) VALUES' @@ -180,12 +210,12 @@ def update_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_ro .where(where_statement, [vcs_row_id, design_group_id]) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - delete_external_factors = [external_factor['id'] for external_factor in external_factor_res if - external_factor['id'] not in + delete_external_factors = [external_factor['external_factor'] for external_factor in external_factor_res if + external_factor['external_factor'] not in external_factors] add_external_factors = [external_factor_id for external_factor_id in external_factors if external_factor_id not in - [external_factor['id'] for external_factor in external_factor_res]] + [external_factor['external_factor'] for external_factor in external_factor_res]] if len(add_external_factors): add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, add_external_factors) From 99c66e2f985723ff12c2f2ecb2abf94e1df0be15 Mon Sep 17 00:00:00 2001 From: jyborn Date: Thu, 3 Aug 2023 13:21:42 +0200 Subject: [PATCH 130/210] new SimulationResult which includes designs, vcss, vds --- .../apps/cvs/simulation/implementation.py | 9 +++-- sedbackend/apps/cvs/simulation/models.py | 14 +++++++ sedbackend/apps/cvs/simulation/router.py | 13 ++++--- sedbackend/apps/cvs/simulation/storage.py | 39 ++++++++++++++----- 4 files changed, 56 insertions(+), 19 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 12a12fd9..a7dbf8da 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -14,6 +14,7 @@ RateWrongOrderException, InvalidFlowSettingsException, VcsFailedException, FlowProcessNotFoundException, \ SimSettingsNotFoundException, CouldNotFetchSimulationDataException, CouldNotFetchMarketInputValuesException, \ CouldNotFetchValueDriverDesignValuesException, NoTechnicalProcessException +from sedbackend.apps.cvs.simulation.models import SimulationResult from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.market_input import exceptions as market_input_exceptions @@ -21,12 +22,13 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], user_id: int, - normalized_npv: bool = False, is_multiprocessing: bool = False) -> List[models.Simulation]: + design_group_ids: List[int], user_id: int, project_id: int, + normalized_npv: bool = False, is_multiprocessing: bool = False) -> SimulationResult: try: with get_connection() as con: - result = storage.run_simulation(con, sim_settings, vcs_ids, design_group_ids, user_id, + result = storage.run_simulation(con, sim_settings, vcs_ids, project_id, design_group_ids, user_id, normalized_npv, is_multiprocessing) + logger.debug(result) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( @@ -105,7 +107,6 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], ) - def run_dsm_file_simulation(user_id: int, project_id: int, sim_params: models.FileParams, dsm_file: UploadFile) -> List[models.Simulation]: try: diff --git a/sedbackend/apps/cvs/simulation/models.py b/sedbackend/apps/cvs/simulation/models.py index 3da9d356..8e43666e 100644 --- a/sedbackend/apps/cvs/simulation/models.py +++ b/sedbackend/apps/cvs/simulation/models.py @@ -3,9 +3,13 @@ from pydantic import BaseModel from typing import Optional from fastapi import Form + +from sedbackend.apps.cvs.design.models import DesignGroup, Design, ValueDriverDesignValue from sedbackend.apps.cvs.link_design_lifecycle import models as link_model from dataclasses import dataclass +from sedbackend.apps.cvs.vcs.models import VCS, ValueDriver + class NonTechCost(str, Enum): """ @@ -36,6 +40,16 @@ class Simulation(BaseModel): max_NPVs: List[float] mean_payback_time: float all_npvs: List[List[float]] + payback_time: float + design_id: int + vcs_id: int + + +class SimulationResult(BaseModel): + designs: List[Design] + vcss: List[VCS] + vds: List[ValueDriver] + runs: List[Simulation] class EditSimSettings(BaseModel): diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 64be845a..472a18ce 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -6,6 +6,7 @@ from sedbackend.apps.cvs.project.router import CVS_APP_SID from sedbackend.apps.core.users.models import User from sedbackend.apps.cvs.simulation import implementation, models +from sedbackend.apps.cvs.simulation.models import SimulationResult router = APIRouter() @@ -13,13 +14,13 @@ @router.post( '/project/{native_project_id}/simulation/run', summary='Run simulation', - response_model=List[models.Simulation], + response_model=models.SimulationResult, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) async def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], - normalized_npv: Optional[bool] = False, - user: User = Depends(get_current_active_user)) -> List[models.Simulation]: - return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, normalized_npv) + native_project_id: int, normalized_npv: Optional[bool] = False, + user: User = Depends(get_current_active_user)) -> SimulationResult: + return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, native_project_id, normalized_npv) # Temporary disabled ''' @@ -43,13 +44,13 @@ async def run_dsm_file_simulation(native_project_id: int, sim_params: models.Fil @router.post( '/project/{native_project_id}/simulation/run-multiprocessing', summary='Run monte carlo simulation with multiprocessing', - response_model=List[models.Simulation], + response_model=models.SimulationResult, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) async def run_multiprocessing(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], normalized_npv: Optional[bool] = False, - user: User = Depends(get_current_active_user)) -> List[models.Simulation]: + user: User = Depends(get_current_active_user)) -> SimulationResult: return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, normalized_npv, True) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 3ee828c0..efdd8ac7 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -16,11 +16,13 @@ from typing import List from sedbackend.apps.cvs.design.models import ValueDriverDesignValue -from sedbackend.apps.cvs.design.storage import get_all_designs +from sedbackend.apps.cvs.design.storage import get_all_designs, get_designs from mysqlsb import FetchType, MySQLStatementBuilder from sedbackend.apps.cvs.life_cycle.storage import get_dsm_from_file_id, get_dsm_from_csv +from sedbackend.apps.cvs.simulation.models import SimulationResult +from sedbackend.apps.cvs.vcs.storage import get_vcs, get_value_driver from sedbackend.libs.formula_parser.parser import NumericStringParser from sedbackend.libs.formula_parser import expressions as expr from sedbackend.apps.cvs.simulation import models @@ -88,14 +90,14 @@ def get_dsm_from_file(db_connection: PooledMySQLConnection, user_id: int, projec def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, - vcs_ids: List[int], + vcs_ids: List[int], project_id: int, design_group_ids: List[int], user_id, normalized_npv: bool = False, - is_multiprocessing: bool = False - ) -> List[models.Simulation]: - design_results = [] + is_multiprocessing: bool = False, + ) -> SimulationResult: if not check_sim_settings(sim_settings): raise e.BadlyFormattedSettingsException + interarrival = sim_settings.interarrival_time flow_time = sim_settings.flow_time runtime = sim_settings.end_time - sim_settings.start_time @@ -116,6 +118,22 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) + all_vcss = [] + for vcs_id in vcs_ids: + all_vcss.append(get_vcs(db_connection, project_id, vcs_id, user_id)) + + all_designs_with_values = [] + for dg_id in design_group_ids: + all_designs_with_values += get_designs(db_connection, project_id, dg_id) + logger.debug(all_designs_with_values) + unique_vd_ids = {vd.vd_id for design in all_designs_with_values for vd in design.vd_design_values} + unique_vd_ids_list = list(unique_vd_ids) + all_vds = [] + for vd_id in unique_vd_ids_list: + all_vds.append(get_value_driver(db_connection, vd_id, user_id)) + + sim_result = SimulationResult(designs=all_designs_with_values, vcss=all_vcss, vds=all_vds, runs=[]) + for vcs_id in vcs_ids: market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] dsm_id = [dsm for dsm in all_dsm_ids if dsm[0] == vcs_id] @@ -172,17 +190,20 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed print(f'{exc.__class__}, {exc}') raise e.SimulationFailedException - sim_res = models.Simulation( + sim_run_res = models.Simulation( time=results.timesteps[-1], mean_NPV=results.normalize_npv() if normalized_npv else results.mean_npv(), max_NPVs=results.all_max_npv(), mean_payback_time=results.mean_npv_payback_time(), - all_npvs=results.npvs + all_npvs=results.npvs, + payback_time=0, + design_id=design, + vcs_id=vcs_id, ) - design_results.append(sim_res) + sim_result.runs.append(sim_run_res) logger.debug('Returning the results') - return design_results + return sim_result def populate_processes(non_tech_add: NonTechCost, db_results, design: int, From ff444ba1cbf1a3649b45b16c16a24129a5b50eb1 Mon Sep 17 00:00:00 2001 From: jyborn Date: Thu, 3 Aug 2023 19:51:55 +0200 Subject: [PATCH 131/210] simulation model update --- sedbackend/apps/cvs/simulation/models.py | 1 + sedbackend/apps/cvs/simulation/storage.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/simulation/models.py b/sedbackend/apps/cvs/simulation/models.py index 8e43666e..e658df0a 100644 --- a/sedbackend/apps/cvs/simulation/models.py +++ b/sedbackend/apps/cvs/simulation/models.py @@ -41,6 +41,7 @@ class Simulation(BaseModel): mean_payback_time: float all_npvs: List[List[float]] payback_time: float + surplus_value_end_result: float design_id: int vcs_id: int diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index efdd8ac7..88ed2c05 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -125,7 +125,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed all_designs_with_values = [] for dg_id in design_group_ids: all_designs_with_values += get_designs(db_connection, project_id, dg_id) - logger.debug(all_designs_with_values) + unique_vd_ids = {vd.vd_id for design in all_designs_with_values for vd in design.vd_design_values} unique_vd_ids_list = list(unique_vd_ids) all_vds = [] @@ -197,6 +197,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed mean_payback_time=results.mean_npv_payback_time(), all_npvs=results.npvs, payback_time=0, + surplus_value_end_result=0, design_id=design, vcs_id=vcs_id, ) From 610a7e144513f23ad70a42fd0202d28c4cb151d1 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 4 Aug 2023 09:54:15 +0200 Subject: [PATCH 132/210] fixed bug formula with external factor not saving --- sedbackend/apps/cvs/link_design_lifecycle/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 902b3ba7..a688d811 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -196,7 +196,7 @@ def delete_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_ro _, rows = delete_statement \ .delete(CVS_FORMULAS_EXTERNAL_FACTORS_TABLE) \ .where( - f'vcs_row = %s and design_group = %s and external_factors in ({",".join(["%s" for _ in range(len(external_factors))])})', + f'vcs_row = %s and design_group = %s and external_factor in ({",".join(["%s" for _ in range(len(external_factors))])})', [vcs_row_id, design_group_id] + external_factors) \ .execute(return_affected_rows=True) From 2b030d5f99b585f9788325195bed8205f1cc340a Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 7 Aug 2023 14:15:37 +0200 Subject: [PATCH 133/210] parse time,cost,revenue varaible --- sedbackend/apps/cvs/simulation/storage.py | 30 ++++++++++++--------- tests/apps/cvs/simulation/test_sim_utils.py | 24 +++++++++++++++++ 2 files changed, 41 insertions(+), 13 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 04b8d5bd..9ebab99f 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -200,9 +200,9 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, if row['category'] != 'Technical processes': try: non_tech = models.NonTechnicalProcess( - cost=nsp.eval(parse_formula(row['cost'], vd_values_row, mi_values)), + cost=nsp.eval(parse_formula(row['cost'], vd_values_row, mi_values, row)), revenue=nsp.eval( - parse_formula(row['revenue'], vd_values_row, mi_values)), + parse_formula(row['revenue'], vd_values_row, mi_values, row)), name=row['iso_name']) except Exception as exc: logger.debug(f'{exc.__class__}, {exc}') @@ -212,10 +212,10 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, elif row['iso_name'] is not None and row['sub_name'] is None: try: time = nsp.eval(parse_formula( - row['time'], vd_values, mi_values)) - cost_formula = parse_formula(row['cost'], vd_values, mi_values) + row['time'], vd_values, mi_values, row)) + cost_formula = parse_formula(row['cost'], vd_values, mi_values, row) revenue_formula = parse_formula( - row['revenue'], vd_values, mi_values) + row['revenue'], vd_values, mi_values, row) p = Process(row['id'], time, nsp.eval(expr.replace_all( @@ -235,10 +235,10 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, sub_name = f'{row["sub_name"]} ({row["iso_name"]})' try: time = nsp.eval(parse_formula( - row['time'], vd_values, mi_values)) - cost_formula = parse_formula(row['cost'], vd_values, mi_values) + row['time'], vd_values, mi_values, row)) + cost_formula = parse_formula(row['cost'], vd_values, mi_values, row) revenue_formula = parse_formula( - row['revenue'], vd_values, mi_values) + row['revenue'], vd_values, mi_values, row) p = Process(row['id'], time, nsp.eval(expr.replace_all( @@ -425,28 +425,32 @@ def replace(match): return result -def parse_formula(formula: str, vd_values, ef_values): - pattern = r'\{(?Pvd|ef):(?P\d+),"([^"]+)"\}' +def parse_formula(formula: str, vd_values, ef_values, formula_row: dict = None) -> str: + pattern = r'\{(?Pvd|ef|process):(?P[a-zA-Z0-9_]+),"([^"]+)"\}' formula = add_multiplication_signs(formula) def replace(match): - tag, id_number, _ = match.groups() - id_number = int(id_number) + tag, value, _ = match.groups() if tag == "vd": + id_number = int(value) for vd in vd_values: if vd["value_driver"] == id_number: return str(vd["value"]) elif tag == "ef": for ef in ef_values: + id_number = int(value) if ef["market_input"] == id_number: return str(ef["value"]) + elif formula_row and tag == "process": + return f'({formula_row[value.lower()]})' + return match.group() replaced_text = re.sub(pattern, replace, formula) + replaced_text = re.sub(pattern, replace, replaced_text) replaced_text = re.sub(pattern, '0', replaced_text) # If there are any tags left, replace them with 0 - logger.debug(f'Parsed formula: {replaced_text}') return replaced_text diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index 9a544514..d9bd4d21 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -32,6 +32,30 @@ def test_parse_formula_values(): assert nsp.eval(new_formula) == 4 +def test_parse_formula_process_variable(): + # Setup + vd_values = [{"value_driver": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] + + formula = '{vd:47241,"Design Similarity [0-1]"}*{process:COST,"COST"}' + time = 5 + cost = '2+{vd:47241,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}' + revenue = 10 + formula_row = { + "time": time, + "cost": cost, + "revenue": revenue, + } + nsp = NumericStringParser() + + # Act + new_formula = parse_formula(formula, vd_values, mi_values, formula_row) + + # Assert + assert new_formula == "10*(2+10/5)" + assert nsp.eval(new_formula) == 40 + + def test_parse_formula_vd_no_exist(): # Setup vd_values = [{"value_driver": 47241, "name": "Speed", "unit": "0-1", "value": 10}] From 490a373b9d5ddfc0166a9592451931c3434e7076 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 8 Aug 2023 12:07:40 +0200 Subject: [PATCH 134/210] removed unneccesary db calls --- sedbackend/apps/core/db.py | 4 +-- sedbackend/apps/cvs/design/storage.py | 8 ++++- sedbackend/apps/cvs/simulation/storage.py | 42 ++++++++++------------- sedbackend/apps/cvs/vcs/storage.py | 21 ++++++++++++ 4 files changed, 49 insertions(+), 26 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..9a0b9d0e 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' #'core-db' database = 'seddb' -port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/design/storage.py b/sedbackend/apps/cvs/design/storage.py index 98154043..afc0c89e 100644 --- a/sedbackend/apps/cvs/design/storage.py +++ b/sedbackend/apps/cvs/design/storage.py @@ -230,7 +230,13 @@ def get_all_designs(db_connection: PooledMySQLConnection, design_group_ids: List logger.debug(f'Error msg: {e.msg}') raise exceptions.DesignGroupNotFoundException - return [populate_design(result) for result in res] + designs = [] + for result in res: + vd_design_values = get_all_vd_design_values(db_connection, result['id']) + result.update({'vd_values': vd_design_values}) + designs.append(populate_design_with_values(result)) + + return designs def create_design(db_connection: PooledMySQLConnection, design_group_id: int, diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 88ed2c05..394e0f0b 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -22,7 +22,7 @@ from sedbackend.apps.cvs.life_cycle.storage import get_dsm_from_file_id, get_dsm_from_csv from sedbackend.apps.cvs.simulation.models import SimulationResult -from sedbackend.apps.cvs.vcs.storage import get_vcs, get_value_driver +from sedbackend.apps.cvs.vcs.storage import get_vcs, get_value_driver, get_vcss from sedbackend.libs.formula_parser.parser import NumericStringParser from sedbackend.libs.formula_parser import expressions as expr from sedbackend.apps.cvs.simulation import models @@ -116,23 +116,18 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) - all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) - - all_vcss = [] - for vcs_id in vcs_ids: - all_vcss.append(get_vcs(db_connection, project_id, vcs_id, user_id)) + unique_vds = {} + for vd in all_vd_design_values: + element_id = vd["id"] + if element_id not in unique_vds: + unique_vds[element_id] = {"id": vd["id"], "name": vd["name"], "unit": vd["unit"]} + all_vds = list(unique_vds.values()) - all_designs_with_values = [] - for dg_id in design_group_ids: - all_designs_with_values += get_designs(db_connection, project_id, dg_id) + all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) - unique_vd_ids = {vd.vd_id for design in all_designs_with_values for vd in design.vd_design_values} - unique_vd_ids_list = list(unique_vd_ids) - all_vds = [] - for vd_id in unique_vd_ids_list: - all_vds.append(get_value_driver(db_connection, vd_id, user_id)) + all_vcss = get_vcss(db_connection, project_id, vcs_ids, user_id) - sim_result = SimulationResult(designs=all_designs_with_values, vcss=all_vcss, vds=all_vds, runs=[]) + sim_result = SimulationResult(designs=all_designs, vcss=all_vcss, vds=all_vds, runs=[]) for vcs_id in vcs_ids: market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] @@ -190,14 +185,15 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed print(f'{exc.__class__}, {exc}') raise e.SimulationFailedException + # TODO: payback_time and mean_payback_time is the same checks for first time it goes above 0, or should something be different? sim_run_res = models.Simulation( time=results.timesteps[-1], mean_NPV=results.normalize_npv() if normalized_npv else results.mean_npv(), max_NPVs=results.all_max_npv(), mean_payback_time=results.mean_npv_payback_time(), all_npvs=results.npvs, - payback_time=0, - surplus_value_end_result=0, + payback_time=results.mean_npv_payback_time(), + surplus_value_end_result=results.npvs[0][-1], design_id=design, vcs_id=vcs_id, ) @@ -337,12 +333,12 @@ def get_vd_design_values(db_connection: PooledMySQLConnection, vcs_row_id: int, def get_all_vd_design_values(db_connection: PooledMySQLConnection, designs: List[int]): try: - query = f'SELECT cvs_value_drivers.id, design, name, value, unit, vcs_row \ - FROM cvs_vd_design_values \ - INNER JOIN cvs_value_drivers ON cvs_vd_design_values.value_driver = cvs_value_drivers.id \ - INNER JOIN cvs_vcs_need_drivers ON cvs_vcs_need_drivers.value_driver = cvs_value_drivers.id \ - INNER JOIN cvs_stakeholder_needs ON cvs_stakeholder_needs.id = cvs_vcs_need_drivers.stakeholder_need \ - WHERE design IN ({",".join(["%s" for _ in range(len(designs))])})' + query = f'SELECT design, value, vcs_row, cvd.name, cvd.unit, cvd.id \ + FROM cvs_vd_design_values cvdv \ + INNER JOIN cvs_value_drivers cvd ON cvdv.value_driver = cvd.id \ + INNER JOIN cvs_vcs_need_drivers cvnd ON cvnd.value_driver = cvd.id \ + INNER JOIN cvs_stakeholder_needs csn ON csn.id = cvnd.stakeholder_need \ + WHERE design IN ({",".join(["%s" for _ in range(len(designs))])})' with db_connection.cursor(prepared=True) as cursor: cursor.execute(query, designs) res = cursor.fetchall() diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index ceb51af6..285eac8a 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -78,6 +78,27 @@ def get_all_vcs(db_connection: PooledMySQLConnection, project_id: int, user_id: return chunk +def get_vcss(db_connection: PooledMySQLConnection, project_id: int, vcs_ids: List[int], user_id: int) -> List[models.VCS]: + logger.debug(f'Fetching vcss with ids={vcs_ids}') + + get_cvs_project(db_connection, project_id, user_id) # perform checks: project and user + + where_statement = "id IN (" + ",".join(["%s" for _ in range(len(vcs_ids))]) + ")" + where_values = vcs_ids + + select_statement = MySQLStatementBuilder(db_connection) + results = select_statement.select(CVS_VCS_TABLE, CVS_VCS_COLUMNS) \ + .where(where_statement, where_values) \ + .order_by(['id'], Sort.ASCENDING) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + + vcs_list = [] + for result in results: + vcs_list.append(populate_vcs(db_connection, result, user_id)) + + return vcs_list + + def get_vcs(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, user_id: int) -> models.VCS: logger.debug(f'Fetching VCS with id={vcs_id}.') From 8904b351659225795d23ab0d8a0fcfba4bbda5c3 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 8 Aug 2023 12:08:07 +0200 Subject: [PATCH 135/210] revert host and db --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 9a0b9d0e..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' #'core-db' +host = 'core-db' database = 'seddb' -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 2725adefd64809e6c094a838aa2fc2477dad1d4b Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 8 Aug 2023 12:59:57 +0200 Subject: [PATCH 136/210] tests fixed --- sedbackend/apps/cvs/simulation/implementation.py | 1 - sedbackend/apps/cvs/simulation/router.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index a7dbf8da..1f5840f8 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -28,7 +28,6 @@ def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], with get_connection() as con: result = storage.run_simulation(con, sim_settings, vcs_ids, project_id, design_group_ids, user_id, normalized_npv, is_multiprocessing) - logger.debug(result) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 472a18ce..ce0a8d44 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -48,11 +48,11 @@ async def run_dsm_file_simulation(native_project_id: int, sim_params: models.Fil dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) async def run_multiprocessing(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], + design_group_ids: List[int], native_project_id: int, normalized_npv: Optional[bool] = False, user: User = Depends(get_current_active_user)) -> SimulationResult: - return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, normalized_npv, - True) + return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, native_project_id, + normalized_npv, True) @router.get( From 23694d2cc61b1e0b526ec4a7b82ef8906d95b71f Mon Sep 17 00:00:00 2001 From: jyborn Date: Thu, 10 Aug 2023 22:16:15 +0200 Subject: [PATCH 137/210] param hierarchy fixed --- sedbackend/apps/cvs/simulation/implementation.py | 6 +++--- sedbackend/apps/cvs/simulation/router.py | 11 +++++------ sedbackend/apps/cvs/simulation/storage.py | 4 +--- 3 files changed, 9 insertions(+), 12 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 1f5840f8..e457dafc 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -21,12 +21,12 @@ from sedbackend.apps.core.files import exceptions as file_ex -def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], user_id: int, project_id: int, +def run_simulation(sim_settings: models.EditSimSettings, project_id: int, vcs_ids: List[int], + design_group_ids: List[int], user_id: int, normalized_npv: bool = False, is_multiprocessing: bool = False) -> SimulationResult: try: with get_connection() as con: - result = storage.run_simulation(con, sim_settings, vcs_ids, project_id, design_group_ids, user_id, + result = storage.run_simulation(con, sim_settings, project_id, vcs_ids, design_group_ids, user_id, normalized_npv, is_multiprocessing) return result except auth_ex.UnauthorizedOperationException: diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index ce0a8d44..031b60e0 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -17,10 +17,10 @@ response_model=models.SimulationResult, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) -async def run_simulation(sim_settings: models.EditSimSettings, vcs_ids: List[int], design_group_ids: List[int], - native_project_id: int, normalized_npv: Optional[bool] = False, +async def run_simulation(sim_settings: models.EditSimSettings, native_project_id: int, vcs_ids: List[int], + design_group_ids: List[int], normalized_npv: Optional[bool] = False, user: User = Depends(get_current_active_user)) -> SimulationResult: - return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, native_project_id, normalized_npv) + return implementation.run_simulation(sim_settings, native_project_id, vcs_ids, design_group_ids, user.id, normalized_npv) # Temporary disabled ''' @@ -47,9 +47,8 @@ async def run_dsm_file_simulation(native_project_id: int, sim_params: models.Fil response_model=models.SimulationResult, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] ) -async def run_multiprocessing(sim_settings: models.EditSimSettings, vcs_ids: List[int], - design_group_ids: List[int], native_project_id: int, - normalized_npv: Optional[bool] = False, +async def run_multiprocessing(sim_settings: models.EditSimSettings, native_project_id: int, vcs_ids: List[int], + design_group_ids: List[int], normalized_npv: Optional[bool] = False, user: User = Depends(get_current_active_user)) -> SimulationResult: return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, native_project_id, normalized_npv, True) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 394e0f0b..dac55dc6 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -90,11 +90,10 @@ def get_dsm_from_file(db_connection: PooledMySQLConnection, user_id: int, projec def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, - vcs_ids: List[int], project_id: int, + project_id: int, vcs_ids: List[int], design_group_ids: List[int], user_id, normalized_npv: bool = False, is_multiprocessing: bool = False, ) -> SimulationResult: - if not check_sim_settings(sim_settings): raise e.BadlyFormattedSettingsException @@ -185,7 +184,6 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed print(f'{exc.__class__}, {exc}') raise e.SimulationFailedException - # TODO: payback_time and mean_payback_time is the same checks for first time it goes above 0, or should something be different? sim_run_res = models.Simulation( time=results.timesteps[-1], mean_NPV=results.normalize_npv() if normalized_npv else results.mean_npv(), From 6e213a33f03b3434c82fd3cb419f42334e13ce0b Mon Sep 17 00:00:00 2001 From: jyborn Date: Thu, 10 Aug 2023 22:18:26 +0200 Subject: [PATCH 138/210] small fix --- sedbackend/apps/cvs/simulation/router.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 031b60e0..ef9f575a 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -50,7 +50,7 @@ async def run_dsm_file_simulation(native_project_id: int, sim_params: models.Fil async def run_multiprocessing(sim_settings: models.EditSimSettings, native_project_id: int, vcs_ids: List[int], design_group_ids: List[int], normalized_npv: Optional[bool] = False, user: User = Depends(get_current_active_user)) -> SimulationResult: - return implementation.run_simulation(sim_settings, vcs_ids, design_group_ids, user.id, native_project_id, + return implementation.run_simulation(sim_settings, native_project_id, vcs_ids, design_group_ids, user.id, normalized_npv, True) From 3aa769ab021bfd10e3fa95637fff0dd15ace8793 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 14 Aug 2023 15:00:39 +0200 Subject: [PATCH 139/210] added row value drivers to formula --- .../apps/cvs/link_design_lifecycle/models.py | 1 + .../apps/cvs/link_design_lifecycle/storage.py | 48 +++++++++++++------ 2 files changed, 34 insertions(+), 15 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/models.py b/sedbackend/apps/cvs/link_design_lifecycle/models.py index 7a8b9dd1..f5fde10e 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/models.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/models.py @@ -38,6 +38,7 @@ class FormulaRowGet(BaseModel): cost: Formula revenue: Formula rate: Rate + row_value_drivers: List[ValueDriver] = [] used_value_drivers: List[ValueDriver] = [] used_external_factors: List[ExternalFactor] = [] diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index a688d811..68c4d29a 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -5,7 +5,7 @@ import re from sedbackend.apps.cvs.design.storage import get_design_group from sedbackend.apps.cvs.market_input.storage import populate_external_factor -from sedbackend.apps.cvs.vcs.storage import get_vcs_row, populate_value_driver +from sedbackend.apps.cvs.vcs.storage import get_vcs_row, populate_value_driver, get_all_value_drivers_vcs_row from sedbackend.apps.cvs.vcs.storage import get_vcs from sedbackend.apps.cvs.link_design_lifecycle import models, exceptions from mysqlsb import FetchType, MySQLStatementBuilder @@ -26,6 +26,11 @@ CVS_PROJECT_VALUE_DRIVERS_TABLE = 'cvs_project_value_drivers' CVS_PROJECT_VALUE_DRIVERS_COLUMNS = ['project', 'value_driver'] +CVS_EXTERNAL_FACTORS_TABLE = 'cvs_market_inputs' +CVS_STAKEHOLDER_NEEDS_TABLE = 'cvs_stakeholder_needs' +CVS_VCS_ROWS_TABLE = 'cvs_vcs_rows' +CVS_VCS_NEED_DRIVERS_TABLE = 'cvs_vcs_need_drivers' + def create_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, formula_row: models.FormulaRowPost): @@ -100,7 +105,6 @@ def edit_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_ def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, value_drivers: List[int], project_id: int): - # Add value driver to project if not already added select_statement = MySQLStatementBuilder(db_connection) project_value_driver_res = select_statement \ @@ -268,24 +272,36 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ with db_connection.cursor(prepared=True) as cursor: cursor.execute( - f"SELECT id, name, unit, vcs_row, design_group FROM cvs_formulas_value_drivers " - f"INNER JOIN cvs_value_drivers ON cvs_formulas_value_drivers.value_driver = cvs_value_drivers.id WHERE {where_statement}", + f"SELECT id, name, unit, vcs_row, design_group FROM {CVS_FORMULAS_VALUE_DRIVERS_TABLE} " + f"INNER JOIN {CVS_VALUE_DRIVERS_TABLE} ON {CVS_FORMULAS_VALUE_DRIVERS_TABLE}.value_driver = cvs_value_drivers.id WHERE {where_statement}", prepared_list) - all_vds = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] + all_used_vds = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] with db_connection.cursor(prepared=True) as cursor: cursor.execute( f"SELECT id, name, unit, vcs_row, design_group FROM {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} " - f"INNER JOIN cvs_market_inputs ON {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE}.external_factor = cvs_market_inputs.id WHERE {where_statement}", + f"INNER JOIN {CVS_EXTERNAL_FACTORS_TABLE} ON {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE}.external_factor = cvs_market_inputs.id WHERE {where_statement}", prepared_list) - all_efs = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] + all_used_efs = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] + + # TODO - get all value drivers from all vcs rows + with db_connection.cursor(prepared=True) as cursor: + cursor.execute( + f"SELECT {CVS_VALUE_DRIVERS_TABLE}.id, {CVS_VALUE_DRIVERS_TABLE}.name, {CVS_VALUE_DRIVERS_TABLE}.unit, {CVS_VCS_ROWS_TABLE}.id AS vcs_row FROM {CVS_VCS_ROWS_TABLE} " + f"INNER JOIN {CVS_STAKEHOLDER_NEEDS_TABLE} ON {CVS_STAKEHOLDER_NEEDS_TABLE}.vcs_row = {CVS_VCS_ROWS_TABLE}.id " + f"INNER JOIN {CVS_VCS_NEED_DRIVERS_TABLE} ON {CVS_VCS_NEED_DRIVERS_TABLE}.stakeholder_need = {CVS_STAKEHOLDER_NEEDS_TABLE}.id " + f"INNER JOIN {CVS_VALUE_DRIVERS_TABLE} ON {CVS_VALUE_DRIVERS_TABLE}.id = {CVS_VCS_NEED_DRIVERS_TABLE}.value_driver " + f"WHERE {CVS_VCS_ROWS_TABLE}.id IN ({','.join(['%s' for _ in range(len(res))])})", + [r['vcs_row'] for r in res]) + all_row_vds = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] formulas = [] for r in res: - r['value_drivers'] = [vd for vd in all_vds if vd['vcs_row'] == r['vcs_row'] and - vd['design_group'] == r['design_group']] - r['external_factors'] = [ef for ef in all_efs if ef['vcs_row'] == r['vcs_row'] and - ef['design_group'] == r['design_group']] + r['row_value_drivers'] = [vd for vd in all_row_vds if vd['vcs_row'] == r['vcs_row']] + r['used_value_drivers'] = [vd for vd in all_used_vds if vd['vcs_row'] == r['vcs_row'] and + vd['design_group'] == r['design_group']] + r['used_external_factors'] = [ef for ef in all_used_efs if ef['vcs_row'] == r['vcs_row'] and + ef['design_group'] == r['design_group']] formulas.append(populate_formula(r)) return formulas @@ -300,11 +316,13 @@ def populate_formula(db_result) -> models.FormulaRowGet: cost=models.Formula(formula=db_result['cost'], comment=db_result['cost_comment']), revenue=models.Formula(formula=db_result['revenue'], comment=db_result['revenue_comment']), rate=db_result['rate'], - used_value_drivers=[populate_value_driver(valueDriver) for valueDriver in db_result['value_drivers']] if - db_result['value_drivers'] is not None else [], + row_value_drivers=[populate_value_driver(valueDriver) for valueDriver in db_result['row_value_drivers']] if + db_result['row_value_drivers'] is not None else [], + used_value_drivers=[populate_value_driver(valueDriver) for valueDriver in db_result['used_value_drivers']] if + db_result['used_value_drivers'] is not None else [], used_external_factors=[populate_external_factor(externalFactor) for externalFactor in - db_result['external_factors']] if - db_result['external_factors'] is not None else [], + db_result['used_external_factors']] if + db_result['used_external_factors'] is not None else [], ) From 0f36a6f4bfad8958bbd202b49cb67d755e73b985 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 14 Aug 2023 16:17:26 +0200 Subject: [PATCH 140/210] save formulas table instead of single row --- .../link_design_lifecycle/implementation.py | 8 +-- .../apps/cvs/link_design_lifecycle/models.py | 1 + .../apps/cvs/link_design_lifecycle/router.py | 7 ++- .../apps/cvs/link_design_lifecycle/storage.py | 48 ++++++++------- .../test_connect_vcs_design.py | 58 +++++++++---------- tests/apps/cvs/testutils.py | 45 +++++--------- 6 files changed, 78 insertions(+), 89 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py index 9f8e2738..5ccf6751 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py @@ -9,10 +9,10 @@ from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions -def edit_formulas(project_id: int, vcs_row_id: int, design_group_id: int, new_formulas: models.FormulaRowPost) -> bool: +def edit_formulas(project_id: int, vcs_id: int, design_group_id: int, formulas: List[models.FormulaRowPost]) -> bool: with get_connection() as con: try: - res = storage.update_formulas(con, project_id, vcs_row_id, design_group_id, new_formulas) + res = storage.update_formulas(con, project_id, vcs_id, design_group_id, formulas) con.commit() return res except vcs_exceptions.VCSNotFoundException: @@ -33,7 +33,7 @@ def edit_formulas(project_id: int, vcs_row_id: int, design_group_id: int, new_fo except design_exceptions.DesignGroupNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f'Could not find designgroup with id {design_group_id}' + detail=f'Could not find design group with id {design_group_id}' ) except project_exceptions.CVSProjectNotFoundException: raise HTTPException( @@ -48,7 +48,7 @@ def edit_formulas(project_id: int, vcs_row_id: int, design_group_id: int, new_fo except vcs_exceptions.VCSTableRowNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f'Could not find vcs row with id={vcs_row_id}.', + detail=f'Could not find vcs row.', ) except exceptions.CouldNotAddValueDriverToProjectException: raise HTTPException( diff --git a/sedbackend/apps/cvs/link_design_lifecycle/models.py b/sedbackend/apps/cvs/link_design_lifecycle/models.py index f5fde10e..cb8d3db5 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/models.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/models.py @@ -44,6 +44,7 @@ class FormulaRowGet(BaseModel): class FormulaRowPost(BaseModel): + vcs_row_id: int time: Formula time_unit: TimeFormat cost: Formula diff --git a/sedbackend/apps/cvs/link_design_lifecycle/router.py b/sedbackend/apps/cvs/link_design_lifecycle/router.py index 7d90d337..32def91f 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/router.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/router.py @@ -24,13 +24,14 @@ async def get_all_formulas(native_project_id: int, vcs_id: int, dg_id: int, @router.put( - '/project/{native_project_id}/vcs-row/{vcs_row_id}/design-group/{dg_id}/formulas', + '/project/{native_project_id}/vcs/{vcs_id}/design-group/{dg_id}/formulas', summary='Edit or create the formulas for time, cost, and revenue', response_model=bool, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) -async def edit_formulas(native_project_id: int, vcs_row_id: int, dg_id: int, new_formulas: models.FormulaRowPost) -> bool: - return implementation.edit_formulas(native_project_id, vcs_row_id, dg_id, new_formulas) +async def edit_formula_table(native_project_id: int, vcs_id: int, dg_id: int, + formulas: List[models.FormulaRowPost]) -> bool: + return implementation.edit_formulas(native_project_id, vcs_id, dg_id, formulas) @router.delete( diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 68c4d29a..b86e9455 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -5,8 +5,7 @@ import re from sedbackend.apps.cvs.design.storage import get_design_group from sedbackend.apps.cvs.market_input.storage import populate_external_factor -from sedbackend.apps.cvs.vcs.storage import get_vcs_row, populate_value_driver, get_all_value_drivers_vcs_row -from sedbackend.apps.cvs.vcs.storage import get_vcs +from sedbackend.apps.cvs.vcs import storage as vcs_storage from sedbackend.apps.cvs.link_design_lifecycle import models, exceptions from mysqlsb import FetchType, MySQLStatementBuilder @@ -77,7 +76,7 @@ def find_vd_and_ef(texts: List[str]) -> (List[str], List[int]): return value_driver_ids, external_factor_ids -def edit_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, project_id: int, +def edit_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, formula_row: models.FormulaRowPost): logger.debug(f'Editing formulas') @@ -227,23 +226,26 @@ def update_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_ro delete_external_factor_formulas(db_connection, vcs_row_id, design_group_id, delete_external_factors) -def update_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, - formula_row: models.FormulaRowPost) -> bool: +def update_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, design_group_id: int, + formula_rows: List[models.FormulaRowPost]) -> bool: + vcs_storage.check_vcs(db_connection, project_id, vcs_id) # Check if vcs exists and matches project get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project - get_vcs_row(db_connection, project_id, vcs_row_id) - count_statement = MySQLStatementBuilder(db_connection) - count = count_statement.count(CVS_FORMULAS_TABLE) \ - .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - count = count['count'] - - if count == 0: - create_formulas(db_connection, project_id, vcs_row_id, design_group_id, formula_row) - elif count == 1: - edit_formulas(db_connection, vcs_row_id, design_group_id, project_id, formula_row) - else: - raise exceptions.FormulasFailedUpdateException + for formula_row in formula_rows: + vcs_storage.get_vcs_row(db_connection, project_id, formula_row.vcs_row_id) # Check if vcs row exists + + count_statement = MySQLStatementBuilder(db_connection) + count = count_statement.count(CVS_FORMULAS_TABLE) \ + .where('vcs_row = %s and design_group = %s', [formula_row.vcs_row_id, design_group_id]) \ + .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + count = count['count'] + + if count == 0: + create_formulas(db_connection, project_id, formula_row.vcs_row_id, design_group_id, formula_row) + elif count == 1: + edit_formulas(db_connection, project_id, formula_row.vcs_row_id, design_group_id, formula_row) + else: + raise exceptions.FormulasFailedUpdateException return True @@ -253,7 +255,7 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ logger.debug(f'Fetching all formulas with vcs_id={vcs_id}') get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project - get_vcs(db_connection, project_id, vcs_id, user_id) + vcs_storage.get_vcs(db_connection, project_id, vcs_id, user_id) select_statement = MySQLStatementBuilder(db_connection) res = select_statement.select(CVS_FORMULAS_TABLE, CVS_FORMULAS_COLUMNS) \ @@ -316,9 +318,11 @@ def populate_formula(db_result) -> models.FormulaRowGet: cost=models.Formula(formula=db_result['cost'], comment=db_result['cost_comment']), revenue=models.Formula(formula=db_result['revenue'], comment=db_result['revenue_comment']), rate=db_result['rate'], - row_value_drivers=[populate_value_driver(valueDriver) for valueDriver in db_result['row_value_drivers']] if + row_value_drivers=[vcs_storage.populate_value_driver(valueDriver) for valueDriver in + db_result['row_value_drivers']] if db_result['row_value_drivers'] is not None else [], - used_value_drivers=[populate_value_driver(valueDriver) for valueDriver in db_result['used_value_drivers']] if + used_value_drivers=[vcs_storage.populate_value_driver(valueDriver) for valueDriver in + db_result['used_value_drivers']] if db_result['used_value_drivers'] is not None else [], used_external_factors=[populate_external_factor(externalFactor) for externalFactor in db_result['used_external_factors']] if @@ -331,7 +335,7 @@ def delete_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_r logger.debug(f'Deleting formulas with vcs_row_id: {vcs_row_id}') get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project - get_vcs_row(db_connection, project_id, vcs_row_id) + vcs_storage.get_vcs_row(db_connection, project_id, vcs_row_id) delete_statement = MySQLStatementBuilder(db_connection) _, rows = delete_statement \ diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index b8cbd4ac..b3b92724 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -12,7 +12,6 @@ def test_create_formulas(client, std_headers, std_user): vcs_rows = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1) if vcs_rows is None: raise Exception - row_id = vcs_rows[0].id design_group = tu.seed_random_design_group(project.id) value_driver = tu.seed_random_value_driver(current_user.id, project.id) external_factor = tu.seed_random_external_factor(project.id) @@ -31,16 +30,16 @@ def test_create_formulas(client, std_headers, std_user): rate = tu.random_rate_choice() time_unit = tu.random_time_unit() - res = client.put(f'/api/cvs/project/{project.id}/vcs-row/{row_id}/design-group/{design_group.id}/formulas', + res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', headers=std_headers, - json={ - "project": project.id, + json=[{ + "vcs_row_id": vcs_rows[0].id, "time": {"formula": time, "comment": time_comment}, "time_unit": time_unit, "cost": {"formula": cost, "comment": cost_comment}, "revenue": {"formula": revenue, "comment": revenue_comment}, "rate": rate - }) + }]) res_get = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', headers=std_headers) @@ -66,7 +65,6 @@ def test_create_formulas_no_optional(client, std_headers, std_user): vcs_rows = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1) if vcs_rows is None: raise Exception - row_id = vcs_rows[0].id design_group = tu.seed_random_design_group(project.id) # Act @@ -76,16 +74,16 @@ def test_create_formulas_no_optional(client, std_headers, std_user): revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() - res = client.put(f'/api/cvs/project/{project.id}/vcs-row/{row_id}/design-group/{design_group.id}/formulas', + res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', headers=std_headers, - json={ - "project": project.id, + json=[{ + "vcs_row_id": vcs_rows[0].id, "time": {"formula": time, "comment": ""}, "time_unit": time_unit, "cost": {"formula": cost, "comment": ""}, "revenue": {"formula": revenue, "comment": ""}, "rate": rate - }) + }]) # Assert assert res.status_code == 200 @@ -220,16 +218,16 @@ def test_edit_formulas(client, std_headers, std_user): rate = tu.random_rate_choice() res = client.put( - f'/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas', + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{formulas[0].design_group_id}/formulas', headers=std_headers, - json={ - "project": project.id, + json=[{ + "vcs_row_id": formulas[0].vcs_row_id, "time": {"formula": time, "comment": ""}, "time_unit": time_unit, "cost": {"formula": cost, "comment": ""}, "revenue": {"formula": revenue, "comment": ""}, "rate": rate - }) + }]) res_get = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', headers=std_headers) @@ -262,16 +260,16 @@ def test_edit_formulas_no_optional(client, std_headers, std_user): rate = tu.random_rate_choice() res = client.put( - f'/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas', + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{formulas[0].design_group_id}/formulas', headers=std_headers, - json={ - "project": project.id, + json=[{ + "vcs_row_id": formulas[0].vcs_row_id, "time": {"formula": time, "comment": ""}, "time_unit": time_unit, "cost": {"formula": cost, "comment": ""}, "revenue": {"formula": revenue, "comment": ""}, "rate": rate - }) + }]) # Assert assert res.status_code == 200 @@ -305,16 +303,16 @@ def test_edit_formulas_invalid_dg(client, std_headers, std_user): revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() - res = client.put(f'/api/cvs/project/{project.id}/vcs-row/{row_id}/design-group/{dg_invalid_id}/formulas', + res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{dg_invalid_id}/formulas', headers=std_headers, - json={ - "project": project.id, + json=[{ + "vcs_row_id": row_id, "time": {"formula": time, "comment": ""}, "time_unit": time_unit, "cost": {"formula": cost, "comment": ""}, "revenue": {"formula": revenue, "comment": ""}, "rate": rate - }) + }]) # Assert assert res.status_code == 404 @@ -345,16 +343,16 @@ def test_edit_formulas_invalid_vcs_row(client, std_headers, std_user): revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() - res = client.put(f'/api/cvs/project/{project.id}/vcs-row/{row_id}/design-group/{design_group.id}/formulas', + res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', headers=std_headers, - json={ - "project": project.id, + json=[{ + "vcs_row_id": row_id, "time": {"formula": time, "comment": ""}, "time_unit": time_unit, "cost": {"formula": cost, "comment": ""}, "revenue": {"formula": revenue, "comment": ""}, "rate": rate - }) + }]) # Assert assert res.status_code == 404 @@ -386,16 +384,16 @@ def test_edit_formulas_invalid_project(client, std_headers, std_user): revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() - res = client.put(f'/api/cvs/project/{invalid_proj_id}/vcs-row/{row_id}/design-group/{design_group.id}/formulas', + res = client.put(f'/api/cvs/project/{invalid_proj_id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', headers=std_headers, - json={ - "project": project.id, + json=[{ + "vcs_row_id": row_id, "time": {"formula": time, "comment": ""}, "time_unit": time_unit, "cost": {"formula": cost, "comment": ""}, "revenue": {"formula": revenue, "comment": ""}, "rate": rate - }) + }]) # Assert assert res.status_code == 404 diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index ff17073b..eba348a8 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -147,7 +147,7 @@ def random_table_row( subprocess = random_subprocess(project_id) subprocess_id = subprocess.id else: - if random.randint(1, 5) == 1: #Give 1/5 chance to produce non-tech process + if random.randint(1, 5) == 1: # Give 1/5 chance to produce non-tech process iso_process_id = random.randint(1, 14) else: iso_process_id = random.randint(15, 25) @@ -253,6 +253,7 @@ def create_vcs_table(project_id, vcs_id, rows: List[vcs_model.VcsRowPost]) -> Li vcs_impl.edit_vcs_table(project_id, vcs_id, rows) return vcs_impl.get_vcs_table(project_id, vcs_id) + # ====================================================================================================================== # BPMN Table # ====================================================================================================================== @@ -346,6 +347,7 @@ def delete_dsm_file_from_vcs_id(proj_id, vcs_id, user_id): file_id = impl_life_cycle.get_dsm_file_id(proj_id, vcs_id) impl_files.impl_delete_file(file_id, user_id) + # ====================================================================================================================== # Designs # ====================================================================================================================== @@ -393,7 +395,6 @@ def random_design(value_driver_ids: int = None): def seed_random_designs(project_id: int, dg_id: int, amount: int = 10): - design_impl.edit_designs(project_id, dg_id, [design_model.DesignPut(name=tu.random_str(5, 50)) for _ in range(amount)]) @@ -406,7 +407,7 @@ def seed_random_designs(project_id: int, dg_id: int, amount: int = 10): def seed_random_formulas(project_id: int, vcs_id: int, design_group_id: int, user_id: int, amount: int = 10) -> List[connect_model.FormulaRowGet]: - vcs_rows = seed_vcs_table_rows(user_id, project_id, vcs_id, amount) + vcs_rows = seed_vcs_table_rows(user_id, project_id, vcs_id, amount) for i, vcs_row in enumerate(vcs_rows): @@ -420,6 +421,7 @@ def seed_random_formulas(project_id: int, vcs_id: int, design_group_id: int, use rate = Rate.PRODUCT formula_post = connect_model.FormulaRowPost( + vcs_row_id=vcs_row.id, time=connect_model.Formula(formula=time, comment=""), time_unit=time_unit, cost=connect_model.Formula(formula=cost, comment=""), @@ -428,31 +430,11 @@ def seed_random_formulas(project_id: int, vcs_id: int, design_group_id: int, use ) connect_impl.edit_formulas( - project_id, vcs_row.id, design_group_id, formula_post) + project_id, vcs_row.id, design_group_id, [formula_post]) return connect_impl.get_all_formulas(project_id, vcs_id, design_group_id, user_id) -def create_formulas(project_id: int, vcs_rows: List[vcs_model.VcsRow], dg_id: int) -> List[FormulaRowGet]: - for row in vcs_rows: - time = str(tu.random.randint(1, 200)) - time_unit = random_time_unit() - cost = str(tu.random.randint(1, 2000)) - revenue = str(tu.random.randint(1, 10000)) - rate = Rate.PRODUCT.value - - formula_post = connect_model.FormulaRowPost( - time=connect_model.Formula(formula=time, comment=""), - time_unit=time_unit, - cost=connect_model.Formula(formula=cost, comment=""), - revenue=connect_model.Formula(formula=revenue, comment=""), - rate=rate - ) - connect_impl.edit_formulas(project_id, row.id, dg_id, formula_post) - - return connect_impl.get_all_formulas(project_id, vcs_rows[0].vcs_id, dg_id) - - def delete_formulas(project_id: int, vcsRow_Dg_ids: List[Tuple[int, int]]): for (vcs_row, dg) in vcsRow_Dg_ids: connect_impl.delete_formulas(project_id, vcs_row, dg) @@ -463,7 +445,7 @@ def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int, project_id, vcs_id), key=lambda row: row.index)) formulas = connect_impl.get_all_formulas( project_id, vcs_id, design_group_id, user_id) - + last_id = -1 rows.reverse() # Reverse to find last technical process for row in rows: if row.iso_process is not None: @@ -478,6 +460,7 @@ def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int, last = next(filter(lambda x: x.vcs_row_id == last_id, formulas)) new_last = connect_model.FormulaRowPost( + vcs_row_id=last.vcs_row_id, time=last.time, time_unit=last.time_unit, cost=last.cost, @@ -485,7 +468,7 @@ def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int, rate=Rate.PROJECT.value ) - connect_impl.edit_formulas(project_id, last_id, design_group_id, new_last) + connect_impl.edit_formulas(project_id, last_id, design_group_id, [new_last]) rows.reverse() # reverse back to find first technical process for row in rows: @@ -502,12 +485,14 @@ def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int, # ====================================================================================================================== def seed_simulation_settings(project_id: int, vcs_ids: List[int], design_ids: List[int]) -> sim_model.SimSettings: - rows = [row.iso_process.name if row.iso_process is not None else row.subprocess.name for row in vcs_impl.get_vcs_table( - project_id, vcs_ids[0])] + rows = [row.iso_process.name if row.iso_process is not None else row.subprocess.name for row in + vcs_impl.get_vcs_table( + project_id, vcs_ids[0])] print("Seed settings vcs rows", rows) for vcs_id in vcs_ids: - new_rows = [row.iso_process.name if row.iso_process is not None else row.subprocess.name for row in vcs_impl.get_vcs_table( - project_id, vcs_id)] + new_rows = [row.iso_process.name if row.iso_process is not None else row.subprocess.name for row in + vcs_impl.get_vcs_table( + project_id, vcs_id)] print("New rows", new_rows) rows = list(filter(lambda x: x in rows, new_rows)) print("Common elements", rows) From 18a6dc0fef649c3c69c8d0664a5e1a93ef17b1f2 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 14 Aug 2023 16:22:46 +0200 Subject: [PATCH 141/210] new value driver refactor --- sedbackend/apps/cvs/vcs/implementation.py | 21 +---- sedbackend/apps/cvs/vcs/models.py | 2 +- sedbackend/apps/cvs/vcs/router.py | 9 -- sedbackend/apps/cvs/vcs/storage.py | 100 ++++------------------ sql/V230721_cvs.sql | 15 +--- 5 files changed, 23 insertions(+), 124 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/implementation.py b/sedbackend/apps/cvs/vcs/implementation.py index aecf936a..fe28a94b 100644 --- a/sedbackend/apps/cvs/vcs/implementation.py +++ b/sedbackend/apps/cvs/vcs/implementation.py @@ -8,6 +8,7 @@ from sedbackend.apps.core.db import get_connection import sedbackend.apps.cvs.project.exceptions as project_exceptions from sedbackend.apps.cvs.vcs import models, storage, exceptions +from sedbackend.apps.cvs.vcs.models import ValueDriverPost from sedbackend.libs.datastructures.pagination import ListChunk from sedbackend.apps.core.files import exceptions as file_ex @@ -270,7 +271,7 @@ def edit_value_driver(value_driver_id: int, def delete_value_driver(project_id: int, value_driver_id: int) -> bool: try: with get_connection() as con: - res = storage.delete_project_value_driver(con, project_id, value_driver_id) + res = storage.delete_value_driver(con, value_driver_id) con.commit() return res except exceptions.ValueDriverNotFoundException: @@ -331,24 +332,6 @@ def add_vcs_multiple_needs_drivers(need_driver_ids: List[Tuple[int, int]]): ) -def add_project_multiple_value_drivers(project_id: int, value_driver_ids: List[int]): - try: - with get_connection() as con: - res = storage.add_project_value_drivers(con, project_id, value_driver_ids) - con.commit() - return res - except exceptions.GenericDatabaseException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Badly formatted request' - ) - except exceptions.ProjectValueDriverFailedToCreateException: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Failed to create project={project_id} and value driver={value_driver_ids} relation' - ) - - # ====================================================================================================================== # VCS ISO Processes # ====================================================================================================================== diff --git a/sedbackend/apps/cvs/vcs/models.py b/sedbackend/apps/cvs/vcs/models.py index e51c069c..47b801c8 100644 --- a/sedbackend/apps/cvs/vcs/models.py +++ b/sedbackend/apps/cvs/vcs/models.py @@ -84,7 +84,7 @@ class ValueDriver(BaseModel): id: int name: str unit: Optional[str] = None - projects: Optional[List[int]] = None + project_id: int class ValueDriverPut(BaseModel): diff --git a/sedbackend/apps/cvs/vcs/router.py b/sedbackend/apps/cvs/vcs/router.py index c62b719d..a15cd748 100644 --- a/sedbackend/apps/cvs/vcs/router.py +++ b/sedbackend/apps/cvs/vcs/router.py @@ -147,15 +147,6 @@ async def create_value_driver(value_driver_post: models.ValueDriverPost, user: User = Depends(get_current_active_user)) -> models.ValueDriver: return implementation.create_value_driver(user.id, value_driver_post) -@router.post( - '/project/{native_project_id}/value-driver', - summary=f'Add value drivers to project', - response_model=bool, - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] -) -async def add_drivers_to_project(native_project_id: int, value_driver_ids: List[int]): - return implementation.add_project_multiple_value_drivers(native_project_id, value_driver_ids) - @router.post( '/project/{native_project_id}/value-driver/need', summary=f'Add value drivers to stakeholder needs', diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 285eac8a..e88bb666 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -7,7 +7,7 @@ from sedbackend.apps.cvs.project.storage import get_cvs_project from sedbackend.apps.cvs.vcs import models, exceptions from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage, models as life_cycle_models -from sedbackend.apps.cvs.vcs.models import ValueDriver +from sedbackend.apps.cvs.vcs.models import ValueDriver, ValueDriverPost from sedbackend.libs.datastructures.pagination import ListChunk from sedbackend.apps.core.files import storage as file_storage, exceptions as file_exceptions from mysqlsb import MySQLStatementBuilder, Sort, FetchType @@ -21,10 +21,7 @@ CVS_VALUE_DIMENSION_COLUMNS = ['id', 'name', 'priority', 'vcs_row'] CVS_VALUE_DRIVER_TABLE = 'cvs_value_drivers' -CVS_VALUE_DRIVER_COLUMNS = ['id', 'user', 'name', 'unit'] - -CVS_PROJECT_VALUE_DRIVER_TABLE = 'cvs_project_value_drivers' -CVS_PROJECT_VALUE_DRIVER_COLUMNS = ['project', 'value_driver'] +CVS_VALUE_DRIVER_COLUMNS = ['id', 'user', 'name', 'unit', 'project_id'] CVS_VCS_ROW_DRIVERS_TABLE = 'cvs_rowDrivers' CVS_VCS_ROW_DRIVERS_COLUMNS = ['vcs_row', 'value_driver'] @@ -315,12 +312,12 @@ def get_all_value_driver(db_connection: PooledMySQLConnection, user_id: int) -> logger.debug(f'Fetching all value drivers for user with id={user_id}.') try: - query = f'SELECT DISTINCT cvd.*, cpvd.project \ + query = f'SELECT DISTINCT cvd.*\ FROM cvs_value_drivers cvd \ - LEFT JOIN cvs_project_value_drivers cpvd ON cvd.id = cpvd.value_driver \ - LEFT JOIN projects_participants pp ON cpvd.project = pp.project_id \ - LEFT JOIN projects_subprojects ps ON cpvd.project = ps.native_project_id \ - WHERE (pp.user_id = %s OR (cvd.user = %s))' + LEFT JOIN cvs_projects p ON cvd.project_id = p.id \ + LEFT JOIN projects_participants pp ON p.id = pp.project_id \ + LEFT JOIN projects_subprojects ps ON p.id = ps.native_project_id \ + WHERE (pp.user_id = %s OR p.owner_id = %s)' with db_connection.cursor(prepared=True, dictionary=True) as cursor: cursor.execute(query, [user_id, user_id]) @@ -329,7 +326,7 @@ def get_all_value_driver(db_connection: PooledMySQLConnection, user_id: int) -> logger.debug(f'Error msg: {e.msg}') raise exceptions.ValueDriverNotFoundException - return combine_value_drivers([populate_value_driver(result) for result in res]) + return [populate_value_driver(result) for result in res] def get_all_value_drivers_vcs_row(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, @@ -415,58 +412,21 @@ def update_vcs_need_driver(db_connection: PooledMySQLConnection, need_id: int, v return True -def add_project_value_drivers(db_connection: PooledMySQLConnection, project_id: int, - value_driver_ids: List[int]) -> bool: - logger.debug(f'Adding relation between project_id={project_id} and value_driver_ids={value_driver_ids}') - - try: - insert_statement = f'INSERT INTO {CVS_PROJECT_VALUE_DRIVER_TABLE} (project, value_driver) VALUES (%s, %s) ON DUPLICATE KEY UPDATE `project`=`project`' - prepared_list = [] - for index, value_driver_id in enumerate(value_driver_ids): - prepared_list.append((project_id, value_driver_id)) - with db_connection.cursor(prepared=True) as cursor: - cursor.executemany(insert_statement, prepared_list) - except Error as e: - logger.debug(f'Error {e.errno} {e.msg}') - raise exceptions.ProjectValueDriverFailedToCreateException - - return True - - def get_value_driver(db_connection: PooledMySQLConnection, value_driver_id: int, user_id: int) -> ValueDriver: logger.debug(f'User={user_id} fetching value driver with id={value_driver_id}.') - query = f'SELECT cvd.*, cpvd.project \ + query = f'SELECT cvd.* \ FROM cvs_value_drivers cvd \ - INNER JOIN cvs_project_value_drivers cpvd ON cpvd.value_driver = cvd.id \ - INNER JOIN projects_subprojects ps ON cpvd.project = ps.native_project_id \ - WHERE cvd.id = %s AND (ps.owner_id = %s OR ps.id IN (SELECT project_id FROM projects_participants WHERE user_id = %s));' + WHERE cvd.id = %s;' with db_connection.cursor(prepared=True, dictionary=True) as cursor: - cursor.execute(query, [value_driver_id, user_id, user_id]) - res = cursor.fetchall() + cursor.execute(query, [value_driver_id]) + res = cursor.fetchone() - if len(res) == 0: + if res is None: raise exceptions.ValueDriverNotFoundException(value_driver_id=value_driver_id) - vds = combine_value_drivers([populate_value_driver(result) for result in res]) - - return vds[0] - - -def combine_value_drivers(data: list[ValueDriver]) -> list[ValueDriver]: - combined_dict = {} - - for entry in data: - key = (entry.id, entry.name) - if key not in combined_dict: - combined_dict[key] = ValueDriver(id=entry.id, name=entry.name, unit=None, projects=[]) - combined_dict[key].unit = entry.unit if entry.unit is not None else combined_dict[key].unit - if entry.projects is not None: - combined_dict[key].projects.extend(entry.projects) - - combined_data = list(combined_dict.values()) - return combined_data + return populate_value_driver(res) def create_value_driver(db_connection: PooledMySQLConnection, user_id: int, @@ -476,11 +436,11 @@ def create_value_driver(db_connection: PooledMySQLConnection, user_id: int, try: insert_statement = MySQLStatementBuilder(db_connection) insert_statement \ - .insert(table=CVS_VALUE_DRIVER_TABLE, columns=['user', 'name', 'unit']) \ - .set_values([user_id, value_driver_post.name, value_driver_post.unit]) \ + .insert(table=CVS_VALUE_DRIVER_TABLE, columns=['user', 'name', 'unit', 'project_id']) \ + .set_values([user_id, value_driver_post.name, value_driver_post.unit, value_driver_post.project_id]) \ .execute(fetch_type=FetchType.FETCH_NONE) value_driver_id = insert_statement.last_insert_id - add_project_value_drivers(db_connection, value_driver_post.project_id, [value_driver_id]) + except Error as e: logger.debug(f'Error msg: {e.msg}') raise exceptions.ValueDriverFailedToCreateException @@ -521,28 +481,6 @@ def delete_value_driver(db_connection: PooledMySQLConnection, value_driver_id: i return True -def delete_project_value_driver(db_connection: PooledMySQLConnection, project_id: int, value_driver_id: int) -> bool: - logger.debug(f'Deleting relation with project={project_id} AND value_driver={value_driver_id}.') - - delete_statement = MySQLStatementBuilder(db_connection) - _, rows = delete_statement.delete(CVS_PROJECT_VALUE_DRIVER_TABLE) \ - .where('project = %s AND value_driver = %s', [project_id, value_driver_id]) \ - .execute(return_affected_rows=True) - - if rows == 0: - raise exceptions.ProjectValueDriverNotFoundException(project_id=project_id, value_driver_id=value_driver_id) - - count_statement = MySQLStatementBuilder(db_connection) - result = count_statement.count(CVS_PROJECT_VALUE_DRIVER_TABLE) \ - .where('value_driver = %s', [value_driver_id]) \ - .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - - if result['count'] == 0: - return delete_value_driver(db_connection, value_driver_id) - - return True - - def delete_all_value_drivers(db_connection: PooledMySQLConnection, user_id: int) -> bool: logger.debug(f'Deleting all value drivers for user with id={user_id}.') @@ -556,13 +494,11 @@ def delete_all_value_drivers(db_connection: PooledMySQLConnection, user_id: int) def populate_value_driver(db_result) -> models.ValueDriver: logger.debug(f'Populating value driver with: {db_result}') - project = None - if 'project' in db_result and db_result['project']: project = [db_result['project']] return models.ValueDriver( id=db_result['id'], name=db_result['name'], unit=db_result['unit'], - projects=project + project_id=db_result['project_id'] ) diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index 79a08553..a31b2144 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -1,14 +1,3 @@ # Value driver to project relation -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_project_value_drivers` -( - `project` INT UNSIGNED NOT NULL, - `value_driver` INT UNSIGNED NOT NULL, - PRIMARY KEY (`project`, `value_driver`), - FOREIGN KEY (`project`) - REFERENCES `seddb`.`cvs_projects`(`id`) - ON DELETE CASCADE, - FOREIGN KEY (`value_driver`) - REFERENCES `seddb`.`cvs_value_drivers`(`id`) - ON DELETE CASCADE -); -CREATE UNIQUE INDEX `project_value_driver_index` ON `seddb`.`cvs_project_value_drivers` (project, value_driver); \ No newline at end of file +ALTER TABLE `seddb`.`cvs_value_drivers` + ADD COLUMN `project_id` INT \ No newline at end of file From 4c570767ac6a418f21e2337599fe25b01853300a Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 14 Aug 2023 16:38:55 +0200 Subject: [PATCH 142/210] fix remove delete vds without project --- sedbackend/apps/cvs/project/storage.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 95687fa8..5994d698 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -108,24 +108,6 @@ def delete_cvs_project(db_connection: PooledMySQLConnection, project_id: int, us if subproject_rows == 0: raise exceptions.SubProjectFailedDeletionException - delete_value_drivers_without_project(db_connection) - - return True - - -# This function could be a mysql trigger instead -def delete_value_drivers_without_project(db_connection: PooledMySQLConnection) -> bool: - logger.debug(f'Checking and deleting if there are any value drivers without project relations') - - query = f'DELETE cvd FROM cvs_value_drivers cvd \ - LEFT JOIN cvs_project_value_drivers cpvd ON cvd.id = cpvd.value_driver \ - WHERE cpvd.value_driver IS NULL;' - - with db_connection.cursor() as cursor: - cursor.execute(query) - rows = cursor.rowcount - logger.debug(f'Removed {rows} value drivers') - return True From 6cc115616cf22bbc9f99f485cea7eaaeab372823 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 14 Aug 2023 16:47:09 +0200 Subject: [PATCH 143/210] tests fix --- sedbackend/apps/cvs/vcs/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index e88bb666..e0c2af09 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -291,7 +291,7 @@ def get_all_value_driver_vcs(db_connection: PooledMySQLConnection, project_id: i try: select_statement = MySQLStatementBuilder(db_connection) results = select_statement \ - .select(CVS_VALUE_DRIVER_TABLE, ['cvs_value_drivers.id'] + CVS_VALUE_DRIVER_COLUMNS[1:]) \ + .select(CVS_VALUE_DRIVER_TABLE, CVS_VALUE_DRIVER_COLUMNS) \ .inner_join('cvs_vcs_need_drivers', 'value_driver = cvs_value_drivers.id') \ .inner_join('cvs_stakeholder_needs', 'stakeholder_need = cvs_stakeholder_needs.id') \ .inner_join('cvs_vcs_rows', 'vcs_row = cvs_vcs_rows.id') \ From 5cfcd962956264fa497ec8d4a7f1db0597e694cd Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 14 Aug 2023 16:49:59 +0200 Subject: [PATCH 144/210] fixed failing tests --- tests/apps/cvs/testutils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 4b7fbc9d..e5cff881 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -468,7 +468,7 @@ def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int, rate=Rate.PROJECT.value ) - connect_impl.edit_formulas(project_id, last_id, design_group_id, [new_last]) + connect_impl.edit_formulas(project_id, vcs_id, design_group_id, [new_last]) rows.reverse() # reverse back to find first technical process for row in rows: From bad5bc9038366b546e4ac3ad98e8150e927ef97b Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 14 Aug 2023 17:18:26 +0200 Subject: [PATCH 145/210] get_all_vd for vcs % design fix --- sedbackend/apps/core/db.py | 4 ++-- sedbackend/apps/cvs/design/storage.py | 11 ++--------- sedbackend/apps/cvs/vcs/storage.py | 7 +++---- 3 files changed, 7 insertions(+), 15 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..ff8bfe0c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' #'core-db' database = 'seddb' -port = 3306 +port = 3001 #3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/design/storage.py b/sedbackend/apps/cvs/design/storage.py index afc0c89e..3f482e85 100644 --- a/sedbackend/apps/cvs/design/storage.py +++ b/sedbackend/apps/cvs/design/storage.py @@ -350,7 +350,7 @@ def delete_design(db_connection: PooledMySQLConnection, design_id: int) -> bool: def get_all_drivers_design_group(db_connection: PooledMySQLConnection, design_group_id: int) -> List[ValueDriver]: logger.debug(f'Fetching all value drivers for design group {design_group_id}') - columns = DESIGN_GROUP_DRIVER_COLUMNS + ['id', 'name', 'unit'] + columns = DESIGN_GROUP_DRIVER_COLUMNS + CVS_VALUE_DRIVER_COLUMNS select_statement = MySQLStatementBuilder(db_connection) res = select_statement \ .select(DESIGN_GROUP_DRIVER_TABLE, columns) \ @@ -358,14 +358,7 @@ def get_all_drivers_design_group(db_connection: PooledMySQLConnection, design_gr .where('design_group = %s', [design_group_id]) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - vds = [] - for result in res: - vd = ValueDriver( - id=result['id'], - name=result['name'], - unit=result['unit'] - ) - vds.append(vd) + vds = [populate_value_driver(r) for r in res] return vds diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index e0c2af09..836d46cf 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -291,7 +291,7 @@ def get_all_value_driver_vcs(db_connection: PooledMySQLConnection, project_id: i try: select_statement = MySQLStatementBuilder(db_connection) results = select_statement \ - .select(CVS_VALUE_DRIVER_TABLE, CVS_VALUE_DRIVER_COLUMNS) \ + .select(CVS_VALUE_DRIVER_TABLE, ['cvs_value_drivers.id', 'user', 'name', 'unit', 'project_id']) \ .inner_join('cvs_vcs_need_drivers', 'value_driver = cvs_value_drivers.id') \ .inner_join('cvs_stakeholder_needs', 'stakeholder_need = cvs_stakeholder_needs.id') \ .inner_join('cvs_vcs_rows', 'vcs_row = cvs_vcs_rows.id') \ @@ -302,9 +302,8 @@ def get_all_value_driver_vcs(db_connection: PooledMySQLConnection, project_id: i raise exceptions.ValueDriverNotFoundException value_drivers = [] - [value_drivers.append(populate_value_driver(res)) for res in results if res['id'] not in - [vd.id for vd in value_drivers]] - + [value_drivers.append(populate_value_driver(res)) for res in results] + logger.debug(value_drivers) return value_drivers From 466daa357a1b41c7e1f7959dc0703cab6fa8f725 Mon Sep 17 00:00:00 2001 From: jyborn Date: Mon, 14 Aug 2023 17:22:00 +0200 Subject: [PATCH 146/210] revert host and port --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index ff8bfe0c..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' #'core-db' +host = 'core-db' database = 'seddb' -port = 3001 #3306 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 0a38cd227a71ee0ece3b171e5e0eff2e03fce849 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 15 Aug 2023 12:32:09 +0200 Subject: [PATCH 147/210] merge with develop fixes --- sedbackend/apps/core/db.py | 4 ++-- sedbackend/apps/cvs/link_design_lifecycle/storage.py | 2 +- sql/V230721_cvs.sql | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..e48b34af 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost'#'core-db' database = 'seddb' -port = 3306 +port = 3001 #3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index b86e9455..d7fcbda8 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -289,7 +289,7 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ # TODO - get all value drivers from all vcs rows with db_connection.cursor(prepared=True) as cursor: cursor.execute( - f"SELECT {CVS_VALUE_DRIVERS_TABLE}.id, {CVS_VALUE_DRIVERS_TABLE}.name, {CVS_VALUE_DRIVERS_TABLE}.unit, {CVS_VCS_ROWS_TABLE}.id AS vcs_row FROM {CVS_VCS_ROWS_TABLE} " + f"SELECT {CVS_VALUE_DRIVERS_TABLE}.id, {CVS_VALUE_DRIVERS_TABLE}.name, {CVS_VALUE_DRIVERS_TABLE}.unit, {CVS_VALUE_DRIVERS_TABLE}.project_id, {CVS_VCS_ROWS_TABLE}.id AS vcs_row FROM {CVS_VCS_ROWS_TABLE} " f"INNER JOIN {CVS_STAKEHOLDER_NEEDS_TABLE} ON {CVS_STAKEHOLDER_NEEDS_TABLE}.vcs_row = {CVS_VCS_ROWS_TABLE}.id " f"INNER JOIN {CVS_VCS_NEED_DRIVERS_TABLE} ON {CVS_VCS_NEED_DRIVERS_TABLE}.stakeholder_need = {CVS_STAKEHOLDER_NEEDS_TABLE}.id " f"INNER JOIN {CVS_VALUE_DRIVERS_TABLE} ON {CVS_VALUE_DRIVERS_TABLE}.id = {CVS_VCS_NEED_DRIVERS_TABLE}.value_driver " diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index 825f547f..9bc951e6 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -1,6 +1,6 @@ # Value driver to project relation ALTER TABLE `seddb`.`cvs_value_drivers` - ADD COLUMN `project_id` INT + ADD COLUMN `project_id` INT; SET FOREIGN_KEY_CHECKS = 0; From 9df60577a589d6cd6f7bc6384c715fec2dc80d23 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 15 Aug 2023 12:33:21 +0200 Subject: [PATCH 148/210] revert host and port --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e48b34af..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost'#'core-db' +host = 'core-db' database = 'seddb' -port = 3001 #3306 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 26087747f1b141e37329be66e25f29552457e0ad Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 15 Aug 2023 12:57:54 +0200 Subject: [PATCH 149/210] tests fix --- sedbackend/apps/core/db.py | 4 +-- .../apps/cvs/link_design_lifecycle/storage.py | 29 +------------------ sql/V230721_cvs.sql | 3 -- 3 files changed, 3 insertions(+), 33 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..e48b34af 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost'#'core-db' database = 'seddb' -port = 3306 +port = 3001 #3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index d7fcbda8..2b071768 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -22,9 +22,6 @@ CVS_FORMULAS_EXTERNAL_FACTORS_TABLE = 'cvs_formulas_external_factors' CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS = ['vcs_row', 'design_group', 'external_factor'] -CVS_PROJECT_VALUE_DRIVERS_TABLE = 'cvs_project_value_drivers' -CVS_PROJECT_VALUE_DRIVERS_COLUMNS = ['project', 'value_driver'] - CVS_EXTERNAL_FACTORS_TABLE = 'cvs_market_inputs' CVS_STAKEHOLDER_NEEDS_TABLE = 'cvs_stakeholder_needs' CVS_VCS_ROWS_TABLE = 'cvs_vcs_rows' @@ -104,30 +101,6 @@ def edit_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, value_drivers: List[int], project_id: int): - # Add value driver to project if not already added - select_statement = MySQLStatementBuilder(db_connection) - project_value_driver_res = select_statement \ - .select(CVS_PROJECT_VALUE_DRIVERS_TABLE, CVS_PROJECT_VALUE_DRIVERS_COLUMNS) \ - .where(f'project = %s and value_driver in ({",".join(["%s" for _ in range(len(value_drivers))])})', - [project_id] + value_drivers) \ - .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) - - value_drivers_outside_project = [vd_id for vd_id in value_drivers if - vd_id not in [res['value_driver'] for res in project_value_driver_res]] - - if value_drivers_outside_project: - try: - prepared_list = [] - insert_statement = f'INSERT INTO {CVS_PROJECT_VALUE_DRIVERS_TABLE} (project, value_driver) VALUES' - for value_driver_id in value_drivers_outside_project: - insert_statement += f'(%s, %s),' - prepared_list += [project_id, value_driver_id] - insert_statement = insert_statement[:-1] - with db_connection.cursor(prepared=True) as cursor: - cursor.execute(insert_statement, prepared_list) - except Exception as e: - logger.error(f'Error adding value driver to project: {e}') - raise exceptions.CouldNotAddValueDriverToProjectException # Add value driver to formulas try: @@ -274,7 +247,7 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ with db_connection.cursor(prepared=True) as cursor: cursor.execute( - f"SELECT id, name, unit, vcs_row, design_group FROM {CVS_FORMULAS_VALUE_DRIVERS_TABLE} " + f"SELECT id, name, unit, project_id, vcs_row, design_group FROM {CVS_FORMULAS_VALUE_DRIVERS_TABLE} " f"INNER JOIN {CVS_VALUE_DRIVERS_TABLE} ON {CVS_FORMULAS_VALUE_DRIVERS_TABLE}.value_driver = cvs_value_drivers.id WHERE {where_statement}", prepared_list) all_used_vds = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index 9bc951e6..18a254ae 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -56,9 +56,6 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` FOREIGN KEY (`value_driver`) REFERENCES `seddb`.`cvs_value_drivers` (`id`) ON DELETE CASCADE, - FOREIGN KEY (`project`, `value_driver`) - REFERENCES `seddb`.`cvs_project_value_drivers` (`project`, `value_driver`) - ON DELETE CASCADE ); ALTER TABLE `seddb`.`cvs_design_mi_formulas` From 53b77a80b3aa598099994c70236c57615710ea53 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 15 Aug 2023 12:58:53 +0200 Subject: [PATCH 150/210] revert host and port --- sedbackend/apps/core/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e48b34af..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost'#'core-db' +host = 'core-db' database = 'seddb' -port = 3001 #3306 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From fa6873769bdf848c7abb65266b758e4c547a3c48 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 15 Aug 2023 13:48:31 +0200 Subject: [PATCH 151/210] updated sql file test --- sql/V230721_cvs.sql | 21 ++++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index 18a254ae..ce8f3d21 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -1,7 +1,19 @@ # Value driver to project relation -ALTER TABLE `seddb`.`cvs_value_drivers` - ADD COLUMN `project_id` INT; - +DROP TABLE IF EXISTS `seddb`.`cvs_value_drivers`; +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_value_drivers` +( + `id` INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY, + `user` INT UNSIGNED NOT NULL, + `name` TEXT NOT NULL, + `unit` VARCHAR(10) NULL, + `project_id` INT + FOREIGN KEY(`user`) + REFERENCES `seddb`.`users`(`id`) + ON DELETE CASCADE, + FOREIGN KEY(`project_id`) + REFERENCES `seddb`.`cvs_projects` (`id`) + ON DELETE CASCADE; +); SET FOREIGN_KEY_CHECKS = 0; ALTER TABLE `seddb`.`cvs_subprocesses` @@ -56,6 +68,9 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` FOREIGN KEY (`value_driver`) REFERENCES `seddb`.`cvs_value_drivers` (`id`) ON DELETE CASCADE, + FOREIGN KEY (`project`, `value_driver`) + REFERENCES `seddb`.`cvs_value_drivers` (`project_id`, `id`) + ON DELETE CASCADE ); ALTER TABLE `seddb`.`cvs_design_mi_formulas` From b46205c30c0ae8214e7cd2f2ea19bd6e5bbbabb3 Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 15 Aug 2023 13:49:08 +0200 Subject: [PATCH 152/210] syntax fix --- sql/V230721_cvs.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index ce8f3d21..68edc1c6 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -6,13 +6,13 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_value_drivers` `user` INT UNSIGNED NOT NULL, `name` TEXT NOT NULL, `unit` VARCHAR(10) NULL, - `project_id` INT + `project_id` INT, FOREIGN KEY(`user`) REFERENCES `seddb`.`users`(`id`) ON DELETE CASCADE, FOREIGN KEY(`project_id`) REFERENCES `seddb`.`cvs_projects` (`id`) - ON DELETE CASCADE; + ON DELETE CASCADE ); SET FOREIGN_KEY_CHECKS = 0; From fc35760a587315b617aae2d787071e7b249fec1d Mon Sep 17 00:00:00 2001 From: jyborn Date: Tue, 15 Aug 2023 13:56:01 +0200 Subject: [PATCH 153/210] new sql test --- sql/V230721_cvs.sql | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index 68edc1c6..5c7bbb9b 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -1,12 +1,14 @@ # Value driver to project relation +SET FOREIGN_KEY_CHECKS = 0; DROP TABLE IF EXISTS `seddb`.`cvs_value_drivers`; +SET FOREIGN_KEY_CHECKS = 1; CREATE TABLE IF NOT EXISTS `seddb`.`cvs_value_drivers` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY, `user` INT UNSIGNED NOT NULL, `name` TEXT NOT NULL, `unit` VARCHAR(10) NULL, - `project_id` INT, + `project_id` INT UNSIGNED NOT NULL, FOREIGN KEY(`user`) REFERENCES `seddb`.`users`(`id`) ON DELETE CASCADE, From 0f41e95104b2ccaa60fa8bb053b2431480e93a00 Mon Sep 17 00:00:00 2001 From: jyborn Date: Wed, 16 Aug 2023 09:43:40 +0200 Subject: [PATCH 154/210] constraint for project_id, name, unit added --- sql/V230721_cvs.sql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index 5c7bbb9b..296c1ccd 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -16,6 +16,8 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_value_drivers` REFERENCES `seddb`.`cvs_projects` (`id`) ON DELETE CASCADE ); +ALTER TABLE `seddb`.`cvs_value_drivers` ADD CONSTRAINT unq_project_name_unit + UNIQUE (project_id, name(20), unit); SET FOREIGN_KEY_CHECKS = 0; ALTER TABLE `seddb`.`cvs_subprocesses` From 6b8e394c0cd2d048ffbba06d143cc6bfebd0b021 Mon Sep 17 00:00:00 2001 From: jyborn Date: Wed, 16 Aug 2023 10:07:06 +0200 Subject: [PATCH 155/210] sql file fix --- sql/V230721_cvs.sql | 3 --- 1 file changed, 3 deletions(-) diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index 296c1ccd..077b92e6 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -71,9 +71,6 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` ON DELETE CASCADE, FOREIGN KEY (`value_driver`) REFERENCES `seddb`.`cvs_value_drivers` (`id`) - ON DELETE CASCADE, - FOREIGN KEY (`project`, `value_driver`) - REFERENCES `seddb`.`cvs_value_drivers` (`project_id`, `id`) ON DELETE CASCADE ); From 9fd10210d03dfae83c6153a369189c32d4772bf5 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 17 Aug 2023 12:13:17 +0200 Subject: [PATCH 156/210] fixed error on duplicate vd when creating dg --- sedbackend/apps/cvs/vcs/storage.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 836d46cf..859e33b9 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -302,8 +302,9 @@ def get_all_value_driver_vcs(db_connection: PooledMySQLConnection, project_id: i raise exceptions.ValueDriverNotFoundException value_drivers = [] - [value_drivers.append(populate_value_driver(res)) for res in results] - logger.debug(value_drivers) + [value_drivers.append(populate_value_driver(res)) for res in results if res['id'] not in + [vd.id for vd in value_drivers]] + return value_drivers From 8d87a48505beab685c6c5505ecaa83de40240d3d Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 17 Aug 2023 14:18:50 +0200 Subject: [PATCH 157/210] return default formulas if dont exist --- .../link_design_lifecycle/implementation.py | 4 +- .../apps/cvs/link_design_lifecycle/router.py | 2 +- .../apps/cvs/link_design_lifecycle/storage.py | 48 ++++++++++++++----- tests/apps/cvs/testutils.py | 5 +- 4 files changed, 40 insertions(+), 19 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py index 5ccf6751..b64c894c 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py @@ -57,10 +57,10 @@ def edit_formulas(project_id: int, vcs_id: int, design_group_id: int, formulas: ) -def get_all_formulas(project_id: int, vcs_id: int, design_group_id: int, user_id: int) -> List[models.FormulaRowGet]: +def get_all_formulas(project_id: int, vcs_id: int, design_group_id: int) -> List[models.FormulaRowGet]: with get_connection() as con: try: - res = storage.get_all_formulas(con, project_id, vcs_id, design_group_id, user_id) + res = storage.get_all_formulas(con, project_id, vcs_id, design_group_id) con.commit() return res except vcs_exceptions.VCSNotFoundException: diff --git a/sedbackend/apps/cvs/link_design_lifecycle/router.py b/sedbackend/apps/cvs/link_design_lifecycle/router.py index 32def91f..c16186a7 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/router.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/router.py @@ -20,7 +20,7 @@ ) async def get_all_formulas(native_project_id: int, vcs_id: int, dg_id: int, user: User = Depends(get_current_active_user)) -> List[models.FormulaRowGet]: - return implementation.get_all_formulas(native_project_id, vcs_id, dg_id, user.id) + return implementation.get_all_formulas(native_project_id, vcs_id, dg_id) @router.put( diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 2b071768..9fc9c3ca 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -4,6 +4,7 @@ from mysql.connector.pooling import PooledMySQLConnection import re from sedbackend.apps.cvs.design.storage import get_design_group +from sedbackend.apps.cvs.link_design_lifecycle.models import Rate, TimeFormat from sedbackend.apps.cvs.market_input.storage import populate_external_factor from sedbackend.apps.cvs.vcs import storage as vcs_storage from sedbackend.apps.cvs.link_design_lifecycle import models, exceptions @@ -101,7 +102,6 @@ def edit_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, value_drivers: List[int], project_id: int): - # Add value driver to formulas try: prepared_list = [] @@ -224,11 +224,11 @@ def update_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_i def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, - design_group_id: int, user_id: int) -> List[models.FormulaRowGet]: + design_group_id: int) -> List[models.FormulaRowGet]: logger.debug(f'Fetching all formulas with vcs_id={vcs_id}') get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project - vcs_storage.get_vcs(db_connection, project_id, vcs_id, user_id) + vcs_rows = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) # Check if vcs exists and matches project select_statement = MySQLStatementBuilder(db_connection) res = select_statement.select(CVS_FORMULAS_TABLE, CVS_FORMULAS_COLUMNS) \ @@ -239,6 +239,8 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ if res is None: raise exceptions.FormulasNotFoundException + all_used_vds, all_used_efs, all_row_vds = [], [], [] + if len(res): where_statement = "(vcs_row, design_group) IN (" + ",".join(["(%s, %s)" for _ in range(len(res))]) + ")" prepared_list = [] @@ -259,23 +261,40 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ prepared_list) all_used_efs = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] - # TODO - get all value drivers from all vcs rows + if vcs_rows: with db_connection.cursor(prepared=True) as cursor: + logger.debug(f'Running') cursor.execute( f"SELECT {CVS_VALUE_DRIVERS_TABLE}.id, {CVS_VALUE_DRIVERS_TABLE}.name, {CVS_VALUE_DRIVERS_TABLE}.unit, {CVS_VALUE_DRIVERS_TABLE}.project_id, {CVS_VCS_ROWS_TABLE}.id AS vcs_row FROM {CVS_VCS_ROWS_TABLE} " f"INNER JOIN {CVS_STAKEHOLDER_NEEDS_TABLE} ON {CVS_STAKEHOLDER_NEEDS_TABLE}.vcs_row = {CVS_VCS_ROWS_TABLE}.id " f"INNER JOIN {CVS_VCS_NEED_DRIVERS_TABLE} ON {CVS_VCS_NEED_DRIVERS_TABLE}.stakeholder_need = {CVS_STAKEHOLDER_NEEDS_TABLE}.id " f"INNER JOIN {CVS_VALUE_DRIVERS_TABLE} ON {CVS_VALUE_DRIVERS_TABLE}.id = {CVS_VCS_NEED_DRIVERS_TABLE}.value_driver " - f"WHERE {CVS_VCS_ROWS_TABLE}.id IN ({','.join(['%s' for _ in range(len(res))])})", - [r['vcs_row'] for r in res]) + f"WHERE {CVS_VCS_ROWS_TABLE}.id IN ({','.join(['%s' for _ in range(len(vcs_rows))])})", + [row.id for row in vcs_rows]) all_row_vds = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] + logger.debug(f'All row vds: {all_row_vds}') formulas = [] - for r in res: - r['row_value_drivers'] = [vd for vd in all_row_vds if vd['vcs_row'] == r['vcs_row']] - r['used_value_drivers'] = [vd for vd in all_used_vds if vd['vcs_row'] == r['vcs_row'] and + for row in vcs_rows: + row_res = [r for r in res if r['vcs_row'] == row.id] + r = {} + if row_res: + r = row_res[0] + else: + r['vcs_row'] = row.id + r['design_group'] = design_group_id + r['time'] = '0' + r['time_comment'] = '' + r['cost'] = '0' + r['cost_comment'] = '' + r['revenue'] = '0' + r['revenue_comment'] = '' + r['time_unit'] = TimeFormat.YEAR + r['rate'] = Rate.PRODUCT + r['row_value_drivers'] = [vd for vd in all_row_vds if vd['vcs_row'] == row.id] + r['used_value_drivers'] = [vd for vd in all_used_vds if vd['vcs_row'] == row.id and vd['design_group'] == r['design_group']] - r['used_external_factors'] = [ef for ef in all_used_efs if ef['vcs_row'] == r['vcs_row'] and + r['used_external_factors'] = [ef for ef in all_used_efs if ef['vcs_row'] == row.id and ef['design_group'] == r['design_group']] formulas.append(populate_formula(r)) @@ -286,10 +305,13 @@ def populate_formula(db_result) -> models.FormulaRowGet: return models.FormulaRowGet( vcs_row_id=db_result['vcs_row'], design_group_id=db_result['design_group'], - time=models.Formula(formula=db_result['time'], comment=db_result['time_comment']), + time=models.Formula(formula=db_result['time'], + comment=db_result['time_comment']), time_unit=db_result['time_unit'], - cost=models.Formula(formula=db_result['cost'], comment=db_result['cost_comment']), - revenue=models.Formula(formula=db_result['revenue'], comment=db_result['revenue_comment']), + cost=models.Formula(formula=db_result['cost'], + comment=db_result['cost_comment']), + revenue=models.Formula(formula=db_result['revenue'], + comment=db_result['revenue_comment']), rate=db_result['rate'], row_value_drivers=[vcs_storage.populate_value_driver(valueDriver) for valueDriver in db_result['row_value_drivers']] if diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index e5cff881..1df1f7e6 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -432,7 +432,7 @@ def seed_random_formulas(project_id: int, vcs_id: int, design_group_id: int, use connect_impl.edit_formulas( project_id, vcs_id, design_group_id, [formula_post]) - return connect_impl.get_all_formulas(project_id, vcs_id, design_group_id, user_id) + return connect_impl.get_all_formulas(project_id, vcs_id, design_group_id) def delete_formulas(project_id: int, vcsRow_Dg_ids: List[Tuple[int, int]]): @@ -443,8 +443,7 @@ def delete_formulas(project_id: int, vcsRow_Dg_ids: List[Tuple[int, int]]): def edit_rate_order_formulas(project_id: int, vcs_id: int, design_group_id: int, user_id: int) -> vcs_model.VcsRow: rows = list(sorted(vcs_impl.get_vcs_table( project_id, vcs_id), key=lambda row: row.index)) - formulas = connect_impl.get_all_formulas( - project_id, vcs_id, design_group_id, user_id) + formulas = connect_impl.get_all_formulas(project_id, vcs_id, design_group_id) last_id = -1 rows.reverse() # Reverse to find last technical process for row in rows: From 098041054cf0d6acfe2f4c1e7c50b8ea2fad2221 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 18 Aug 2023 10:07:19 +0200 Subject: [PATCH 158/210] changed column name from project_id to project --- sedbackend/apps/cvs/link_design_lifecycle/storage.py | 4 ++-- sedbackend/apps/cvs/vcs/storage.py | 12 +++++------- sql/V230721_cvs.sql | 6 +++--- tests/apps/cvs/testutils.py | 4 ++-- tests/apps/cvs/vcs/test_value_drivers.py | 6 +++--- 5 files changed, 15 insertions(+), 17 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 9fc9c3ca..6c54a97d 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -249,7 +249,7 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ with db_connection.cursor(prepared=True) as cursor: cursor.execute( - f"SELECT id, name, unit, project_id, vcs_row, design_group FROM {CVS_FORMULAS_VALUE_DRIVERS_TABLE} " + f"SELECT id, name, unit, {CVS_VALUE_DRIVERS_TABLE}.project, vcs_row, design_group FROM {CVS_FORMULAS_VALUE_DRIVERS_TABLE} " f"INNER JOIN {CVS_VALUE_DRIVERS_TABLE} ON {CVS_FORMULAS_VALUE_DRIVERS_TABLE}.value_driver = cvs_value_drivers.id WHERE {where_statement}", prepared_list) all_used_vds = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] @@ -265,7 +265,7 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ with db_connection.cursor(prepared=True) as cursor: logger.debug(f'Running') cursor.execute( - f"SELECT {CVS_VALUE_DRIVERS_TABLE}.id, {CVS_VALUE_DRIVERS_TABLE}.name, {CVS_VALUE_DRIVERS_TABLE}.unit, {CVS_VALUE_DRIVERS_TABLE}.project_id, {CVS_VCS_ROWS_TABLE}.id AS vcs_row FROM {CVS_VCS_ROWS_TABLE} " + f"SELECT {CVS_VALUE_DRIVERS_TABLE}.id, {CVS_VALUE_DRIVERS_TABLE}.name, {CVS_VALUE_DRIVERS_TABLE}.unit, {CVS_VALUE_DRIVERS_TABLE}.project, {CVS_VCS_ROWS_TABLE}.id AS vcs_row FROM {CVS_VCS_ROWS_TABLE} " f"INNER JOIN {CVS_STAKEHOLDER_NEEDS_TABLE} ON {CVS_STAKEHOLDER_NEEDS_TABLE}.vcs_row = {CVS_VCS_ROWS_TABLE}.id " f"INNER JOIN {CVS_VCS_NEED_DRIVERS_TABLE} ON {CVS_VCS_NEED_DRIVERS_TABLE}.stakeholder_need = {CVS_STAKEHOLDER_NEEDS_TABLE}.id " f"INNER JOIN {CVS_VALUE_DRIVERS_TABLE} ON {CVS_VALUE_DRIVERS_TABLE}.id = {CVS_VCS_NEED_DRIVERS_TABLE}.value_driver " diff --git a/sedbackend/apps/cvs/vcs/storage.py b/sedbackend/apps/cvs/vcs/storage.py index 859e33b9..2ef00f7a 100644 --- a/sedbackend/apps/cvs/vcs/storage.py +++ b/sedbackend/apps/cvs/vcs/storage.py @@ -21,10 +21,8 @@ CVS_VALUE_DIMENSION_COLUMNS = ['id', 'name', 'priority', 'vcs_row'] CVS_VALUE_DRIVER_TABLE = 'cvs_value_drivers' -CVS_VALUE_DRIVER_COLUMNS = ['id', 'user', 'name', 'unit', 'project_id'] +CVS_VALUE_DRIVER_COLUMNS = ['id', 'user', 'name', 'unit', 'project'] -CVS_VCS_ROW_DRIVERS_TABLE = 'cvs_rowDrivers' -CVS_VCS_ROW_DRIVERS_COLUMNS = ['vcs_row', 'value_driver'] CVS_ISO_PROCESS_TABLE = 'cvs_iso_processes' CVS_ISO_PROCESS_COLUMNS = ['id', 'name', 'category'] @@ -291,7 +289,7 @@ def get_all_value_driver_vcs(db_connection: PooledMySQLConnection, project_id: i try: select_statement = MySQLStatementBuilder(db_connection) results = select_statement \ - .select(CVS_VALUE_DRIVER_TABLE, ['cvs_value_drivers.id', 'user', 'name', 'unit', 'project_id']) \ + .select(CVS_VALUE_DRIVER_TABLE, ['cvs_value_drivers.id', 'user', 'name', 'unit', 'project']) \ .inner_join('cvs_vcs_need_drivers', 'value_driver = cvs_value_drivers.id') \ .inner_join('cvs_stakeholder_needs', 'stakeholder_need = cvs_stakeholder_needs.id') \ .inner_join('cvs_vcs_rows', 'vcs_row = cvs_vcs_rows.id') \ @@ -314,7 +312,7 @@ def get_all_value_driver(db_connection: PooledMySQLConnection, user_id: int) -> try: query = f'SELECT DISTINCT cvd.*\ FROM cvs_value_drivers cvd \ - LEFT JOIN cvs_projects p ON cvd.project_id = p.id \ + LEFT JOIN cvs_projects p ON cvd.project = p.id \ LEFT JOIN projects_participants pp ON p.id = pp.project_id \ LEFT JOIN projects_subprojects ps ON p.id = ps.native_project_id \ WHERE (pp.user_id = %s OR p.owner_id = %s)' @@ -436,7 +434,7 @@ def create_value_driver(db_connection: PooledMySQLConnection, user_id: int, try: insert_statement = MySQLStatementBuilder(db_connection) insert_statement \ - .insert(table=CVS_VALUE_DRIVER_TABLE, columns=['user', 'name', 'unit', 'project_id']) \ + .insert(table=CVS_VALUE_DRIVER_TABLE, columns=['user', 'name', 'unit', 'project']) \ .set_values([user_id, value_driver_post.name, value_driver_post.unit, value_driver_post.project_id]) \ .execute(fetch_type=FetchType.FETCH_NONE) value_driver_id = insert_statement.last_insert_id @@ -498,7 +496,7 @@ def populate_value_driver(db_result) -> models.ValueDriver: id=db_result['id'], name=db_result['name'], unit=db_result['unit'], - project_id=db_result['project_id'] + project_id=db_result['project'] ) diff --git a/sql/V230721_cvs.sql b/sql/V230721_cvs.sql index 077b92e6..3b9b2a1e 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230721_cvs.sql @@ -8,16 +8,16 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_value_drivers` `user` INT UNSIGNED NOT NULL, `name` TEXT NOT NULL, `unit` VARCHAR(10) NULL, - `project_id` INT UNSIGNED NOT NULL, + `project` INT UNSIGNED NOT NULL, FOREIGN KEY(`user`) REFERENCES `seddb`.`users`(`id`) ON DELETE CASCADE, - FOREIGN KEY(`project_id`) + FOREIGN KEY(`project`) REFERENCES `seddb`.`cvs_projects` (`id`) ON DELETE CASCADE ); ALTER TABLE `seddb`.`cvs_value_drivers` ADD CONSTRAINT unq_project_name_unit - UNIQUE (project_id, name(20), unit); + UNIQUE (project, name(20), unit); SET FOREIGN_KEY_CHECKS = 0; ALTER TABLE `seddb`.`cvs_subprocesses` diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 1df1f7e6..5c1b8163 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -104,12 +104,12 @@ def random_value_driver_post(user_id: int, project_id: int, name: str = None, un return sedbackend.apps.cvs.vcs.models.ValueDriverPost( name=name, unit=unit, - project_id=project_id + project=project_id ) def seed_random_value_driver(user_id: int, project_id: int) -> sedbackend.apps.cvs.vcs.models.ValueDriver: - value_driver_post = random_value_driver_post(user_id=user_id, project_id=project_id) + value_driver_post = random_value_driver_post(user_id=user_id, project=project_id) new_value_driver = sedbackend.apps.cvs.vcs.implementation.create_value_driver( user_id, value_driver_post) diff --git a/tests/apps/cvs/vcs/test_value_drivers.py b/tests/apps/cvs/vcs/test_value_drivers.py index 54df00e2..1fcd5615 100644 --- a/tests/apps/cvs/vcs/test_value_drivers.py +++ b/tests/apps/cvs/vcs/test_value_drivers.py @@ -67,7 +67,7 @@ def test_create_value_driver(client, std_headers, std_user): res = client.post(f'/api/cvs/value-driver', headers=std_headers, json={ 'name': vd.name, 'unit': vd.unit, - 'project_id': vd.project_id + 'project': vd.project_id }) # Assert assert res.status_code == 200 # 200 OK @@ -85,7 +85,7 @@ def test_create_value_driver_missing_name(client, std_headers, std_user): # Act res = client.post(f'/api/cvs/value-driver', headers=std_headers, json={ 'unit': vdPost.unit, - 'project_id': vdPost.project_id + 'project': vdPost.project_id }) # Assert assert res.status_code == 422 # 422 Unprocessable Entity @@ -101,7 +101,7 @@ def test_create_value_driver_missing_unit(client, std_headers, std_user): # Act res = client.post(f'/api/cvs/value-driver', headers=std_headers, json={ 'name': vd.name, - 'project_id': vd.project_id + 'project': vd.project_id }) # Assert assert res.status_code == 200 # 200 OK From 784b7d34a0b1d7ebf5fb5548dcd421e084f359e0 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 18 Aug 2023 10:46:53 +0200 Subject: [PATCH 159/210] fixed failing tests --- sedbackend/apps/cvs/simulation/router.py | 4 +++- sedbackend/apps/cvs/simulation/storage.py | 8 ++++---- tests/apps/cvs/testutils.py | 4 ++-- tests/apps/cvs/vcs/test_value_drivers.py | 6 +++--- 4 files changed, 12 insertions(+), 10 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index ef9f575a..c7dee458 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -20,7 +20,9 @@ async def run_simulation(sim_settings: models.EditSimSettings, native_project_id: int, vcs_ids: List[int], design_group_ids: List[int], normalized_npv: Optional[bool] = False, user: User = Depends(get_current_active_user)) -> SimulationResult: - return implementation.run_simulation(sim_settings, native_project_id, vcs_ids, design_group_ids, user.id, normalized_npv) + return implementation.run_simulation(sim_settings, native_project_id, vcs_ids, design_group_ids, user.id, + normalized_npv) + # Temporary disabled ''' diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 85e93e04..93368647 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -76,7 +76,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed for vd in all_vd_design_values: element_id = vd["id"] if element_id not in unique_vds: - unique_vds[element_id] = {"id": vd["id"], "name": vd["name"], "unit": vd["unit"]} + unique_vds[element_id] = {"id": vd["id"], "name": vd["name"], "unit": vd["unit"], "project": vd["project"]} all_vds = list(unique_vds.values()) all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) @@ -273,7 +273,7 @@ def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], d def get_all_vd_design_values(db_connection: PooledMySQLConnection, designs: List[int]): try: - query = f'SELECT design, value, vcs_row, cvd.name, cvd.unit, cvd.id \ + query = f'SELECT design, value, vcs_row, cvd.name, cvd.unit, cvd.id, cvd.project \ FROM cvs_vd_design_values cvdv \ INNER JOIN cvs_value_drivers cvd ON cvdv.value_driver = cvd.id \ INNER JOIN cvs_vcs_need_drivers cvnd ON cvnd.value_driver = cvd.id \ @@ -342,8 +342,8 @@ def edit_simulation_settings(db_connection: PooledMySQLConnection, project_id: i sim_settings.interarrival_time, sim_settings.start_time, sim_settings.end_time, sim_settings.discount_rate, sim_settings.non_tech_add.value, sim_settings.monte_carlo, sim_settings.runs] - update_Statement = MySQLStatementBuilder(db_connection) - _, rows = update_Statement \ + update_statement = MySQLStatementBuilder(db_connection) + _, rows = update_statement \ .update(table=SIM_SETTINGS_TABLE, set_statement=set_statement, values=values) \ .where('project = %s', [project_id]) \ .execute(return_affected_rows=True) diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 5c1b8163..1df1f7e6 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -104,12 +104,12 @@ def random_value_driver_post(user_id: int, project_id: int, name: str = None, un return sedbackend.apps.cvs.vcs.models.ValueDriverPost( name=name, unit=unit, - project=project_id + project_id=project_id ) def seed_random_value_driver(user_id: int, project_id: int) -> sedbackend.apps.cvs.vcs.models.ValueDriver: - value_driver_post = random_value_driver_post(user_id=user_id, project=project_id) + value_driver_post = random_value_driver_post(user_id=user_id, project_id=project_id) new_value_driver = sedbackend.apps.cvs.vcs.implementation.create_value_driver( user_id, value_driver_post) diff --git a/tests/apps/cvs/vcs/test_value_drivers.py b/tests/apps/cvs/vcs/test_value_drivers.py index 1fcd5615..54df00e2 100644 --- a/tests/apps/cvs/vcs/test_value_drivers.py +++ b/tests/apps/cvs/vcs/test_value_drivers.py @@ -67,7 +67,7 @@ def test_create_value_driver(client, std_headers, std_user): res = client.post(f'/api/cvs/value-driver', headers=std_headers, json={ 'name': vd.name, 'unit': vd.unit, - 'project': vd.project_id + 'project_id': vd.project_id }) # Assert assert res.status_code == 200 # 200 OK @@ -85,7 +85,7 @@ def test_create_value_driver_missing_name(client, std_headers, std_user): # Act res = client.post(f'/api/cvs/value-driver', headers=std_headers, json={ 'unit': vdPost.unit, - 'project': vdPost.project_id + 'project_id': vdPost.project_id }) # Assert assert res.status_code == 422 # 422 Unprocessable Entity @@ -101,7 +101,7 @@ def test_create_value_driver_missing_unit(client, std_headers, std_user): # Act res = client.post(f'/api/cvs/value-driver', headers=std_headers, json={ 'name': vd.name, - 'project': vd.project_id + 'project_id': vd.project_id }) # Assert assert res.status_code == 200 # 200 OK From 548659ae9d657be19b2ddbddf5e319fa340720c6 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 18 Aug 2023 11:17:24 +0200 Subject: [PATCH 160/210] fixed failing simulation --- sedbackend/apps/cvs/simulation/storage.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 93368647..6c29e7af 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -1,9 +1,7 @@ import re import sys -import tempfile from math import isnan -from fastapi import UploadFile from mysql.connector.pooling import PooledMySQLConnection import pandas as pd from mysql.connector import Error @@ -13,14 +11,13 @@ from desim import interface as des from desim.data import NonTechCost, TimeFormat from desim.simulation import Process -import os from typing import List from sedbackend.apps.cvs.design.storage import get_all_designs from mysqlsb import FetchType, MySQLStatementBuilder -from sedbackend.apps.cvs.life_cycle.storage import get_dsm_from_file_id, get_dsm_from_csv +from sedbackend.apps.cvs.life_cycle.storage import get_dsm_from_file_id from sedbackend.apps.cvs.simulation.models import SimulationResult from sedbackend.apps.cvs.vcs.storage import get_vcss from sedbackend.libs.formula_parser.parser import NumericStringParser @@ -76,7 +73,7 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed for vd in all_vd_design_values: element_id = vd["id"] if element_id not in unique_vds: - unique_vds[element_id] = {"id": vd["id"], "name": vd["name"], "unit": vd["unit"], "project": vd["project"]} + unique_vds[element_id] = {"id": vd["id"], "name": vd["name"], "unit": vd["unit"], "project_id": vd["project"]} all_vds = list(unique_vds.values()) all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) @@ -409,7 +406,7 @@ def replace(match): if tag == "vd": id_number = int(value) for vd in vd_values: - if vd["value_driver"] == id_number: + if vd["id"] == id_number: return str(vd["value"]) elif tag == "ef": for ef in ef_values: From d3ad649916e7e440833c4b9712127f55625f9539 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 18 Aug 2023 11:27:42 +0200 Subject: [PATCH 161/210] fixed failing tests --- tests/apps/cvs/simulation/test_sim_utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index d9bd4d21..24c3736d 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -19,7 +19,7 @@ def test_parse_formula_simple(): def test_parse_formula_values(): # Setup - vd_values = [{"value_driver": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] formula = '2+{vd:47241,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}' nsp = NumericStringParser() @@ -34,7 +34,7 @@ def test_parse_formula_values(): def test_parse_formula_process_variable(): # Setup - vd_values = [{"value_driver": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] formula = '{vd:47241,"Design Similarity [0-1]"}*{process:COST,"COST"}' @@ -58,7 +58,7 @@ def test_parse_formula_process_variable(): def test_parse_formula_vd_no_exist(): # Setup - vd_values = [{"value_driver": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] formula = '2+{vd:1,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}' nsp = NumericStringParser() @@ -95,7 +95,7 @@ def test_add_multiplication_valid_formula(): def test_parse_without_multiplication_signs(): # Setup - vd_values = [{"value_driver": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] formula = '2{vd:47241,"Design Similarity [0-1]"}{ef:114,"Fuel Cost [k€/liter]"}' nsp = NumericStringParser() From e11294d89c0e56251032152ec80d80c695f7736a Mon Sep 17 00:00:00 2001 From: = <=> Date: Tue, 5 Sep 2023 14:10:18 +0200 Subject: [PATCH 162/210] test prod --- docker-compose.prod.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index be1d2bd3..ded7489f 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -8,15 +8,14 @@ services: - "80:80" - "443:443" environment: - - NGINX_HOST=sedlab.ppd.chalmers.se + - NGINX_HOST=143.198.251.72 - NGINX_PORT=443 build: context: ./deployment/nginx/ dockerfile: Dockerfile-tls-termination-proxy restart: unless-stopped volumes: - - C:\Certbot\live\sedlab.ppd.chalmers.se\:/etc/nginx/certs/live/sedlab.ppd.chalmers.se # TLS Certs syslink location - - C:\Certbot\archive\sedlab.ppd.chalmers.se\:/etc/nginx/certs/archive/sedlab.ppd.chalmers.se # TLS Certs actual location + - ./nginx.conf:/etc/nginx/nginx.conf depends_on: - "core-db" - "backend-api" From 027fe7bb357bc4f6d378872fa29c20a58ce546df Mon Sep 17 00:00:00 2001 From: = <=> Date: Tue, 5 Sep 2023 14:15:41 +0200 Subject: [PATCH 163/210] test prod nginx host --- docker-compose.prod.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index ded7489f..3086f70c 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -8,7 +8,7 @@ services: - "80:80" - "443:443" environment: - - NGINX_HOST=143.198.251.72 + - NGINX_HOST=clubdesign.se - NGINX_PORT=443 build: context: ./deployment/nginx/ From 22a6eebe6f3f2c2d44c3de72ed7ecf78d2a08bb9 Mon Sep 17 00:00:00 2001 From: = <=> Date: Tue, 5 Sep 2023 17:23:15 +0200 Subject: [PATCH 164/210] new ssl test --- deployment/nginx/nginx.conf | 4 ++-- docker-compose.prod.yml | 4 +--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/deployment/nginx/nginx.conf b/deployment/nginx/nginx.conf index c50aa254..616aec42 100644 --- a/deployment/nginx/nginx.conf +++ b/deployment/nginx/nginx.conf @@ -1,7 +1,7 @@ server { listen 443 ssl; - ssl_certificate /etc/nginx/certs/live/sedlab.ppd.chalmers.se/fullchain.pem; - ssl_certificate_key /etc/nginx/certs/live/sedlab.ppd.chalmers.se/privkey.pem; + ssl_certificate /etc/letsencrypt/live/clubdesign.systemsengineering.design/fullchain.pem; + ssl_certificate_key /etc/letsencrypt/live/clubdesign.systemsengineering.design/privkey.pem; location / { proxy_pass http://backend-api:80; diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index 3086f70c..d2be06e4 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -8,14 +8,12 @@ services: - "80:80" - "443:443" environment: - - NGINX_HOST=clubdesign.se + - NGINX_HOST=clubdesign.systemsengineering.design - NGINX_PORT=443 build: context: ./deployment/nginx/ dockerfile: Dockerfile-tls-termination-proxy restart: unless-stopped - volumes: - - ./nginx.conf:/etc/nginx/nginx.conf depends_on: - "core-db" - "backend-api" From 2d91375a88327f54c99ff6634db8f9702b309310 Mon Sep 17 00:00:00 2001 From: = <=> Date: Tue, 5 Sep 2023 18:41:52 +0200 Subject: [PATCH 165/210] test volumes --- docker-compose.prod.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index d2be06e4..09c4babe 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -14,6 +14,9 @@ services: context: ./deployment/nginx/ dockerfile: Dockerfile-tls-termination-proxy restart: unless-stopped + volumes: + - /etc/letsencrypt/live/clubdesign.systemsengineering.design/fullchain.pem:/etc/letsencrypt/live/clubdesign.systemsengineering.design/fullchain.pem + - /etc/letsencrypt/live/clubdesign.systemsengineering.design/privkey.pem:/etc/letsencrypt/live/clubdesign.systemsengineering.design/privkey.pem depends_on: - "core-db" - "backend-api" From 353d6f7e98fdfb58baed5420e25a8ebd37a270a6 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 25 Sep 2023 17:18:03 +0200 Subject: [PATCH 166/210] Sql file fix (#118) * removed dsm sql file * changes not added * removed drop table lines * sql syntax error * sql file name change --- sql/V220608_cvs.sql | 38 +++++------------------- sql/V230529_cvs_dsm_files.sql | 12 -------- sql/{V230721_cvs.sql => V230925_cvs.sql} | 31 +++++++------------ 3 files changed, 17 insertions(+), 64 deletions(-) delete mode 100644 sql/V230529_cvs_dsm_files.sql rename sql/{V230721_cvs.sql => V230925_cvs.sql} (70%) diff --git a/sql/V220608_cvs.sql b/sql/V220608_cvs.sql index e107290d..7313f16d 100644 --- a/sql/V220608_cvs.sql +++ b/sql/V220608_cvs.sql @@ -107,8 +107,6 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_vcs_rows` REFERENCES `seddb`.`cvs_vcss`(`id`) ON DELETE CASCADE ON UPDATE NO ACTION - # CONSTRAINT `unique_index` - # UNIQUE (`index`, `vcs`) ); # Stakeholder need @@ -146,9 +144,15 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_value_drivers` `user` INT UNSIGNED NOT NULL, `name` TEXT NOT NULL, `unit` VARCHAR(10) NULL, + `project` INT UNSIGNED NOT NULL, FOREIGN KEY(`user`) REFERENCES `seddb`.`users`(`id`) - ON DELETE CASCADE + ON DELETE CASCADE, + FOREIGN KEY(`project`) + REFERENCES `seddb`.`cvs_projects` (`id`) + ON DELETE CASCADE, + CONSTRAINT unq_project_name_unit + UNIQUE (project, name(20), unit) ); #Vcs row and value driver connection @@ -305,31 +309,3 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_market_input_values` REFERENCES `seddb`.`cvs_market_inputs`(`id`) ON DELETE CASCADE ); - -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_market_inputs` -( - `formulas` INT UNSIGNED NOT NULL, - `market_input` INT UNSIGNED NOT NULL, - PRIMARY KEY(`formulas`, `market_input`), - FOREIGN KEY (`formulas`) - REFERENCES `seddb`.`cvs_design_mi_formulas`(`vcs_row`) - ON DELETE CASCADE, - FOREIGN KEY(`market_input`) - REFERENCES `seddb`.`cvs_market_inputs`(`id`) - ON DELETE CASCADE -); - - -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` -( - `formulas` INT UNSIGNED NOT NULL, - `value_driver` INT UNSIGNED NOT NULL, - PRIMARY KEY(`formulas`, `value_driver`), - FOREIGN KEY (`formulas`) - REFERENCES `seddb`.`cvs_design_mi_formulas`(`vcs_row`) - ON DELETE CASCADE, - FOREIGN KEY(`value_driver`) - REFERENCES `seddb`.`cvs_value_drivers`(`id`) - ON DELETE CASCADE -); - diff --git a/sql/V230529_cvs_dsm_files.sql b/sql/V230529_cvs_dsm_files.sql deleted file mode 100644 index bbd07694..00000000 --- a/sql/V230529_cvs_dsm_files.sql +++ /dev/null @@ -1,12 +0,0 @@ -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_dsm_files` -( - `vcs` INT UNSIGNED NOT NULL, - `file` INT UNSIGNED NOT NULL, - PRIMARY KEY (`vcs`), - FOREIGN KEY (`vcs`) - REFERENCES `seddb`.`cvs_vcss`(`id`) - ON DELETE CASCADE, - FOREIGN KEY(`file`) - REFERENCES `seddb`.`files`(`id`) - ON DELETE CASCADE -); \ No newline at end of file diff --git a/sql/V230721_cvs.sql b/sql/V230925_cvs.sql similarity index 70% rename from sql/V230721_cvs.sql rename to sql/V230925_cvs.sql index 3b9b2a1e..c936f7e2 100644 --- a/sql/V230721_cvs.sql +++ b/sql/V230925_cvs.sql @@ -1,23 +1,16 @@ -# Value driver to project relation -SET FOREIGN_KEY_CHECKS = 0; -DROP TABLE IF EXISTS `seddb`.`cvs_value_drivers`; -SET FOREIGN_KEY_CHECKS = 1; -CREATE TABLE IF NOT EXISTS `seddb`.`cvs_value_drivers` +# DSM +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_dsm_files` ( - `id` INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY, - `user` INT UNSIGNED NOT NULL, - `name` TEXT NOT NULL, - `unit` VARCHAR(10) NULL, - `project` INT UNSIGNED NOT NULL, - FOREIGN KEY(`user`) - REFERENCES `seddb`.`users`(`id`) + `vcs` INT UNSIGNED NOT NULL, + `file` INT UNSIGNED NOT NULL, + PRIMARY KEY (`vcs`), + FOREIGN KEY (`vcs`) + REFERENCES `seddb`.`cvs_vcss`(`id`) ON DELETE CASCADE, - FOREIGN KEY(`project`) - REFERENCES `seddb`.`cvs_projects` (`id`) - ON DELETE CASCADE + FOREIGN KEY(`file`) + REFERENCES `seddb`.`files`(`id`) + ON DELETE CASCADE ); -ALTER TABLE `seddb`.`cvs_value_drivers` ADD CONSTRAINT unq_project_name_unit - UNIQUE (project, name(20), unit); SET FOREIGN_KEY_CHECKS = 0; ALTER TABLE `seddb`.`cvs_subprocesses` @@ -34,10 +27,6 @@ ALTER TABLE `seddb`.`cvs_design_mi_formulas` REFERENCES `seddb`.`cvs_projects` (`id`) ON DELETE CASCADE; -DROP TABLE IF EXISTS `seddb`.`cvs_formulas_market_inputs`; -DROP TABLE IF EXISTS `seddb`.`cvs_formulas_value_drivers`; - - CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_external_factors` ( `vcs_row` INT UNSIGNED NOT NULL, From 58e57b1b3e06a5d5536182072540b7cf635979b1 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 2 Oct 2023 14:26:21 +0200 Subject: [PATCH 167/210] parse if statement --- sedbackend/apps/cvs/simulation/storage.py | 21 ++++ tests/apps/cvs/simulation/test_sim_utils.py | 109 +++++++++++++++++++- 2 files changed, 126 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 6c29e7af..ecbebcdd 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -396,6 +396,24 @@ def replace(match): return result +def parse_if_statement(formula: str) -> str: + # The pattern is if(condition, true_value, false_value) + pattern = r'if\(([^,]+),([^,]+),([^,]+)\)' + match = re.search(pattern, formula) + + if match: + condition, true_value, false_value = match.groups() + condition = condition.replace('=', '==') + if eval(condition): + value = true_value + else: + value = false_value + + formula = re.sub(pattern, value.strip(), formula).strip() + + return formula + + def parse_formula(formula: str, vd_values, ef_values, formula_row: dict = None) -> str: pattern = r'\{(?Pvd|ef|process):(?P[a-zA-Z0-9_]+),"([^"]+)"\}' @@ -420,6 +438,9 @@ def replace(match): replaced_text = re.sub(pattern, replace, formula) replaced_text = re.sub(pattern, replace, replaced_text) + + replaced_text = parse_if_statement(replaced_text) + replaced_text = re.sub(pattern, '0', replaced_text) # If there are any tags left, replace them with 0 return replaced_text diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index 24c3736d..d580c279 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -19,17 +19,18 @@ def test_parse_formula_simple(): def test_parse_formula_values(): # Setup - vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] + vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}, + {"id": 1, "name": "Test", "unit": "T", "value": 20}] mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] - formula = '2+{vd:47241,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}' + formula = '2+{vd:47241,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}+{vd:1,"Test [T]"}' nsp = NumericStringParser() # Act new_formula = parse_formula(formula, vd_values, mi_values) # Assert - assert new_formula == "2+10/5" - assert nsp.eval(new_formula) == 4 + assert new_formula == "2+10/5+20" + assert nsp.eval(new_formula) == 24 def test_parse_formula_process_variable(): @@ -106,3 +107,103 @@ def test_parse_without_multiplication_signs(): # Assert assert new_formula == "2*10*5" assert nsp.eval(new_formula) == 100 + + +def test_if_statement_true(): + formula = 'if(1, 1, 0)' + nsp = NumericStringParser() + + # Act + new_formula = parse_formula(formula, [], []) + + # Assert + assert new_formula == "1" + assert nsp.eval(new_formula) == 1 + + +def test_if_statement_false(): + formula = 'if(0, 1, 0)' + nsp = NumericStringParser() + + # Act + new_formula = parse_formula(formula, [], []) + + # Assert + assert new_formula == "0" + assert nsp.eval(new_formula) == 0 + + +def test_if_statement_true_condition(): + formula = 'if("10=10", 1, 0)' + nsp = NumericStringParser() + + # Act + new_formula = parse_formula(formula, [], []) + + # Assert + assert new_formula == "1" + assert nsp.eval(new_formula) == 1 + + +def test_if_statement_false_condition(): + formula = 'if(10=11, 1, 0)' + nsp = NumericStringParser() + + # Act + new_formula = parse_formula(formula, [], []) + + # Assert + assert new_formula == "0" + assert nsp.eval(new_formula) == 0 + + +def test_if_statement_whitespace(): + formula = 'if(10 = 10, 1, 0)' + nsp = NumericStringParser() + + # Act + new_formula = parse_formula(formula, [], []) + + # Assert + assert new_formula == "1" + assert nsp.eval(new_formula) == 1 + + +def test_if_statement_string(): + formula = 'if("Speed" = "Speed", 10, 0)' + nsp = NumericStringParser() + + # Act + new_formula = parse_formula(formula, [], []) + + # Assert + assert new_formula == "10" + assert nsp.eval(new_formula) == 10 + + +def test_if_statement_greater_than(): + formula = 'if(10 > 9, 10, 0)' + nsp = NumericStringParser() + + # Act + new_formula = parse_formula(formula, [], []) + + # Assert + assert new_formula == "10" + assert nsp.eval(new_formula) == 10 + + +def test_if_statement_formula(): + # Setup + vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}, + {"id": 1, "name": "Test", "unit": "T", "value": 20}] + mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] + formula = '2+{vd:47241,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}+if({vd:1,"Test [T]"}=20, {vd:47241,"Design Similarity [0-1]"}, 0)' + nsp = NumericStringParser() + + # Act + new_formula = parse_formula(formula, vd_values, mi_values) + + # Assert + assert new_formula == "2+10/5+10" + assert nsp.eval(new_formula) == 14 From c12de0d1531dc36a87e426cc361c089025297d12 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 10 Oct 2023 11:04:42 +0200 Subject: [PATCH 168/210] allow string for vd design values --- sedbackend/apps/core/db.py | 6 +++-- sedbackend/apps/cvs/design/models.py | 2 +- tests/apps/cvs/design/test_design.py | 33 +++++++++++++++++++++++++++- 3 files changed, 37 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..20dba37e 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,11 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +#host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +#port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/design/models.py b/sedbackend/apps/cvs/design/models.py index b0fc4855..a1775069 100644 --- a/sedbackend/apps/cvs/design/models.py +++ b/sedbackend/apps/cvs/design/models.py @@ -30,7 +30,7 @@ class DesignGroupPost(BaseModel): class ValueDriverDesignValue(BaseModel): vd_id: int - value: float + value: str def __eq__(self, other: Any) -> bool: return self.vd_id == other.vd_id diff --git a/tests/apps/cvs/design/test_design.py b/tests/apps/cvs/design/test_design.py index 12c0fd0c..9b6ef974 100644 --- a/tests/apps/cvs/design/test_design.py +++ b/tests/apps/cvs/design/test_design.py @@ -63,7 +63,6 @@ def test_create_design_no_values(client, std_headers, std_user): tu.delete_vd_from_user(current_user.id) - def test_edit_designs(client, std_headers, std_user): # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) @@ -97,6 +96,38 @@ def test_edit_designs(client, std_headers, std_user): tu.delete_vd_from_user(current_user.id) +def test_edit_design_string_value(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) + tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 2) # To get value drivers to vcs + design_group = tu.seed_random_design_group(project.id, None, vcs.id) + designs = tu.seed_random_designs(project.id, design_group.id, 1) + # Act + res = client.put(f'/api/cvs/project/{project.id}/design-group/{design_group.id}/designs', headers=std_headers, + json=[{ + 'id': designs[0].id, + 'name': "new design", + 'vd_design_values': [ + {'vd_id': vd.id, + 'value': "This is a text"} for vd in design_group.vds + ] + } + ]) + + # Assert + assert res.status_code == 200 # 200 OK + designs = impl_design.get_designs(project.id, design_group.id) + assert designs[0].name == "new design" + assert len(designs) == 1 + assert len(designs[0].vd_design_values) == len(design_group.vds) + + # Cleanup + tu.delete_vd_from_user(current_user.id) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) + def test_delete_designs(client, std_headers, std_user): # Setup From 9bc7462a0a222cc8492dac7cf651c2c5834c4a78 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 15 Oct 2023 14:29:45 +0200 Subject: [PATCH 169/210] replaced parsing package --- sedbackend/apps/core/db.py | 4 +- sedbackend/apps/cvs/simulation/storage.py | 23 ++-- sedbackend/libs/formula_parser/parser.py | 116 -------------------- tests/apps/cvs/simulation/test_sim_utils.py | 57 +++++----- 4 files changed, 42 insertions(+), 158 deletions(-) delete mode 100644 sedbackend/libs/formula_parser/parser.py diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 20dba37e..7f8542e7 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,10 +10,8 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -#host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' -#port = 3306 port = 3001 try: diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index ecbebcdd..16f7762c 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -11,6 +11,7 @@ from desim import interface as des from desim.data import NonTechCost, TimeFormat from desim.simulation import Process +from plusminus import BaseArithmeticParser from typing import List from sedbackend.apps.cvs.design.storage import get_all_designs @@ -20,7 +21,6 @@ from sedbackend.apps.cvs.life_cycle.storage import get_dsm_from_file_id from sedbackend.apps.cvs.simulation.models import SimulationResult from sedbackend.apps.cvs.vcs.storage import get_vcss -from sedbackend.libs.formula_parser.parser import NumericStringParser from sedbackend.libs.formula_parser import expressions as expr from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e @@ -160,7 +160,7 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, vd_values=None): if mi_values is None: mi_values = [] - nsp = NumericStringParser() + parser = BaseArithmeticParser() technical_processes = [] non_tech_processes = [] @@ -170,8 +170,8 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, if row['category'] != 'Technical processes': try: non_tech = models.NonTechnicalProcess( - cost=nsp.eval(parse_formula(row['cost'], vd_values_row, mi_values, row)), - revenue=nsp.eval( + cost=parser.evaluate(parse_formula(row['cost'], vd_values_row, mi_values, row)), + revenue=parser.evaluate( parse_formula(row['revenue'], vd_values_row, mi_values, row)), name=row['iso_name']) except Exception as exc: @@ -181,16 +181,16 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, elif row['iso_name'] is not None and row['sub_name'] is None: try: - time = nsp.eval(parse_formula( + time = parser.evaluate(parse_formula( row['time'], vd_values, mi_values, row)) cost_formula = parse_formula(row['cost'], vd_values, mi_values, row) revenue_formula = parse_formula( row['revenue'], vd_values, mi_values, row) p = Process(row['id'], time, - nsp.eval(expr.replace_all( + parser.evaluate(expr.replace_all( 'time', time, cost_formula)), - nsp.eval(expr.replace_all( + parser.evaluate(expr.replace_all( 'time', time, revenue_formula)), row['iso_name'], non_tech_add, TIME_FORMAT_DICT.get( row['time_unit'].lower()) @@ -204,16 +204,16 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, elif row['sub_name'] is not None: sub_name = f'{row["sub_name"]} ({row["iso_name"]})' try: - time = nsp.eval(parse_formula( + time = parser.evaluate(parse_formula( row['time'], vd_values, mi_values, row)) cost_formula = parse_formula(row['cost'], vd_values, mi_values, row) revenue_formula = parse_formula( row['revenue'], vd_values, mi_values, row) p = Process(row['id'], time, - nsp.eval(expr.replace_all( + parser.evaluate(expr.replace_all( 'time', time, cost_formula)), - nsp.eval(expr.replace_all( + parser.evaluate(expr.replace_all( 'time', time, revenue_formula)), sub_name, non_tech_add, TIME_FORMAT_DICT.get( row['time_unit'].lower()) @@ -400,11 +400,12 @@ def parse_if_statement(formula: str) -> str: # The pattern is if(condition, true_value, false_value) pattern = r'if\(([^,]+),([^,]+),([^,]+)\)' match = re.search(pattern, formula) + parser = BaseArithmeticParser() if match: condition, true_value, false_value = match.groups() condition = condition.replace('=', '==') - if eval(condition): + if parser.evaluate(condition): value = true_value else: value = false_value diff --git a/sedbackend/libs/formula_parser/parser.py b/sedbackend/libs/formula_parser/parser.py deleted file mode 100644 index 78b1c3ad..00000000 --- a/sedbackend/libs/formula_parser/parser.py +++ /dev/null @@ -1,116 +0,0 @@ -from __future__ import division -import pyparsing as pyp -import math -import operator - - -# See https://stackoverflow.com/questions/11951701/safe-way-to-parse-user-supplied-mathematical-formula-in-python -# and https://stackoverflow.com/questions/23879784/parse-mathematical-expressions-with-pyparsing -# and https://stackoverflow.com/questions/65287009/parse-math-expression-as-3-2-temp-humidity-where-replace-alpha-values-f -# Read up on regex: https://docs.python.org/3/library/re.html -# https://www.w3schools.com/python/python_regex.asp - - -class NumericStringParser(object): - """ - Most of this code comes from the fourFn.py pyparsing example - http://pyparsing.wikispaces.com/file/view/fourFn.py - http://pyparsing.wikispaces.com/message/view/home/15549426 - __author__='Paul McGuire' - """ - - def push_first(self, strg, loc, toks): - self.exprStack.append(toks[0]) - - def push_u_minus(self, strg, loc, toks): - if toks and toks[0] == '-': - self.exprStack.append('unary -') - - def __init__(self): - """ - expop :: '^' - multop :: '*' | '/' - addop :: '+' | '-' - integer :: ['+' | '-'] '0'..'9'+ - atom :: PI | E | real | fn '(' expr ')' | '(' expr ')' - factor :: atom [ expop factor ]* - term :: factor [ multop factor ]* - expr :: term [ addop term ]* - """ - point = pyp.Literal(".") - e = pyp.CaselessLiteral("E") - fnumber = pyp.Combine(pyp.Word("+-" + pyp.nums, pyp.nums) + - pyp.Optional(point + pyp.Optional(pyp.Word(pyp.nums))) + - pyp.Optional(e + pyp.Word("+-" + pyp.nums, pyp.nums))) - ident = pyp.Word(pyp.alphas, pyp.alphas + pyp.nums + "_$") - plus = pyp.Literal("+") - minus = pyp.Literal("-") - mult = pyp.Literal("*") - div = pyp.Literal("/") - lpar = pyp.Literal("(").suppress() - rpar = pyp.Literal(")").suppress() - addop = plus | minus - multop = mult | div - expop = pyp.Literal("^") - pi = pyp.CaselessLiteral("PI") - expr = pyp.Forward() - atom = ((pyp.Optional(pyp.oneOf("- +")) + - (pi | e | fnumber | ident + lpar + expr + rpar).setParseAction(self.push_first)) - | pyp.Optional(pyp.oneOf("- +")) + pyp.Group(lpar + expr + rpar) - ).setParseAction(self.push_u_minus) - # by defining exponentiation as "atom [ ^ factor ]..." instead of - # "atom [ ^ atom ]...", we get right-to-left exponents, instead of left-to-right - # that is, 2^3^2 = 2^(3^2), not (2^3)^2. - factor = pyp.Forward() - factor << atom + pyp.ZeroOrMore((expop + factor).setParseAction( - self.push_first)) - term = factor + pyp.ZeroOrMore((multop + factor).setParseAction( - self.push_first)) - expr << term + pyp.ZeroOrMore((addop + term).setParseAction(self.push_first)) - self.bnf = expr - # map operator symbols to corresponding arithmetic operations - epsilon = 1e-12 - self.opn = {"+": operator.add, - "-": operator.sub, - "*": operator.mul, - "/": operator.truediv, - "^": operator.pow} - self.fn = {"sin": math.sin, - "cos": math.cos, - "tan": math.tan, - "abs": abs, - "trunc": lambda a: int(a), - "round": round, - # For Python3 compatibility, cmp replaced by ((a > 0) - (a < 0)). See - # https://docs.python.org/3.0/whatsnew/3.0.html#ordering-comparisons - "sgn": lambda a: abs(a) > epsilon and ((a > 0) - (a < 0)) or 0} - self.exprStack = [] - - def evaluate_stack(self, s): - op = s.pop() - if op == 'unary -': - return -self.evaluate_stack(s) - if op in "+-*/^": - op2 = self.evaluate_stack(s) - op1 = self.evaluate_stack(s) - return self.opn[op](op1, op2) - elif op == "PI": - return math.pi # 3.1415926535 - elif op == "E": - return math.e # 2.718281828 - elif op in self.fn: - return self.fn[op](self.evaluate_stack(s)) - elif op[0].isalpha(): - return 0 - else: - return float(op) - - def eval(self, num_string, parse_all=True): - """ - Evaluates a mathematical expression consisting of numbers and mathematical operators. - - """ - self.exprStack = [] - results = self.bnf.parseString(num_string, parse_all) - val = self.evaluate_stack(self.exprStack[:]) - return val diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index d580c279..126525bc 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -1,5 +1,6 @@ from sedbackend.apps.cvs.simulation.storage import parse_formula, add_multiplication_signs -from sedbackend.libs.formula_parser.parser import NumericStringParser +from plusminus import BaseArithmeticParser + def test_parse_formula_simple(): @@ -7,14 +8,14 @@ def test_parse_formula_simple(): formula = f'(3+1)/2' vd_values = [] mi_values = [] - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, vd_values, mi_values) # Assert assert new_formula == formula - assert nsp.eval(new_formula) == 2 + assert parser.evaluate(new_formula) == 2 def test_parse_formula_values(): @@ -23,14 +24,14 @@ def test_parse_formula_values(): {"id": 1, "name": "Test", "unit": "T", "value": 20}] mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] formula = '2+{vd:47241,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}+{vd:1,"Test [T]"}' - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, vd_values, mi_values) # Assert assert new_formula == "2+10/5+20" - assert nsp.eval(new_formula) == 24 + assert parser.evaluate(new_formula) == 24 def test_parse_formula_process_variable(): @@ -47,14 +48,14 @@ def test_parse_formula_process_variable(): "cost": cost, "revenue": revenue, } - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, vd_values, mi_values, formula_row) # Assert assert new_formula == "10*(2+10/5)" - assert nsp.eval(new_formula) == 40 + assert parser.evaluate(new_formula) == 40 def test_parse_formula_vd_no_exist(): @@ -62,14 +63,14 @@ def test_parse_formula_vd_no_exist(): vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] formula = '2+{vd:1,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}' - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, vd_values, mi_values) # Assert assert new_formula == "2+0/5" - assert nsp.eval(new_formula) == 2 + assert parser.evaluate(new_formula) == 2 def test_add_multiplication_signs(): @@ -99,98 +100,98 @@ def test_parse_without_multiplication_signs(): vd_values = [{"id": 47241, "name": "Speed", "unit": "0-1", "value": 10}] mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] formula = '2{vd:47241,"Design Similarity [0-1]"}{ef:114,"Fuel Cost [k€/liter]"}' - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, vd_values, mi_values) # Assert assert new_formula == "2*10*5" - assert nsp.eval(new_formula) == 100 + assert parser.evaluate(new_formula) == 100 def test_if_statement_true(): formula = 'if(1, 1, 0)' - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, [], []) # Assert assert new_formula == "1" - assert nsp.eval(new_formula) == 1 + assert parser.evaluate(new_formula) == 1 def test_if_statement_false(): formula = 'if(0, 1, 0)' - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, [], []) # Assert assert new_formula == "0" - assert nsp.eval(new_formula) == 0 + assert parser.evaluate(new_formula) == 0 def test_if_statement_true_condition(): - formula = 'if("10=10", 1, 0)' - nsp = NumericStringParser() + formula = 'if(10=10, 1, 0)' + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, [], []) # Assert assert new_formula == "1" - assert nsp.eval(new_formula) == 1 + assert parser.evaluate(new_formula) == 1 def test_if_statement_false_condition(): formula = 'if(10=11, 1, 0)' - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, [], []) # Assert assert new_formula == "0" - assert nsp.eval(new_formula) == 0 + assert parser.evaluate(new_formula) == 0 def test_if_statement_whitespace(): formula = 'if(10 = 10, 1, 0)' - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, [], []) # Assert assert new_formula == "1" - assert nsp.eval(new_formula) == 1 + assert parser.evaluate(new_formula) == 1 def test_if_statement_string(): formula = 'if("Speed" = "Speed", 10, 0)' - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, [], []) # Assert assert new_formula == "10" - assert nsp.eval(new_formula) == 10 + assert parser.evaluate(new_formula) == 10 def test_if_statement_greater_than(): formula = 'if(10 > 9, 10, 0)' - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, [], []) # Assert assert new_formula == "10" - assert nsp.eval(new_formula) == 10 + assert parser.evaluate(new_formula) == 10 def test_if_statement_formula(): @@ -199,11 +200,11 @@ def test_if_statement_formula(): {"id": 1, "name": "Test", "unit": "T", "value": 20}] mi_values = [{"market_input": 114, "name": "Fuel Cost", "unit": "k€/liter", "value": 5}] formula = '2+{vd:47241,"Design Similarity [0-1]"}/{ef:114,"Fuel Cost [k€/liter]"}+if({vd:1,"Test [T]"}=20, {vd:47241,"Design Similarity [0-1]"}, 0)' - nsp = NumericStringParser() + parser = BaseArithmeticParser() # Act new_formula = parse_formula(formula, vd_values, mi_values) # Assert assert new_formula == "2+10/5+10" - assert nsp.eval(new_formula) == 14 + assert parser.evaluate(new_formula) == 14 From f4398f2d3e156423d17d3ad377c6499f768d24dd Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 15 Oct 2023 15:34:27 +0200 Subject: [PATCH 170/210] run simulation with conditional statements and text --- sedbackend/apps/cvs/simulation/storage.py | 442 ++++++++++++++-------- 1 file changed, 288 insertions(+), 154 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 16f7762c..4314c2c0 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -29,25 +29,43 @@ from sedbackend.apps.core.files import exceptions as file_exceptions SIM_SETTINGS_TABLE = "cvs_simulation_settings" -SIM_SETTINGS_COLUMNS = ['project', 'time_unit', 'flow_process', 'flow_start_time', 'flow_time', - 'interarrival_time', 'start_time', 'end_time', 'discount_rate', 'non_tech_add', 'monte_carlo', - 'runs'] - -TIME_FORMAT_DICT = dict({ - 'year': TimeFormat.YEAR, - 'month': TimeFormat.MONTH, - 'week': TimeFormat.WEEK, - 'day': TimeFormat.DAY, - 'hour': TimeFormat.HOUR, - 'minutes': TimeFormat.MINUTES -}) - - -def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, - project_id: int, vcs_ids: List[int], - design_group_ids: List[int], user_id, normalized_npv: bool = False, - is_multiprocessing: bool = False, - ) -> SimulationResult: +SIM_SETTINGS_COLUMNS = [ + "project", + "time_unit", + "flow_process", + "flow_start_time", + "flow_time", + "interarrival_time", + "start_time", + "end_time", + "discount_rate", + "non_tech_add", + "monte_carlo", + "runs", +] + +TIME_FORMAT_DICT = dict( + { + "year": TimeFormat.YEAR, + "month": TimeFormat.MONTH, + "week": TimeFormat.WEEK, + "day": TimeFormat.DAY, + "hour": TimeFormat.HOUR, + "minutes": TimeFormat.MINUTES, + } +) + + +def run_simulation( + db_connection: PooledMySQLConnection, + sim_settings: models.EditSimSettings, + project_id: int, + vcs_ids: List[int], + design_group_ids: List[int], + user_id, + normalized_npv: bool = False, + is_multiprocessing: bool = False, +) -> SimulationResult: if not check_sim_settings(sim_settings): raise e.BadlyFormattedSettingsException @@ -67,23 +85,32 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed all_designs = get_all_designs(db_connection, design_group_ids) - all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) + all_vd_design_values = get_all_vd_design_values( + db_connection, [design.id for design in all_designs] + ) unique_vds = {} for vd in all_vd_design_values: element_id = vd["id"] if element_id not in unique_vds: - unique_vds[element_id] = {"id": vd["id"], "name": vd["name"], "unit": vd["unit"], "project_id": vd["project"]} + unique_vds[element_id] = { + "id": vd["id"], + "name": vd["name"], + "unit": vd["unit"], + "project_id": vd["project"], + } all_vds = list(unique_vds.values()) all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) all_vcss = get_vcss(db_connection, project_id, vcs_ids, user_id) - sim_result = SimulationResult(designs=all_designs, vcss=all_vcss, vds=all_vds, runs=[]) + sim_result = SimulationResult( + designs=all_designs, vcss=all_vcss, vds=all_vds, runs=[] + ) for vcs_id in vcs_ids: - market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] + market_values = [mi for mi in all_market_values if mi["vcs"] == vcs_id] dsm_id = [dsm for dsm in all_dsm_ids if dsm[0] == vcs_id] dsm = None if len(dsm_id) > 0: @@ -93,22 +120,33 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed except file_exceptions.FileNotFoundException: pass for design_group_id in design_group_ids: - sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] + sim_data = [ + sd + for sd in all_sim_data + if sd["vcs"] == vcs_id and sd["design_group"] == design_group_id + ] if sim_data is None or sim_data == []: raise e.VcsFailedException if not check_entity_rate(sim_data, process): raise e.RateWrongOrderException - designs = [design.id for design in all_designs if design.design_group_id == design_group_id] + designs = [ + design.id + for design in all_designs + if design.design_group_id == design_group_id + ] if designs is None or []: raise e.DesignIdsNotFoundException for design in designs: - vd_values = [vd for vd in all_vd_design_values if vd['design'] == design] - processes, non_tech_processes = populate_processes(non_tech_add, sim_data, design, market_values, - vd_values) + vd_values = [ + vd for vd in all_vd_design_values if vd["design"] == design + ] + processes, non_tech_processes = populate_processes( + non_tech_add, sim_data, design, market_values, vd_values + ) if dsm is None: dsm = create_simple_dsm(processes) @@ -117,30 +155,58 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed try: if is_monte_carlo and not is_multiprocessing: - results = sim.run_monte_carlo_simulation(flow_time, interarrival, process, processes, - non_tech_processes, - non_tech_add, dsm, time_unit, discount_rate, runtime, - runs) + results = sim.run_monte_carlo_simulation( + flow_time, + interarrival, + process, + processes, + non_tech_processes, + non_tech_add, + dsm, + time_unit, + discount_rate, + runtime, + runs, + ) elif is_monte_carlo and is_multiprocessing: - results = sim.run_parallell_simulations(flow_time, interarrival, process, processes, - non_tech_processes, - non_tech_add, dsm, time_unit, discount_rate, runtime, - runs) + results = sim.run_parallell_simulations( + flow_time, + interarrival, + process, + processes, + non_tech_processes, + non_tech_add, + dsm, + time_unit, + discount_rate, + runtime, + runs, + ) else: - results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, - non_tech_add, dsm, time_unit, - discount_rate, runtime) + results = sim.run_simulation( + flow_time, + interarrival, + process, + processes, + non_tech_processes, + non_tech_add, + dsm, + time_unit, + discount_rate, + runtime, + ) except Exception as exc: tb = sys.exc_info()[2] - logger.debug( - f'{exc.__class__}, {exc}, {exc.with_traceback(tb)}') - print(f'{exc.__class__}, {exc}') + logger.debug(f"{exc.__class__}, {exc}, {exc.with_traceback(tb)}") + print(f"{exc.__class__}, {exc}") raise e.SimulationFailedException sim_run_res = models.Simulation( time=results.timesteps[-1], - mean_NPV=results.normalize_npv() if normalized_npv else results.mean_npv(), + mean_NPV=results.normalize_npv() + if normalized_npv + else results.mean_npv(), max_NPVs=results.all_max_npv(), mean_payback_time=results.mean_npv_payback_time(), all_npvs=results.npvs, @@ -151,13 +217,13 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed ) sim_result.runs.append(sim_run_res) - logger.debug('Returning the results') + logger.debug("Returning the results") return sim_result -def populate_processes(non_tech_add: NonTechCost, db_results, design: int, - mi_values=None, - vd_values=None): +def populate_processes( + non_tech_add: NonTechCost, db_results, design: int, mi_values=None, vd_values=None +): if mi_values is None: mi_values = [] parser = BaseArithmeticParser() @@ -166,64 +232,76 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, non_tech_processes = [] for row in db_results: - vd_values_row = [vd for vd in vd_values if vd['vcs_row'] == row['id'] and vd['design'] == design] - if row['category'] != 'Technical processes': + vd_values_row = [ + vd + for vd in vd_values + if vd["vcs_row"] == row["id"] and vd["design"] == design + ] + if row["category"] != "Technical processes": try: non_tech = models.NonTechnicalProcess( - cost=parser.evaluate(parse_formula(row['cost'], vd_values_row, mi_values, row)), + cost=parser.evaluate( + parse_formula(row["cost"], vd_values_row, mi_values, row) + ), revenue=parser.evaluate( - parse_formula(row['revenue'], vd_values_row, mi_values, row)), - name=row['iso_name']) + parse_formula(row["revenue"], vd_values_row, mi_values, row) + ), + name=row["iso_name"], + ) except Exception as exc: - logger.debug(f'{exc.__class__}, {exc}') - raise e.FormulaEvalException(row['id']) + logger.debug(f"{exc.__class__}, {exc}") + raise e.FormulaEvalException(row["id"]) non_tech_processes.append(non_tech) - elif row['iso_name'] is not None and row['sub_name'] is None: + elif row["iso_name"] is not None and row["sub_name"] is None: try: - time = parser.evaluate(parse_formula( - row['time'], vd_values, mi_values, row)) - cost_formula = parse_formula(row['cost'], vd_values, mi_values, row) + time = parser.evaluate( + parse_formula(row["time"], vd_values, mi_values, row) + ) + cost_formula = parse_formula(row["cost"], vd_values, mi_values, row) revenue_formula = parse_formula( - row['revenue'], vd_values, mi_values, row) - p = Process(row['id'], - time, - parser.evaluate(expr.replace_all( - 'time', time, cost_formula)), - parser.evaluate(expr.replace_all( - 'time', time, revenue_formula)), - row['iso_name'], non_tech_add, TIME_FORMAT_DICT.get( - row['time_unit'].lower()) - ) + row["revenue"], vd_values, mi_values, row + ) + p = Process( + row["id"], + time, + parser.evaluate(expr.replace_all("time", time, cost_formula)), + parser.evaluate(expr.replace_all("time", time, revenue_formula)), + row["iso_name"], + non_tech_add, + TIME_FORMAT_DICT.get(row["time_unit"].lower()), + ) if p.time < 0: - raise e.NegativeTimeException(row['id']) + raise e.NegativeTimeException(row["id"]) except Exception as exc: - logger.debug(f'{exc.__class__}, {exc}') - raise e.FormulaEvalException(row['id']) + logger.debug(f"{exc.__class__}, {exc}") + raise e.FormulaEvalException(row["id"]) technical_processes.append(p) - elif row['sub_name'] is not None: + elif row["sub_name"] is not None: sub_name = f'{row["sub_name"]} ({row["iso_name"]})' try: - time = parser.evaluate(parse_formula( - row['time'], vd_values, mi_values, row)) - cost_formula = parse_formula(row['cost'], vd_values, mi_values, row) + time = parser.evaluate( + parse_formula(row["time"], vd_values, mi_values, row) + ) + cost_formula = parse_formula(row["cost"], vd_values, mi_values, row) revenue_formula = parse_formula( - row['revenue'], vd_values, mi_values, row) - p = Process(row['id'], - time, - parser.evaluate(expr.replace_all( - 'time', time, cost_formula)), - parser.evaluate(expr.replace_all( - 'time', time, revenue_formula)), - sub_name, non_tech_add, TIME_FORMAT_DICT.get( - row['time_unit'].lower()) - ) + row["revenue"], vd_values, mi_values, row + ) + p = Process( + row["id"], + time, + parser.evaluate(expr.replace_all("time", time, cost_formula)), + parser.evaluate(expr.replace_all("time", time, revenue_formula)), + sub_name, + non_tech_add, + TIME_FORMAT_DICT.get(row["time_unit"].lower()), + ) if p.time < 0: - raise e.NegativeTimeException(row['id']) + raise e.NegativeTimeException(row["id"]) except Exception as exc: - logger.debug(f'{exc.__class__}, {exc}') - raise e.FormulaEvalException(row['id']) + logger.debug(f"{exc.__class__}, {exc}") + raise e.FormulaEvalException(row["id"]) technical_processes.append(p) else: raise e.ProcessNotFoundException @@ -231,14 +309,16 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, return technical_processes, non_tech_processes -def get_sim_data(db_connection: PooledMySQLConnection, vcs_id: int, design_group_id: int): - query = f'SELECT cvs_vcs_rows.id, cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ +def get_sim_data( + db_connection: PooledMySQLConnection, vcs_id: int, design_group_id: int +): + query = f"SELECT cvs_vcs_rows.id, cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ subprocess, cvs_subprocesses.name as sub_name, time, time_unit, cost, revenue, rate FROM cvs_vcs_rows \ LEFT OUTER JOIN cvs_subprocesses ON cvs_vcs_rows.subprocess = cvs_subprocesses.id \ LEFT OUTER JOIN cvs_iso_processes ON cvs_vcs_rows.iso_process = cvs_iso_processes.id \ OR cvs_subprocesses.iso_process = cvs_iso_processes.id \ LEFT OUTER JOIN cvs_design_mi_formulas ON cvs_vcs_rows.id = cvs_design_mi_formulas.vcs_row \ - WHERE cvs_vcs_rows.vcs = %s AND cvs_design_mi_formulas.design_group = %s ORDER BY `index`' + WHERE cvs_vcs_rows.vcs = %s AND cvs_design_mi_formulas.design_group = %s ORDER BY `index`" with db_connection.cursor(prepared=True) as cursor: cursor.execute(query, [vcs_id, design_group_id]) res = cursor.fetchall() @@ -246,7 +326,11 @@ def get_sim_data(db_connection: PooledMySQLConnection, vcs_id: int, design_group return res -def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], design_group_ids: List[int]): +def get_all_sim_data( + db_connection: PooledMySQLConnection, + vcs_ids: List[int], + design_group_ids: List[int], +): try: query = f'SELECT cvs_vcs_rows.id, cvs_vcs_rows.vcs, cvs_design_mi_formulas.design_group, \ cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ @@ -263,7 +347,7 @@ def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], d res = cursor.fetchall() res = [dict(zip(cursor.column_names, row)) for row in res] except Error as error: - logger.debug(f'Error msg: {error.msg}') + logger.debug(f"Error msg: {error.msg}") raise e.CouldNotFetchSimulationDataException return res @@ -281,18 +365,20 @@ def get_all_vd_design_values(db_connection: PooledMySQLConnection, designs: List res = cursor.fetchall() res = [dict(zip(cursor.column_names, row)) for row in res] except Error as error: - logger.debug(f'Error msg: {error.msg}') + logger.debug(f"Error msg: {error.msg}") raise e.CouldNotFetchValueDriverDesignValuesException return res def get_simulation_settings(db_connection: PooledMySQLConnection, project_id: int): - logger.debug(f'Fetching simulation settings for project {project_id}') + logger.debug(f"Fetching simulation settings for project {project_id}") select_statement = MySQLStatementBuilder(db_connection) - res = select_statement.select(SIM_SETTINGS_TABLE, SIM_SETTINGS_COLUMNS) \ - .where('project = %s', [project_id]) \ + res = ( + select_statement.select(SIM_SETTINGS_TABLE, SIM_SETTINGS_COLUMNS) + .where("project = %s", [project_id]) .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + ) if res is None: raise e.SimSettingsNotFoundException @@ -300,20 +386,28 @@ def get_simulation_settings(db_connection: PooledMySQLConnection, project_id: in return populate_sim_settings(res) -def edit_simulation_settings(db_connection: PooledMySQLConnection, project_id: int, - sim_settings: models.EditSimSettings, user_id: int): - logger.debug(f'Editing simulation settings for project {project_id}') +def edit_simulation_settings( + db_connection: PooledMySQLConnection, + project_id: int, + sim_settings: models.EditSimSettings, + user_id: int, +): + logger.debug(f"Editing simulation settings for project {project_id}") - if (sim_settings.flow_process is None and sim_settings.flow_start_time is None) \ - or (sim_settings.flow_process is not None and sim_settings.flow_start_time is not None): + if (sim_settings.flow_process is None and sim_settings.flow_start_time is None) or ( + sim_settings.flow_process is not None + and sim_settings.flow_start_time is not None + ): raise e.InvalidFlowSettingsException count_sim = MySQLStatementBuilder(db_connection) - count = count_sim.count(SIM_SETTINGS_TABLE) \ - .where('project = %s', [project_id]) \ + count = ( + count_sim.count(SIM_SETTINGS_TABLE) + .where("project = %s", [project_id]) .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + ) - count = count['count'] + count = count["count"] if sim_settings.flow_process is not None: flow_process_exists = False @@ -321,48 +415,75 @@ def edit_simulation_settings(db_connection: PooledMySQLConnection, project_id: i for vcs in vcss: rows = vcs_storage.get_vcs_table(db_connection, project_id, vcs.id) for row in rows: - if (row.iso_process is not None and row.iso_process.name == sim_settings.flow_process) or \ - (row.subprocess is not None and f'{row.subprocess.name} ({row.subprocess.parent_process.name})' - == sim_settings.flow_process): + if ( + row.iso_process is not None + and row.iso_process.name == sim_settings.flow_process + ) or ( + row.subprocess is not None + and f"{row.subprocess.name} ({row.subprocess.parent_process.name})" + == sim_settings.flow_process + ): flow_process_exists = True break if not flow_process_exists: raise e.FlowProcessNotFoundException - if (count == 1): + if count == 1: columns = SIM_SETTINGS_COLUMNS[1:] - set_statement = ','.join([col + ' = %s' for col in columns]) - - values = [sim_settings.time_unit.value, sim_settings.flow_process, sim_settings.flow_start_time, - sim_settings.flow_time, - sim_settings.interarrival_time, sim_settings.start_time, sim_settings.end_time, - sim_settings.discount_rate, sim_settings.non_tech_add.value, sim_settings.monte_carlo, - sim_settings.runs] + set_statement = ",".join([col + " = %s" for col in columns]) + + values = [ + sim_settings.time_unit.value, + sim_settings.flow_process, + sim_settings.flow_start_time, + sim_settings.flow_time, + sim_settings.interarrival_time, + sim_settings.start_time, + sim_settings.end_time, + sim_settings.discount_rate, + sim_settings.non_tech_add.value, + sim_settings.monte_carlo, + sim_settings.runs, + ] update_statement = MySQLStatementBuilder(db_connection) - _, rows = update_statement \ - .update(table=SIM_SETTINGS_TABLE, set_statement=set_statement, values=values) \ - .where('project = %s', [project_id]) \ + _, rows = ( + update_statement.update( + table=SIM_SETTINGS_TABLE, set_statement=set_statement, values=values + ) + .where("project = %s", [project_id]) .execute(return_affected_rows=True) + ) - elif (count == 0): + elif count == 0: create_sim_settings(db_connection, project_id, sim_settings) return True -def create_sim_settings(db_connection: PooledMySQLConnection, project_id: int, - sim_settings: models.EditSimSettings) -> bool: - values = [project_id] + [sim_settings.time_unit.value, sim_settings.flow_process, sim_settings.flow_start_time, - sim_settings.flow_time, - sim_settings.interarrival_time, sim_settings.start_time, sim_settings.end_time, - sim_settings.discount_rate, sim_settings.non_tech_add.value, sim_settings.monte_carlo, - sim_settings.runs] +def create_sim_settings( + db_connection: PooledMySQLConnection, + project_id: int, + sim_settings: models.EditSimSettings, +) -> bool: + values = [project_id] + [ + sim_settings.time_unit.value, + sim_settings.flow_process, + sim_settings.flow_start_time, + sim_settings.flow_time, + sim_settings.interarrival_time, + sim_settings.start_time, + sim_settings.end_time, + sim_settings.discount_rate, + sim_settings.non_tech_add.value, + sim_settings.monte_carlo, + sim_settings.runs, + ] insert_statement = MySQLStatementBuilder(db_connection) - insert_statement.insert(SIM_SETTINGS_TABLE, SIM_SETTINGS_COLUMNS) \ - .set_values(values) \ - .execute(fetch_type=FetchType.FETCH_NONE) + insert_statement.insert(SIM_SETTINGS_TABLE, SIM_SETTINGS_COLUMNS).set_values( + values + ).execute(fetch_type=FetchType.FETCH_NONE) return True @@ -376,14 +497,14 @@ def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[in res = cursor.fetchall() res = [dict(zip(cursor.column_names, row)) for row in res] except Error as error: - logger.debug(f'Error msg: {error.msg}') + logger.debug(f"Error msg: {error.msg}") raise e.CouldNotFetchMarketInputValuesException return res def add_multiplication_signs(formula: str) -> str: # Define a regular expression pattern to find the positions where the multiplication sign is missing - pattern = r'(\d)([a-zA-Z({\[<])|([}\])>]|})([a-zA-Z({\[<])|([}\])>]|{)(\d)' + pattern = r"(\d)([a-zA-Z({\[<])|([}\])>]|})([a-zA-Z({\[<])|([}\])>]|{)(\d)" # Use the re.sub() function to replace the matches with the correct format def replace(match): @@ -398,13 +519,14 @@ def replace(match): def parse_if_statement(formula: str) -> str: # The pattern is if(condition, true_value, false_value) - pattern = r'if\(([^,]+),([^,]+),([^,]+)\)' + pattern = r"if\(([^,]+),([^,]+),([^,]+)\)" match = re.search(pattern, formula) parser = BaseArithmeticParser() if match: condition, true_value, false_value = match.groups() - condition = condition.replace('=', '==') + condition = condition.replace("=", "==") + logger.debug(f"Parsing if statement {condition}, {true_value}, {false_value}") if parser.evaluate(condition): value = true_value else: @@ -426,14 +548,16 @@ def replace(match): id_number = int(value) for vd in vd_values: if vd["id"] == id_number: - return str(vd["value"]) + vd_value = str(vd["value"]) + return vd_value if vd_value.replace('.','').isnumeric() else '"' + vd_value + '"' elif tag == "ef": for ef in ef_values: id_number = int(value) if ef["market_input"] == id_number: - return str(ef["value"]) + ef_value = str(ef["value"]) + return ef_value if ef_value.replace('.','').isnumeric() else '"' + ef_value + '"' elif formula_row and tag == "process": - return f'({formula_row[value.lower()]})' + return f"({formula_row[value.lower()]})" return match.group() @@ -442,7 +566,9 @@ def replace(match): replaced_text = parse_if_statement(replaced_text) - replaced_text = re.sub(pattern, '0', replaced_text) # If there are any tags left, replace them with 0 + replaced_text = re.sub( + pattern, "0", replaced_text + ) # If there are any tags left, replace them with 0 return replaced_text @@ -452,12 +578,18 @@ def check_entity_rate(db_results, flow_process_name: str): # Set the flow_process_index to be highest possible. flow_process_index = len(db_results) for i in range(len(db_results)): - if db_results[i]['sub_name'] == flow_process_name or db_results[i]['iso_name'] == flow_process_name: + if ( + db_results[i]["sub_name"] == flow_process_name + or db_results[i]["iso_name"] == flow_process_name + ): flow_process_index = i if i > flow_process_index: for j in range(i, len(db_results)): - if db_results[j]['rate'] == 'per_project' and db_results[j]['category'] == 'Technical processes': + if ( + db_results[j]["rate"] == "per_project" + and db_results[j]["category"] == "Technical processes" + ): print("Rate check false") rate_check = False break @@ -496,7 +628,9 @@ def create_simple_dsm(processes: List[Process]) -> dict: else: name = processes[i - 1].name - dsm.update({name: [1 if i + 1 == j else "X" if i == j else 0 for j in range(n)]}) + dsm.update( + {name: [1 if i + 1 == j else "X" if i == j else 0 for j in range(n)]} + ) return dsm @@ -518,18 +652,18 @@ def fill_dsm_with_zeros(dsm: dict) -> dict: def populate_sim_settings(db_result) -> models.SimSettings: - logger.debug(f'Populating simulation settings') + logger.debug(f"Populating simulation settings") return models.SimSettings( - project=db_result['project'], - time_unit=db_result['time_unit'], - flow_process=db_result['flow_process'], - flow_start_time=db_result['flow_start_time'], - flow_time=db_result['flow_time'], - interarrival_time=db_result['interarrival_time'], - start_time=db_result['start_time'], - end_time=db_result['end_time'], - discount_rate=db_result['discount_rate'], - non_tech_add=db_result['non_tech_add'], - monte_carlo=db_result['monte_carlo'], - runs=db_result['runs'] + project=db_result["project"], + time_unit=db_result["time_unit"], + flow_process=db_result["flow_process"], + flow_start_time=db_result["flow_start_time"], + flow_time=db_result["flow_time"], + interarrival_time=db_result["interarrival_time"], + start_time=db_result["start_time"], + end_time=db_result["end_time"], + discount_rate=db_result["discount_rate"], + non_tech_add=db_result["non_tech_add"], + monte_carlo=db_result["monte_carlo"], + runs=db_result["runs"], ) From af77eeb0273a3598806755105153251af8d9feb9 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 15 Oct 2023 15:41:05 +0200 Subject: [PATCH 171/210] update port --- sedbackend/apps/core/db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 7f8542e7..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -12,7 +12,7 @@ password = Environment.get_variable('MYSQL_PWD_RW') host = 'core-db' database = 'seddb' -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 23ef46fa8c53533470eb967123721073ac164d43 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 15 Oct 2023 15:51:01 +0200 Subject: [PATCH 172/210] added plusminus to requirements.txt --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index c9e7a649..01e37e6f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,4 +14,5 @@ openpyxl==3.1.2 mysql-statement-builder==0.* python-magic==0.4.27 pytest==7.3.1 -httpx==0.24.0 \ No newline at end of file +httpx==0.24.0 +plusminus==0.7.0 \ No newline at end of file From f4056a4c5f5e2ec9c75d0044ae217056edfbd6be Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 16 Oct 2023 13:27:38 +0200 Subject: [PATCH 173/210] VARCHAR to CHAR --- sql/V231010_cvs.sql | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 sql/V231010_cvs.sql diff --git a/sql/V231010_cvs.sql b/sql/V231010_cvs.sql new file mode 100644 index 00000000..058ab0bc --- /dev/null +++ b/sql/V231010_cvs.sql @@ -0,0 +1,2 @@ +ALTER TABLE `seddb`.`cvs_vd_design_values` + MODIFY COLUMN `value` CHAR(255); \ No newline at end of file From 5163181b4790c102b79ed9d981e372a75e74cc0c Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 23 Oct 2023 14:41:35 +0200 Subject: [PATCH 174/210] run simulation with 0 if time, cost, revenue not specified --- sedbackend/apps/cvs/simulation/storage.py | 48 +- tests/apps/cvs/simulation/test_simulation.py | 526 +++++++++++-------- 2 files changed, 326 insertions(+), 248 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 4314c2c0..87caaa07 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -269,7 +269,7 @@ def populate_processes( parser.evaluate(expr.replace_all("time", time, revenue_formula)), row["iso_name"], non_tech_add, - TIME_FORMAT_DICT.get(row["time_unit"].lower()), + TIME_FORMAT_DICT.get(row["time_unit"].lower() if row["time_unit"] else "year"), ) if p.time < 0: raise e.NegativeTimeException(row["id"]) @@ -294,7 +294,7 @@ def populate_processes( parser.evaluate(expr.replace_all("time", time, revenue_formula)), sub_name, non_tech_add, - TIME_FORMAT_DICT.get(row["time_unit"].lower()), + TIME_FORMAT_DICT.get(row["time_unit"].lower() if row["time_unit"] else "year"), ) if p.time < 0: @@ -309,41 +309,26 @@ def populate_processes( return technical_processes, non_tech_processes -def get_sim_data( - db_connection: PooledMySQLConnection, vcs_id: int, design_group_id: int -): - query = f"SELECT cvs_vcs_rows.id, cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ - subprocess, cvs_subprocesses.name as sub_name, time, time_unit, cost, revenue, rate FROM cvs_vcs_rows \ - LEFT OUTER JOIN cvs_subprocesses ON cvs_vcs_rows.subprocess = cvs_subprocesses.id \ - LEFT OUTER JOIN cvs_iso_processes ON cvs_vcs_rows.iso_process = cvs_iso_processes.id \ - OR cvs_subprocesses.iso_process = cvs_iso_processes.id \ - LEFT OUTER JOIN cvs_design_mi_formulas ON cvs_vcs_rows.id = cvs_design_mi_formulas.vcs_row \ - WHERE cvs_vcs_rows.vcs = %s AND cvs_design_mi_formulas.design_group = %s ORDER BY `index`" - with db_connection.cursor(prepared=True) as cursor: - cursor.execute(query, [vcs_id, design_group_id]) - res = cursor.fetchall() - res = [dict(zip(cursor.column_names, row)) for row in res] - return res - - def get_all_sim_data( db_connection: PooledMySQLConnection, vcs_ids: List[int], design_group_ids: List[int], ): try: - query = f'SELECT cvs_vcs_rows.id, cvs_vcs_rows.vcs, cvs_design_mi_formulas.design_group, \ + query = f'SELECT cvs_vcs_rows.id, cvs_vcs_rows.vcs, cvs_design_groups.id as design_group, \ cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ subprocess, cvs_subprocesses.name as sub_name, time, time_unit, cost, revenue, rate FROM cvs_vcs_rows \ + LEFT JOIN cvs_design_groups ON cvs_design_groups.id IN ({",".join(["%s" for _ in range(len(design_group_ids))])}) \ LEFT OUTER JOIN cvs_subprocesses ON cvs_vcs_rows.subprocess = cvs_subprocesses.id \ LEFT OUTER JOIN cvs_iso_processes ON cvs_vcs_rows.iso_process = cvs_iso_processes.id \ OR cvs_subprocesses.iso_process = cvs_iso_processes.id \ - LEFT OUTER JOIN cvs_design_mi_formulas ON cvs_vcs_rows.id = cvs_design_mi_formulas.vcs_row \ + LEFT JOIN cvs_design_mi_formulas ON cvs_vcs_rows.id = cvs_design_mi_formulas.vcs_row \ + AND cvs_design_mi_formulas.design_group \ + IN ({",".join(["%s" for _ in range(len(design_group_ids))])}) \ WHERE cvs_vcs_rows.vcs IN ({",".join(["%s" for _ in range(len(vcs_ids))])}) \ - AND cvs_design_mi_formulas.design_group \ - IN ({",".join(["%s" for _ in range(len(design_group_ids))])}) ORDER BY `index`' + ORDER BY `index`' with db_connection.cursor(prepared=True) as cursor: - cursor.execute(query, vcs_ids + design_group_ids) + cursor.execute(query, design_group_ids + design_group_ids + vcs_ids) res = cursor.fetchall() res = [dict(zip(cursor.column_names, row)) for row in res] except Error as error: @@ -538,6 +523,8 @@ def parse_if_statement(formula: str) -> str: def parse_formula(formula: str, vd_values, ef_values, formula_row: dict = None) -> str: + if not formula: + return "0" pattern = r'\{(?Pvd|ef|process):(?P[a-zA-Z0-9_]+),"([^"]+)"\}' formula = add_multiplication_signs(formula) @@ -549,13 +536,21 @@ def replace(match): for vd in vd_values: if vd["id"] == id_number: vd_value = str(vd["value"]) - return vd_value if vd_value.replace('.','').isnumeric() else '"' + vd_value + '"' + return ( + vd_value + if vd_value.replace(".", "").isnumeric() + else '"' + vd_value + '"' + ) elif tag == "ef": for ef in ef_values: id_number = int(value) if ef["market_input"] == id_number: ef_value = str(ef["value"]) - return ef_value if ef_value.replace('.','').isnumeric() else '"' + ef_value + '"' + return ( + ef_value + if ef_value.replace(".", "").isnumeric() + else '"' + ef_value + '"' + ) elif formula_row and tag == "process": return f"({formula_row[value.lower()]})" @@ -590,7 +585,6 @@ def check_entity_rate(db_results, flow_process_name: str): db_results[j]["rate"] == "per_project" and db_results[j]["category"] == "Technical processes" ): - print("Rate check false") rate_check = False break break diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index 63b0b0c7..3f83ab7a 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -4,259 +4,343 @@ def test_run_single_simulation(client, std_headers, std_user): - #Setup - - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json={ - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 200 - - #Should probably assert some other stuff about the output to ensure that it is correct. - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 200 + + # Should probably assert some other stuff about the output to ensure that it is correct. + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) def test_run_sim_invalid_design_group(client, std_headers, std_user): - # Setup + # Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) + current_user = impl_users.impl_get_user_with_username(std_user.username) - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False - # Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json={ - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id + 9999] - }) + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id + 9999], + }, + ) - # Assert - assert res.status_code == 400 + # Assert + assert res.status_code == 400 - # Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) def test_run_sim_invalid_vcss(client, std_headers, std_user): - # Setup + # Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) + current_user = impl_users.impl_get_user_with_username(std_user.username) - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False - # Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json={ - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id + 9999], - "design_group_ids": [design_group.id] - }) + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id + 9999], + "design_group_ids": [design_group.id], + }, + ) - # Assert - assert res.status_code == 400 + # Assert + assert res.status_code == 400 - # Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) def test_run_sim_end_time_before_start_time(client, std_headers, std_user): - #Setup - - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.end_time = settings.start_time - 1 - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False + settings.end_time = settings.start_time - 1 + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 400 + assert res.json() == { + "detail": "Settings are not correct" + } # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_sim_flow_time_above_total_time(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.flow_time = settings.start_time * settings.end_time - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False + settings.flow_time = settings.start_time * settings.end_time + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 400 + assert res.json() == { + "detail": "Settings are not correct" + } # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_sim_no_flows(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.flow_start_time = None - settings.flow_process = None - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id,[vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False + settings.flow_start_time = None + settings.flow_process = None + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 400 + assert res.json() == { + "detail": "Settings are not correct" + } # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_sim_both_flows(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.flow_start_time = 5 - settings.flow_process = 10 - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False + settings.flow_start_time = 5 + settings.flow_process = 10 + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 400 + assert res.json() == { + "detail": "Settings are not correct" + } # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_sim_rate_invalid_order(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - flow_proc = tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id, current_user.id) - - settings.monte_carlo = False - settings.flow_process = flow_proc.iso_process.name if flow_proc.iso_process is not None else flow_proc.subprocess.name - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Wrong order of rate of entities. Per project assigned after per product'} #RateWrongOrderException - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + flow_proc = tu.edit_rate_order_formulas( + project.id, vcs.id, design_group.id, current_user.id + ) + + settings.monte_carlo = False + settings.flow_process = ( + flow_proc.iso_process.name + if flow_proc.iso_process is not None + else flow_proc.subprocess.name + ) + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 400 + assert res.json() == { + "detail": "Wrong order of rate of entities. Per project assigned after per product" + } # RateWrongOrderException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_sim_invalid_proj(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - - settings.monte_carlo = False - project_id = project.id + 10000 - - #Act - res = client.post(f'/api/cvs/project/{project_id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 404 - assert res.json() == {'detail': 'Sub-project not found.'} - - #Should probably assert some other stuff about the output to ensure that it is correct. - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + + settings.monte_carlo = False + project_id = project.id + 10000 + + # Act + res = client.post( + f"/api/cvs/project/{project_id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 404 + assert res.json() == {"detail": "Sub-project not found."} + + # Should probably assert some other stuff about the output to ensure that it is correct. + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) + + +def test_run_single_simulation_no_values(client, std_headers, std_user): + # Setup + + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + vcs = tu.seed_random_vcs(project.id, current_user.id) + tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 10) + design_group = tu.seed_random_design_group(project.id) + designs = tu.seed_random_designs(project.id, design_group.id, 1) + settings = tu.seed_simulation_settings(project.id, [vcs.id], [designs[0].id]) + settings.monte_carlo = False + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + print(res.json()) + + # Assert + assert res.status_code == 200 + assert res.json()["runs"][0]["max_NPVs"][-1] == 0 + + # Should probably assert some other stuff about the output to ensure that it is correct. + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) From 596c94b0f69ef646e768bd0e86238cc5e30a2606 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 24 Oct 2023 11:14:07 +0200 Subject: [PATCH 175/210] improved error handling for simulation --- sedbackend/apps/cvs/simulation/exceptions.py | 15 +- .../apps/cvs/simulation/implementation.py | 146 ++++-- sedbackend/apps/cvs/simulation/storage.py | 467 ++++++++++------- tests/apps/cvs/simulation/test_simulation.py | 488 ++++++++++-------- 4 files changed, 662 insertions(+), 454 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/exceptions.py b/sedbackend/apps/cvs/simulation/exceptions.py index c19363df..dbcb8b0f 100644 --- a/sedbackend/apps/cvs/simulation/exceptions.py +++ b/sedbackend/apps/cvs/simulation/exceptions.py @@ -14,8 +14,9 @@ class EntityRateOutOfOrderException(Exception): class FormulaEvalException(Exception): - def __init__(self, process_id) -> None: - self.process_id = process_id + def __init__(self, exception, sim_data) -> None: + self.name = sim_data['iso_name'] if sim_data['iso_name'] is not None else sim_data['sub_name'] + self.message = str(exception) class RateWrongOrderException(Exception): @@ -23,12 +24,13 @@ class RateWrongOrderException(Exception): class NegativeTimeException(Exception): - def __init__(self, process_id) -> None: - self.process_id = process_id + def __init__(self, sim_data) -> None: + self.name = sim_data['iso_name'] if sim_data['iso_name'] is not None else sim_data['sub_name'] class SimulationFailedException(Exception): - pass + def __init__(self, exception) -> None: + self.message = str(exception) class DesignIdsNotFoundException(Exception): @@ -44,7 +46,8 @@ class VcsFailedException(Exception): class BadlyFormattedSettingsException(Exception): - pass + def __init__(self, message) -> None: + self.message = message class FlowProcessNotFoundException(Exception): diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index e457dafc..45367cee 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -9,11 +9,24 @@ from sedbackend.apps.core.authentication import exceptions as auth_ex from sedbackend.apps.core.db import get_connection from sedbackend.apps.cvs.project import exceptions as project_exceptions -from sedbackend.apps.cvs.simulation.exceptions import BadlyFormattedSettingsException, DSMFileNotFoundException, \ - DesignIdsNotFoundException, FormulaEvalException, NegativeTimeException, ProcessNotFoundException, \ - RateWrongOrderException, InvalidFlowSettingsException, VcsFailedException, FlowProcessNotFoundException, \ - SimSettingsNotFoundException, CouldNotFetchSimulationDataException, CouldNotFetchMarketInputValuesException, \ - CouldNotFetchValueDriverDesignValuesException, NoTechnicalProcessException +from sedbackend.apps.cvs.simulation.exceptions import ( + BadlyFormattedSettingsException, + DSMFileNotFoundException, + DesignIdsNotFoundException, + FormulaEvalException, + NegativeTimeException, + ProcessNotFoundException, + RateWrongOrderException, + InvalidFlowSettingsException, + SimulationFailedException, + VcsFailedException, + FlowProcessNotFoundException, + SimSettingsNotFoundException, + CouldNotFetchSimulationDataException, + CouldNotFetchMarketInputValuesException, + CouldNotFetchValueDriverDesignValuesException, + NoTechnicalProcessException, +) from sedbackend.apps.cvs.simulation.models import SimulationResult from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions @@ -21,146 +34,172 @@ from sedbackend.apps.core.files import exceptions as file_ex -def run_simulation(sim_settings: models.EditSimSettings, project_id: int, vcs_ids: List[int], - design_group_ids: List[int], user_id: int, - normalized_npv: bool = False, is_multiprocessing: bool = False) -> SimulationResult: +def run_simulation( + sim_settings: models.EditSimSettings, + project_id: int, + vcs_ids: List[int], + design_group_ids: List[int], + user_id: int, + normalized_npv: bool = False, + is_multiprocessing: bool = False, +) -> SimulationResult: try: with get_connection() as con: - result = storage.run_simulation(con, sim_settings, project_id, vcs_ids, design_group_ids, user_id, - normalized_npv, is_multiprocessing) + result = storage.run_simulation( + con, + sim_settings, + project_id, + vcs_ids, + design_group_ids, + user_id, + normalized_npv, + is_multiprocessing, + ) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, - detail='Unauthorized user.', + detail="Unauthorized user.", ) except project_exceptions.CVSProjectNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f'Could not find project.', + detail=f"Could not find project.", ) except market_input_exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f'Could not find market input', + detail=f"Could not find market input", ) except ProcessNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f'Could not find process', + detail=f"Could not find process", ) except FormulaEvalException as e: + # TODO: Add vcs name and design group name. Have to update get_all_sim_data raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Could not evaluate formulas of process with id: {e.process_id}' + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Failed to evaluate formulas of process {e.name}. {e.message.capitalize()}.", ) except RateWrongOrderException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Wrong order of rate of entities. Per project assigned after per product' + detail=f"Wrong order of rate of entities. Total sum cannot come after per product. Check your VCS table.", ) except NegativeTimeException as e: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Formula at process with id: {e.process_id} evaluated to negative time' + detail=f"Negative time for process {e.name}. Check your formulas.", ) except DesignIdsNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'No design ids or empty array supplied' + detail=f"No designs in chosen design group", ) except VcsFailedException: raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Invalid vcs ids' + status_code=status.HTTP_400_BAD_REQUEST, detail=f"Invalid vcs ids" ) - except BadlyFormattedSettingsException: + except BadlyFormattedSettingsException as e: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Settings are not correct' + detail=f"Settings are not correct: \n {e.message}", ) except CouldNotFetchSimulationDataException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not fetch simulation data' + detail=f"Could not fetch simulation data. Check your VCSs and Design Groups.", ) except CouldNotFetchMarketInputValuesException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not fetch market input values' + detail=f"Could not fetch market input values", ) except CouldNotFetchValueDriverDesignValuesException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not fetch value driver design values' + detail=f"Could not fetch value driver design values", ) except NoTechnicalProcessException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'No technical processes found' + detail=f"No technical processes found", ) except file_ex.FileNotFoundException: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=f'Could not find DSM file' + status_code=status.HTTP_404_NOT_FOUND, detail=f"Could not find DSM file" + ) + except FailedToFetchSimulationDataException: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Could not fetch simulation data. Check your VCSs and Design Groups.", + ) + except SimulationFailedException as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, detail=e.message.capitalize() ) -def run_dsm_file_simulation(user_id: int, project_id: int, sim_params: models.FileParams, - dsm_file: UploadFile) -> List[models.Simulation]: +def run_dsm_file_simulation( + user_id: int, project_id: int, sim_params: models.FileParams, dsm_file: UploadFile +) -> List[models.Simulation]: try: with get_connection() as con: - res = storage.run_sim_with_dsm_file(con, user_id, project_id, sim_params, dsm_file) # Wtf saknar xlsx file + res = storage.run_sim_with_dsm_file( + con, user_id, project_id, sim_params, dsm_file + ) # Wtf saknar xlsx file return res except auth_ex.UnauthorizedOperationException: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, - detail='Unauthorized user.', + detail="Unauthorized user.", ) except vcs_exceptions.VCSNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not find vcs with id=.', + detail=f"Could not find vcs with id=.", ) except project_exceptions.CVSProjectNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not find project.', + detail=f"Could not find project.", ) except market_input_exceptions.ExternalFactorNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not find market input', + detail=f"Could not find market input", ) except ProcessNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not find process', + detail=f"Could not find process", ) except DSMFileNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Could not read uploaded file' + detail=f"Could not read uploaded file", ) except FormulaEvalException as e: + # TODO: Add vcs name and design group name. Have to update get_all_sim_data raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Could not evaluate formulas of process with id: {e.process_id}' + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Failed to evaluate formulas of process {e.name}. {e.message.capitalize()}.", ) except RateWrongOrderException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Wrong order of rate of entities. Per project assigned after per product' + detail=f"Wrong order of rate of entities. Total sum cannot come after per product. Check your VCS table.", ) except NegativeTimeException as e: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Formula at process with id: {e.process_id} evaluated to negative time' + detail=f"Formula at process with id: {e.process_id} evaluated to negative time", ) except DesignIdsNotFoundException: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'No design ids or empty array supplied' + detail=f"No designs in chosen design group", ) @@ -172,42 +211,45 @@ def get_sim_settings(project_id: int) -> models.SimSettings: return result except project_exceptions.CVSProjectNotFoundException: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=f'Could not find project' + status_code=status.HTTP_404_NOT_FOUND, detail=f"Could not find project" ) except SimSettingsNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f'Could not find simulation settings' + detail=f"Could not find simulation settings", ) except Exception as e: logger.debug(e) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Could not send simulation settings' + detail=f"Could not send simulation settings", ) -def edit_sim_settings(project_id: int, sim_settings: models.EditSimSettings, user_id: int) -> bool: +def edit_sim_settings( + project_id: int, sim_settings: models.EditSimSettings, user_id: int +) -> bool: try: with get_connection() as con: - res = storage.edit_simulation_settings(con, project_id, sim_settings, user_id) + res = storage.edit_simulation_settings( + con, project_id, sim_settings, user_id + ) con.commit() return res except InvalidFlowSettingsException: logger.debug("Invalid flow settings") raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Both flow process and flow start time supplied or neither supplied' + detail=f"Both flow process and flow start time supplied or neither supplied", ) except FlowProcessNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, - detail=f'The supplied flow process can not be found in any vcs' + detail=f"The supplied flow process can not be found in any vcs", ) except Exception as e: logger.debug(e) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f'Could not update simulation settings' + detail=f"Could not update simulation settings", ) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index ecbebcdd..d0b50288 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -29,27 +29,46 @@ from sedbackend.apps.core.files import exceptions as file_exceptions SIM_SETTINGS_TABLE = "cvs_simulation_settings" -SIM_SETTINGS_COLUMNS = ['project', 'time_unit', 'flow_process', 'flow_start_time', 'flow_time', - 'interarrival_time', 'start_time', 'end_time', 'discount_rate', 'non_tech_add', 'monte_carlo', - 'runs'] - -TIME_FORMAT_DICT = dict({ - 'year': TimeFormat.YEAR, - 'month': TimeFormat.MONTH, - 'week': TimeFormat.WEEK, - 'day': TimeFormat.DAY, - 'hour': TimeFormat.HOUR, - 'minutes': TimeFormat.MINUTES -}) - - -def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, - project_id: int, vcs_ids: List[int], - design_group_ids: List[int], user_id, normalized_npv: bool = False, - is_multiprocessing: bool = False, - ) -> SimulationResult: - if not check_sim_settings(sim_settings): - raise e.BadlyFormattedSettingsException +SIM_SETTINGS_COLUMNS = [ + "project", + "time_unit", + "flow_process", + "flow_start_time", + "flow_time", + "interarrival_time", + "start_time", + "end_time", + "discount_rate", + "non_tech_add", + "monte_carlo", + "runs", +] + +TIME_FORMAT_DICT = dict( + { + "year": TimeFormat.YEAR, + "month": TimeFormat.MONTH, + "week": TimeFormat.WEEK, + "day": TimeFormat.DAY, + "hour": TimeFormat.HOUR, + "minutes": TimeFormat.MINUTES, + } +) + + +def run_simulation( + db_connection: PooledMySQLConnection, + sim_settings: models.EditSimSettings, + project_id: int, + vcs_ids: List[int], + design_group_ids: List[int], + user_id, + normalized_npv: bool = False, + is_multiprocessing: bool = False, +) -> SimulationResult: + settings_msg = check_sim_settings(sim_settings) + if settings_msg: + raise e.BadlyFormattedSettingsException(settings_msg) interarrival = sim_settings.interarrival_time flow_time = sim_settings.flow_time @@ -62,28 +81,31 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed runs = sim_settings.runs all_sim_data = get_all_sim_data(db_connection, vcs_ids, design_group_ids) - all_market_values = get_all_market_values(db_connection, vcs_ids) - all_designs = get_all_designs(db_connection, design_group_ids) - - all_vd_design_values = get_all_vd_design_values(db_connection, [design.id for design in all_designs]) + all_vd_design_values = get_all_vd_design_values( + db_connection, [design.id for design in all_designs] + ) unique_vds = {} for vd in all_vd_design_values: element_id = vd["id"] if element_id not in unique_vds: - unique_vds[element_id] = {"id": vd["id"], "name": vd["name"], "unit": vd["unit"], "project_id": vd["project"]} + unique_vds[element_id] = { + "id": vd["id"], + "name": vd["name"], + "unit": vd["unit"], + "project_id": vd["project"], + } all_vds = list(unique_vds.values()) - all_dsm_ids = life_cycle_storage.get_multiple_dsm_file_id(db_connection, vcs_ids) - all_vcss = get_vcss(db_connection, project_id, vcs_ids, user_id) - - sim_result = SimulationResult(designs=all_designs, vcss=all_vcss, vds=all_vds, runs=[]) + sim_result = SimulationResult( + designs=all_designs, vcss=all_vcss, vds=all_vds, runs=[] + ) for vcs_id in vcs_ids: - market_values = [mi for mi in all_market_values if mi['vcs'] == vcs_id] + market_values = [mi for mi in all_market_values if mi["vcs"] == vcs_id] dsm_id = [dsm for dsm in all_dsm_ids if dsm[0] == vcs_id] dsm = None if len(dsm_id) > 0: @@ -93,22 +115,33 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed except file_exceptions.FileNotFoundException: pass for design_group_id in design_group_ids: - sim_data = [sd for sd in all_sim_data if sd['vcs'] == vcs_id and sd['design_group'] == design_group_id] + sim_data = [ + sd + for sd in all_sim_data + if sd["vcs"] == vcs_id and sd["design_group"] == design_group_id + ] if sim_data is None or sim_data == []: - raise e.VcsFailedException + raise e.CouldNotFetchSimulationDataException if not check_entity_rate(sim_data, process): raise e.RateWrongOrderException - designs = [design.id for design in all_designs if design.design_group_id == design_group_id] + designs = [ + design.id + for design in all_designs + if design.design_group_id == design_group_id + ] if designs is None or []: raise e.DesignIdsNotFoundException for design in designs: - vd_values = [vd for vd in all_vd_design_values if vd['design'] == design] - processes, non_tech_processes = populate_processes(non_tech_add, sim_data, design, market_values, - vd_values) + vd_values = [ + vd for vd in all_vd_design_values if vd["design"] == design + ] + processes, non_tech_processes = populate_processes( + non_tech_add, sim_data, design, market_values, vd_values + ) if dsm is None: dsm = create_simple_dsm(processes) @@ -117,30 +150,58 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed try: if is_monte_carlo and not is_multiprocessing: - results = sim.run_monte_carlo_simulation(flow_time, interarrival, process, processes, - non_tech_processes, - non_tech_add, dsm, time_unit, discount_rate, runtime, - runs) + results = sim.run_monte_carlo_simulation( + flow_time, + interarrival, + process, + processes, + non_tech_processes, + non_tech_add, + dsm, + time_unit, + discount_rate, + runtime, + runs, + ) elif is_monte_carlo and is_multiprocessing: - results = sim.run_parallell_simulations(flow_time, interarrival, process, processes, - non_tech_processes, - non_tech_add, dsm, time_unit, discount_rate, runtime, - runs) + results = sim.run_parallell_simulations( + flow_time, + interarrival, + process, + processes, + non_tech_processes, + non_tech_add, + dsm, + time_unit, + discount_rate, + runtime, + runs, + ) else: - results = sim.run_simulation(flow_time, interarrival, process, processes, non_tech_processes, - non_tech_add, dsm, time_unit, - discount_rate, runtime) + results = sim.run_simulation( + flow_time, + interarrival, + process, + processes, + non_tech_processes, + non_tech_add, + dsm, + time_unit, + discount_rate, + runtime, + ) except Exception as exc: tb = sys.exc_info()[2] - logger.debug( - f'{exc.__class__}, {exc}, {exc.with_traceback(tb)}') - print(f'{exc.__class__}, {exc}') - raise e.SimulationFailedException + logger.debug(f"{exc.__class__}, {exc}, {exc.with_traceback(tb)}") + print(f"{exc.__class__}, {exc}") + raise e.SimulationFailedException(exc) sim_run_res = models.Simulation( time=results.timesteps[-1], - mean_NPV=results.normalize_npv() if normalized_npv else results.mean_npv(), + mean_NPV=results.normalize_npv() + if normalized_npv + else results.mean_npv(), max_NPVs=results.all_max_npv(), mean_payback_time=results.mean_npv_payback_time(), all_npvs=results.npvs, @@ -151,13 +212,13 @@ def run_simulation(db_connection: PooledMySQLConnection, sim_settings: models.Ed ) sim_result.runs.append(sim_run_res) - logger.debug('Returning the results') + logger.debug("Returning the results") return sim_result -def populate_processes(non_tech_add: NonTechCost, db_results, design: int, - mi_values=None, - vd_values=None): +def populate_processes( + non_tech_add: NonTechCost, db_results, design: int, mi_values=None, vd_values=None +): if mi_values is None: mi_values = [] nsp = NumericStringParser() @@ -166,64 +227,71 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, non_tech_processes = [] for row in db_results: - vd_values_row = [vd for vd in vd_values if vd['vcs_row'] == row['id'] and vd['design'] == design] - if row['category'] != 'Technical processes': + vd_values_row = [ + vd + for vd in vd_values + if vd["vcs_row"] == row["id"] and vd["design"] == design + ] + if row["category"] != "Technical processes": try: non_tech = models.NonTechnicalProcess( - cost=nsp.eval(parse_formula(row['cost'], vd_values_row, mi_values, row)), + cost=nsp.eval( + parse_formula(row["cost"], vd_values_row, mi_values, row) + ), revenue=nsp.eval( - parse_formula(row['revenue'], vd_values_row, mi_values, row)), - name=row['iso_name']) + parse_formula(row["revenue"], vd_values_row, mi_values, row) + ), + name=row["iso_name"], + ) except Exception as exc: - logger.debug(f'{exc.__class__}, {exc}') - raise e.FormulaEvalException(row['id']) + logger.debug(f"{exc.__class__}, {exc}") + raise e.FormulaEvalException(exc, row) non_tech_processes.append(non_tech) - elif row['iso_name'] is not None and row['sub_name'] is None: + elif row["iso_name"] is not None and row["sub_name"] is None: try: - time = nsp.eval(parse_formula( - row['time'], vd_values, mi_values, row)) - cost_formula = parse_formula(row['cost'], vd_values, mi_values, row) + time = nsp.eval(parse_formula(row["time"], vd_values, mi_values, row)) + cost_formula = parse_formula(row["cost"], vd_values, mi_values, row) revenue_formula = parse_formula( - row['revenue'], vd_values, mi_values, row) - p = Process(row['id'], - time, - nsp.eval(expr.replace_all( - 'time', time, cost_formula)), - nsp.eval(expr.replace_all( - 'time', time, revenue_formula)), - row['iso_name'], non_tech_add, TIME_FORMAT_DICT.get( - row['time_unit'].lower()) - ) - if p.time < 0: - raise e.NegativeTimeException(row['id']) + row["revenue"], vd_values, mi_values, row + ) + p = Process( + row["id"], + time, + nsp.eval(expr.replace_all("time", time, cost_formula)), + nsp.eval(expr.replace_all("time", time, revenue_formula)), + row["iso_name"], + non_tech_add, + TIME_FORMAT_DICT.get(row["time_unit"].lower()), + ) except Exception as exc: - logger.debug(f'{exc.__class__}, {exc}') - raise e.FormulaEvalException(row['id']) + logger.debug(f"{exc.__class__}, {exc}") + raise e.FormulaEvalException(exc, row) + if p.time < 0: + raise e.NegativeTimeException(row) technical_processes.append(p) - elif row['sub_name'] is not None: + elif row["sub_name"] is not None: sub_name = f'{row["sub_name"]} ({row["iso_name"]})' try: - time = nsp.eval(parse_formula( - row['time'], vd_values, mi_values, row)) - cost_formula = parse_formula(row['cost'], vd_values, mi_values, row) + time = nsp.eval(parse_formula(row["time"], vd_values, mi_values, row)) + cost_formula = parse_formula(row["cost"], vd_values, mi_values, row) revenue_formula = parse_formula( - row['revenue'], vd_values, mi_values, row) - p = Process(row['id'], - time, - nsp.eval(expr.replace_all( - 'time', time, cost_formula)), - nsp.eval(expr.replace_all( - 'time', time, revenue_formula)), - sub_name, non_tech_add, TIME_FORMAT_DICT.get( - row['time_unit'].lower()) - ) - - if p.time < 0: - raise e.NegativeTimeException(row['id']) + row["revenue"], vd_values, mi_values, row + ) + p = Process( + row["id"], + time, + nsp.eval(expr.replace_all("time", time, cost_formula)), + nsp.eval(expr.replace_all("time", time, revenue_formula)), + sub_name, + non_tech_add, + TIME_FORMAT_DICT.get(row["time_unit"].lower()), + ) except Exception as exc: - logger.debug(f'{exc.__class__}, {exc}') - raise e.FormulaEvalException(row['id']) + logger.debug(f"{exc.__class__}, {exc}") + raise e.FormulaEvalException(exc, row) + if p.time < 0: + raise e.NegativeTimeException(row) technical_processes.append(p) else: raise e.ProcessNotFoundException @@ -231,14 +299,16 @@ def populate_processes(non_tech_add: NonTechCost, db_results, design: int, return technical_processes, non_tech_processes -def get_sim_data(db_connection: PooledMySQLConnection, vcs_id: int, design_group_id: int): - query = f'SELECT cvs_vcs_rows.id, cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ +def get_sim_data( + db_connection: PooledMySQLConnection, vcs_id: int, design_group_id: int +): + query = f"SELECT cvs_vcs_rows.id, cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ subprocess, cvs_subprocesses.name as sub_name, time, time_unit, cost, revenue, rate FROM cvs_vcs_rows \ LEFT OUTER JOIN cvs_subprocesses ON cvs_vcs_rows.subprocess = cvs_subprocesses.id \ LEFT OUTER JOIN cvs_iso_processes ON cvs_vcs_rows.iso_process = cvs_iso_processes.id \ OR cvs_subprocesses.iso_process = cvs_iso_processes.id \ LEFT OUTER JOIN cvs_design_mi_formulas ON cvs_vcs_rows.id = cvs_design_mi_formulas.vcs_row \ - WHERE cvs_vcs_rows.vcs = %s AND cvs_design_mi_formulas.design_group = %s ORDER BY `index`' + WHERE cvs_vcs_rows.vcs = %s AND cvs_design_mi_formulas.design_group = %s ORDER BY `index`" with db_connection.cursor(prepared=True) as cursor: cursor.execute(query, [vcs_id, design_group_id]) res = cursor.fetchall() @@ -246,7 +316,11 @@ def get_sim_data(db_connection: PooledMySQLConnection, vcs_id: int, design_group return res -def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], design_group_ids: List[int]): +def get_all_sim_data( + db_connection: PooledMySQLConnection, + vcs_ids: List[int], + design_group_ids: List[int], +): try: query = f'SELECT cvs_vcs_rows.id, cvs_vcs_rows.vcs, cvs_design_mi_formulas.design_group, \ cvs_vcs_rows.iso_process, cvs_iso_processes.name as iso_name, category, \ @@ -263,7 +337,7 @@ def get_all_sim_data(db_connection: PooledMySQLConnection, vcs_ids: List[int], d res = cursor.fetchall() res = [dict(zip(cursor.column_names, row)) for row in res] except Error as error: - logger.debug(f'Error msg: {error.msg}') + logger.debug(f"Error msg: {error.msg}") raise e.CouldNotFetchSimulationDataException return res @@ -281,18 +355,20 @@ def get_all_vd_design_values(db_connection: PooledMySQLConnection, designs: List res = cursor.fetchall() res = [dict(zip(cursor.column_names, row)) for row in res] except Error as error: - logger.debug(f'Error msg: {error.msg}') + logger.debug(f"Error msg: {error.msg}") raise e.CouldNotFetchValueDriverDesignValuesException return res def get_simulation_settings(db_connection: PooledMySQLConnection, project_id: int): - logger.debug(f'Fetching simulation settings for project {project_id}') + logger.debug(f"Fetching simulation settings for project {project_id}") select_statement = MySQLStatementBuilder(db_connection) - res = select_statement.select(SIM_SETTINGS_TABLE, SIM_SETTINGS_COLUMNS) \ - .where('project = %s', [project_id]) \ + res = ( + select_statement.select(SIM_SETTINGS_TABLE, SIM_SETTINGS_COLUMNS) + .where("project = %s", [project_id]) .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + ) if res is None: raise e.SimSettingsNotFoundException @@ -300,20 +376,28 @@ def get_simulation_settings(db_connection: PooledMySQLConnection, project_id: in return populate_sim_settings(res) -def edit_simulation_settings(db_connection: PooledMySQLConnection, project_id: int, - sim_settings: models.EditSimSettings, user_id: int): - logger.debug(f'Editing simulation settings for project {project_id}') +def edit_simulation_settings( + db_connection: PooledMySQLConnection, + project_id: int, + sim_settings: models.EditSimSettings, + user_id: int, +): + logger.debug(f"Editing simulation settings for project {project_id}") - if (sim_settings.flow_process is None and sim_settings.flow_start_time is None) \ - or (sim_settings.flow_process is not None and sim_settings.flow_start_time is not None): + if (sim_settings.flow_process is None and sim_settings.flow_start_time is None) or ( + sim_settings.flow_process is not None + and sim_settings.flow_start_time is not None + ): raise e.InvalidFlowSettingsException count_sim = MySQLStatementBuilder(db_connection) - count = count_sim.count(SIM_SETTINGS_TABLE) \ - .where('project = %s', [project_id]) \ + count = ( + count_sim.count(SIM_SETTINGS_TABLE) + .where("project = %s", [project_id]) .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + ) - count = count['count'] + count = count["count"] if sim_settings.flow_process is not None: flow_process_exists = False @@ -321,48 +405,75 @@ def edit_simulation_settings(db_connection: PooledMySQLConnection, project_id: i for vcs in vcss: rows = vcs_storage.get_vcs_table(db_connection, project_id, vcs.id) for row in rows: - if (row.iso_process is not None and row.iso_process.name == sim_settings.flow_process) or \ - (row.subprocess is not None and f'{row.subprocess.name} ({row.subprocess.parent_process.name})' - == sim_settings.flow_process): + if ( + row.iso_process is not None + and row.iso_process.name == sim_settings.flow_process + ) or ( + row.subprocess is not None + and f"{row.subprocess.name} ({row.subprocess.parent_process.name})" + == sim_settings.flow_process + ): flow_process_exists = True break if not flow_process_exists: raise e.FlowProcessNotFoundException - if (count == 1): + if count == 1: columns = SIM_SETTINGS_COLUMNS[1:] - set_statement = ','.join([col + ' = %s' for col in columns]) - - values = [sim_settings.time_unit.value, sim_settings.flow_process, sim_settings.flow_start_time, - sim_settings.flow_time, - sim_settings.interarrival_time, sim_settings.start_time, sim_settings.end_time, - sim_settings.discount_rate, sim_settings.non_tech_add.value, sim_settings.monte_carlo, - sim_settings.runs] + set_statement = ",".join([col + " = %s" for col in columns]) + + values = [ + sim_settings.time_unit.value, + sim_settings.flow_process, + sim_settings.flow_start_time, + sim_settings.flow_time, + sim_settings.interarrival_time, + sim_settings.start_time, + sim_settings.end_time, + sim_settings.discount_rate, + sim_settings.non_tech_add.value, + sim_settings.monte_carlo, + sim_settings.runs, + ] update_statement = MySQLStatementBuilder(db_connection) - _, rows = update_statement \ - .update(table=SIM_SETTINGS_TABLE, set_statement=set_statement, values=values) \ - .where('project = %s', [project_id]) \ + _, rows = ( + update_statement.update( + table=SIM_SETTINGS_TABLE, set_statement=set_statement, values=values + ) + .where("project = %s", [project_id]) .execute(return_affected_rows=True) + ) - elif (count == 0): + elif count == 0: create_sim_settings(db_connection, project_id, sim_settings) return True -def create_sim_settings(db_connection: PooledMySQLConnection, project_id: int, - sim_settings: models.EditSimSettings) -> bool: - values = [project_id] + [sim_settings.time_unit.value, sim_settings.flow_process, sim_settings.flow_start_time, - sim_settings.flow_time, - sim_settings.interarrival_time, sim_settings.start_time, sim_settings.end_time, - sim_settings.discount_rate, sim_settings.non_tech_add.value, sim_settings.monte_carlo, - sim_settings.runs] +def create_sim_settings( + db_connection: PooledMySQLConnection, + project_id: int, + sim_settings: models.EditSimSettings, +) -> bool: + values = [project_id] + [ + sim_settings.time_unit.value, + sim_settings.flow_process, + sim_settings.flow_start_time, + sim_settings.flow_time, + sim_settings.interarrival_time, + sim_settings.start_time, + sim_settings.end_time, + sim_settings.discount_rate, + sim_settings.non_tech_add.value, + sim_settings.monte_carlo, + sim_settings.runs, + ] insert_statement = MySQLStatementBuilder(db_connection) - insert_statement.insert(SIM_SETTINGS_TABLE, SIM_SETTINGS_COLUMNS) \ - .set_values(values) \ - .execute(fetch_type=FetchType.FETCH_NONE) + insert_statement.insert(SIM_SETTINGS_TABLE, SIM_SETTINGS_COLUMNS).set_values( + values + ).execute(fetch_type=FetchType.FETCH_NONE) return True @@ -376,14 +487,14 @@ def get_all_market_values(db_connection: PooledMySQLConnection, vcs_ids: List[in res = cursor.fetchall() res = [dict(zip(cursor.column_names, row)) for row in res] except Error as error: - logger.debug(f'Error msg: {error.msg}') + logger.debug(f"Error msg: {error.msg}") raise e.CouldNotFetchMarketInputValuesException return res def add_multiplication_signs(formula: str) -> str: # Define a regular expression pattern to find the positions where the multiplication sign is missing - pattern = r'(\d)([a-zA-Z({\[<])|([}\])>]|})([a-zA-Z({\[<])|([}\])>]|{)(\d)' + pattern = r"(\d)([a-zA-Z({\[<])|([}\])>]|})([a-zA-Z({\[<])|([}\])>]|{)(\d)" # Use the re.sub() function to replace the matches with the correct format def replace(match): @@ -398,12 +509,12 @@ def replace(match): def parse_if_statement(formula: str) -> str: # The pattern is if(condition, true_value, false_value) - pattern = r'if\(([^,]+),([^,]+),([^,]+)\)' + pattern = r"if\(([^,]+),([^,]+),([^,]+)\)" match = re.search(pattern, formula) if match: condition, true_value, false_value = match.groups() - condition = condition.replace('=', '==') + condition = condition.replace("=", "==") if eval(condition): value = true_value else: @@ -432,7 +543,7 @@ def replace(match): if ef["market_input"] == id_number: return str(ef["value"]) elif formula_row and tag == "process": - return f'({formula_row[value.lower()]})' + return f"({formula_row[value.lower()]})" return match.group() @@ -441,7 +552,9 @@ def replace(match): replaced_text = parse_if_statement(replaced_text) - replaced_text = re.sub(pattern, '0', replaced_text) # If there are any tags left, replace them with 0 + replaced_text = re.sub( + pattern, "0", replaced_text + ) # If there are any tags left, replace them with 0 return replaced_text @@ -451,12 +564,18 @@ def check_entity_rate(db_results, flow_process_name: str): # Set the flow_process_index to be highest possible. flow_process_index = len(db_results) for i in range(len(db_results)): - if db_results[i]['sub_name'] == flow_process_name or db_results[i]['iso_name'] == flow_process_name: + if ( + db_results[i]["sub_name"] == flow_process_name + or db_results[i]["iso_name"] == flow_process_name + ): flow_process_index = i if i > flow_process_index: for j in range(i, len(db_results)): - if db_results[j]['rate'] == 'per_project' and db_results[j]['category'] == 'Technical processes': + if ( + db_results[j]["rate"] == "per_project" + and db_results[j]["category"] == "Technical processes" + ): print("Rate check false") rate_check = False break @@ -465,22 +584,18 @@ def check_entity_rate(db_results, flow_process_name: str): return rate_check -def check_sim_settings(settings: models.EditSimSettings) -> bool: - settings_check = True - +def check_sim_settings(settings: models.EditSimSettings) -> str: + settings_check_msg = "" if settings.end_time - settings.start_time <= 0: - settings_check = False + settings_check_msg += "End simulation is less than start simulation. \n" if settings.flow_time > settings.end_time - settings.start_time: - settings_check = False - - if settings.flow_start_time is not None and settings.flow_process is not None: - settings_check = False + settings_check_msg += "Flow time is longer than simulation time. \n" if settings.flow_start_time is None and settings.flow_process is None: - settings_check = False + settings_check_msg += "Flow start time is not set. \n" - return settings_check + return settings_check_msg # Create DSM that only goes from one process to the other following the order of the index in the VCS @@ -495,7 +610,9 @@ def create_simple_dsm(processes: List[Process]) -> dict: else: name = processes[i - 1].name - dsm.update({name: [1 if i + 1 == j else "X" if i == j else 0 for j in range(n)]}) + dsm.update( + {name: [1 if i + 1 == j else "X" if i == j else 0 for j in range(n)]} + ) return dsm @@ -517,18 +634,18 @@ def fill_dsm_with_zeros(dsm: dict) -> dict: def populate_sim_settings(db_result) -> models.SimSettings: - logger.debug(f'Populating simulation settings') + logger.debug(f"Populating simulation settings") return models.SimSettings( - project=db_result['project'], - time_unit=db_result['time_unit'], - flow_process=db_result['flow_process'], - flow_start_time=db_result['flow_start_time'], - flow_time=db_result['flow_time'], - interarrival_time=db_result['interarrival_time'], - start_time=db_result['start_time'], - end_time=db_result['end_time'], - discount_rate=db_result['discount_rate'], - non_tech_add=db_result['non_tech_add'], - monte_carlo=db_result['monte_carlo'], - runs=db_result['runs'] + project=db_result["project"], + time_unit=db_result["time_unit"], + flow_process=db_result["flow_process"], + flow_start_time=db_result["flow_start_time"], + flow_time=db_result["flow_time"], + interarrival_time=db_result["interarrival_time"], + start_time=db_result["start_time"], + end_time=db_result["end_time"], + discount_rate=db_result["discount_rate"], + non_tech_add=db_result["non_tech_add"], + monte_carlo=db_result["monte_carlo"], + runs=db_result["runs"], ) diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index 63b0b0c7..099f101c 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -4,259 +4,305 @@ def test_run_single_simulation(client, std_headers, std_user): - #Setup - - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json={ - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 200 - - #Should probably assert some other stuff about the output to ensure that it is correct. - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 200 + + # Should probably assert some other stuff about the output to ensure that it is correct. + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) def test_run_sim_invalid_design_group(client, std_headers, std_user): - # Setup + # Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) + current_user = impl_users.impl_get_user_with_username(std_user.username) - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False - # Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json={ - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id + 9999] - }) + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id + 9999], + }, + ) - # Assert - assert res.status_code == 400 + # Assert + assert res.status_code == 400 - # Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) def test_run_sim_invalid_vcss(client, std_headers, std_user): - # Setup + # Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) + current_user = impl_users.impl_get_user_with_username(std_user.username) - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False - # Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json={ - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id + 9999], - "design_group_ids": [design_group.id] - }) + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id + 9999], + "design_group_ids": [design_group.id], + }, + ) - # Assert - assert res.status_code == 400 + # Assert + assert res.status_code == 400 - # Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) def test_run_sim_end_time_before_start_time(client, std_headers, std_user): - #Setup - - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.end_time = settings.start_time - 1 - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False + settings.end_time = settings.start_time - 1 + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 400 + assert res.json() == { + "detail": "Settings are not correct" + } # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_sim_flow_time_above_total_time(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.flow_time = settings.start_time * settings.end_time - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False + settings.flow_time = settings.start_time * settings.end_time + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 400 + assert res.json() == { + "detail": "Settings are not correct" + } # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_sim_no_flows(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.flow_start_time = None - settings.flow_process = None - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id,[vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False + settings.flow_start_time = None + settings.flow_process = None + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 400 + assert res.json() == { + "detail": "Settings are not correct" + } # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_sim_both_flows(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - settings.monte_carlo = False - settings.flow_start_time = 5 - settings.flow_process = 10 - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} #Should raise BadlyFormattedSettingsException - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + settings.monte_carlo = False + settings.flow_start_time = 5 + settings.flow_process = 10 + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 400 + assert res.json() == { + "detail": "Settings are not correct" + } # Should raise BadlyFormattedSettingsException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_sim_rate_invalid_order(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - flow_proc = tu.edit_rate_order_formulas(project.id, vcs.id, design_group.id, current_user.id) - - settings.monte_carlo = False - settings.flow_process = flow_proc.iso_process.name if flow_proc.iso_process is not None else flow_proc.subprocess.name - - #Act - res = client.post(f'/api/cvs/project/{project.id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 400 - assert res.json() == {'detail': 'Wrong order of rate of entities. Per project assigned after per product'} #RateWrongOrderException - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + flow_proc = tu.edit_rate_order_formulas( + project.id, vcs.id, design_group.id, current_user.id + ) + + settings.monte_carlo = False + settings.flow_process = ( + flow_proc.iso_process.name + if flow_proc.iso_process is not None + else flow_proc.subprocess.name + ) + + # Act + res = client.post( + f"/api/cvs/project/{project.id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 400 + assert res.json() == { + "detail": "Wrong order of rate of entities. Per project assigned after per product" + } # RateWrongOrderException + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) def test_run_sim_invalid_proj(client, std_headers, std_user): - #Setup - current_user = impl_users.impl_get_user_with_username(std_user.username) - - project, vcs, design_group, design, settings = sim_tu.setup_single_simulation(current_user.id) - - settings.monte_carlo = False - project_id = project.id + 10000 - - #Act - res = client.post(f'/api/cvs/project/{project_id}/simulation/run', - headers=std_headers, - json = { - "sim_settings": settings.dict(), - "vcs_ids": [vcs.id], - "design_group_ids": [design_group.id] - }) - - #Assert - assert res.status_code == 404 - assert res.json() == {'detail': 'Sub-project not found.'} - - #Should probably assert some other stuff about the output to ensure that it is correct. - - - #Cleanup - tu.delete_design_group(project.id, design_group.id) - tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) - tu.delete_project_by_id(project.id, current_user.id) - tu.delete_vd_from_user(current_user.id) + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + + project, vcs, design_group, design, settings = sim_tu.setup_single_simulation( + current_user.id + ) + + settings.monte_carlo = False + project_id = project.id + 10000 + + # Act + res = client.post( + f"/api/cvs/project/{project_id}/simulation/run", + headers=std_headers, + json={ + "sim_settings": settings.dict(), + "vcs_ids": [vcs.id], + "design_group_ids": [design_group.id], + }, + ) + + # Assert + assert res.status_code == 404 + assert res.json() == {"detail": "Sub-project not found."} + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) From 49a7522e65d096a9aeb1fceb62b0f89249301276 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 24 Oct 2023 11:54:15 +0200 Subject: [PATCH 176/210] fixed failing tests --- sedbackend/apps/cvs/simulation/storage.py | 3 +++ .../cvs/simulation/test_sim_multiprocessing.py | 6 +++--- tests/apps/cvs/simulation/test_simulation.py | 18 +++++------------- 3 files changed, 11 insertions(+), 16 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index d0b50288..98500dea 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -592,6 +592,9 @@ def check_sim_settings(settings: models.EditSimSettings) -> str: if settings.flow_time > settings.end_time - settings.start_time: settings_check_msg += "Flow time is longer than simulation time. \n" + if settings.flow_start_time is not None and settings.flow_process is not None: + settings_check_msg += "Cannot have flow start time on flow process. \n" + if settings.flow_start_time is None and settings.flow_process is None: settings_check_msg += "Flow start time is not set. \n" diff --git a/tests/apps/cvs/simulation/test_sim_multiprocessing.py b/tests/apps/cvs/simulation/test_sim_multiprocessing.py index da5436e4..09faec4e 100644 --- a/tests/apps/cvs/simulation/test_sim_multiprocessing.py +++ b/tests/apps/cvs/simulation/test_sim_multiprocessing.py @@ -110,7 +110,7 @@ def test_run_mc_sim_end_time_before_start_time(client, std_headers, std_user): # Assert assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} # Should raise BadlyFormattedSettingsException + assert "Settings are not correct" in res.json()["detail"] # Cleanup tu.delete_design_group(project.id, design_group.id) @@ -139,7 +139,7 @@ def test_run_mc_sim_no_flows(client, std_headers, std_user): # Assert assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} # Should raise BadlyFormattedSettingsException + assert "Settings are not correct" in res.json()["detail"] # Cleanup tu.delete_design_group(project.id, design_group.id) @@ -168,7 +168,7 @@ def test_run_mc_sim_both_flows(client, std_headers, std_user): # Assert assert res.status_code == 400 - assert res.json() == {'detail': 'Settings are not correct'} # Should raise BadlyFormattedSettingsException + assert "Settings are not correct" in res.json()["detail"] # Cleanup tu.delete_design_group(project.id, design_group.id) diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index 099f101c..a4016f64 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -119,9 +119,7 @@ def test_run_sim_end_time_before_start_time(client, std_headers, std_user): # Assert assert res.status_code == 400 - assert res.json() == { - "detail": "Settings are not correct" - } # Should raise BadlyFormattedSettingsException + assert "Settings are not correct" in res.json()["detail"] # Cleanup tu.delete_design_group(project.id, design_group.id) @@ -153,9 +151,7 @@ def test_run_sim_flow_time_above_total_time(client, std_headers, std_user): # Assert assert res.status_code == 400 - assert res.json() == { - "detail": "Settings are not correct" - } # Should raise BadlyFormattedSettingsException + assert "Settings are not correct" in res.json()["detail"] # Cleanup tu.delete_design_group(project.id, design_group.id) @@ -188,9 +184,7 @@ def test_run_sim_no_flows(client, std_headers, std_user): # Assert assert res.status_code == 400 - assert res.json() == { - "detail": "Settings are not correct" - } # Should raise BadlyFormattedSettingsException + assert "Settings are not correct" in res.json()["detail"] # Cleanup tu.delete_design_group(project.id, design_group.id) @@ -223,9 +217,7 @@ def test_run_sim_both_flows(client, std_headers, std_user): # Assert assert res.status_code == 400 - assert res.json() == { - "detail": "Settings are not correct" - } # Should raise BadlyFormattedSettingsException + assert "Settings are not correct" in res.json()["detail"] # Cleanup tu.delete_design_group(project.id, design_group.id) @@ -265,7 +257,7 @@ def test_run_sim_rate_invalid_order(client, std_headers, std_user): # Assert assert res.status_code == 400 assert res.json() == { - "detail": "Wrong order of rate of entities. Per project assigned after per product" + "detail": "Wrong order of rate of entities. Total sum cannot come after per product. Check your VCS table." } # RateWrongOrderException # Cleanup From 62745c6044615d8c248fcc385a1ca9d110001738 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 24 Oct 2023 13:27:40 +0200 Subject: [PATCH 177/210] better messages for failed parsing --- sedbackend/apps/cvs/simulation/exceptions.py | 9 ++++++++- tests/apps/cvs/simulation/test_sim_utils.py | 1 + 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/simulation/exceptions.py b/sedbackend/apps/cvs/simulation/exceptions.py index dbcb8b0f..ff04727b 100644 --- a/sedbackend/apps/cvs/simulation/exceptions.py +++ b/sedbackend/apps/cvs/simulation/exceptions.py @@ -1,4 +1,5 @@ from tkinter import E +import re class ProcessNotFoundException(Exception): @@ -16,7 +17,13 @@ class EntityRateOutOfOrderException(Exception): class FormulaEvalException(Exception): def __init__(self, exception, sim_data) -> None: self.name = sim_data['iso_name'] if sim_data['iso_name'] is not None else sim_data['sub_name'] - self.message = str(exception) + pattern = r"found '([^']+)'" + match = re.search(pattern, str(exception)) + if match: + found_char = match.group(1) + self.message = f"Check your formula for \"{found_char}\"" + else: + self.message = str(exception) class RateWrongOrderException(Exception): diff --git a/tests/apps/cvs/simulation/test_sim_utils.py b/tests/apps/cvs/simulation/test_sim_utils.py index d580c279..3aaa0e7e 100644 --- a/tests/apps/cvs/simulation/test_sim_utils.py +++ b/tests/apps/cvs/simulation/test_sim_utils.py @@ -207,3 +207,4 @@ def test_if_statement_formula(): # Assert assert new_formula == "2+10/5+10" assert nsp.eval(new_formula) == 14 + \ No newline at end of file From 558608e203aad9801ebb5499883022cebd2cbd44 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 27 Oct 2023 13:57:25 +0200 Subject: [PATCH 178/210] project and subproject and participants added to create cvs project --- sedbackend/apps/core/db.py | 6 +- sedbackend/apps/cvs/project/models.py | 9 +- sedbackend/apps/cvs/project/storage.py | 173 +++++++++----- tests/apps/cvs/projects/test_projects.py | 274 +++++++++++++---------- 4 files changed, 282 insertions(+), 180 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..20dba37e 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,11 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +#host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +#port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/project/models.py b/sedbackend/apps/cvs/project/models.py index 52e5ac51..78e88e78 100644 --- a/sedbackend/apps/cvs/project/models.py +++ b/sedbackend/apps/cvs/project/models.py @@ -1,7 +1,9 @@ from datetime import datetime -from typing import Optional +from typing import Optional, List, Dict from pydantic import BaseModel, Field +from sedbackend.apps.core.projects.models import AccessLevel from sedbackend.apps.core.users.models import User +import sedbackend.apps.core.projects.models as proj_models class CVSProject(BaseModel): @@ -12,9 +14,14 @@ class CVSProject(BaseModel): owner: User datetime_created: datetime my_access_right: int + project: proj_models.Project = None + subproject: proj_models.SubProject = None + class CVSProjectPost(BaseModel): name: str = Field(..., max_length=255) description: Optional[str] = None currency: Optional[str] = Field(None, max_length=10) + participants: Optional[List[int]] = [] + participants_access: Optional[Dict[int, AccessLevel]] = {} diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 5994d698..c5937d5a 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -9,21 +9,38 @@ import sedbackend.apps.core.projects.storage as proj_storage CVS_APPLICATION_SID = "MOD.CVS" -CVS_PROJECT_TABLE = 'cvs_projects' -CVS_PROJECT_COLUMNS = ['id', 'name', 'description', 'currency', 'owner_id', 'datetime_created'] - -PROJECTS_SUBPROJECTS_TABLE = 'projects_subprojects' -PROJECTS_SUBPROJECTS_COLUMNS = ['id', 'name', 'application_sid', 'project_id', 'native_project_id', - 'owner_id', 'datetime_created'] - -def get_all_cvs_project(db_connection: PooledMySQLConnection, user_id: int) -> ListChunk[models.CVSProject]: - logger.debug(f'Fetching all CVS projects for user with id={user_id}.') - - query = f'SELECT DISTINCT p.*, COALESCE(pp.access_level, 4) AS my_access_right \ +CVS_PROJECT_TABLE = "cvs_projects" +CVS_PROJECT_COLUMNS = [ + "id", + "name", + "description", + "currency", + "owner_id", + "datetime_created", +] + +PROJECTS_SUBPROJECTS_TABLE = "projects_subprojects" +PROJECTS_SUBPROJECTS_COLUMNS = [ + "id", + "name", + "application_sid", + "project_id", + "native_project_id", + "owner_id", + "datetime_created", +] + + +def get_all_cvs_project( + db_connection: PooledMySQLConnection, user_id: int +) -> ListChunk[models.CVSProject]: + logger.debug(f"Fetching all CVS projects for user with id={user_id}.") + + query = f"SELECT DISTINCT p.*, COALESCE(pp.access_level, 4) AS my_access_right \ FROM cvs_projects p \ LEFT JOIN projects_subprojects ps ON p.id = ps.project_id AND ps.owner_id = %s \ LEFT JOIN projects_participants pp ON p.id = pp.project_id AND pp.user_id = %s \ - WHERE p.owner_id = %s OR ps.owner_id = %s OR pp.user_id = %s;' + WHERE p.owner_id = %s OR ps.owner_id = %s OR pp.user_id = %s;" with db_connection.cursor(prepared=True, dictionary=True) as cursor: cursor.execute(query, [user_id, user_id, user_id, user_id, user_id]) @@ -31,95 +48,139 @@ def get_all_cvs_project(db_connection: PooledMySQLConnection, user_id: int) -> L cvs_project_list = [populate_cvs_project(db_connection, res) for res in result] - return ListChunk[models.CVSProject](chunk=cvs_project_list, length_total=len(cvs_project_list)) + return ListChunk[models.CVSProject]( + chunk=cvs_project_list, length_total=len(cvs_project_list) + ) -def get_cvs_project(db_connection: PooledMySQLConnection, project_id: int, user_id: int) -> models.CVSProject: - logger.debug(f'Fetching CVS project with id={project_id} user={user_id}.') +def get_cvs_project( + db_connection: PooledMySQLConnection, + cvs_project_id: int, + user_id: int, + project: proj_models.Project = None, + subproject: proj_models.SubProject = None, +) -> models.CVSProject: + logger.debug(f"Fetching CVS project with id={cvs_project_id} user={user_id}.") - query = f'SELECT p.*, COALESCE(pp.access_level, 4) AS my_access_right \ + query = f"SELECT p.*, COALESCE(pp.access_level, 4) AS my_access_right \ FROM cvs_projects p \ LEFT JOIN projects_participants pp ON pp.project_id = %s AND pp.user_id = %s \ - WHERE p.id = %s;' + WHERE p.id = %s;" with db_connection.cursor(prepared=True, dictionary=True) as cursor: - cursor.execute(query, [project_id, user_id, project_id]) + cursor.execute(query, [cvs_project_id, user_id, cvs_project_id]) result = cursor.fetchone() logger.debug(result) if result is None: raise exceptions.CVSProjectNotFoundException - return populate_cvs_project(db_connection, result) - + return populate_cvs_project(db_connection, result, project, subproject) -def create_cvs_project(db_connection: PooledMySQLConnection, project: models.CVSProjectPost, - user_id: int) -> models.CVSProject: - logger.debug(f'Creating a CVS project for user with id={user_id}.') +def create_cvs_project( + db_connection: PooledMySQLConnection, + cvs_project: models.CVSProjectPost, + user_id: int, +) -> models.CVSProject: + logger.debug(f"Creating a CVS project for user with id={user_id}.") insert_statement = MySQLStatementBuilder(db_connection) - insert_statement \ - .insert(table=CVS_PROJECT_TABLE, columns=['name', 'description', 'currency', 'owner_id']) \ - .set_values([project.name, project.description, project.currency, user_id]) \ - .execute(fetch_type=FetchType.FETCH_NONE) + insert_statement.insert( + table=CVS_PROJECT_TABLE, columns=["name", "description", "currency", "owner_id"] + ).set_values( + [cvs_project.name, cvs_project.description, cvs_project.currency, user_id] + ).execute( + fetch_type=FetchType.FETCH_NONE + ) cvs_project_id = insert_statement.last_insert_id # Insert corresponding subproject row - subproject = proj_models.SubProjectPost(name=project.name, application_sid=CVS_APPLICATION_SID, native_project_id=cvs_project_id) - proj_storage.db_post_subproject(db_connection, subproject, user_id) + subproject_model = proj_models.SubProjectPost( + name=cvs_project.name, + application_sid=CVS_APPLICATION_SID, + native_project_id=cvs_project_id, + ) + project_model = proj_models.ProjectPost( + name=cvs_project.name, + participants=[user_id] + cvs_project.participants, + participants_access={ + user_id: proj_models.AccessLevel.OWNER, + **cvs_project.participants_access, + }, + ) + project = proj_storage.db_post_project(db_connection, project_model, user_id) + subproject = proj_storage.db_post_subproject( + db_connection, subproject_model, user_id, project.id + ) - return get_cvs_project(db_connection, cvs_project_id, user_id) + return get_cvs_project(db_connection, cvs_project_id, user_id, project, subproject) -def edit_cvs_project(db_connection: PooledMySQLConnection, project_id: int, - new_project: models.CVSProjectPost, user_id: int) -> models.CVSProject: - logger.debug(f'Editing CVS project with id={project_id}.') +def edit_cvs_project( + db_connection: PooledMySQLConnection, + project_id: int, + new_project: models.CVSProjectPost, + user_id: int, +) -> models.CVSProject: + logger.debug(f"Editing CVS project with id={project_id}.") # Updating update_statement = MySQLStatementBuilder(db_connection) update_statement.update( table=CVS_PROJECT_TABLE, - set_statement='name = %s, description = %s, currency = %s', + set_statement="name = %s, description = %s, currency = %s", values=[new_project.name, new_project.description, new_project.currency], ) - update_statement.where('id = %s', [project_id]) + update_statement.where("id = %s", [project_id]) update_statement.execute(return_affected_rows=True) return get_cvs_project(db_connection, project_id, user_id) -def delete_cvs_project(db_connection: PooledMySQLConnection, project_id: int, user_id: int) -> bool: - logger.debug(f'Deleting CVS project with id={project_id}.') +def delete_cvs_project( + db_connection: PooledMySQLConnection, cvs_project_id: int, user_id: int +) -> bool: + logger.debug(f"Deleting CVS project with id={cvs_project_id}.") + cvs_project = get_cvs_project(db_connection, cvs_project_id, user_id) delete_statement = MySQLStatementBuilder(db_connection) - _, rows = delete_statement.delete(CVS_PROJECT_TABLE) \ - .where('id = %s', [project_id]) \ + _, rows = ( + delete_statement.delete(CVS_PROJECT_TABLE) + .where("id = %s", [cvs_project_id]) .execute(return_affected_rows=True) + ) if rows == 0: raise exceptions.CVSProjectFailedDeletionException - delete_subproject_statement = MySQLStatementBuilder(db_connection) - _, subproject_rows = delete_subproject_statement.delete(PROJECTS_SUBPROJECTS_TABLE) \ - .where('application_sid = %s AND native_project_id = %s', ['MOD.CVS', project_id]) \ - .execute(return_affected_rows=True) - - if subproject_rows == 0: - raise exceptions.SubProjectFailedDeletionException + if cvs_project.subproject: + proj_storage.db_delete_subproject( + db_connection, + cvs_project.project.id if cvs_project.project else None, + cvs_project.subproject.id, + ) + if cvs_project.project: + proj_storage.db_delete_project(db_connection, cvs_project.project.id) return True -def populate_cvs_project(db_connection: PooledMySQLConnection, - db_result) -> models.CVSProject: - logger.debug(f'Populating cvs project with {db_result}') +def populate_cvs_project( + db_connection: PooledMySQLConnection, + db_result, + project: proj_models.Project = None, + subproject: proj_models.SubProject = None, +) -> models.CVSProject: + logger.debug(f"Populating cvs project with {db_result}") return models.CVSProject( - id=db_result['id'], - name=db_result['name'], - description=db_result['description'], - currency=db_result['currency'], - owner=db_get_user_safe_with_id(db_connection, db_result['owner_id']), - datetime_created=db_result['datetime_created'], - my_access_right=db_result['my_access_right'] + id=db_result["id"], + name=db_result["name"], + description=db_result["description"], + currency=db_result["currency"], + owner=db_get_user_safe_with_id(db_connection, db_result["owner_id"]), + datetime_created=db_result["datetime_created"], + my_access_right=db_result["my_access_right"], + project=project, + subproject=subproject, ) diff --git a/tests/apps/cvs/projects/test_projects.py b/tests/apps/cvs/projects/test_projects.py index 61e86de5..3589a807 100644 --- a/tests/apps/cvs/projects/test_projects.py +++ b/tests/apps/cvs/projects/test_projects.py @@ -9,22 +9,21 @@ import tests.apps.cvs.testutils as tu import tests.testutils as testutils import sedbackend.apps.core.users.implementation as impl_users +import sedbackend.apps.core.users.models as models_users def test_create_cvs_project(client, admin_headers): - #Setup + # Setup name = testutils.random_str(5, 30) description = testutils.random_str(20, 200) currency = testutils.random_str(0, 10) - - # Act - res = client.post("/api/cvs/project", - headers=admin_headers, - json={ - "name": name, - "description": description, - "currency": currency - }) + + # Act + res = client.post( + "/api/cvs/project", + headers=admin_headers, + json={"name": name, "description": description, "currency": currency}, + ) # Assert assert res.status_code == 200 @@ -33,78 +32,80 @@ def test_create_cvs_project(client, admin_headers): assert res.json()["currency"] == currency # cleanup - sedbackend.apps.cvs.project.implementation.delete_cvs_project(res.json()["id"], res.json()["owner"]["id"]) + tu.delete_project_by_id(res.json()["id"], res.json()["owner"]["id"]) def test_create_only_name_project(client, std_headers): - #Setup + # Setup name = testutils.random_str(3, 255) - #Act - res = client.post("/api/cvs/project", - headers=std_headers, - json={ - "name": name - }) - - #Assert + # Act + res = client.post("/api/cvs/project", headers=std_headers, json={"name": name}) + + # Assert assert res.status_code == 200 assert res.json()["name"] == name assert res.json()["currency"] == None assert res.json()["description"] == None - #cleanup + # cleanup tu.delete_project_by_id(res.json()["id"], res.json()["owner"]["id"]) def test_create_no_name_project(client, std_headers): - #Act - res1 = client.post("/api/cvs/project", - headers=std_headers, - json={ - "description": testutils.random_str(20,200), - "currency": testutils.random_str(0,10) - }) - - #Assert + # Act + res1 = client.post( + "/api/cvs/project", + headers=std_headers, + json={ + "description": testutils.random_str(20, 200), + "currency": testutils.random_str(0, 10), + }, + ) + + # Assert assert res1.status_code == 422 - + def test_create_too_long_currency_project(client, std_headers): - #Act - res2 = client.post("/api/cvs/project", - headers=std_headers, - json={ - "name": testutils.random_str(3, 30), - "description": testutils.random_str(20, 200), - "currency": testutils.random_str(11, 50) - }) - - #Assert + # Act + res2 = client.post( + "/api/cvs/project", + headers=std_headers, + json={ + "name": testutils.random_str(3, 30), + "description": testutils.random_str(20, 200), + "currency": testutils.random_str(11, 50), + }, + ) + + # Assert assert res2.status_code == 422 def test_create_too_long_name_project(client, std_headers): - #Act - res2 = client.post("/api/cvs/project", - headers=std_headers, - json={ - "name": testutils.random_str(256, 300), - "description": testutils.random_str(20, 200), - "currency": testutils.random_str(0, 10) - }) - - #Assert + # Act + res2 = client.post( + "/api/cvs/project", + headers=std_headers, + json={ + "name": testutils.random_str(256, 300), + "description": testutils.random_str(20, 200), + "currency": testutils.random_str(0, 10), + }, + ) + + # Assert assert res2.status_code == 422 def test_delete_cvs_project(client, std_headers, std_user): - #Setup + # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) # Act - res = client.delete(f'/api/cvs/project/{project.id}', headers=std_headers) + res = client.delete(f"/api/cvs/project/{project.id}", headers=std_headers) # Assert assert res.status_code == 200 @@ -112,98 +113,94 @@ def test_delete_cvs_project(client, std_headers, std_user): def test_delete_wrong_cvs_project(client, std_headers, std_user): - #setup + # setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) wrong_id = project.id + 1 - #Act - res = client.delete(f'/api/cvs/project/{wrong_id}', headers=std_headers) + # Act + res = client.delete(f"/api/cvs/project/{wrong_id}", headers=std_headers) - #Assert - assert res.status_code == 404 #Should fail on accesslevelchecker + # Assert + assert res.status_code == 404 # Should fail on accesslevelchecker - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) - def test_edit_cvs_project(client, std_headers, std_user): - #Setup + # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - - name = testutils.random_str(5,50) + name = testutils.random_str(5, 50) description = testutils.random_str(20, 200) currency = testutils.random_str(0, 10) - #Act - - res = client.put(f'/api/cvs/project/{project.id}', - headers=std_headers, - json = { - "name": name, - "description": description, - "currency": currency - }) - + # Act + + res = client.put( + f"/api/cvs/project/{project.id}", + headers=std_headers, + json={"name": name, "description": description, "currency": currency}, + ) + # Assert assert res.status_code == 200 assert res.json()["name"] == name - assert res.json()["currency"] == currency and res.json()["currency"] != project.currency + assert ( + res.json()["currency"] == currency + and res.json()["currency"] != project.currency + ) assert res.json()["description"] == description - - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) def test_edit_cvs_project_only_name(client, std_headers, std_user): - #Setup + # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) + name = testutils.random_str(5, 50) + + # Act + res = client.put( + f"/api/cvs/project/{project.id}", headers=std_headers, json={"name": name} + ) - name = testutils.random_str(5,50) - - #Act - res = client.put(f'/api/cvs/project/{project.id}', - headers=std_headers, - json = { - "name": name - }) - - #Assert + # Assert assert res.status_code == 200 assert res.json()["name"] == name assert res.json()["description"] == None - #cleanup + # cleanup tu.delete_project_by_id(project.id, current_user.id) def test_edit_cvs_project_same(client, std_headers, std_user): - #Setup + # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - - #Act - res = client.put(f'/api/cvs/project/{project.id}', - headers=std_headers, - json = { - "name": project.name, - "description": project.description, - "currency": project.currency - }) - - #Assert + # Act + res = client.put( + f"/api/cvs/project/{project.id}", + headers=std_headers, + json={ + "name": project.name, + "description": project.description, + "currency": project.currency, + }, + ) + + # Assert assert res.status_code == 200 assert res.json()["name"] == project.name assert res.json()["description"] == project.description assert res.json()["currency"] == project.currency - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) @@ -212,56 +209,91 @@ def test_get_cvs_project(client, std_headers, std_user): current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) - #Act - res = client.get(f'/api/cvs/project/{project.id}', - headers=std_headers) - + # Act + res = client.get(f"/api/cvs/project/{project.id}", headers=std_headers) + # Assert assert res.status_code == 200 assert res.json()["id"] == project.id assert res.json()["name"] == project.name - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) def test_get_wrong_cvs_project(client, std_headers, std_user): - #Setup + # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) project = tu.seed_random_project(current_user.id) wrong_proj_id = project.id + 1 - - #Act - res = client.get(f'/api/cvs/project/{wrong_proj_id}', - headers=std_headers) - - #Assert + + # Act + res = client.get(f"/api/cvs/project/{wrong_proj_id}", headers=std_headers) + + # Assert assert res.status_code == 404 - #Cleanup + # Cleanup tu.delete_project_by_id(project.id, current_user.id) - def test_get_all_cvs_projects(client, std_headers, std_user): - #Setup + # Setup current_user = impl_users.impl_get_user_with_username(std_user.username) proj1 = tu.seed_random_project(current_user.id) proj2 = tu.seed_random_project(current_user.id) proj3 = tu.seed_random_project(current_user.id) - #Act - res = client.get(f'/api/cvs/project/all', headers=std_headers) + # Act + res = client.get(f"/api/cvs/project/all", headers=std_headers) - #Assert + # Assert assert res.status_code == 200 assert res.json()["chunk"][0]["id"] == proj1.id assert res.json()["chunk"][1]["id"] == proj2.id assert res.json()["chunk"][2]["id"] == proj3.id - #Cleanup + # Cleanup tu.delete_project_by_id(proj1.id, current_user.id) tu.delete_project_by_id(proj2.id, current_user.id) tu.delete_project_by_id(proj3.id, current_user.id) - \ No newline at end of file + + +def test_create_project_participants(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + participant = impl_users.impl_post_user( + models_users.UserPost( + username=testutils.random_str(10, 20), + password=testutils.random_str(10, 20), + email=testutils.random_str(10, 20), + full_name="Test User", + ), + ) + + # Act + res = client.post( + "/api/cvs/project", + headers=std_headers, + json={ + "name": "Project1", + "description": "Test project", + "currency": "SEK", + "participants": [participant.id], + "participants_access": {participant.id: 2}, + }, + ) + + # Assert + assert res.status_code == 200 + print(res.json()) + assert [current_user.id, participant.id] == [ + user["id"] for user in res.json()["project"]["participants"] + ] + assert res.json()["project"]["participants_access"][str(participant.id)] == 2 + assert res.json()["project"]["participants_access"][str(current_user.id)] == 4 + + # Cleanup + tu.delete_project_by_id(res.json()["id"], res.json()["owner"]["id"]) + impl_users.impl_delete_user_from_db(participant.id) From 61477c14b4a0eb3ea5763db87316011f3b6a2b3d Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 27 Oct 2023 15:22:16 +0200 Subject: [PATCH 179/210] edit participant for cvs project --- sedbackend/apps/core/db.py | 6 +- sedbackend/apps/cvs/project/models.py | 1 - sedbackend/apps/cvs/project/storage.py | 48 +++++++- tests/apps/cvs/projects/test_projects.py | 143 +++++++++++++++++++++++ 4 files changed, 188 insertions(+), 10 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 20dba37e..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,11 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -#host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' -#port = 3306 -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/project/models.py b/sedbackend/apps/cvs/project/models.py index 78e88e78..b68c1f3b 100644 --- a/sedbackend/apps/cvs/project/models.py +++ b/sedbackend/apps/cvs/project/models.py @@ -23,5 +23,4 @@ class CVSProjectPost(BaseModel): name: str = Field(..., max_length=255) description: Optional[str] = None currency: Optional[str] = Field(None, max_length=10) - participants: Optional[List[int]] = [] participants_access: Optional[Dict[int, AccessLevel]] = {} diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index c5937d5a..386ccecc 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -74,6 +74,13 @@ def get_cvs_project( if result is None: raise exceptions.CVSProjectNotFoundException + if not subproject: + subproject = proj_storage.db_get_subproject_native( + db_connection, "MOD.CVS", cvs_project_id + ) + if not project: + project = proj_storage.db_get_project(db_connection, subproject.project_id) + return populate_cvs_project(db_connection, result, project, subproject) @@ -103,7 +110,7 @@ def create_cvs_project( ) project_model = proj_models.ProjectPost( name=cvs_project.name, - participants=[user_id] + cvs_project.participants, + participants=[user_id] + list(cvs_project.participants_access.keys()), participants_access={ user_id: proj_models.AccessLevel.OWNER, **cvs_project.participants_access, @@ -119,11 +126,13 @@ def create_cvs_project( def edit_cvs_project( db_connection: PooledMySQLConnection, - project_id: int, + cvs_project_id: int, new_project: models.CVSProjectPost, user_id: int, ) -> models.CVSProject: - logger.debug(f"Editing CVS project with id={project_id}.") + logger.debug(f"Editing CVS project with id={cvs_project_id}.") + + cvs_project = get_cvs_project(db_connection, cvs_project_id, user_id) # Updating update_statement = MySQLStatementBuilder(db_connection) @@ -132,10 +141,39 @@ def edit_cvs_project( set_statement="name = %s, description = %s, currency = %s", values=[new_project.name, new_project.description, new_project.currency], ) - update_statement.where("id = %s", [project_id]) + update_statement.where("id = %s", [cvs_project_id]) update_statement.execute(return_affected_rows=True) - return get_cvs_project(db_connection, project_id, user_id) + if cvs_project.project: + old_participants = cvs_project.project.participants_access + old_participants.pop(user_id) + new_participants = new_project.participants_access + participants_to_add = {} + participants_to_remove = [] + participants_to_update = {} + + for user_id, access_level in new_participants.items(): + if user_id not in old_participants: + participants_to_add[user_id] = access_level + elif old_participants[user_id] != access_level: + participants_to_update[user_id] = access_level + + for user_id, access_level in old_participants.items(): + if user_id not in new_participants: + participants_to_remove.append(user_id) + + project = proj_storage.db_update_project( + db_connection, + proj_models.ProjectEdit( + id=cvs_project.project.id, + name=cvs_project.project.name, + participants_to_add=participants_to_add, + participants_to_remove=participants_to_remove, + participants_to_update=participants_to_update, + ), + ) + + return get_cvs_project(db_connection, cvs_project_id, user_id, project) def delete_cvs_project( diff --git a/tests/apps/cvs/projects/test_projects.py b/tests/apps/cvs/projects/test_projects.py index 3589a807..2d359d24 100644 --- a/tests/apps/cvs/projects/test_projects.py +++ b/tests/apps/cvs/projects/test_projects.py @@ -297,3 +297,146 @@ def test_create_project_participants(client, std_headers, std_user): # Cleanup tu.delete_project_by_id(res.json()["id"], res.json()["owner"]["id"]) impl_users.impl_delete_user_from_db(participant.id) + + +def test_add_project_participant(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + participant = impl_users.impl_post_user( + models_users.UserPost( + username=testutils.random_str(10, 20), + password=testutils.random_str(10, 20), + email=testutils.random_str(10, 20), + full_name="Test User", + ), + ) + project = tu.seed_random_project(current_user.id) + + # Act + res = client.put( + f"/api/cvs/project/{project.id}", + headers=std_headers, + json={ + "name": project.name, + "description": project.description, + "currency": project.currency, + "participants_access": {participant.id: 2}, + }, + ) + + # Assert + assert res.status_code == 200 + assert [current_user.id, participant.id] == [ + user["id"] for user in res.json()["project"]["participants"] + ] + assert res.json()["project"]["participants_access"][str(participant.id)] == 2 + assert res.json()["project"]["participants_access"][str(current_user.id)] == 4 + + # Cleanup + tu.delete_project_by_id(project.id, current_user.id) + impl_users.impl_delete_user_from_db(participant.id) + + +def test_remove_project_participant(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + name = testutils.random_str(5, 30) + description = testutils.random_str(20, 200) + currency = testutils.random_str(0, 10) + + participant = impl_users.impl_post_user( + models_users.UserPost( + username=testutils.random_str(10, 20), + password=testutils.random_str(10, 20), + email=testutils.random_str(10, 20), + full_name="Test User", + ), + ) + + res = client.post( + "/api/cvs/project", + headers=std_headers, + json={ + "name": name, + "description": description, + "currency": currency, + "participants_access": {participant.id: 2}, + }, + ) + cvs_project = res.json() + + # Act + res = client.put( + f"/api/cvs/project/{cvs_project['id']}", + headers=std_headers, + json={ + "name": cvs_project["name"], + "description": cvs_project["description"], + "currency": cvs_project["currency"], + "participants_access": {}, + }, + ) + + # Assert + assert res.status_code == 200 + assert participant.id not in [ + user["id"] for user in res.json()["project"]["participants"] + ] + assert res.json()["project"]["participants_access"][str(current_user.id)] == 4 + + # Cleanup + tu.delete_project_by_id(cvs_project["id"], current_user.id) + impl_users.impl_delete_user_from_db(participant.id) + + +def test_update_project_participant(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + name = testutils.random_str(5, 30) + description = testutils.random_str(20, 200) + currency = testutils.random_str(0, 10) + + participant = impl_users.impl_post_user( + models_users.UserPost( + username=testutils.random_str(10, 20), + password=testutils.random_str(10, 20), + email=testutils.random_str(10, 20), + full_name="Test User", + ), + ) + + res = client.post( + "/api/cvs/project", + headers=std_headers, + json={ + "name": name, + "description": description, + "currency": currency, + "participants_access": {participant.id: 2}, + }, + ) + cvs_project = res.json() + + # Act + res = client.put( + f"/api/cvs/project/{cvs_project['id']}", + headers=std_headers, + json={ + "name": cvs_project["name"], + "description": cvs_project["description"], + "currency": cvs_project["currency"], + "participants_access": {participant.id: 3}, + }, + ) + + # Assert + assert res.status_code == 200 + assert [current_user.id, participant.id] == [ + user["id"] for user in res.json()["project"]["participants"] + ] + assert res.json()["project"]["participants_access"][str(participant.id)] == 3 + assert res.json()["project"]["participants_access"][str(current_user.id)] == 4 + + # Cleanup + tu.delete_project_by_id(cvs_project["id"], current_user.id) + impl_users.impl_delete_user_from_db(participant.id) From eae025945ee5a59167d70ede20f13b928bcd6538 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 27 Oct 2023 15:32:11 +0200 Subject: [PATCH 180/210] fixed failing tests --- sedbackend/apps/cvs/project/storage.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 386ccecc..5be2146c 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -55,12 +55,14 @@ def get_all_cvs_project( def get_cvs_project( db_connection: PooledMySQLConnection, - cvs_project_id: int, + project_id: int, user_id: int, project: proj_models.Project = None, subproject: proj_models.SubProject = None, ) -> models.CVSProject: - logger.debug(f"Fetching CVS project with id={cvs_project_id} user={user_id}.") + logger.debug(f"Fetching CVS project with id={project_id} user={user_id}.") + + cvs_project_id = project_id query = f"SELECT p.*, COALESCE(pp.access_level, 4) AS my_access_right \ FROM cvs_projects p \ From 8b0ac96d95f77f88bfd1f9359696104d9ddaedc7 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sat, 28 Oct 2023 14:35:40 +0200 Subject: [PATCH 181/210] fixed failing simulation tests --- sedbackend/apps/core/projects/dependencies.py | 2 ++ sedbackend/apps/cvs/project/storage.py | 6 ++---- sedbackend/apps/cvs/simulation/router.py | 10 +++++----- tests/apps/cvs/testutils.py | 4 ---- 4 files changed, 9 insertions(+), 13 deletions(-) diff --git a/sedbackend/apps/core/projects/dependencies.py b/sedbackend/apps/core/projects/dependencies.py index 8043ee77..b8e20dd1 100644 --- a/sedbackend/apps/core/projects/dependencies.py +++ b/sedbackend/apps/core/projects/dependencies.py @@ -62,6 +62,8 @@ def check_user_subproject_access(subproject: SubProject, access_levels: List[Acc project = impl_get_project(subproject.project_id) # <-- This can throw # Check user access level in that project access = project.participants_access[user_id] + print("access: ", access) + print("access_levels: ", access_levels) if access in access_levels: logger.debug(f"Yes, user {user_id} has access level {access}") return True diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 5be2146c..8a50871e 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -62,15 +62,13 @@ def get_cvs_project( ) -> models.CVSProject: logger.debug(f"Fetching CVS project with id={project_id} user={user_id}.") - cvs_project_id = project_id - query = f"SELECT p.*, COALESCE(pp.access_level, 4) AS my_access_right \ FROM cvs_projects p \ LEFT JOIN projects_participants pp ON pp.project_id = %s AND pp.user_id = %s \ WHERE p.id = %s;" with db_connection.cursor(prepared=True, dictionary=True) as cursor: - cursor.execute(query, [cvs_project_id, user_id, cvs_project_id]) + cursor.execute(query, [project_id, user_id, project_id]) result = cursor.fetchone() logger.debug(result) if result is None: @@ -78,7 +76,7 @@ def get_cvs_project( if not subproject: subproject = proj_storage.db_get_subproject_native( - db_connection, "MOD.CVS", cvs_project_id + db_connection, "MOD.CVS", project_id ) if not project: project = proj_storage.db_get_project(db_connection, subproject.project_id) diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index c7dee458..6ea29f3f 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -15,7 +15,7 @@ '/project/{native_project_id}/simulation/run', summary='Run simulation', response_model=models.SimulationResult, - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) async def run_simulation(sim_settings: models.EditSimSettings, native_project_id: int, vcs_ids: List[int], design_group_ids: List[int], normalized_npv: Optional[bool] = False, @@ -30,7 +30,7 @@ async def run_simulation(sim_settings: models.EditSimSettings, native_project_id '/project/{native_project_id}/sim/upload-dsm', summary='Run simulation with DSM predefined in Excel or CSV file', response_model=List[models.Simulation], - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) async def run_dsm_file_simulation(native_project_id: int, sim_params: models.FileParams = Depends(), dsm_file: UploadFile = File(default=None), @@ -47,7 +47,7 @@ async def run_dsm_file_simulation(native_project_id: int, sim_params: models.Fil '/project/{native_project_id}/simulation/run-multiprocessing', summary='Run monte carlo simulation with multiprocessing', response_model=models.SimulationResult, - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) async def run_multiprocessing(sim_settings: models.EditSimSettings, native_project_id: int, vcs_ids: List[int], design_group_ids: List[int], normalized_npv: Optional[bool] = False, @@ -60,7 +60,7 @@ async def run_multiprocessing(sim_settings: models.EditSimSettings, native_proje '/project/{native_project_id}/simulation/settings', summary='Get settings for project', response_model=models.SimSettings, - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read, CVS_APP_SID))] + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) async def get_sim_settings(native_project_id: int) -> models.SimSettings: return implementation.get_sim_settings(native_project_id) @@ -70,7 +70,7 @@ async def get_sim_settings(native_project_id: int) -> models.SimSettings: '/project/{native_project_id}/simulation/settings', summary='Create or update simulation settings', response_model=bool, - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit, CVS_APP_SID))] + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_edit(), CVS_APP_SID))] ) async def put_sim_settings(native_project_id: int, sim_settings: models.EditSimSettings, user: User = Depends(get_current_active_user)) -> bool: diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 1df1f7e6..f6a1c4bc 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -487,20 +487,16 @@ def seed_simulation_settings(project_id: int, vcs_ids: List[int], design_ids: Li rows = [row.iso_process.name if row.iso_process is not None else row.subprocess.name for row in vcs_impl.get_vcs_table( project_id, vcs_ids[0])] - print("Seed settings vcs rows", rows) for vcs_id in vcs_ids: new_rows = [row.iso_process.name if row.iso_process is not None else row.subprocess.name for row in vcs_impl.get_vcs_table( project_id, vcs_id)] - print("New rows", new_rows) rows = list(filter(lambda x: x in rows, new_rows)) - print("Common elements", rows) time_unit = random_time_unit() interarrival_time = round(tu.random.uniform(1, 255), ndigits=5) start_time = round(tu.random.uniform(1, 300), ndigits=5) end_time = round(tu.random.uniform(300, 1000), ndigits=5) - print("Row len", len(rows)) flow_process = rows[1] flow_start_time = None # Get valid start time flow_time = round(tu.random.uniform(0, end_time - start_time), ndigits=5) From a4907899bdf1f7e53592fbae2dbf640c631de776 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sat, 28 Oct 2023 14:36:07 +0200 Subject: [PATCH 182/210] removed print statements --- sedbackend/apps/core/projects/dependencies.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sedbackend/apps/core/projects/dependencies.py b/sedbackend/apps/core/projects/dependencies.py index b8e20dd1..8043ee77 100644 --- a/sedbackend/apps/core/projects/dependencies.py +++ b/sedbackend/apps/core/projects/dependencies.py @@ -62,8 +62,6 @@ def check_user_subproject_access(subproject: SubProject, access_levels: List[Acc project = impl_get_project(subproject.project_id) # <-- This can throw # Check user access level in that project access = project.participants_access[user_id] - print("access: ", access) - print("access_levels: ", access_levels) if access in access_levels: logger.debug(f"Yes, user {user_id} has access level {access}") return True From 654a71ed81d951cc23ccf705e5daf599fe1e9ef8 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sat, 28 Oct 2023 14:56:16 +0200 Subject: [PATCH 183/210] fixed error if project does not exist --- sedbackend/apps/cvs/project/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 8a50871e..44ff98d0 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -78,7 +78,7 @@ def get_cvs_project( subproject = proj_storage.db_get_subproject_native( db_connection, "MOD.CVS", project_id ) - if not project: + if not project and subproject and subproject.project_id: project = proj_storage.db_get_project(db_connection, subproject.project_id) return populate_cvs_project(db_connection, result, project, subproject) From 786a7a61f53b6e776eef5cba3aa7827eb1550b86 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sat, 4 Nov 2023 16:00:04 +0100 Subject: [PATCH 184/210] get shared projects --- sedbackend/apps/cvs/project/storage.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/cvs/project/storage.py b/sedbackend/apps/cvs/project/storage.py index 44ff98d0..0005b21b 100644 --- a/sedbackend/apps/cvs/project/storage.py +++ b/sedbackend/apps/cvs/project/storage.py @@ -36,17 +36,29 @@ def get_all_cvs_project( ) -> ListChunk[models.CVSProject]: logger.debug(f"Fetching all CVS projects for user with id={user_id}.") + # The issue is that cvs_project.id is not the same as + query = f"SELECT DISTINCT p.*, COALESCE(pp.access_level, 4) AS my_access_right \ FROM cvs_projects p \ - LEFT JOIN projects_subprojects ps ON p.id = ps.project_id AND ps.owner_id = %s \ - LEFT JOIN projects_participants pp ON p.id = pp.project_id AND pp.user_id = %s \ + LEFT JOIN projects_subprojects ps ON p.id = ps.native_project_id \ + LEFT JOIN projects_participants pp ON ps.project_id = pp.project_id AND pp.user_id = %s \ WHERE p.owner_id = %s OR ps.owner_id = %s OR pp.user_id = %s;" with db_connection.cursor(prepared=True, dictionary=True) as cursor: - cursor.execute(query, [user_id, user_id, user_id, user_id, user_id]) + cursor.execute(query, [user_id, user_id, user_id, user_id]) result = cursor.fetchall() - cvs_project_list = [populate_cvs_project(db_connection, res) for res in result] + cvs_project_list = [] + for res in result: + subproject = proj_storage.db_get_subproject_native( + db_connection, "MOD.CVS", res["id"] + ) + project = None + if subproject.project_id: + project = proj_storage.db_get_project(db_connection, subproject.project_id) + cvs_project_list.append( + populate_cvs_project(db_connection, res, project, subproject) + ) return ListChunk[models.CVSProject]( chunk=cvs_project_list, length_total=len(cvs_project_list) From 8723609e126ef0b91c8f973ac94f04b82028609d Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 20 Nov 2023 20:56:18 +0100 Subject: [PATCH 185/210] added latex row to formula --- .../cvs/link_design_lifecycle/exceptions.py | 5 - .../link_design_lifecycle/implementation.py | 5 - .../apps/cvs/link_design_lifecycle/models.py | 3 +- .../apps/cvs/link_design_lifecycle/router.py | 3 +- .../apps/cvs/link_design_lifecycle/storage.py | 487 +++++++++++++----- sql/V231010_cvs.sql | 7 +- .../test_connect_vcs_design.py | 335 ++++++++---- tests/apps/cvs/testutils.py | 6 +- 8 files changed, 592 insertions(+), 259 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/exceptions.py b/sedbackend/apps/cvs/link_design_lifecycle/exceptions.py index a071e8bc..ae4f0907 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/exceptions.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/exceptions.py @@ -2,11 +2,6 @@ class FormulasNotFoundException(Exception): pass -class WrongTimeUnitException(Exception): - def __init__(self, time_unit: str = None) -> None: - self.time_unit = time_unit - - class VCSNotFoundException(Exception): pass diff --git a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py index b64c894c..dfa4ace0 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/implementation.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/implementation.py @@ -68,11 +68,6 @@ def get_all_formulas(project_id: int, vcs_id: int, design_group_id: int) -> List status_code=status.HTTP_404_NOT_FOUND, detail=f'Could not find VCS with id {vcs_id}' ) - except exceptions.WrongTimeUnitException as e: # Where exactly does this fire???? - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f'Wrong time unit. Given unit: {e.time_unit}' - ) except project_exceptions.CVSProjectNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, diff --git a/sedbackend/apps/cvs/link_design_lifecycle/models.py b/sedbackend/apps/cvs/link_design_lifecycle/models.py index cb8d3db5..3158d8a2 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/models.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/models.py @@ -26,7 +26,8 @@ class Rate(Enum): class Formula(BaseModel): - formula: str + text: str + latex: str comment: Optional[str] = None diff --git a/sedbackend/apps/cvs/link_design_lifecycle/router.py b/sedbackend/apps/cvs/link_design_lifecycle/router.py index c16186a7..bb1a3400 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/router.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/router.py @@ -18,8 +18,7 @@ response_model=List[models.FormulaRowGet], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) -async def get_all_formulas(native_project_id: int, vcs_id: int, dg_id: int, - user: User = Depends(get_current_active_user)) -> List[models.FormulaRowGet]: +async def get_all_formulas(native_project_id: int, vcs_id: int, dg_id: int) -> List[models.FormulaRowGet]: return implementation.get_all_formulas(native_project_id, vcs_id, dg_id) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 6c54a97d..2cbbb661 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -11,14 +11,33 @@ from mysqlsb import FetchType, MySQLStatementBuilder CVS_FORMULAS_TABLE = 'cvs_design_mi_formulas' -CVS_FORMULAS_COLUMNS = ['project', 'vcs_row', 'design_group', 'time', 'time_comment', 'time_unit', 'cost', - 'cost_comment', 'revenue', 'revenue_comment', 'rate'] +CVS_FORMULAS_COLUMNS = [ + 'project', + 'vcs_row', + 'design_group', + 'time', + 'time_latex', + 'time_comment', + 'time_unit', + 'cost', + 'cost_latex', + 'cost_comment', + 'revenue', + 'revenue_latex', + 'revenue_comment', + 'rate', +] CVS_VALUE_DRIVERS_TABLE = 'cvs_value_drivers' CVS_VALUE_DRIVERS_COLUMNS = ['id', 'user', 'name', 'unit'] CVS_FORMULAS_VALUE_DRIVERS_TABLE = 'cvs_formulas_value_drivers' -CVS_FORMULAS_VALUE_DRIVERS_COLUMNS = ['vcs_row', 'design_group', 'value_driver', 'project'] +CVS_FORMULAS_VALUE_DRIVERS_COLUMNS = [ + 'vcs_row', + 'design_group', + 'value_driver', + 'project', +] CVS_FORMULAS_EXTERNAL_FACTORS_TABLE = 'cvs_formulas_external_factors' CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS = ['vcs_row', 'design_group', 'external_factor'] @@ -29,32 +48,57 @@ CVS_VCS_NEED_DRIVERS_TABLE = 'cvs_vcs_need_drivers' -def create_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, - formula_row: models.FormulaRowPost): +def create_formulas( + db_connection: PooledMySQLConnection, + project_id: int, + vcs_row_id: int, + design_group_id: int, + formula_row: models.FormulaRowPost, +): logger.debug(f'Creating formulas') value_driver_ids, external_factor_ids = find_vd_and_ef( - [formula_row.time.formula, formula_row.cost.formula, formula_row.revenue.formula]) + [ + formula_row.time.text, + formula_row.cost.text, + formula_row.revenue.text, + ] + ) - values = [project_id, vcs_row_id, design_group_id, formula_row.time.formula, formula_row.time.comment, - formula_row.time_unit.value, - formula_row.cost.formula, formula_row.cost.comment, - formula_row.revenue.formula, formula_row.revenue.comment, formula_row.rate.value] + values = [ + project_id, + vcs_row_id, + design_group_id, + formula_row.time.text, + formula_row.time.latex, + formula_row.time.comment, + formula_row.time_unit.value, + formula_row.cost.text, + formula_row.cost.latex, + formula_row.cost.comment, + formula_row.revenue.text, + formula_row.revenue.latex, + formula_row.revenue.comment, + formula_row.rate.value, + ] try: insert_statement = MySQLStatementBuilder(db_connection) - insert_statement \ - .insert(table=CVS_FORMULAS_TABLE, columns=CVS_FORMULAS_COLUMNS) \ - .set_values(values=values) \ - .execute(fetch_type=FetchType.FETCH_NONE) + insert_statement.insert( + table=CVS_FORMULAS_TABLE, columns=CVS_FORMULAS_COLUMNS + ).set_values(values=values).execute(fetch_type=FetchType.FETCH_NONE) except Exception as e: logger.error(f'Error while inserting formulas: {e}') raise exceptions.FormulasFailedUpdateException if value_driver_ids: - add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, value_driver_ids, project_id) + add_value_driver_formulas( + db_connection, vcs_row_id, design_group_id, value_driver_ids, project_id + ) if external_factor_ids: - add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, external_factor_ids) + add_external_factor_formulas( + db_connection, vcs_row_id, design_group_id, external_factor_ids + ) def find_vd_and_ef(texts: List[str]) -> (List[str], List[int]): @@ -74,34 +118,66 @@ def find_vd_and_ef(texts: List[str]) -> (List[str], List[int]): return value_driver_ids, external_factor_ids -def edit_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, design_group_id: int, - formula_row: models.FormulaRowPost): +def edit_formulas( + db_connection: PooledMySQLConnection, + project_id: int, + vcs_row_id: int, + design_group_id: int, + formula_row: models.FormulaRowPost, +): logger.debug(f'Editing formulas') value_driver_ids, external_factor_ids = find_vd_and_ef( - [formula_row.time.formula, formula_row.cost.formula, formula_row.revenue.formula]) + [ + formula_row.time.text, + formula_row.cost.text, + formula_row.revenue.text, + ] + ) columns = CVS_FORMULAS_COLUMNS[3:] set_statement = ', '.join([col + ' = %s' for col in columns]) - values = [formula_row.time.formula, formula_row.time.comment, formula_row.time_unit.value, formula_row.cost.formula, - formula_row.cost.comment, formula_row.revenue.formula, formula_row.revenue.comment, - formula_row.rate.value] + values = [ + formula_row.time.text, + formula_row.time.latex, + formula_row.time.comment, + formula_row.time_unit.value, + formula_row.cost.text, + formula_row.cost.latex, + formula_row.cost.comment, + formula_row.revenue.text, + formula_row.revenue.latex, + formula_row.revenue.comment, + formula_row.rate.value, + ] # Update formula row update_statement = MySQLStatementBuilder(db_connection) - _, rows = update_statement \ - .update(table=CVS_FORMULAS_TABLE, set_statement=set_statement, values=values) \ - .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) \ + _, rows = ( + update_statement.update( + table=CVS_FORMULAS_TABLE, set_statement=set_statement, values=values + ) + .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) .execute(return_affected_rows=True) + ) - update_value_driver_formulas(db_connection, vcs_row_id, design_group_id, value_driver_ids, project_id) + update_value_driver_formulas( + db_connection, vcs_row_id, design_group_id, value_driver_ids, project_id + ) - update_external_factor_formulas(db_connection, vcs_row_id, design_group_id, external_factor_ids) + update_external_factor_formulas( + db_connection, vcs_row_id, design_group_id, external_factor_ids + ) -def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, - value_drivers: List[int], project_id: int): +def add_value_driver_formulas( + db_connection: PooledMySQLConnection, + vcs_row_id: int, + design_group_id: int, + value_drivers: List[int], + project_id: int, +): # Add value driver to formulas try: prepared_list = [] @@ -110,7 +186,9 @@ def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: insert_statement += f'(%s, %s, %s, %s),' prepared_list += [vcs_row_id, design_group_id, value_driver_id, project_id] insert_statement = insert_statement[:-1] - insert_statement += ' ON DUPLICATE KEY UPDATE vcs_row = vcs_row' # On duplicate do nothing + insert_statement += ( + ' ON DUPLICATE KEY UPDATE vcs_row = vcs_row' # On duplicate do nothing + ) with db_connection.cursor(prepared=True) as cursor: cursor.execute(insert_statement, prepared_list) except Exception as e: @@ -118,39 +196,68 @@ def add_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: raise exceptions.FormulasFailedUpdateException -def delete_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, - value_drivers: List[int]): +def delete_value_driver_formulas( + db_connection: PooledMySQLConnection, + vcs_row_id: int, + design_group_id: int, + value_drivers: List[int], +): delete_statement = MySQLStatementBuilder(db_connection) - _, rows = delete_statement \ - .delete(CVS_FORMULAS_VALUE_DRIVERS_TABLE) \ + _, rows = ( + delete_statement.delete(CVS_FORMULAS_VALUE_DRIVERS_TABLE) .where( - f'vcs_row = %s and design_group = %s and value_driver in ({",".join(["%s" for _ in range(len(value_drivers))])})', - [vcs_row_id, design_group_id] + value_drivers) \ + f'vcs_row = %s and design_group = %s and value_driver in ({",".join(["%s" for _ in range(len(value_drivers))])})', + [vcs_row_id, design_group_id] + value_drivers, + ) .execute(return_affected_rows=True) + ) -def update_value_driver_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, - value_drivers: List[int], project_id: int): +def update_value_driver_formulas( + db_connection: PooledMySQLConnection, + vcs_row_id: int, + design_group_id: int, + value_drivers: List[int], + project_id: int, +): where_statement = "vcs_row = %s and design_group = %s" select_statement = MySQLStatementBuilder(db_connection) - value_driver_res = select_statement.select(CVS_FORMULAS_VALUE_DRIVERS_TABLE, CVS_FORMULAS_VALUE_DRIVERS_COLUMNS) \ - .where(where_statement, [vcs_row_id, design_group_id]) \ + value_driver_res = ( + select_statement.select( + CVS_FORMULAS_VALUE_DRIVERS_TABLE, CVS_FORMULAS_VALUE_DRIVERS_COLUMNS + ) + .where(where_statement, [vcs_row_id, design_group_id]) .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + ) - delete_value_drivers = [value_driver['value_driver'] for value_driver in value_driver_res if - value_driver['value_driver'] not in - value_drivers] - add_value_drivers = [value_driver_id for value_driver_id in value_drivers if value_driver_id not in - [value_driver['value_driver'] for value_driver in value_driver_res]] + delete_value_drivers = [ + value_driver['value_driver'] + for value_driver in value_driver_res + if value_driver['value_driver'] not in value_drivers + ] + add_value_drivers = [ + value_driver_id + for value_driver_id in value_drivers + if value_driver_id + not in [value_driver['value_driver'] for value_driver in value_driver_res] + ] if len(add_value_drivers): - add_value_driver_formulas(db_connection, vcs_row_id, design_group_id, add_value_drivers, project_id) + add_value_driver_formulas( + db_connection, vcs_row_id, design_group_id, add_value_drivers, project_id + ) if len(delete_value_drivers): - delete_value_driver_formulas(db_connection, vcs_row_id, design_group_id, delete_value_drivers) + delete_value_driver_formulas( + db_connection, vcs_row_id, design_group_id, delete_value_drivers + ) -def add_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, - external_factors: List[int]): +def add_external_factor_formulas( + db_connection: PooledMySQLConnection, + vcs_row_id: int, + design_group_id: int, + external_factors: List[int], +): try: prepared_list = [] insert_statement = f'INSERT INTO {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} (vcs_row, design_group, external_factor) VALUES' @@ -158,7 +265,9 @@ def add_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_row_i insert_statement += f'(%s, %s, %s),' prepared_list += [vcs_row_id, design_group_id, external_factor_id] insert_statement = insert_statement[:-1] - insert_statement += ' ON DUPLICATE KEY UPDATE vcs_row = vcs_row' # On duplicate do nothing + insert_statement += ( + ' ON DUPLICATE KEY UPDATE vcs_row = vcs_row' # On duplicate do nothing + ) with db_connection.cursor(prepared=True) as cursor: cursor.execute(insert_statement, prepared_list) except Exception as e: @@ -166,75 +275,138 @@ def add_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_row_i raise exceptions.FormulasFailedUpdateException -def delete_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, - external_factors: List[int]): +def delete_external_factor_formulas( + db_connection: PooledMySQLConnection, + vcs_row_id: int, + design_group_id: int, + external_factors: List[int], +): delete_statement = MySQLStatementBuilder(db_connection) - _, rows = delete_statement \ - .delete(CVS_FORMULAS_EXTERNAL_FACTORS_TABLE) \ + _, rows = ( + delete_statement.delete(CVS_FORMULAS_EXTERNAL_FACTORS_TABLE) .where( - f'vcs_row = %s and design_group = %s and external_factor in ({",".join(["%s" for _ in range(len(external_factors))])})', - [vcs_row_id, design_group_id] + external_factors) \ + f'vcs_row = %s and design_group = %s and external_factor in ({",".join(["%s" for _ in range(len(external_factors))])})', + [vcs_row_id, design_group_id] + external_factors, + ) .execute(return_affected_rows=True) + ) -def update_external_factor_formulas(db_connection: PooledMySQLConnection, vcs_row_id: int, design_group_id: int, - external_factors: List[int]): +def update_external_factor_formulas( + db_connection: PooledMySQLConnection, + vcs_row_id: int, + design_group_id: int, + external_factors: List[int], +): where_statement = "vcs_row = %s and design_group = %s" select_statement = MySQLStatementBuilder(db_connection) - external_factor_res = select_statement.select(CVS_FORMULAS_EXTERNAL_FACTORS_TABLE, - CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS) \ - .where(where_statement, [vcs_row_id, design_group_id]) \ + external_factor_res = ( + select_statement.select( + CVS_FORMULAS_EXTERNAL_FACTORS_TABLE, CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS + ) + .where(where_statement, [vcs_row_id, design_group_id]) .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + ) - delete_external_factors = [external_factor['external_factor'] for external_factor in external_factor_res if - external_factor['external_factor'] not in - external_factors] - add_external_factors = [external_factor_id for external_factor_id in external_factors if - external_factor_id not in - [external_factor['external_factor'] for external_factor in external_factor_res]] + delete_external_factors = [ + external_factor['external_factor'] + for external_factor in external_factor_res + if external_factor['external_factor'] not in external_factors + ] + add_external_factors = [ + external_factor_id + for external_factor_id in external_factors + if external_factor_id + not in [ + external_factor['external_factor'] + for external_factor in external_factor_res + ] + ] if len(add_external_factors): - add_external_factor_formulas(db_connection, vcs_row_id, design_group_id, add_external_factors) + add_external_factor_formulas( + db_connection, vcs_row_id, design_group_id, add_external_factors + ) if len(delete_external_factors): - delete_external_factor_formulas(db_connection, vcs_row_id, design_group_id, delete_external_factors) - - -def update_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, design_group_id: int, - formula_rows: List[models.FormulaRowPost]) -> bool: - vcs_storage.check_vcs(db_connection, project_id, vcs_id) # Check if vcs exists and matches project - get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project + delete_external_factor_formulas( + db_connection, vcs_row_id, design_group_id, delete_external_factors + ) + + +def update_formulas( + db_connection: PooledMySQLConnection, + project_id: int, + vcs_id: int, + design_group_id: int, + formula_rows: List[models.FormulaRowPost], +) -> bool: + vcs_storage.check_vcs( + db_connection, project_id, vcs_id + ) # Check if vcs exists and matches project + get_design_group( + db_connection, project_id, design_group_id + ) # Check if design group exists and matches project for formula_row in formula_rows: - vcs_storage.get_vcs_row(db_connection, project_id, formula_row.vcs_row_id) # Check if vcs row exists + vcs_storage.get_vcs_row( + db_connection, project_id, formula_row.vcs_row_id + ) # Check if vcs row exists count_statement = MySQLStatementBuilder(db_connection) - count = count_statement.count(CVS_FORMULAS_TABLE) \ - .where('vcs_row = %s and design_group = %s', [formula_row.vcs_row_id, design_group_id]) \ + count = ( + count_statement.count(CVS_FORMULAS_TABLE) + .where( + 'vcs_row = %s and design_group = %s', + [formula_row.vcs_row_id, design_group_id], + ) .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + ) count = count['count'] if count == 0: - create_formulas(db_connection, project_id, formula_row.vcs_row_id, design_group_id, formula_row) + create_formulas( + db_connection, + project_id, + formula_row.vcs_row_id, + design_group_id, + formula_row, + ) elif count == 1: - edit_formulas(db_connection, project_id, formula_row.vcs_row_id, design_group_id, formula_row) + edit_formulas( + db_connection, + project_id, + formula_row.vcs_row_id, + design_group_id, + formula_row, + ) else: raise exceptions.FormulasFailedUpdateException return True -def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_id: int, - design_group_id: int) -> List[models.FormulaRowGet]: +def get_all_formulas( + db_connection: PooledMySQLConnection, + project_id: int, + vcs_id: int, + design_group_id: int, +) -> List[models.FormulaRowGet]: logger.debug(f'Fetching all formulas with vcs_id={vcs_id}') - get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project - vcs_rows = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) # Check if vcs exists and matches project + get_design_group( + db_connection, project_id, design_group_id + ) # Check if design group exists and matches project + vcs_rows = vcs_storage.get_vcs_table( + db_connection, project_id, vcs_id + ) # Check if vcs exists and matches project select_statement = MySQLStatementBuilder(db_connection) - res = select_statement.select(CVS_FORMULAS_TABLE, CVS_FORMULAS_COLUMNS) \ - .inner_join('cvs_vcs_rows', 'vcs_row = cvs_vcs_rows.id') \ - .where('vcs = %s and design_group = %s', [vcs_id, design_group_id]) \ + res = ( + select_statement.select(CVS_FORMULAS_TABLE, CVS_FORMULAS_COLUMNS) + .inner_join('cvs_vcs_rows', 'vcs_row = cvs_vcs_rows.id') + .where('vcs = %s and design_group = %s', [vcs_id, design_group_id]) .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + ) if res is None: raise exceptions.FormulasNotFoundException @@ -242,7 +414,11 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ all_used_vds, all_used_efs, all_row_vds = [], [], [] if len(res): - where_statement = "(vcs_row, design_group) IN (" + ",".join(["(%s, %s)" for _ in range(len(res))]) + ")" + where_statement = ( + "(vcs_row, design_group) IN (" + + ",".join(["(%s, %s)" for _ in range(len(res))]) + + ")" + ) prepared_list = [] for r in res: prepared_list += [r['vcs_row'], r['design_group']] @@ -251,15 +427,21 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ cursor.execute( f"SELECT id, name, unit, {CVS_VALUE_DRIVERS_TABLE}.project, vcs_row, design_group FROM {CVS_FORMULAS_VALUE_DRIVERS_TABLE} " f"INNER JOIN {CVS_VALUE_DRIVERS_TABLE} ON {CVS_FORMULAS_VALUE_DRIVERS_TABLE}.value_driver = cvs_value_drivers.id WHERE {where_statement}", - prepared_list) - all_used_vds = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] + prepared_list, + ) + all_used_vds = [ + dict(zip(cursor.column_names, row)) for row in cursor.fetchall() + ] with db_connection.cursor(prepared=True) as cursor: cursor.execute( f"SELECT id, name, unit, vcs_row, design_group FROM {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} " f"INNER JOIN {CVS_EXTERNAL_FACTORS_TABLE} ON {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE}.external_factor = cvs_market_inputs.id WHERE {where_statement}", - prepared_list) - all_used_efs = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] + prepared_list, + ) + all_used_efs = [ + dict(zip(cursor.column_names, row)) for row in cursor.fetchall() + ] if vcs_rows: with db_connection.cursor(prepared=True) as cursor: @@ -270,8 +452,11 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ f"INNER JOIN {CVS_VCS_NEED_DRIVERS_TABLE} ON {CVS_VCS_NEED_DRIVERS_TABLE}.stakeholder_need = {CVS_STAKEHOLDER_NEEDS_TABLE}.id " f"INNER JOIN {CVS_VALUE_DRIVERS_TABLE} ON {CVS_VALUE_DRIVERS_TABLE}.id = {CVS_VCS_NEED_DRIVERS_TABLE}.value_driver " f"WHERE {CVS_VCS_ROWS_TABLE}.id IN ({','.join(['%s' for _ in range(len(vcs_rows))])})", - [row.id for row in vcs_rows]) - all_row_vds = [dict(zip(cursor.column_names, row)) for row in cursor.fetchall()] + [row.id for row in vcs_rows], + ) + all_row_vds = [ + dict(zip(cursor.column_names, row)) for row in cursor.fetchall() + ] logger.debug(f'All row vds: {all_row_vds}') formulas = [] @@ -283,19 +468,28 @@ def get_all_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_ else: r['vcs_row'] = row.id r['design_group'] = design_group_id - r['time'] = '0' + r['time'] = '' + r['time_latex'] = '' r['time_comment'] = '' - r['cost'] = '0' + r['cost'] = '' + r['cost_latex'] = '' r['cost_comment'] = '' - r['revenue'] = '0' + r['revenue'] = '' + r['revenue_latex'] = '' r['revenue_comment'] = '' r['time_unit'] = TimeFormat.YEAR r['rate'] = Rate.PRODUCT r['row_value_drivers'] = [vd for vd in all_row_vds if vd['vcs_row'] == row.id] - r['used_value_drivers'] = [vd for vd in all_used_vds if vd['vcs_row'] == row.id and - vd['design_group'] == r['design_group']] - r['used_external_factors'] = [ef for ef in all_used_efs if ef['vcs_row'] == row.id and - ef['design_group'] == r['design_group']] + r['used_value_drivers'] = [ + vd + for vd in all_used_vds + if vd['vcs_row'] == row.id and vd['design_group'] == r['design_group'] + ] + r['used_external_factors'] = [ + ef + for ef in all_used_efs + if ef['vcs_row'] == row.id and ef['design_group'] == r['design_group'] + ] formulas.append(populate_formula(r)) return formulas @@ -305,38 +499,57 @@ def populate_formula(db_result) -> models.FormulaRowGet: return models.FormulaRowGet( vcs_row_id=db_result['vcs_row'], design_group_id=db_result['design_group'], - time=models.Formula(formula=db_result['time'], - comment=db_result['time_comment']), + time=models.Formula( + text=db_result['time'], latex=db_result['time_latex'], comment=db_result['time_comment'] + ), time_unit=db_result['time_unit'], - cost=models.Formula(formula=db_result['cost'], - comment=db_result['cost_comment']), - revenue=models.Formula(formula=db_result['revenue'], - comment=db_result['revenue_comment']), + cost=models.Formula( + text=db_result['cost'], latex=db_result['cost_latex'], comment=db_result['cost_comment'] + ), + revenue=models.Formula( + text=db_result['revenue'], latex=db_result['revenue_latex'], comment=db_result['revenue_comment'] + ), rate=db_result['rate'], - row_value_drivers=[vcs_storage.populate_value_driver(valueDriver) for valueDriver in - db_result['row_value_drivers']] if - db_result['row_value_drivers'] is not None else [], - used_value_drivers=[vcs_storage.populate_value_driver(valueDriver) for valueDriver in - db_result['used_value_drivers']] if - db_result['used_value_drivers'] is not None else [], - used_external_factors=[populate_external_factor(externalFactor) for externalFactor in - db_result['used_external_factors']] if - db_result['used_external_factors'] is not None else [], + row_value_drivers=[ + vcs_storage.populate_value_driver(valueDriver) + for valueDriver in db_result['row_value_drivers'] + ] + if db_result['row_value_drivers'] is not None + else [], + used_value_drivers=[ + vcs_storage.populate_value_driver(valueDriver) + for valueDriver in db_result['used_value_drivers'] + ] + if db_result['used_value_drivers'] is not None + else [], + used_external_factors=[ + populate_external_factor(externalFactor) + for externalFactor in db_result['used_external_factors'] + ] + if db_result['used_external_factors'] is not None + else [], ) -def delete_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_row_id: int, - design_group_id: int) -> bool: +def delete_formulas( + db_connection: PooledMySQLConnection, + project_id: int, + vcs_row_id: int, + design_group_id: int, +) -> bool: logger.debug(f'Deleting formulas with vcs_row_id: {vcs_row_id}') - get_design_group(db_connection, project_id, design_group_id) # Check if design group exists and matches project + get_design_group( + db_connection, project_id, design_group_id + ) # Check if design group exists and matches project vcs_storage.get_vcs_row(db_connection, project_id, vcs_row_id) delete_statement = MySQLStatementBuilder(db_connection) - _, rows = delete_statement \ - .delete(CVS_FORMULAS_TABLE) \ - .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) \ + _, rows = ( + delete_statement.delete(CVS_FORMULAS_TABLE) + .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) .execute(return_affected_rows=True) + ) if rows != 1: raise exceptions.FormulasFailedDeletionException @@ -344,14 +557,18 @@ def delete_formulas(db_connection: PooledMySQLConnection, project_id: int, vcs_r return True -def get_vcs_dg_pairs(db_connection: PooledMySQLConnection, project_id: int) -> List[models.VcsDgPairs]: - query = "SELECT cvs_vcss.name AS vcs_name, cvs_vcss.id AS vcs_id, cvs_design_groups.name AS design_group_name, " \ - "cvs_design_groups.id AS design_group_id, \ +def get_vcs_dg_pairs( + db_connection: PooledMySQLConnection, project_id: int +) -> List[models.VcsDgPairs]: + query = ( + "SELECT cvs_vcss.name AS vcs_name, cvs_vcss.id AS vcs_id, cvs_design_groups.name AS design_group_name, " + "cvs_design_groups.id AS design_group_id, \ (SELECT count(*) FROM cvs_vcs_rows WHERE cvs_vcs_rows.vcs = cvs_vcss.id) \ - = ((SELECT (count(*)) FROM cvs_design_mi_formulas INNER JOIN cvs_vcs_rows ON cvs_vcs_rows.id = vcs_row WHERE " \ - "cvs_design_mi_formulas.design_group=cvs_design_groups.id AND vcs=cvs_vcss.id)) \ + = ((SELECT (count(*)) FROM cvs_design_mi_formulas INNER JOIN cvs_vcs_rows ON cvs_vcs_rows.id = vcs_row WHERE " + "cvs_design_mi_formulas.design_group=cvs_design_groups.id AND vcs=cvs_vcss.id)) \ AS has_formulas FROM cvs_vcss, cvs_design_groups WHERE cvs_vcss.project = %s AND cvs_design_groups.project = %s \ GROUP BY vcs_id, design_group_id ORDER BY has_formulas DESC;" + ) with db_connection.cursor(prepared=True) as cursor: # Log for sanity check @@ -365,12 +582,14 @@ def get_vcs_dg_pairs(db_connection: PooledMySQLConnection, project_id: int) -> L rs = cursor.fetchall() for res in rs: zip(cursor.column_names, res) - res_dict.append(models.VcsDgPairs( - vcs=res[0], - vcs_id=res[1], - design_group=res[2], - design_group_id=res[3], - has_formulas=res[4] - )) + res_dict.append( + models.VcsDgPairs( + vcs=res[0], + vcs_id=res[1], + design_group=res[2], + design_group_id=res[3], + has_formulas=res[4], + ) + ) return res_dict diff --git a/sql/V231010_cvs.sql b/sql/V231010_cvs.sql index 058ab0bc..5d50689e 100644 --- a/sql/V231010_cvs.sql +++ b/sql/V231010_cvs.sql @@ -1,2 +1,7 @@ ALTER TABLE `seddb`.`cvs_vd_design_values` - MODIFY COLUMN `value` CHAR(255); \ No newline at end of file + MODIFY COLUMN `value` CHAR(255); + +ALTER TABLE `seddb`.`cvs_design_mi_formulas` + ADD COLUMN `time_latex` TEXT NULL AFTER `time`, + ADD COLUMN `cost_latex` TEXT NULL AFTER `cost`, + ADD COLUMN `revenue_latex` TEXT NULL AFTER `revenue`; \ No newline at end of file diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index b3b92724..c664fa23 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -18,11 +18,32 @@ def test_create_formulas(client, std_headers, std_user): # Act - time = '2+{vd:' + str(value_driver.id) + ',"' + str(value_driver.name) + '"}+{vd:' + str( - value_driver.id) + ',"' + str(value_driver.name) + '"}' + time = ( + '2+{vd:' + + str(value_driver.id) + + ',"' + + str(value_driver.name) + + '"}+{vd:' + + str(value_driver.id) + + ',"' + + str(value_driver.name) + + '"}' + ) + time_latex = f'2+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}' cost = '2+{ef:' + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' - revenue = '20+{vd:' + str(value_driver.id) + ',"' + str(value_driver.name) + '"}+{ef:' + str( - external_factor.id) + ',"' + str(external_factor.name) + '"}' + cost_latex = f'2+\\class{{ef}}{{\\identifier{{vd:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' + revenue = ( + '20+{vd:' + + str(value_driver.id) + + ',"' + + str(value_driver.name) + + '"}+{ef:' + + str(external_factor.id) + + ',"' + + str(external_factor.name) + + '"}' + ) + revenue_latex = f'20+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}+\\class{{ef}}{{\\identifier{{vd:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' time_comment = testutils.random_str(10, 200) cost_comment = testutils.random_str(10, 200) revenue_comment = None @@ -30,19 +51,29 @@ def test_create_formulas(client, std_headers, std_user): rate = tu.random_rate_choice() time_unit = tu.random_time_unit() - res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', - headers=std_headers, - json=[{ - "vcs_row_id": vcs_rows[0].id, - "time": {"formula": time, "comment": time_comment}, - "time_unit": time_unit, - "cost": {"formula": cost, "comment": cost_comment}, - "revenue": {"formula": revenue, "comment": revenue_comment}, - "rate": rate - }]) - - res_get = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', - headers=std_headers) + res = client.put( + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', + headers=std_headers, + json=[ + { + "vcs_row_id": vcs_rows[0].id, + "time": {"text": time, "latex": time_latex, "comment": time_comment}, + "time_unit": time_unit, + "cost": {"text": cost, "latex": cost_latex, "comment": cost_comment}, + "revenue": { + "text": revenue, + "latex": revenue_latex, + "comment": revenue_comment, + }, + "rate": rate, + } + ], + ) + + res_get = client.get( + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + headers=std_headers, + ) # Assert assert res.status_code == 200 @@ -74,16 +105,20 @@ def test_create_formulas_no_optional(client, std_headers, std_user): revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() - res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', - headers=std_headers, - json=[{ - "vcs_row_id": vcs_rows[0].id, - "time": {"formula": time, "comment": ""}, - "time_unit": time_unit, - "cost": {"formula": cost, "comment": ""}, - "revenue": {"formula": revenue, "comment": ""}, - "rate": rate - }]) + res = client.put( + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', + headers=std_headers, + json=[ + { + "vcs_row_id": vcs_rows[0].id, + "time": {"text": time, "latex": time, "comment": ""}, + "time_unit": time_unit, + "cost": {"text": cost, "latex": cost, "comment": ""}, + "revenue": {"text": revenue, "latex": revenue, "comment": ""}, + "rate": rate, + } + ], + ) # Assert assert res.status_code == 200 @@ -105,17 +140,24 @@ def test_get_all_formulas(client, std_headers, std_user): design_group = tu.seed_random_design_group(project.id) # Act - formulas = tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id) + formulas = tu.seed_random_formulas( + project.id, vcs.id, design_group.id, current_user.id + ) - res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', - headers=std_headers) + res = client.get( + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + headers=std_headers, + ) # Assert assert res.status_code == 200 assert len(res.json()) == len(formulas) # Cleanup - tu.delete_formulas(project.id, [(formula.vcs_row_id, formula.design_group_id) for formula in formulas]) + tu.delete_formulas( + project.id, + [(formula.vcs_row_id, formula.design_group_id) for formula in formulas], + ) tu.delete_design_group(project.id, design_group.id) tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) @@ -134,8 +176,10 @@ def test_get_all_formulas_invalid_project(client, std_headers, std_user): # Act tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id) - res = client.get(f'/api/cvs/project/{invalid_proj_id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', - headers=std_headers) + res = client.get( + f'/api/cvs/project/{invalid_proj_id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + headers=std_headers, + ) # Assert assert res.status_code == 404 @@ -158,8 +202,10 @@ def test_get_all_formulas_invalid_vcs(client, std_headers, std_user): # Act tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id) - res = client.get(f'/api/cvs/project/{project.id}/vcs/{invalid_vcs_id}/design-group/{design_group.id}/formulas/all', - headers=std_headers) + res = client.get( + f'/api/cvs/project/{project.id}/vcs/{invalid_vcs_id}/design-group/{design_group.id}/formulas/all', + headers=std_headers, + ) # Assert assert res.status_code == 404 @@ -182,8 +228,10 @@ def get_all_formulas_invalid_design_group(client, std_headers, std_user): # Act tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id) - res = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{invalid_dg_id}/formulas/all', - headers=std_headers) + res = client.get( + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{invalid_dg_id}/formulas/all', + headers=std_headers, + ) # Assert assert res.status_code == 404 @@ -201,18 +249,38 @@ def test_edit_formulas(client, std_headers, std_user): vcs = tu.seed_random_vcs(project.id, current_user.id) design_group = tu.seed_random_design_group(project.id) - formulas = tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 1) + formulas = tu.seed_random_formulas( + project.id, vcs.id, design_group.id, current_user.id, 1 + ) value_driver = tu.seed_random_value_driver(current_user.id, project.id) external_factor = tu.seed_random_external_factor(project.id) # Act - time = '2+{vd:' + str(value_driver.id) + ',"' + str(value_driver.name) + '"}+{vd:' + str( - value_driver.id) + ',"' + str(value_driver.name) + '"}' + time = ( + '2+{vd:' + + str(value_driver.id) + + ',"' + + str(value_driver.name) + + '"}+{vd:' + + str(value_driver.id) + + ',"' + + str(value_driver.name) + + '"}' + ) cost = '2+{ef:' + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' - revenue = '20+{vd:' + str(value_driver.id) + ',"' + str(value_driver.name) + '"}+{ef:' + str( - external_factor.id) + ',"' + str(external_factor.name) + '"}' + revenue = ( + '20+{vd:' + + str(value_driver.id) + + ',"' + + str(value_driver.name) + + '"}+{ef:' + + str(external_factor.id) + + ',"' + + str(external_factor.name) + + '"}' + ) time_unit = tu.random_time_unit() rate = tu.random_rate_choice() @@ -220,17 +288,22 @@ def test_edit_formulas(client, std_headers, std_user): res = client.put( f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{formulas[0].design_group_id}/formulas', headers=std_headers, - json=[{ - "vcs_row_id": formulas[0].vcs_row_id, - "time": {"formula": time, "comment": ""}, - "time_unit": time_unit, - "cost": {"formula": cost, "comment": ""}, - "revenue": {"formula": revenue, "comment": ""}, - "rate": rate - }]) - - res_get = client.get(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', - headers=std_headers) + json=[ + { + "vcs_row_id": formulas[0].vcs_row_id, + "time": {"text": time, "latex": time, "comment": ""}, + "time_unit": time_unit, + "cost": {"text": cost, "latex": cost, "comment": ""}, + "revenue": {"text": revenue, "latex": revenue, "comment": ""}, + "rate": rate, + } + ], + ) + + res_get = client.get( + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + headers=std_headers, + ) # Assert assert res.status_code == 200 @@ -251,7 +324,9 @@ def test_edit_formulas_no_optional(client, std_headers, std_user): design_group = tu.seed_random_design_group(project.id) # Act - formulas = tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 1) + formulas = tu.seed_random_formulas( + project.id, vcs.id, design_group.id, current_user.id, 1 + ) time = testutils.random_str(10, 200) time_unit = tu.random_time_unit() @@ -262,21 +337,27 @@ def test_edit_formulas_no_optional(client, std_headers, std_user): res = client.put( f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{formulas[0].design_group_id}/formulas', headers=std_headers, - json=[{ - "vcs_row_id": formulas[0].vcs_row_id, - "time": {"formula": time, "comment": ""}, - "time_unit": time_unit, - "cost": {"formula": cost, "comment": ""}, - "revenue": {"formula": revenue, "comment": ""}, - "rate": rate - }]) + json=[ + { + "vcs_row_id": formulas[0].vcs_row_id, + "time": {"text": time, "latex": time, "comment": ""}, + "time_unit": time_unit, + "cost": {"text": cost, "latex": cost, "comment": ""}, + "revenue": {"text": revenue, "latex": revenue, "comment": ""}, + "rate": rate, + } + ], + ) # Assert assert res.status_code == 200 assert res.json() == True # Cleanup - tu.delete_formulas(project.id, [(formula.vcs_row_id, formula.design_group_id) for formula in formulas]) + tu.delete_formulas( + project.id, + [(formula.vcs_row_id, formula.design_group_id) for formula in formulas], + ) tu.delete_design_group(project.id, design_group.id) tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) @@ -303,16 +384,20 @@ def test_edit_formulas_invalid_dg(client, std_headers, std_user): revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() - res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{dg_invalid_id}/formulas', - headers=std_headers, - json=[{ - "vcs_row_id": row_id, - "time": {"formula": time, "comment": ""}, - "time_unit": time_unit, - "cost": {"formula": cost, "comment": ""}, - "revenue": {"formula": revenue, "comment": ""}, - "rate": rate - }]) + res = client.put( + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{dg_invalid_id}/formulas', + headers=std_headers, + json=[ + { + "vcs_row_id": row_id, + "time": {"text": time, "latex": time, "comment": ""}, + "time_unit": time_unit, + "cost": {"text": cost, "latex": cost, "comment": ""}, + "revenue": {"text": revenue, "latex": revenue, "comment": ""}, + "rate": rate, + } + ], + ) # Assert assert res.status_code == 404 @@ -343,16 +428,20 @@ def test_edit_formulas_invalid_vcs_row(client, std_headers, std_user): revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() - res = client.put(f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', - headers=std_headers, - json=[{ - "vcs_row_id": row_id, - "time": {"formula": time, "comment": ""}, - "time_unit": time_unit, - "cost": {"formula": cost, "comment": ""}, - "revenue": {"formula": revenue, "comment": ""}, - "rate": rate - }]) + res = client.put( + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', + headers=std_headers, + json=[ + { + "vcs_row_id": row_id, + "time": {"text": time, "latex": time, "comment": ""}, + "time_unit": time_unit, + "cost": {"text": cost, "latex": cost, "comment": ""}, + "revenue": {"text": revenue, "latex": revenue, "comment": ""}, + "rate": rate, + } + ], + ) # Assert assert res.status_code == 404 @@ -384,16 +473,20 @@ def test_edit_formulas_invalid_project(client, std_headers, std_user): revenue = testutils.random_str(10, 200) rate = tu.random_rate_choice() - res = client.put(f'/api/cvs/project/{invalid_proj_id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', - headers=std_headers, - json=[{ - "vcs_row_id": row_id, - "time": {"formula": time, "comment": ""}, - "time_unit": time_unit, - "cost": {"formula": cost, "comment": ""}, - "revenue": {"formula": revenue, "comment": ""}, - "rate": rate - }]) + res = client.put( + f'/api/cvs/project/{invalid_proj_id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', + headers=std_headers, + json=[ + { + "vcs_row_id": row_id, + "time": {"text": time, "latex": time, "comment": ""}, + "time_unit": time_unit, + "cost": {"text": cost, "latex": cost, "comment": ""}, + "revenue": {"text": revenue, "latex": revenue, "comment": ""}, + "rate": rate, + } + ], + ) # Assert assert res.status_code == 404 @@ -414,11 +507,14 @@ def test_delete_formulas(client, std_headers, std_user): design_group = tu.seed_random_design_group(project.id) # Act - formulas = tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 1) + formulas = tu.seed_random_formulas( + project.id, vcs.id, design_group.id, current_user.id, 1 + ) res = client.delete( f'/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas', - headers=std_headers) + headers=std_headers, + ) # Assert assert res.status_code == 200 @@ -440,12 +536,15 @@ def test_delete_formulas_invalid_project(client, std_headers, std_user): design_group = tu.seed_random_design_group(project.id) # Act - formulas = tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 1) + formulas = tu.seed_random_formulas( + project.id, vcs.id, design_group.id, current_user.id, 1 + ) res = client.delete( f'/api/cvs/project/{invalid_proj_id}/vcs-row/{formulas[0].vcs_row_id}/design-group/' f'{formulas[0].design_group_id}/formulas', - headers=std_headers) + headers=std_headers, + ) # Assert assert res.status_code == 404 @@ -464,12 +563,15 @@ def test_delete_formulas_invalid_vcs_row(client, std_headers, std_user): design_group = tu.seed_random_design_group(project.id) # Act - formulas = tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 1) + formulas = tu.seed_random_formulas( + project.id, vcs.id, design_group.id, current_user.id, 1 + ) invalid_vcs_row_id = formulas[0].vcs_row_id + 1 res = client.delete( f'/api/cvs/project/{project.id}/vcs-row/{invalid_vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas', - headers=std_headers) + headers=std_headers, + ) # Assert assert res.status_code == 404 @@ -489,11 +591,14 @@ def test_delete_formulas_invalid_design_group(client, std_headers, std_user): invalid_dg_id = design_group.id + 1 # Act - formulas = tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id, 1) + formulas = tu.seed_random_formulas( + project.id, vcs.id, design_group.id, current_user.id, 1 + ) res = client.delete( f'/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{invalid_dg_id}/formulas', - headers=std_headers) + headers=std_headers, + ) # Assert assert res.status_code == 400 @@ -513,18 +618,26 @@ def test_get_vcs_dg_pairs(client, std_headers, std_user): formulas = [] for i in range(4): - formulas.append(tu.seed_random_formulas(project.id, vcss[i].id, dgs[i].id, current_user.id, 1)) + formulas.append( + tu.seed_random_formulas( + project.id, vcss[i].id, dgs[i].id, current_user.id, 1 + ) + ) # Act - res = client.get(f'/api/cvs/project/{project.id}/vcs/design/formula-pairs', - headers=std_headers) + res = client.get( + f'/api/cvs/project/{project.id}/vcs/design/formula-pairs', headers=std_headers + ) # Assert assert res.status_code == 200 assert len(res.json()) == len(vcss) * len(dgs) # Cleanup - tu.delete_formulas(project.id, [(formula[0].vcs_row_id, formula[0].design_group_id) for formula in formulas]) + tu.delete_formulas( + project.id, + [(formula[0].vcs_row_id, formula[0].design_group_id) for formula in formulas], + ) [tu.delete_design_group(project.id, design_group.id) for design_group in dgs] tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id for vcs in vcss]) tu.delete_project_by_id(project.id, current_user.id) @@ -542,11 +655,17 @@ def test_get_vcs_dg_pairs_invalid_project(client, std_headers, std_user): formulas = [] for i in range(4): - formulas.append(tu.seed_random_formulas(project.id, vcss[i].id, dgs[i].id, current_user.id, 1)) + formulas.append( + tu.seed_random_formulas( + project.id, vcss[i].id, dgs[i].id, current_user.id, 1 + ) + ) # Act - res = client.get(f'/api/cvs/project/{invalid_proj_id}/vcs/design/formula-pairs', - headers=std_headers) + res = client.get( + f'/api/cvs/project/{invalid_proj_id}/vcs/design/formula-pairs', + headers=std_headers, + ) # Assert assert res.status_code == 404 diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index f6a1c4bc..5ba1243f 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -422,10 +422,10 @@ def seed_random_formulas(project_id: int, vcs_id: int, design_group_id: int, use formula_post = connect_model.FormulaRowPost( vcs_row_id=vcs_row.id, - time=connect_model.Formula(formula=time, comment=""), + time=connect_model.Formula(text=time, latex=time, comment=""), time_unit=time_unit, - cost=connect_model.Formula(formula=cost, comment=""), - revenue=connect_model.Formula(formula=revenue, comment=""), + cost=connect_model.Formula(text=cost, latex=cost, comment=""), + revenue=connect_model.Formula(text=revenue, latex=revenue, comment=""), rate=rate ) From 65a81b6ce6650e5df93750d34460384b18dff66b Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 21 Nov 2023 17:13:08 +0100 Subject: [PATCH 186/210] empty string if time, cost, revenue is none --- sedbackend/apps/cvs/link_design_lifecycle/storage.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 2cbbb661..46ecce87 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -500,14 +500,14 @@ def populate_formula(db_result) -> models.FormulaRowGet: vcs_row_id=db_result['vcs_row'], design_group_id=db_result['design_group'], time=models.Formula( - text=db_result['time'], latex=db_result['time_latex'], comment=db_result['time_comment'] + text=db_result['time'] or '', latex=db_result['time_latex'] or '', comment=db_result['time_comment'] ), time_unit=db_result['time_unit'], cost=models.Formula( - text=db_result['cost'], latex=db_result['cost_latex'], comment=db_result['cost_comment'] + text=db_result['cost'] or '', latex=db_result['cost_latex'] or '', comment=db_result['cost_comment'] ), revenue=models.Formula( - text=db_result['revenue'], latex=db_result['revenue_latex'], comment=db_result['revenue_comment'] + text=db_result['revenue'] or '', latex=db_result['revenue_latex'] or '', comment=db_result['revenue_comment'] ), rate=db_result['rate'], row_value_drivers=[ From a9fa47facf6d2ac1cf3f6a13e6345e25b7e87457 Mon Sep 17 00:00:00 2001 From: Ziidy Date: Fri, 24 Nov 2023 14:49:45 +0100 Subject: [PATCH 187/210] fixed delete_simulation/delete_simulations, removed logger.debug and unecessary imports --- designs.csv | 4 + runs.csv | 7 + sedbackend/apps/core/db.py | 4 +- sedbackend/apps/cvs/life_cycle/storage.py | 3 +- sedbackend/apps/cvs/simulation/exceptions.py | 1 + .../apps/cvs/simulation/implementation.py | 88 ++++++++++++- sedbackend/apps/cvs/simulation/models.py | 8 ++ sedbackend/apps/cvs/simulation/router.py | 38 ++++++ sedbackend/apps/cvs/simulation/storage.py | 124 +++++++++++++++++- sql/V231116_cvs.sql | 13 ++ vcss.csv | 3 + vds.csv | 6 + 12 files changed, 284 insertions(+), 15 deletions(-) create mode 100644 designs.csv create mode 100644 runs.csv create mode 100644 sql/V231116_cvs.sql create mode 100644 vcss.csv create mode 100644 vds.csv diff --git a/designs.csv b/designs.csv new file mode 100644 index 00000000..8d5e81b3 --- /dev/null +++ b/designs.csv @@ -0,0 +1,4 @@ +id,name,design_group_id,vd_design_values +1,(2G) HUD,7,"[{'vd_id': 2, 'value': '1.144'}, {'vd_id': 3, 'value': '15'}, {'vd_id': 4, 'value': '0'}, {'vd_id': 5, 'value': '2.08'}, {'vd_id': 6, 'value': '0'}]" +2,(2G) AR HUD,7,"[{'vd_id': 2, 'value': '0.91'}, {'vd_id': 3, 'value': '20'}, {'vd_id': 4, 'value': '15'}, {'vd_id': 5, 'value': '2.15'}, {'vd_id': 6, 'value': '0'}]" +3,(3G) Holographic Wave Guice AR-HUD,7,"[{'vd_id': 2, 'value': '0.45'}, {'vd_id': 3, 'value': '30'}, {'vd_id': 4, 'value': '4'}, {'vd_id': 5, 'value': '2.35'}, {'vd_id': 6, 'value': '0'}]" diff --git a/runs.csv b/runs.csv new file mode 100644 index 00000000..406963e0 --- /dev/null +++ b/runs.csv @@ -0,0 +1,7 @@ +time,mean_NPV,max_NPVs,mean_payback_time,all_npvs,payback_time,surplus_value_end_result,design_id,vcs_id +"[0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0, 3.25, 3.5, 3.75, 4.0, 4.25, 4.5, 4.75, 5.0, 5.25, 5.5, 5.75, 6.0, 6.25, 6.5, 6.75, 7.0, 7.25, 7.5, 7.75, 8.0, 8.25, 8.5, 8.75, 9.0, 9.25, 9.5, 9.75, 10.0, 10.25, 10.5, 10.75, 11.0, 11.25, 11.5, 11.75, 12.0, 12.25, 12.5, 12.75, 13.0, 13.25, 13.5, 13.75, 14.0, 14.25, 14.5, 14.75]","[0.0, 0.0, 0.0, 0.0, 0.0, -31020.683812044346, -31020.683812044346, -31020.683812044346, -31020.683812044346, -60925.467741778804, -60925.467741778804, -60925.467741778804, -60925.467741778804, -87921.17323234044, -87921.17323234044, -87921.17323234044, -87921.17323234044, -113688.20721630525, -113688.20721630525, -113688.20721630525, -113688.20721630525, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166]",[-137070.6397462166],-1.0,"[[0.0, 0.0, 0.0, 0.0, 0.0, -31020.683812044346, -31020.683812044346, -31020.683812044346, -31020.683812044346, -60925.467741778804, -60925.467741778804, -60925.467741778804, -60925.467741778804, -87921.17323234044, -87921.17323234044, -87921.17323234044, -87921.17323234044, -113688.20721630525, -113688.20721630525, -113688.20721630525, -113688.20721630525, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166, -137070.6397462166]]",-1.0,-137070.6397462166,1,23 +"[0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0, 3.25, 3.5, 3.75, 4.0, 4.25, 4.5, 4.75, 5.0, 5.25, 5.5, 5.75, 6.0, 6.25, 6.5, 6.75, 7.0, 7.25, 7.5, 7.75, 8.0, 8.25, 8.5, 8.75, 9.0, 9.25, 9.5, 9.75, 10.0, 10.25, 10.5, 10.75, 11.0, 11.25, 11.5, 11.75, 12.0, 12.25, 12.5, 12.75, 13.0, 13.25, 13.5, 13.75, 14.0, 14.25, 14.5, 14.75]","[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -37779.73845511261, -37779.73845511261, -37779.73845511261, -37779.73845511261, -68021.10948352134, -68021.10948352134, -68021.10948352134, -68021.10948352134, -94621.71157339044, -94621.71157339044, -94621.71157339044, -94621.71157339044, -119414.0130202508, -119414.0130202508, -119414.0130202508, -119414.0130202508, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195]",[-142970.2710131195],-1.0,"[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -37779.73845511261, -37779.73845511261, -37779.73845511261, -37779.73845511261, -68021.10948352134, -68021.10948352134, -68021.10948352134, -68021.10948352134, -94621.71157339044, -94621.71157339044, -94621.71157339044, -94621.71157339044, -119414.0130202508, -119414.0130202508, -119414.0130202508, -119414.0130202508, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195, -142970.2710131195]]",-1.0,-142970.2710131195,2,23 +"[0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0, 3.25, 3.5, 3.75, 4.0, 4.25, 4.5, 4.75, 5.0, 5.25, 5.5, 5.75, 6.0, 6.25, 6.5, 6.75, 7.0, 7.25, 7.5, 7.75, 8.0, 8.25, 8.5, 8.75, 9.0, 9.25, 9.5, 9.75, 10.0, 10.25, 10.5, 10.75, 11.0, 11.25, 11.5, 11.75, 12.0, 12.25, 12.5, 12.75, 13.0, 13.25, 13.5, 13.75, 14.0, 14.25, 14.5, 14.75]","[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -48540.79374944504, -48540.79374944504, -48540.79374944504, -48540.79374944504, -93124.28945993421, -93124.28945993421, -93124.28945993421, -93124.28945993421, -132798.16739623205, -132798.16739623205, -132798.16739623205, -132798.16739623205, -174295.1260838775, -174295.1260838775, -174295.1260838775, -174295.1260838775, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528]",[-211891.51964216528],-1.0,"[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -48540.79374944504, -48540.79374944504, -48540.79374944504, -48540.79374944504, -93124.28945993421, -93124.28945993421, -93124.28945993421, -93124.28945993421, -132798.16739623205, -132798.16739623205, -132798.16739623205, -132798.16739623205, -174295.1260838775, -174295.1260838775, -174295.1260838775, -174295.1260838775, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528, -211891.51964216528]]",-1.0,-211891.51964216528,3,23 +"[0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0, 3.25, 3.5, 3.75, 4.0, 4.25, 4.5, 4.75, 5.0, 5.25, 5.5, 5.75, 6.0, 6.25, 6.5, 6.75, 7.0, 7.25, 7.5, 7.75, 8.0, 8.25, 8.5, 8.75, 9.0, 9.25, 9.5, 9.75, 10.0, 10.25, 10.5, 10.75, 11.0, 11.25, 11.5, 11.75, 12.0, 12.25, 12.5, 12.75, 13.0, 13.25, 13.5, 13.75, 14.0, 14.25, 14.5, 14.75]","[0.0, 0.0, 0.0, 0.0, 0.0, -3330.3936859077508, -3330.3936859077508, -3330.3936859077508, -3330.3936859077508, -6259.0631057566625, -6259.0631057566625, -6259.0631057566625, -6259.0631057566625, -8970.794054551008, -8970.794054551008, -8970.794054551008, -8970.794054551008, -11481.656048332597, -11481.656048332597, -11481.656048332597, -11481.656048332597, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129]",[-13806.528268646129],-1.0,"[[0.0, 0.0, 0.0, 0.0, 0.0, -3330.3936859077508, -3330.3936859077508, -3330.3936859077508, -3330.3936859077508, -6259.0631057566625, -6259.0631057566625, -6259.0631057566625, -6259.0631057566625, -8970.794054551008, -8970.794054551008, -8970.794054551008, -8970.794054551008, -11481.656048332597, -11481.656048332597, -11481.656048332597, -11481.656048332597, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129, -13806.528268646129]]",-1.0,-13806.528268646129,1,93 +"[0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0, 3.25, 3.5, 3.75, 4.0, 4.25, 4.5, 4.75, 5.0, 5.25, 5.5, 5.75, 6.0, 6.25, 6.5, 6.75, 7.0, 7.25, 7.5, 7.75, 8.0, 8.25, 8.5, 8.75, 9.0, 9.25, 9.5, 9.75, 10.0, 10.25, 10.5, 10.75, 11.0, 11.25, 11.5, 11.75, 12.0, 12.25, 12.5, 12.75, 13.0, 13.25, 13.5, 13.75, 14.0, 14.25, 14.5, 14.75]","[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -4230.574026902625, -4230.574026902625, -4230.574026902625, -4230.574026902625, -7956.2087963758895, -7956.2087963758895, -7956.2087963758895, -7956.2087963758895, -11405.87062567408, -11405.87062567408, -11405.87062567408, -11405.87062567408, -14600.001954386806, -14600.001954386806, -14600.001954386806, -14600.001954386806, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088]",[-17557.53096735088],-1.0,"[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -4230.574026902625, -4230.574026902625, -4230.574026902625, -4230.574026902625, -7956.2087963758895, -7956.2087963758895, -7956.2087963758895, -7956.2087963758895, -11405.87062567408, -11405.87062567408, -11405.87062567408, -11405.87062567408, -14600.001954386806, -14600.001954386806, -14600.001954386806, -14600.001954386806, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088, -17557.53096735088]]",-1.0,-17557.53096735088,2,93 +"[0.0, 0.0, 0.25, 0.5, 0.75, 1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0, 3.25, 3.5, 3.75, 4.0, 4.25, 4.5, 4.75, 5.0, 5.25, 5.5, 5.75, 6.0, 6.25, 6.5, 6.75, 7.0, 7.25, 7.5, 7.75, 8.0, 8.25, 8.5, 8.75, 9.0, 9.25, 9.5, 9.75, 10.0, 10.25, 10.5, 10.75, 11.0, 11.25, 11.5, 11.75, 12.0, 12.25, 12.5, 12.75, 13.0, 13.25, 13.5, 13.75, 14.0, 14.25, 14.5, 14.75]","[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -373.77825628024334, -5721.264053573385, -5721.264053573385, -5721.264053573385, -5721.264053573385, -10663.47341479187, -10663.47341479187, -10663.47341479187, -10663.47341479187, -15239.593201274201, -15239.593201274201, -15239.593201274201, -15239.593201274201, -19476.74115873591, -19476.74115873591, -19476.74115873591, -19476.74115873591, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516]",[-23400.026311029516],-1.0,"[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -373.77825628024334, -5721.264053573385, -5721.264053573385, -5721.264053573385, -5721.264053573385, -10663.47341479187, -10663.47341479187, -10663.47341479187, -10663.47341479187, -15239.593201274201, -15239.593201274201, -15239.593201274201, -15239.593201274201, -19476.74115873591, -19476.74115873591, -19476.74115873591, -19476.74115873591, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516, -23400.026311029516]]",-1.0,-23400.026311029516,3,93 diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..daa1d89a 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/life_cycle/storage.py b/sedbackend/apps/cvs/life_cycle/storage.py index 92b52df3..988c7ca4 100644 --- a/sedbackend/apps/cvs/life_cycle/storage.py +++ b/sedbackend/apps/cvs/life_cycle/storage.py @@ -315,7 +315,6 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, dsm_file = pd.read_csv(f) logger.debug(f'File content: {dsm_file}') vcs_table = vcs_storage.get_vcs_table(db_connection, project_id, vcs_id) - vcs_processes = get_process_names_from_rows(vcs_table) if len(dsm_file['Processes'].values[1:-1]) != len(vcs_processes): @@ -342,6 +341,7 @@ def save_dsm_file(db_connection: PooledMySQLConnection, project_id: int, pass f.seek(0) + logger.debug(f'File content: {model_file}') stored_file = file_storage.db_save_file(db_connection, model_file) insert_statement = MySQLStatementBuilder(db_connection) @@ -410,7 +410,6 @@ def delete_dsm_file(db_connection: PooledMySQLConnection, project_id: int, vcs_i return True - def get_dsm_from_file_id(db_connection: PooledMySQLConnection, file_id: int, user_id: int) -> dict: try: path = file_storage.db_get_file_path(db_connection, file_id, user_id) diff --git a/sedbackend/apps/cvs/simulation/exceptions.py b/sedbackend/apps/cvs/simulation/exceptions.py index ff04727b..251de8a5 100644 --- a/sedbackend/apps/cvs/simulation/exceptions.py +++ b/sedbackend/apps/cvs/simulation/exceptions.py @@ -40,6 +40,7 @@ def __init__(self, exception) -> None: self.message = str(exception) + class DesignIdsNotFoundException(Exception): pass diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 45367cee..0303f479 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -27,7 +27,7 @@ CouldNotFetchValueDriverDesignValuesException, NoTechnicalProcessException, ) -from sedbackend.apps.cvs.simulation.models import SimulationResult +from sedbackend.apps.cvs.simulation.models import SimulationResult,SimulationFetch from sedbackend.apps.cvs.vcs import exceptions as vcs_exceptions from sedbackend.apps.cvs.market_input import exceptions as market_input_exceptions @@ -55,6 +55,7 @@ def run_simulation( normalized_npv, is_multiprocessing, ) + con.commit() return result except auth_ex.UnauthorizedOperationException: raise HTTPException( @@ -130,11 +131,6 @@ def run_simulation( raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"Could not find DSM file" ) - except FailedToFetchSimulationDataException: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=f"Could not fetch simulation data. Check your VCSs and Design Groups.", - ) except SimulationFailedException as e: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=e.message.capitalize() @@ -224,6 +220,86 @@ def get_sim_settings(project_id: int) -> models.SimSettings: status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Could not send simulation settings", ) + +def get_simulations(project_id: int) -> List[models.SimulationFetch]: + try: + with get_connection() as con: + result = storage.get_simulation_files(con, project_id) + return result + except project_exceptions.CVSProjectNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"Could not find project" + ) + except Exception as e: + logger.debug(e) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Could not send simulations", + ) + + +def remove_simulation_files(project_id: int, user_id: int) -> bool: + try: + with get_connection() as con: + result = storage.delete_all_simulation_files(con, project_id, user_id) + con.commit() + return result + except project_exceptions.CVSProjectNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"Could not find project" + ) + except Exception as e: + logger.debug(e) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Could not send simulations", + ) + + + +def get_simulation_file_content(user_id: int, file_id) -> SimulationResult: + try: + with get_connection() as con: + result = storage.get_file_content(con, user_id, file_id) + if result is None: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Could not find simulation file", + ) + logger.debug('Successfully retrieved simulation file content') + logger.debug(result) + return result + except project_exceptions.CVSProjectNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail=f"Could not find project" + ) + except Exception as e: + logger.exception(e) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Could not retrieve simulation file content", + ) + + + +def remove_simulation_file(project_id: int, user_id, file_id) -> bool: + try: + with get_connection() as con: + result = storage.delete_simulation_file(con, project_id, file_id,user_id) + con.commit() + return result + except project_exceptions.CVSProjectNotFoundException: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, detail=f"Could not find project" + ) + except Exception as e: + logger.debug(e) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Could not send simulations", + ) + def edit_sim_settings( diff --git a/sedbackend/apps/cvs/simulation/models.py b/sedbackend/apps/cvs/simulation/models.py index e658df0a..b66dc7d9 100644 --- a/sedbackend/apps/cvs/simulation/models.py +++ b/sedbackend/apps/cvs/simulation/models.py @@ -4,11 +4,13 @@ from typing import Optional from fastapi import Form +import datetime from sedbackend.apps.cvs.design.models import DesignGroup, Design, ValueDriverDesignValue from sedbackend.apps.cvs.link_design_lifecycle import models as link_model from dataclasses import dataclass from sedbackend.apps.cvs.vcs.models import VCS, ValueDriver +from sedbackend.apps.cvs.design.models import DesignGroup, Design, ValueDriverDesignValue class NonTechCost(str, Enum): @@ -70,6 +72,12 @@ class EditSimSettings(BaseModel): class SimSettings(EditSimSettings): project: int +class SimulationFetch(BaseModel): + project_id: int + file: int + insert_timestamp: datetime.datetime + + @dataclass class FileParams: diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 6ea29f3f..cbd84a81 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -8,6 +8,7 @@ from sedbackend.apps.cvs.simulation import implementation, models from sedbackend.apps.cvs.simulation.models import SimulationResult + router = APIRouter() @@ -66,6 +67,25 @@ async def get_sim_settings(native_project_id: int) -> models.SimSettings: return implementation.get_sim_settings(native_project_id) +@router.get( + '/project/{native_project_id}/simulation/all/', + summary='Get simulations for project', + response_model=List[models.SimulationFetch], + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +) +async def get_simulations(native_project_id: int) -> List[models.SimulationFetch]: + return implementation.get_simulations(native_project_id) + + +@router.delete( + '/project/{native_project_id}/simulation/all/', + summary='Remove all simulation files', + response_model= bool, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +) +async def remove_simulation_files(native_project_id: int, user: User = Depends(get_current_active_user)) -> bool: + return implementation.remove_simulation_files(native_project_id, user.id) + @router.put( '/project/{native_project_id}/simulation/settings', summary='Create or update simulation settings', @@ -75,3 +95,21 @@ async def get_sim_settings(native_project_id: int) -> models.SimSettings: async def put_sim_settings(native_project_id: int, sim_settings: models.EditSimSettings, user: User = Depends(get_current_active_user)) -> bool: return implementation.edit_sim_settings(native_project_id, sim_settings, user.id) + +@router.get( + '/project/{native_project_id}/simulation/file/{file_id}', + summary='Get simulation file', + response_model=models.SimulationResult, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +) +async def get_simulation_file_content(native_project_id,file_id: int, user: User = Depends(get_current_active_user)) -> models.SimulationResult: + return implementation.get_simulation_file_content(user.id, file_id) + +@router.delete( + '/project/{native_project_id}/simulation/file/{file_id}', + summary='Remove simulation file', + response_model=bool, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +) +async def get_simulation_file_content(native_project_id,file_id: int, user: User = Depends(get_current_active_user)) -> bool: + return implementation.remove_simulation_file(native_project_id, user.id, file_id) \ No newline at end of file diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 15556e41..a56ab01c 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -1,32 +1,48 @@ import re import sys from math import isnan +import csv + +import requests + +import io +import magic +import os +import tempfile +from datetime import datetime +from plusminus import BaseArithmeticParser from mysql.connector.pooling import PooledMySQLConnection import pandas as pd +import numpy as np from mysql.connector import Error from fastapi.logger import logger +from fastapi import UploadFile from desim import interface as des from desim.data import NonTechCost, TimeFormat from desim.simulation import Process -from plusminus import BaseArithmeticParser from typing import List from sedbackend.apps.cvs.design.storage import get_all_designs from mysqlsb import FetchType, MySQLStatementBuilder -from sedbackend.apps.cvs.life_cycle.storage import get_dsm_from_file_id +from sedbackend.apps.cvs.project.router import CVS_APP_SID from sedbackend.apps.cvs.simulation.models import SimulationResult +from sedbackend.apps.cvs.life_cycle.storage import get_dsm_from_file_id from sedbackend.apps.cvs.vcs.storage import get_vcss from sedbackend.libs.formula_parser import expressions as expr from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import storage as vcs_storage -from sedbackend.apps.cvs.life_cycle import storage as life_cycle_storage -from sedbackend.apps.core.files import exceptions as file_exceptions +from sedbackend.apps.cvs.life_cycle import exceptions as life_cycle_exceptions, storage as life_cycle_storage +from sedbackend.apps.core.files import models as file_models, storage as file_storage, exceptions as file_exceptions +from sedbackend.apps.core.projects import storage as core_project_storage +from sedbackend.apps.core.files import models as file_models, storage as file_storage, exceptions as file_ex +from sedbackend.apps.core.files.models import StoredFilePath + SIM_SETTINGS_TABLE = "cvs_simulation_settings" SIM_SETTINGS_COLUMNS = [ @@ -54,6 +70,103 @@ "minutes": TimeFormat.MINUTES, } ) +MAX_FILE_SIZE = 100 * 10 ** 6 # 100MB + +SIM_SETTINGS_TABLE = "cvs_simulation_settings" +SIM_SETTINGS_COLUMNS = ['project', 'time_unit', 'flow_process', 'flow_start_time', 'flow_time', + 'interarrival_time', 'start_time', 'end_time', 'discount_rate', 'non_tech_add', 'monte_carlo', + 'runs'] + +CVS_SIMULATION_FILES_TABLE = 'cvs_simulation_files' +CVS_SIMULATION_FILES_COLUMNSS = ['project_id', 'file'] +CVS_SIMULATION_FILES_COLUMNS = ['project_id', 'file', 'insert_timestamp'] + +def csv_from_dataframe(dataframe) -> UploadFile: + dataframe = pd.DataFrame(dataframe) + fd, path = tempfile.mkstemp() + try: + with open(path, "w+") as csv_file: + dataframe.to_json(csv_file,orient='columns') + finally: + csv_file = open(path, "r+b") + upload_file = UploadFile(filename=csv_file.name + ".json", file=csv_file) + os.close(fd) + os.remove(path) + + return upload_file + +def save_simulation(db_connection: PooledMySQLConnection, project_id: int, simulation: SimulationResult,user_id: int) -> bool: + upload_file = csv_from_dataframe(simulation) + logger.debug(f'upload_files: {upload_file.read}') + return save_simulation_file(db_connection, project_id, upload_file, user_id) + +def save_simulation_file(db_connection: PooledMySQLConnection, project_id: int, + file: UploadFile, user_id) -> bool: + subproject = core_project_storage.db_get_subproject_native(db_connection, CVS_APP_SID, project_id) + model_file = file_models.StoredFilePost.import_fastapi_file(file, user_id, subproject.id) + + with model_file.file_object as f: + f.seek(0) + tmp_file = f.read() + mime = magic.from_buffer(tmp_file) + logger.debug(f'Mime: {mime}') + if mime != "JSON data" and "ASCII text" not in mime: + raise life_cycle_exceptions.InvalidFileTypeException + f.seek(0) + csv_file = pd.read_json(f) + logger.debug(f'File content: {model_file}') + f.seek(0) + stored_file = file_storage.db_save_file(db_connection, model_file) + + insert_statement = MySQLStatementBuilder(db_connection) + insert_statement.insert(CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNSS) \ + .set_values([project_id, stored_file.id]) \ + .execute(fetch_type=FetchType.FETCH_NONE) + return True + +def get_simulation_files(db_connection: PooledMySQLConnection, project_id: int): + select_statement = MySQLStatementBuilder(db_connection) + file_res = select_statement.select(CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNS) \ + .where('project_id = %s', [project_id]) \ + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + return file_res + +def get_simulation_file_path(db_connection: PooledMySQLConnection, file_id, user_id) -> StoredFilePath: + return file_storage.db_get_file_path(db_connection, file_id, user_id) + + + +def delete_simulation_file(db_connection: PooledMySQLConnection, project_id: int, file_id, user_id: int) -> bool: + if file_id is None: + file_storage.db_delete_file(db_connection, file_id, user_id) + + delete_statement = MySQLStatementBuilder(db_connection) + _, rows = delete_statement.delete(CVS_SIMULATION_FILES_TABLE) \ + .where('file = %s', [file_id] ) \ + .execute(return_affected_rows=True) + return True + + +def delete_all_simulation_files(db_connection: PooledMySQLConnection, project_id: int, user_id: int) -> bool: + files = get_simulation_files(db_connection, project_id) + + for file in files: + file_storage.db_delete_file(db_connection, file['file'],user_id) + + delete_statement = MySQLStatementBuilder(db_connection) + _, rows = delete_statement.delete(CVS_SIMULATION_FILES_TABLE) \ + .where('project_id = %s', [project_id]) \ + .execute(return_affected_rows=True) + return True + + +def get_file_content(db_connection: PooledMySQLConnection, user_id, file_id) -> SimulationResult: + path = get_simulation_file_path(db_connection, file_id, user_id).path + with open(path, newline='') as f: + data = pd.read_json(f, orient='columns') + designs, vcss, vds, run = data[1] + + return SimulationResult(designs = designs, vcss = vcss,vds = vds,runs = run) def run_simulation( @@ -211,8 +324,9 @@ def run_simulation( ) sim_result.runs.append(sim_run_res) - logger.debug("Returning the results") + save_simulation(db_connection, project_id,sim_result, user_id) return sim_result + def populate_processes( diff --git a/sql/V231116_cvs.sql b/sql/V231116_cvs.sql new file mode 100644 index 00000000..1a41a4fd --- /dev/null +++ b/sql/V231116_cvs.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS `seddb`.`cvs_simulation_files` +( + `project_id` INT UNSIGNED NOT NULL, + `file` INT UNSIGNED NOT NULL, + `insert_timestamp` DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), + PRIMARY KEY (`file`), + FOREIGN KEY (`project_id`) + REFERENCES `seddb`.`cvs_projects`(`id`) + ON DELETE CASCADE, + FOREIGN KEY(`file`) + REFERENCES `seddb`.`files`(`id`) + ON DELETE CASCADE +); \ No newline at end of file diff --git a/vcss.csv b/vcss.csv new file mode 100644 index 00000000..9eee5b87 --- /dev/null +++ b/vcss.csv @@ -0,0 +1,3 @@ +id,name,description,project,datetime_created,year_from,year_to +23,Traditional Business,For traditional business,"{'id': 2, 'name': 'Wingquistss', 'description': 'Seminar Wingquist april 19th', 'currency': 'k$', 'owner': {'id': 1, 'username': 'admin', 'email': None, 'full_name': None, 'disabled': False, 'scopes': 'admin'}, 'datetime_created': datetime.datetime(2023, 6, 9, 14, 11, 18, 247000), 'my_access_right': 4, 'project': None, 'subproject': {'name': 'Unnamed sub-project', 'application_sid': 'MOD.CVS', 'native_project_id': 2, 'id': 2, 'owner_id': 1, 'project_id': None, 'datetime_created': datetime.datetime(2023, 8, 3, 10, 42, 58, 131000)}}",2023-07-19 11:15:56.675,2023,2023 +93,Traditional Business (1),For traditional business,"{'id': 2, 'name': 'Wingquistss', 'description': 'Seminar Wingquist april 19th', 'currency': 'k$', 'owner': {'id': 1, 'username': 'admin', 'email': None, 'full_name': None, 'disabled': False, 'scopes': 'admin'}, 'datetime_created': datetime.datetime(2023, 6, 9, 14, 11, 18, 247000), 'my_access_right': 4, 'project': None, 'subproject': {'name': 'Unnamed sub-project', 'application_sid': 'MOD.CVS', 'native_project_id': 2, 'id': 2, 'owner_id': 1, 'project_id': None, 'datetime_created': datetime.datetime(2023, 8, 3, 10, 42, 58, 131000)}}",2023-10-20 11:16:06.782,2023,2023 diff --git a/vds.csv b/vds.csv new file mode 100644 index 00000000..0f3fb347 --- /dev/null +++ b/vds.csv @@ -0,0 +1,6 @@ +id,name,unit,project_id +2,Design Similarity,0-1,2 +3,Volume,Liter,2 +4,Field of View,%,2 +5,Assembly RMS,N/A,2 +6,Volume > Reserved Volume,liter,2 From 7ecd2efcb856c39df0aa1e53ac373388aa4b807f Mon Sep 17 00:00:00 2001 From: Ziidy Date: Sun, 3 Dec 2023 20:43:27 +0100 Subject: [PATCH 188/210] datetime not working still SimulationResult: +) -> models.SimulationFetch: try: with get_connection() as con: result = storage.run_simulation( diff --git a/sedbackend/apps/cvs/simulation/models.py b/sedbackend/apps/cvs/simulation/models.py index b66dc7d9..65166258 100644 --- a/sedbackend/apps/cvs/simulation/models.py +++ b/sedbackend/apps/cvs/simulation/models.py @@ -75,7 +75,7 @@ class SimSettings(EditSimSettings): class SimulationFetch(BaseModel): project_id: int file: int - insert_timestamp: datetime.datetime + insert_timestamp: str diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index cbd84a81..3dd35c1f 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -15,12 +15,12 @@ @router.post( '/project/{native_project_id}/simulation/run', summary='Run simulation', - response_model=models.SimulationResult, + response_model=models.SimulationFetch, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) async def run_simulation(sim_settings: models.EditSimSettings, native_project_id: int, vcs_ids: List[int], design_group_ids: List[int], normalized_npv: Optional[bool] = False, - user: User = Depends(get_current_active_user)) -> SimulationResult: + user: User = Depends(get_current_active_user)) -> models.SimulationFetch: return implementation.run_simulation(sim_settings, native_project_id, vcs_ids, design_group_ids, user.id, normalized_npv) @@ -47,12 +47,12 @@ async def run_dsm_file_simulation(native_project_id: int, sim_params: models.Fil @router.post( '/project/{native_project_id}/simulation/run-multiprocessing', summary='Run monte carlo simulation with multiprocessing', - response_model=models.SimulationResult, + response_model=models.SimulationFetch, dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) async def run_multiprocessing(sim_settings: models.EditSimSettings, native_project_id: int, vcs_ids: List[int], design_group_ids: List[int], normalized_npv: Optional[bool] = False, - user: User = Depends(get_current_active_user)) -> SimulationResult: + user: User = Depends(get_current_active_user)) -> models.SimulationFetch: return implementation.run_simulation(sim_settings, native_project_id, vcs_ids, design_group_ids, user.id, normalized_npv, True) @@ -68,7 +68,7 @@ async def get_sim_settings(native_project_id: int) -> models.SimSettings: @router.get( - '/project/{native_project_id}/simulation/all/', + '/project/{native_project_id}/simulation/all', summary='Get simulations for project', response_model=List[models.SimulationFetch], dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] @@ -77,12 +77,12 @@ async def get_simulations(native_project_id: int) -> List[models.SimulationFetch return implementation.get_simulations(native_project_id) -@router.delete( - '/project/{native_project_id}/simulation/all/', - summary='Remove all simulation files', - response_model= bool, - dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] -) +#@router.delete( + # '/project/{native_project_id}/simulation/all', + # summary='Remove all simulation files', + # response_model= bool, + #dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +#) async def remove_simulation_files(native_project_id: int, user: User = Depends(get_current_active_user)) -> bool: return implementation.remove_simulation_files(native_project_id, user.id) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index a56ab01c..e67d86ee 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -27,7 +27,7 @@ from typing import List from sedbackend.apps.cvs.design.storage import get_all_designs -from mysqlsb import FetchType, MySQLStatementBuilder +from mysqlsb import FetchType, MySQLStatementBuilder,Sort from sedbackend.apps.cvs.project.router import CVS_APP_SID from sedbackend.apps.cvs.simulation.models import SimulationResult @@ -124,18 +124,21 @@ def save_simulation_file(db_connection: PooledMySQLConnection, project_id: int, .execute(fetch_type=FetchType.FETCH_NONE) return True -def get_simulation_files(db_connection: PooledMySQLConnection, project_id: int): +def get_simulation_files(db_connection: PooledMySQLConnection, project_id: int) -> [models.SimulationFetch]: select_statement = MySQLStatementBuilder(db_connection) file_res = select_statement.select(CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNS) \ .where('project_id = %s', [project_id]) \ + .order_by(['file'], Sort.DESCENDING) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + for row in file_res: + row['insert_timestamp'] = str(row['insert_timestamp']) + logger.debug(row['insert_timestamp']) return file_res def get_simulation_file_path(db_connection: PooledMySQLConnection, file_id, user_id) -> StoredFilePath: return file_storage.db_get_file_path(db_connection, file_id, user_id) - def delete_simulation_file(db_connection: PooledMySQLConnection, project_id: int, file_id, user_id: int) -> bool: if file_id is None: file_storage.db_delete_file(db_connection, file_id, user_id) @@ -169,6 +172,18 @@ def get_file_content(db_connection: PooledMySQLConnection, user_id, file_id) -> return SimulationResult(designs = designs, vcss = vcss,vds = vds,runs = run) +def get_simulation_content_with_max_file_id(db_connection: PooledMySQLConnection, project_id: int) -> models.SimulationFetch: + select_statement = MySQLStatementBuilder(db_connection) + max_file_id_subquery = select_statement.select(CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNS) \ + .where('project_id = %s', [project_id]) \ + .order_by(['file'], Sort.DESCENDING) \ + .limit(1) \ + .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + + return max_file_id_subquery + + + def run_simulation( db_connection: PooledMySQLConnection, sim_settings: models.EditSimSettings, @@ -178,7 +193,7 @@ def run_simulation( user_id, normalized_npv: bool = False, is_multiprocessing: bool = False, -) -> SimulationResult: +) -> models.SimulationFetch: settings_msg = check_sim_settings(sim_settings) if settings_msg: raise e.BadlyFormattedSettingsException(settings_msg) @@ -325,7 +340,10 @@ def run_simulation( sim_result.runs.append(sim_run_res) save_simulation(db_connection, project_id,sim_result, user_id) - return sim_result + sim_file_info = get_simulation_content_with_max_file_id(db_connection, project_id) + + + return sim_file_info diff --git a/vcss.csv b/vcss.csv deleted file mode 100644 index 9eee5b87..00000000 --- a/vcss.csv +++ /dev/null @@ -1,3 +0,0 @@ -id,name,description,project,datetime_created,year_from,year_to -23,Traditional Business,For traditional business,"{'id': 2, 'name': 'Wingquistss', 'description': 'Seminar Wingquist april 19th', 'currency': 'k$', 'owner': {'id': 1, 'username': 'admin', 'email': None, 'full_name': None, 'disabled': False, 'scopes': 'admin'}, 'datetime_created': datetime.datetime(2023, 6, 9, 14, 11, 18, 247000), 'my_access_right': 4, 'project': None, 'subproject': {'name': 'Unnamed sub-project', 'application_sid': 'MOD.CVS', 'native_project_id': 2, 'id': 2, 'owner_id': 1, 'project_id': None, 'datetime_created': datetime.datetime(2023, 8, 3, 10, 42, 58, 131000)}}",2023-07-19 11:15:56.675,2023,2023 -93,Traditional Business (1),For traditional business,"{'id': 2, 'name': 'Wingquistss', 'description': 'Seminar Wingquist april 19th', 'currency': 'k$', 'owner': {'id': 1, 'username': 'admin', 'email': None, 'full_name': None, 'disabled': False, 'scopes': 'admin'}, 'datetime_created': datetime.datetime(2023, 6, 9, 14, 11, 18, 247000), 'my_access_right': 4, 'project': None, 'subproject': {'name': 'Unnamed sub-project', 'application_sid': 'MOD.CVS', 'native_project_id': 2, 'id': 2, 'owner_id': 1, 'project_id': None, 'datetime_created': datetime.datetime(2023, 8, 3, 10, 42, 58, 131000)}}",2023-10-20 11:16:06.782,2023,2023 diff --git a/vds.csv b/vds.csv deleted file mode 100644 index 0f3fb347..00000000 --- a/vds.csv +++ /dev/null @@ -1,6 +0,0 @@ -id,name,unit,project_id -2,Design Similarity,0-1,2 -3,Volume,Liter,2 -4,Field of View,%,2 -5,Assembly RMS,N/A,2 -6,Volume > Reserved Volume,liter,2 From 557419852d9c57f5d19c7ba1f23c5f029fa4266a Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 4 Dec 2023 21:26:17 +0100 Subject: [PATCH 189/210] search for user with full_name empty string --- sedbackend/apps/core/users/storage.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sedbackend/apps/core/users/storage.py b/sedbackend/apps/core/users/storage.py index 3398b553..cc52b982 100644 --- a/sedbackend/apps/core/users/storage.py +++ b/sedbackend/apps/core/users/storage.py @@ -199,7 +199,7 @@ def db_search_users(connection: PooledMySQLConnection, username_search_str: str, if len(username_search_str) == 0: username_search_str = "." if len(full_name_search_str) == 0: - full_name_search_stmnt = '(`full_name` rlike ? OR `full_name` IS NULL)' + full_name_search_stmnt = '(`full_name` rlike ? OR `full_name` IS NULL OR `full_name` = "")' full_name_search_str = "." stmnt = MySQLStatementBuilder(connection) From 200e653dfbdfb156985a8bfdfeed05d48944335e Mon Sep 17 00:00:00 2001 From: Ziidy Date: Tue, 5 Dec 2023 20:04:41 +0100 Subject: [PATCH 190/210] Works! will check unecessary logdebugs/code before merging --- .../apps/cvs/simulation/implementation.py | 12 +++---- sedbackend/apps/cvs/simulation/models.py | 1 + sedbackend/apps/cvs/simulation/router.py | 12 +++---- sedbackend/apps/cvs/simulation/storage.py | 35 +++++++++---------- sql/V231116_cvs.sql | 1 + 5 files changed, 31 insertions(+), 30 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 3be7c493..63f85c66 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -56,6 +56,8 @@ def run_simulation( is_multiprocessing, ) con.commit() + logger.debug("abs3") + logger.debug(result) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( @@ -225,23 +227,20 @@ def get_simulations(project_id: int) -> List[models.SimulationFetch]: try: with get_connection() as con: result = storage.get_simulation_files(con, project_id) + con.commit() return result except project_exceptions.CVSProjectNotFoundException: raise HTTPException( status_code=status.HTTP_404_NOT_FOUND, detail=f"Could not find project" ) - except Exception as e: - logger.debug(e) - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Could not send simulations", - ) def remove_simulation_files(project_id: int, user_id: int) -> bool: try: with get_connection() as con: + logger.debug('queww') result = storage.delete_all_simulation_files(con, project_id, user_id) + logger.debug(result) con.commit() return result except project_exceptions.CVSProjectNotFoundException: @@ -268,6 +267,7 @@ def get_simulation_file_content(user_id: int, file_id) -> SimulationResult: ) logger.debug('Successfully retrieved simulation file content') logger.debug(result) + con.commit() return result except project_exceptions.CVSProjectNotFoundException: raise HTTPException( diff --git a/sedbackend/apps/cvs/simulation/models.py b/sedbackend/apps/cvs/simulation/models.py index 65166258..b387a630 100644 --- a/sedbackend/apps/cvs/simulation/models.py +++ b/sedbackend/apps/cvs/simulation/models.py @@ -76,6 +76,7 @@ class SimulationFetch(BaseModel): project_id: int file: int insert_timestamp: str + vs_x_ds: str diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 3dd35c1f..f64f2fd7 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -77,12 +77,12 @@ async def get_simulations(native_project_id: int) -> List[models.SimulationFetch return implementation.get_simulations(native_project_id) -#@router.delete( - # '/project/{native_project_id}/simulation/all', - # summary='Remove all simulation files', - # response_model= bool, - #dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] -#) +@router.delete( + '/project/{native_project_id}/simulation/all', + summary='Remove all simulation files', + response_model= bool, + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +) async def remove_simulation_files(native_project_id: int, user: User = Depends(get_current_active_user)) -> bool: return implementation.remove_simulation_files(native_project_id, user.id) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index e67d86ee..5b0ae151 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -78,8 +78,8 @@ 'runs'] CVS_SIMULATION_FILES_TABLE = 'cvs_simulation_files' -CVS_SIMULATION_FILES_COLUMNSS = ['project_id', 'file'] -CVS_SIMULATION_FILES_COLUMNS = ['project_id', 'file', 'insert_timestamp'] +CVS_SIMULATION_FILES_COLUMNSS = ['project_id', 'file','vs_x_ds'] +CVS_SIMULATION_FILES_COLUMNS = ['project_id', 'file', 'insert_timestamp', 'vs_x_ds'] def csv_from_dataframe(dataframe) -> UploadFile: dataframe = pd.DataFrame(dataframe) @@ -95,13 +95,13 @@ def csv_from_dataframe(dataframe) -> UploadFile: return upload_file -def save_simulation(db_connection: PooledMySQLConnection, project_id: int, simulation: SimulationResult,user_id: int) -> bool: +def save_simulation(db_connection: PooledMySQLConnection, project_id: int, simulation: SimulationResult,user_id: int, vs_x_ds: str) -> bool: upload_file = csv_from_dataframe(simulation) logger.debug(f'upload_files: {upload_file.read}') - return save_simulation_file(db_connection, project_id, upload_file, user_id) + return save_simulation_file(db_connection, project_id, upload_file, user_id, vs_x_ds) def save_simulation_file(db_connection: PooledMySQLConnection, project_id: int, - file: UploadFile, user_id) -> bool: + file: UploadFile, user_id, vs_x_ds: str) -> bool: subproject = core_project_storage.db_get_subproject_native(db_connection, CVS_APP_SID, project_id) model_file = file_models.StoredFilePost.import_fastapi_file(file, user_id, subproject.id) @@ -120,18 +120,20 @@ def save_simulation_file(db_connection: PooledMySQLConnection, project_id: int, insert_statement = MySQLStatementBuilder(db_connection) insert_statement.insert(CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNSS) \ - .set_values([project_id, stored_file.id]) \ + .set_values([project_id, stored_file.id, vs_x_ds]) \ .execute(fetch_type=FetchType.FETCH_NONE) return True -def get_simulation_files(db_connection: PooledMySQLConnection, project_id: int) -> [models.SimulationFetch]: +def get_simulation_files(db_connection: PooledMySQLConnection, project_id: int) -> List[models.SimulationFetch]: select_statement = MySQLStatementBuilder(db_connection) file_res = select_statement.select(CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNS) \ .where('project_id = %s', [project_id]) \ .order_by(['file'], Sort.DESCENDING) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) for row in file_res: - row['insert_timestamp'] = str(row['insert_timestamp']) + logger.debug(type( row['insert_timestamp'])) + row['insert_timestamp'] = row['insert_timestamp'].strftime("%Y-%m-%d") + logger.debug(type( row['insert_timestamp'])) logger.debug(row['insert_timestamp']) return file_res @@ -152,14 +154,8 @@ def delete_simulation_file(db_connection: PooledMySQLConnection, project_id: int def delete_all_simulation_files(db_connection: PooledMySQLConnection, project_id: int, user_id: int) -> bool: files = get_simulation_files(db_connection, project_id) - for file in files: file_storage.db_delete_file(db_connection, file['file'],user_id) - - delete_statement = MySQLStatementBuilder(db_connection) - _, rows = delete_statement.delete(CVS_SIMULATION_FILES_TABLE) \ - .where('project_id = %s', [project_id]) \ - .execute(return_affected_rows=True) return True @@ -179,6 +175,8 @@ def get_simulation_content_with_max_file_id(db_connection: PooledMySQLConnection .order_by(['file'], Sort.DESCENDING) \ .limit(1) \ .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) + + max_file_id_subquery['insert_timestamp'] = max_file_id_subquery['insert_timestamp'].strftime("%Y-%m-%d") return max_file_id_subquery @@ -339,10 +337,11 @@ def run_simulation( ) sim_result.runs.append(sim_run_res) - save_simulation(db_connection, project_id,sim_result, user_id) - sim_file_info = get_simulation_content_with_max_file_id(db_connection, project_id) - - + vs_x_ds = str(len(sim_result.vcss)) + 'x' + str(len(sim_result.designs)) + save_simulation(db_connection, project_id,sim_result, user_id, vs_x_ds) + sim_file_info = get_simulation_content_with_max_file_id(db_connection, project_id) + logger.debug("abs1") + logger.debug("abs2") return sim_file_info diff --git a/sql/V231116_cvs.sql b/sql/V231116_cvs.sql index 1a41a4fd..70da6c7e 100644 --- a/sql/V231116_cvs.sql +++ b/sql/V231116_cvs.sql @@ -3,6 +3,7 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_simulation_files` `project_id` INT UNSIGNED NOT NULL, `file` INT UNSIGNED NOT NULL, `insert_timestamp` DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), + `vs_x_ds` TEXT NOT NULL, PRIMARY KEY (`file`), FOREIGN KEY (`project_id`) REFERENCES `seddb`.`cvs_projects`(`id`) From 14447cd8e6edb7bbe3f7731b20ce2be588d82686 Mon Sep 17 00:00:00 2001 From: Ziidy Date: Sat, 9 Dec 2023 20:09:56 +0100 Subject: [PATCH 191/210] last commit before merge --- sedbackend/apps/core/db.py | 4 ++-- sedbackend/apps/cvs/simulation/storage.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index daa1d89a..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'localhost' +host = 'core-db' database = 'seddb' -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 5b0ae151..9ba298d0 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -110,9 +110,10 @@ def save_simulation_file(db_connection: PooledMySQLConnection, project_id: int, tmp_file = f.read() mime = magic.from_buffer(tmp_file) logger.debug(f'Mime: {mime}') - if mime != "JSON data" and "ASCII text" not in mime: + if mime != "JSON text data" and "ASCII text" not in mime: raise life_cycle_exceptions.InvalidFileTypeException f.seek(0) + logger.debug('hello') csv_file = pd.read_json(f) logger.debug(f'File content: {model_file}') f.seek(0) From 6976c7290c2b96a1b4a560848a73cc7aa39b24b9 Mon Sep 17 00:00:00 2001 From: Ziidy Date: Sun, 10 Dec 2023 14:34:40 +0100 Subject: [PATCH 192/210] removed unecessary logger.debug --- sedbackend/apps/cvs/simulation/implementation.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 63f85c66..985e511b 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -56,7 +56,6 @@ def run_simulation( is_multiprocessing, ) con.commit() - logger.debug("abs3") logger.debug(result) return result except auth_ex.UnauthorizedOperationException: @@ -238,7 +237,6 @@ def get_simulations(project_id: int) -> List[models.SimulationFetch]: def remove_simulation_files(project_id: int, user_id: int) -> bool: try: with get_connection() as con: - logger.debug('queww') result = storage.delete_all_simulation_files(con, project_id, user_id) logger.debug(result) con.commit() From eb708cd8d2ad5b5309434bcb060b15e92e9dfcda Mon Sep 17 00:00:00 2001 From: Ziidy Date: Mon, 18 Dec 2023 14:53:32 +0100 Subject: [PATCH 193/210] fixed tests --- sedbackend/apps/core/db.py | 6 +++-- .../apps/core/projects/implementation.py | 2 +- sedbackend/apps/cvs/simulation/storage.py | 6 ----- tests/apps/cvs/simulation/test_simulation.py | 27 +++++++++++++++---- 4 files changed, 27 insertions(+), 14 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..20dba37e 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,9 +10,11 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -host = 'core-db' +#host = 'core-db' +host = 'localhost' database = 'seddb' -port = 3306 +#port = 3306 +port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/core/projects/implementation.py b/sedbackend/apps/core/projects/implementation.py index 84c679c2..b0aef035 100644 --- a/sedbackend/apps/core/projects/implementation.py +++ b/sedbackend/apps/core/projects/implementation.py @@ -1,4 +1,4 @@ -from typing import Optional, List, Union +from typing import Optional, List, Union, Dict from fastapi import HTTPException, status diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 9ba298d0..9678b710 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -113,7 +113,6 @@ def save_simulation_file(db_connection: PooledMySQLConnection, project_id: int, if mime != "JSON text data" and "ASCII text" not in mime: raise life_cycle_exceptions.InvalidFileTypeException f.seek(0) - logger.debug('hello') csv_file = pd.read_json(f) logger.debug(f'File content: {model_file}') f.seek(0) @@ -132,10 +131,7 @@ def get_simulation_files(db_connection: PooledMySQLConnection, project_id: int) .order_by(['file'], Sort.DESCENDING) \ .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) for row in file_res: - logger.debug(type( row['insert_timestamp'])) row['insert_timestamp'] = row['insert_timestamp'].strftime("%Y-%m-%d") - logger.debug(type( row['insert_timestamp'])) - logger.debug(row['insert_timestamp']) return file_res def get_simulation_file_path(db_connection: PooledMySQLConnection, file_id, user_id) -> StoredFilePath: @@ -341,8 +337,6 @@ def run_simulation( vs_x_ds = str(len(sim_result.vcss)) + 'x' + str(len(sim_result.designs)) save_simulation(db_connection, project_id,sim_result, user_id, vs_x_ds) sim_file_info = get_simulation_content_with_max_file_id(db_connection, project_id) - logger.debug("abs1") - logger.debug("abs2") return sim_file_info diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index 438ef4b2..4104845d 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -14,7 +14,7 @@ def test_run_single_simulation(client, std_headers, std_user): settings.monte_carlo = False # Act - res = client.post( + saveSim = client.post( f"/api/cvs/project/{project.id}/simulation/run", headers=std_headers, json={ @@ -23,9 +23,22 @@ def test_run_single_simulation(client, std_headers, std_user): "design_group_ids": [design_group.id], }, ) - + allSimId = client.get( + f"/api/cvs/project/{project.id}/simulation/all", + headers=std_headers + ) + res = client.get( + f"/api/cvs/project/{project.id}/simulation/file/{allSimId.json()[-1]['file']}", + headers=std_headers + ) # Assert + + assert saveSim.status_code == 200 + assert allSimId.status_code == 200 assert res.status_code == 200 + assert res.json()['designs'] == design + assert allSimId.json()[-1]['project_id'] == project.id + # Should probably assert some other stuff about the output to ensure that it is correct. @@ -33,6 +46,7 @@ def test_run_single_simulation(client, std_headers, std_user): tu.delete_design_group(project.id, design_group.id) tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) + assert res.status_code == 200 def test_run_sim_invalid_design_group(client, std_headers, std_user): @@ -313,7 +327,7 @@ def test_run_single_simulation_no_values(client, std_headers, std_user): settings.monte_carlo = False # Act - res = client.post( + saveSim = client.post( f"/api/cvs/project/{project.id}/simulation/run", headers=std_headers, json={ @@ -322,8 +336,11 @@ def test_run_single_simulation_no_values(client, std_headers, std_user): "design_group_ids": [design_group.id], }, ) - print(res.json()) - + res = client.get( + f"/api/cvs/project/{project.id}/simulation/file/{saveSim.json()['file']}", + headers=std_headers + ) + # Assert assert res.status_code == 200 assert res.json()["runs"][0]["max_NPVs"][-1] == 0 From 4a473ab24b2d4c29c243c809e59bc5b04f852471 Mon Sep 17 00:00:00 2001 From: Ziidy Date: Mon, 18 Dec 2023 14:59:58 +0100 Subject: [PATCH 194/210] forgot to change ports --- sedbackend/apps/core/db.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 20dba37e..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -10,11 +10,9 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') -#host = 'core-db' -host = 'localhost' +host = 'core-db' database = 'seddb' -#port = 3306 -port = 3001 +port = 3306 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From 5c269ae2d274560c18bd2d962101ecd175778703 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 18 Dec 2023 15:48:24 +0100 Subject: [PATCH 195/210] return right vd and ef name or undefined on deletion --- .../apps/cvs/link_design_lifecycle/storage.py | 91 ++++++++++-- .../test_connect_vcs_design.py | 139 ++++++++++++++---- 2 files changed, 193 insertions(+), 37 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 46ecce87..97b671bf 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -43,6 +43,7 @@ CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS = ['vcs_row', 'design_group', 'external_factor'] CVS_EXTERNAL_FACTORS_TABLE = 'cvs_market_inputs' +CVS_EXTERNAL_FACTORS_COLUMNS = ['id', 'name', 'unit'] CVS_STAKEHOLDER_NEEDS_TABLE = 'cvs_stakeholder_needs' CVS_VCS_ROWS_TABLE = 'cvs_vcs_rows' CVS_VCS_NEED_DRIVERS_TABLE = 'cvs_vcs_need_drivers' @@ -490,25 +491,93 @@ def get_all_formulas( for ef in all_used_efs if ef['vcs_row'] == row.id and ef['design_group'] == r['design_group'] ] - formulas.append(populate_formula(r)) + formulas.append(populate_formula_row(db_connection, r)) return formulas -def populate_formula(db_result) -> models.FormulaRowGet: +def populate_formula( + db_connection: PooledMySQLConnection, + text: str = '', + latex: str = '', + comment: str = '', +) -> models.Formula: + used_value_drivers = set() + used_external_factors = set() + # find all value drivers and external factors + vd_pattern = r'\{vd:(?P\d+),"([^"]+)"\}' + vd_matches = re.findall(vd_pattern, text) + for vd_id, _ in vd_matches: + used_value_drivers.add(vd_id) + ef_pattern = r'\{ef:(?P\d+),"([^"]+)"\}' + ef_matches = re.findall(ef_pattern, text) + for ef_id, _ in ef_matches: + used_external_factors.add(ef_id) + + # fetch value drivers and external factors + vd_names = {} + ef_names = {} + if len(used_value_drivers): + select_statement = MySQLStatementBuilder(db_connection) + value_drivers = ( + select_statement.select(CVS_VALUE_DRIVERS_TABLE, CVS_VALUE_DRIVERS_COLUMNS) + .where("id IN (" + ",".join(["%s" for _ in range(len(used_value_drivers))]) + ")", used_value_drivers) + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + ) + for vd in value_drivers: + vd_names[str(vd['id'])] = f"{vd['name']} [{vd['unit'] if vd['unit'] else 'N/A'}]" + if len(used_external_factors): + select_statement = MySQLStatementBuilder(db_connection) + external_factors = ( + select_statement.select( + CVS_EXTERNAL_FACTORS_TABLE, CVS_EXTERNAL_FACTORS_COLUMNS + ) + .where("id IN (" + ",".join(["%s" for _ in range(len(used_external_factors))]) + ")", used_external_factors) + .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + ) + for ef in external_factors: + ef_names[str(ef['id'])] = f"{ef['name']} [{ef['unit'] if ef['unit'] else 'N/A'}]" + + + # replace value driver and external factors names in text + for vd in used_value_drivers: + vd_replace_pattern = r'\{vd:' + vd + r',"([^"]+)"\}' + vd_name = vd_names[vd] if vd in vd_names else 'UNDEFINED [N/A]' + text = re.sub(vd_replace_pattern, '{vd:' + vd + ',"' + vd_name + '"}', text) + vd_latex_pattern = r'\\class{vd}{\\identifier{vd:' + vd + r'}{\\text{([^"]+)}}}' + latex_new = f'\+class{{vd}}{{\+identifier{{vd:{str(vd)}}}{{\+text{{{str(vd_name)}}}}}}}' + latex = re.sub(vd_latex_pattern, latex_new, latex) + for ef in used_external_factors: + ef_replace_pattern = r'\{ef:' + ef + r',"([^"]+)"\}' + ef_name = ef_names[ef] if ef in ef_names else 'UNDEFINED [N/A]' + text = re.sub(ef_replace_pattern, '{ef:' + ef + ',"' + ef_name + '"}', text) + ef_latex_pattern = r'\\class{ef}{\\identifier{ef:' + ef + r'}{\\text{([^"]+)}}}' + latex_new = f'\+class{{ef}}{{\+identifier{{ef:{str(ef)}}}{{\+text{{{str(ef_name)}}}}}}}' + latex = re.sub(ef_latex_pattern, latex_new, latex) + + return models.Formula(text=text, latex=latex, comment=comment) + + +def populate_formula_row(db_connection: PooledMySQLConnection, db_result) -> models.FormulaRowGet: return models.FormulaRowGet( vcs_row_id=db_result['vcs_row'], design_group_id=db_result['design_group'], - time=models.Formula( - text=db_result['time'] or '', latex=db_result['time_latex'] or '', comment=db_result['time_comment'] - ), + time=populate_formula( + db_connection, + text=db_result['time'], + latex=db_result['time_latex'], + comment=db_result['time_comment']), time_unit=db_result['time_unit'], - cost=models.Formula( - text=db_result['cost'] or '', latex=db_result['cost_latex'] or '', comment=db_result['cost_comment'] - ), - revenue=models.Formula( - text=db_result['revenue'] or '', latex=db_result['revenue_latex'] or '', comment=db_result['revenue_comment'] - ), + cost=populate_formula( + db_connection, + text=db_result['cost'], + latex=db_result['cost_latex'], + comment=db_result['cost_comment']), + revenue=populate_formula( + db_connection, + text=db_result['revenue'], + latex=db_result['revenue_latex'], + comment=db_result['revenue_comment']), rate=db_result['rate'], row_value_drivers=[ vcs_storage.populate_value_driver(valueDriver) diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index c664fa23..54dd7163 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -1,6 +1,15 @@ import tests.testutils as testutils import tests.apps.cvs.testutils as tu import sedbackend.apps.core.users.implementation as impl_users +from sedbackend.apps.cvs.vcs import implementation as impl_vcs, models as vcs_model +from sedbackend.apps.cvs.link_design_lifecycle import ( + implementation as impl_connect, + models as connect_model, +) +from sedbackend.apps.cvs.market_input import ( + implementation as impl_market_input, + models as market_input_model, +) def test_create_formulas(client, std_headers, std_user): @@ -17,32 +26,11 @@ def test_create_formulas(client, std_headers, std_user): external_factor = tu.seed_random_external_factor(project.id) # Act - - time = ( - '2+{vd:' - + str(value_driver.id) - + ',"' - + str(value_driver.name) - + '"}+{vd:' - + str(value_driver.id) - + ',"' - + str(value_driver.name) - + '"}' - ) - time_latex = f'2+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}' - cost = '2+{ef:' + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' - cost_latex = f'2+\\class{{ef}}{{\\identifier{{vd:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' - revenue = ( - '20+{vd:' - + str(value_driver.id) - + ',"' - + str(value_driver.name) - + '"}+{ef:' - + str(external_factor.id) - + ',"' - + str(external_factor.name) - + '"}' - ) + time = f"2+{{vd:{str(value_driver.id)},\"{str(value_driver.name)} [{str(value_driver.unit)}]\"}}+{{vd:{str(value_driver.id)},\"{str(value_driver.name)} [{str(value_driver.unit)}]\"}}" + time_latex = f'2+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)} [{str(value_driver.unit)}]}}}}}}+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)} [{str(value_driver.unit)}]}}}}}}' + cost = f"2+{{ef:{str(external_factor.id)},\"{str(external_factor.name)} [{str(external_factor.unit)}]\"}}" + cost_latex = f'2+\\class{{ef}}{{\\identifier{{ef:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' + revenue = f"20+{{vd:{str(value_driver.id)},\"{str(value_driver.name)} [{str(value_driver.unit)}]\"}}+{{ef:{str(external_factor.id)},\"{str(external_factor.name)} [{str(external_factor.unit)}]\"}}" revenue_latex = f'20+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}+\\class{{ef}}{{\\identifier{{vd:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' time_comment = testutils.random_str(10, 200) cost_comment = testutils.random_str(10, 200) @@ -79,6 +67,16 @@ def test_create_formulas(client, std_headers, std_user): assert res.status_code == 200 assert res_get.json()[0]['used_value_drivers'][0]['id'] == value_driver.id assert res_get.json()[0]['used_external_factors'][0]['id'] == external_factor.id + assert res_get.json()[0]['time']['text'] == time + assert res_get.json()[0]['time']['latex'] == time_latex + assert res_get.json()[0]['time']['comment'] == time_comment + assert res_get.json()[0]['cost']['text'] == cost + assert res_get.json()[0]['cost']['latex'] == cost_latex + assert res_get.json()[0]['cost']['comment'] == cost_comment + assert res_get.json()[0]['revenue']['text'] == revenue + assert res_get.json()[0]['revenue']['latex'] == revenue_latex + assert res_get.json()[0]['revenue']['comment'] == revenue_comment + assert res_get.json()[0]['rate'] == rate # Cleanup tu.delete_design_group(project.id, design_group.id) @@ -673,3 +671,92 @@ def test_get_vcs_dg_pairs_invalid_project(client, std_headers, std_user): # Cleanup tu.delete_project_by_id(project.id, current_user.id) tu.delete_vd_from_user(current_user.id) + + +def test_get_all_formulas_name_change(client, std_headers, std_user): + # Setup + current_user = impl_users.impl_get_user_with_username(std_user.username) + project = tu.seed_random_project(current_user.id) + + vcs = tu.seed_random_vcs(project.id, current_user.id) + vcs_rows = tu.seed_vcs_table_rows(current_user.id, project.id, vcs.id, 1) + design_group = tu.seed_random_design_group(project.id) + value_driver = tu.seed_random_value_driver(current_user.id, project.id) + value_driver2 = tu.seed_random_value_driver(current_user.id, project.id) + external_factor = tu.seed_random_external_factor(project.id) + + time = ( + '2+{vd:' + + str(value_driver.id) + + ',"' + + str(value_driver.name) + + '"}+{vd:' + + str(value_driver.id) + + ',"' + + str(value_driver.name) + + '"}' + ) + time_latex = f'2+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}' + cost = '2+{ef:' + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' + cost_latex = f'2+\\class{{ef}}{{\\identifier{{ef:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' + revenue = ( + '20+{vd:' + + str(value_driver2.id) + + ',"' + + str(value_driver2.name) + + '"}+{ef:' + + str(external_factor.id) + + ',"' + + str(external_factor.name) + + '"}' + ) + revenue_latex = f'20+\\class{{vd}}{{\\identifier{{vd:{str(value_driver2.id)}}}{{\\text{{{str(value_driver2.name)}}}}}}}+\\class{{ef}}{{\\identifier{{vd:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' + + rate = tu.random_rate_choice() + + time_unit = tu.random_time_unit() + + # Act + formula_post = connect_model.FormulaRowPost( + vcs_row_id=vcs_rows[0].id, + time=connect_model.Formula(text=time, latex=time_latex, comment=""), + time_unit=time_unit, + cost=connect_model.Formula(text=cost, latex=cost_latex, comment=""), + revenue=connect_model.Formula(text=revenue, latex=revenue_latex, comment=""), + rate=rate, + ) + + impl_connect.edit_formulas(project.id, vcs.id, design_group.id, [formula_post]) + + impl_vcs.edit_value_driver( + value_driver.id, + vcs_model.ValueDriverPut(name="new VD name", unit="VD unit"), + current_user.id, + ) + impl_market_input.update_external_factor( + project.id, + market_input_model.ExternalFactor( + id=external_factor.id, name="new EF name", unit="EF unit" + ), + ) + impl_vcs.delete_value_driver(project.id, value_driver2.id) + + res = client.get( + f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + headers=std_headers, + ) + + # Assert + assert res.status_code == 200 + assert 'new VD name [VD unit]' in res.json()[0]['time']['text'] + assert 'new VD name [VD unit]' in res.json()[0]['time']['latex'] + assert 'new EF name [EF unit]' in res.json()[0]['cost']['text'] + assert 'new EF name [EF unit]' in res.json()[0]['cost']['latex'] + assert 'UNDEFINED [N/A]' in res.json()[0]['revenue']['text'] + assert 'UNDEFINED [N/A]' in res.json()[0]['revenue']['latex'] + + # Cleanup + tu.delete_design_group(project.id, design_group.id) + tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) + tu.delete_project_by_id(project.id, current_user.id) + tu.delete_vd_from_user(current_user.id) From 122604d5983d82e7745929f80960177fb96a1bb9 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 22 Dec 2023 15:10:06 +0100 Subject: [PATCH 196/210] removed logger and unused code --- sedbackend/apps/cvs/simulation/implementation.py | 3 --- sedbackend/apps/cvs/simulation/storage.py | 10 +--------- 2 files changed, 1 insertion(+), 12 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 985e511b..95a86827 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -56,7 +56,6 @@ def run_simulation( is_multiprocessing, ) con.commit() - logger.debug(result) return result except auth_ex.UnauthorizedOperationException: raise HTTPException( @@ -238,7 +237,6 @@ def remove_simulation_files(project_id: int, user_id: int) -> bool: try: with get_connection() as con: result = storage.delete_all_simulation_files(con, project_id, user_id) - logger.debug(result) con.commit() return result except project_exceptions.CVSProjectNotFoundException: @@ -264,7 +262,6 @@ def get_simulation_file_content(user_id: int, file_id) -> SimulationResult: detail=f"Could not find simulation file", ) logger.debug('Successfully retrieved simulation file content') - logger.debug(result) con.commit() return result except project_exceptions.CVSProjectNotFoundException: diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 9678b710..c2c9d151 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -1,11 +1,6 @@ import re import sys from math import isnan -import csv - -import requests - -import io import magic import os import tempfile @@ -14,7 +9,6 @@ from mysql.connector.pooling import PooledMySQLConnection import pandas as pd -import numpy as np from mysql.connector import Error from fastapi.logger import logger @@ -40,7 +34,7 @@ from sedbackend.apps.cvs.life_cycle import exceptions as life_cycle_exceptions, storage as life_cycle_storage from sedbackend.apps.core.files import models as file_models, storage as file_storage, exceptions as file_exceptions from sedbackend.apps.core.projects import storage as core_project_storage -from sedbackend.apps.core.files import models as file_models, storage as file_storage, exceptions as file_ex +from sedbackend.apps.core.files import models as file_models, storage as file_storage from sedbackend.apps.core.files.models import StoredFilePath @@ -109,11 +103,9 @@ def save_simulation_file(db_connection: PooledMySQLConnection, project_id: int, f.seek(0) tmp_file = f.read() mime = magic.from_buffer(tmp_file) - logger.debug(f'Mime: {mime}') if mime != "JSON text data" and "ASCII text" not in mime: raise life_cycle_exceptions.InvalidFileTypeException f.seek(0) - csv_file = pd.read_json(f) logger.debug(f'File content: {model_file}') f.seek(0) stored_file = file_storage.db_save_file(db_connection, model_file) From f581df10dc35d4ffa3a95436c4c169768afd4397 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sat, 23 Dec 2023 11:56:51 +0100 Subject: [PATCH 197/210] update vd and ef name or remove will update formula --- .../apps/cvs/link_design_lifecycle/storage.py | 274 ++++++++++-------- .../test_connect_vcs_design.py | 112 +++---- 2 files changed, 213 insertions(+), 173 deletions(-) diff --git a/sedbackend/apps/cvs/link_design_lifecycle/storage.py b/sedbackend/apps/cvs/link_design_lifecycle/storage.py index 97b671bf..53786b18 100644 --- a/sedbackend/apps/cvs/link_design_lifecycle/storage.py +++ b/sedbackend/apps/cvs/link_design_lifecycle/storage.py @@ -10,43 +10,43 @@ from sedbackend.apps.cvs.link_design_lifecycle import models, exceptions from mysqlsb import FetchType, MySQLStatementBuilder -CVS_FORMULAS_TABLE = 'cvs_design_mi_formulas' +CVS_FORMULAS_TABLE = "cvs_design_mi_formulas" CVS_FORMULAS_COLUMNS = [ - 'project', - 'vcs_row', - 'design_group', - 'time', - 'time_latex', - 'time_comment', - 'time_unit', - 'cost', - 'cost_latex', - 'cost_comment', - 'revenue', - 'revenue_latex', - 'revenue_comment', - 'rate', + "project", + "vcs_row", + "design_group", + "time", + "time_latex", + "time_comment", + "time_unit", + "cost", + "cost_latex", + "cost_comment", + "revenue", + "revenue_latex", + "revenue_comment", + "rate", ] -CVS_VALUE_DRIVERS_TABLE = 'cvs_value_drivers' -CVS_VALUE_DRIVERS_COLUMNS = ['id', 'user', 'name', 'unit'] +CVS_VALUE_DRIVERS_TABLE = "cvs_value_drivers" +CVS_VALUE_DRIVERS_COLUMNS = ["id", "user", "name", "unit"] -CVS_FORMULAS_VALUE_DRIVERS_TABLE = 'cvs_formulas_value_drivers' +CVS_FORMULAS_VALUE_DRIVERS_TABLE = "cvs_formulas_value_drivers" CVS_FORMULAS_VALUE_DRIVERS_COLUMNS = [ - 'vcs_row', - 'design_group', - 'value_driver', - 'project', + "vcs_row", + "design_group", + "value_driver", + "project", ] -CVS_FORMULAS_EXTERNAL_FACTORS_TABLE = 'cvs_formulas_external_factors' -CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS = ['vcs_row', 'design_group', 'external_factor'] +CVS_FORMULAS_EXTERNAL_FACTORS_TABLE = "cvs_formulas_external_factors" +CVS_FORMULAS_EXTERNAL_FACTORS_COLUMNS = ["vcs_row", "design_group", "external_factor"] -CVS_EXTERNAL_FACTORS_TABLE = 'cvs_market_inputs' -CVS_EXTERNAL_FACTORS_COLUMNS = ['id', 'name', 'unit'] -CVS_STAKEHOLDER_NEEDS_TABLE = 'cvs_stakeholder_needs' -CVS_VCS_ROWS_TABLE = 'cvs_vcs_rows' -CVS_VCS_NEED_DRIVERS_TABLE = 'cvs_vcs_need_drivers' +CVS_EXTERNAL_FACTORS_TABLE = "cvs_market_inputs" +CVS_EXTERNAL_FACTORS_COLUMNS = ["id", "name", "unit"] +CVS_STAKEHOLDER_NEEDS_TABLE = "cvs_stakeholder_needs" +CVS_VCS_ROWS_TABLE = "cvs_vcs_rows" +CVS_VCS_NEED_DRIVERS_TABLE = "cvs_vcs_need_drivers" def create_formulas( @@ -56,7 +56,7 @@ def create_formulas( design_group_id: int, formula_row: models.FormulaRowPost, ): - logger.debug(f'Creating formulas') + logger.debug(f"Creating formulas") value_driver_ids, external_factor_ids = find_vd_and_ef( [ @@ -89,7 +89,7 @@ def create_formulas( table=CVS_FORMULAS_TABLE, columns=CVS_FORMULAS_COLUMNS ).set_values(values=values).execute(fetch_type=FetchType.FETCH_NONE) except Exception as e: - logger.error(f'Error while inserting formulas: {e}') + logger.error(f"Error while inserting formulas: {e}") raise exceptions.FormulasFailedUpdateException if value_driver_ids: @@ -126,7 +126,7 @@ def edit_formulas( design_group_id: int, formula_row: models.FormulaRowPost, ): - logger.debug(f'Editing formulas') + logger.debug(f"Editing formulas") value_driver_ids, external_factor_ids = find_vd_and_ef( [ @@ -137,7 +137,7 @@ def edit_formulas( ) columns = CVS_FORMULAS_COLUMNS[3:] - set_statement = ', '.join([col + ' = %s' for col in columns]) + set_statement = ", ".join([col + " = %s" for col in columns]) values = [ formula_row.time.text, @@ -155,11 +155,11 @@ def edit_formulas( # Update formula row update_statement = MySQLStatementBuilder(db_connection) - _, rows = ( + _, _ = ( update_statement.update( table=CVS_FORMULAS_TABLE, set_statement=set_statement, values=values ) - .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) + .where("vcs_row = %s and design_group = %s", [vcs_row_id, design_group_id]) .execute(return_affected_rows=True) ) @@ -182,18 +182,18 @@ def add_value_driver_formulas( # Add value driver to formulas try: prepared_list = [] - insert_statement = f'INSERT INTO {CVS_FORMULAS_VALUE_DRIVERS_TABLE} (vcs_row, design_group, value_driver, project) VALUES' + insert_statement = f"INSERT INTO {CVS_FORMULAS_VALUE_DRIVERS_TABLE} (vcs_row, design_group, value_driver, project) VALUES" for value_driver_id in value_drivers: - insert_statement += f'(%s, %s, %s, %s),' + insert_statement += f"(%s, %s, %s, %s)," prepared_list += [vcs_row_id, design_group_id, value_driver_id, project_id] insert_statement = insert_statement[:-1] insert_statement += ( - ' ON DUPLICATE KEY UPDATE vcs_row = vcs_row' # On duplicate do nothing + " ON DUPLICATE KEY UPDATE vcs_row = vcs_row" # On duplicate do nothing ) with db_connection.cursor(prepared=True) as cursor: cursor.execute(insert_statement, prepared_list) except Exception as e: - logger.error(f'Error while inserting value drivers: {e}') + logger.error(f"Error while inserting value drivers: {e}") raise exceptions.FormulasFailedUpdateException @@ -232,15 +232,15 @@ def update_value_driver_formulas( ) delete_value_drivers = [ - value_driver['value_driver'] + value_driver["value_driver"] for value_driver in value_driver_res - if value_driver['value_driver'] not in value_drivers + if value_driver["value_driver"] not in value_drivers ] add_value_drivers = [ value_driver_id for value_driver_id in value_drivers if value_driver_id - not in [value_driver['value_driver'] for value_driver in value_driver_res] + not in [value_driver["value_driver"] for value_driver in value_driver_res] ] if len(add_value_drivers): @@ -261,18 +261,18 @@ def add_external_factor_formulas( ): try: prepared_list = [] - insert_statement = f'INSERT INTO {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} (vcs_row, design_group, external_factor) VALUES' + insert_statement = f"INSERT INTO {CVS_FORMULAS_EXTERNAL_FACTORS_TABLE} (vcs_row, design_group, external_factor) VALUES" for external_factor_id in external_factors: - insert_statement += f'(%s, %s, %s),' + insert_statement += f"(%s, %s, %s)," prepared_list += [vcs_row_id, design_group_id, external_factor_id] insert_statement = insert_statement[:-1] insert_statement += ( - ' ON DUPLICATE KEY UPDATE vcs_row = vcs_row' # On duplicate do nothing + " ON DUPLICATE KEY UPDATE vcs_row = vcs_row" # On duplicate do nothing ) with db_connection.cursor(prepared=True) as cursor: cursor.execute(insert_statement, prepared_list) except Exception as e: - logger.error(f'Error while inserting external factors: {e}') + logger.error(f"Error while inserting external factors: {e}") raise exceptions.FormulasFailedUpdateException @@ -283,7 +283,7 @@ def delete_external_factor_formulas( external_factors: List[int], ): delete_statement = MySQLStatementBuilder(db_connection) - _, rows = ( + _, _ = ( delete_statement.delete(CVS_FORMULAS_EXTERNAL_FACTORS_TABLE) .where( f'vcs_row = %s and design_group = %s and external_factor in ({",".join(["%s" for _ in range(len(external_factors))])})', @@ -310,16 +310,16 @@ def update_external_factor_formulas( ) delete_external_factors = [ - external_factor['external_factor'] + external_factor["external_factor"] for external_factor in external_factor_res - if external_factor['external_factor'] not in external_factors + if external_factor["external_factor"] not in external_factors ] add_external_factors = [ external_factor_id for external_factor_id in external_factors if external_factor_id not in [ - external_factor['external_factor'] + external_factor["external_factor"] for external_factor in external_factor_res ] ] @@ -357,12 +357,12 @@ def update_formulas( count = ( count_statement.count(CVS_FORMULAS_TABLE) .where( - 'vcs_row = %s and design_group = %s', + "vcs_row = %s and design_group = %s", [formula_row.vcs_row_id, design_group_id], ) .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) ) - count = count['count'] + count = count["count"] if count == 0: create_formulas( @@ -392,7 +392,7 @@ def get_all_formulas( vcs_id: int, design_group_id: int, ) -> List[models.FormulaRowGet]: - logger.debug(f'Fetching all formulas with vcs_id={vcs_id}') + logger.debug(f"Fetching all formulas with vcs_id={vcs_id}") get_design_group( db_connection, project_id, design_group_id @@ -404,8 +404,8 @@ def get_all_formulas( select_statement = MySQLStatementBuilder(db_connection) res = ( select_statement.select(CVS_FORMULAS_TABLE, CVS_FORMULAS_COLUMNS) - .inner_join('cvs_vcs_rows', 'vcs_row = cvs_vcs_rows.id') - .where('vcs = %s and design_group = %s', [vcs_id, design_group_id]) + .inner_join("cvs_vcs_rows", "vcs_row = cvs_vcs_rows.id") + .where("vcs = %s and design_group = %s", [vcs_id, design_group_id]) .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) ) @@ -422,7 +422,7 @@ def get_all_formulas( ) prepared_list = [] for r in res: - prepared_list += [r['vcs_row'], r['design_group']] + prepared_list += [r["vcs_row"], r["design_group"]] with db_connection.cursor(prepared=True) as cursor: cursor.execute( @@ -446,7 +446,7 @@ def get_all_formulas( if vcs_rows: with db_connection.cursor(prepared=True) as cursor: - logger.debug(f'Running') + logger.debug(f"Running") cursor.execute( f"SELECT {CVS_VALUE_DRIVERS_TABLE}.id, {CVS_VALUE_DRIVERS_TABLE}.name, {CVS_VALUE_DRIVERS_TABLE}.unit, {CVS_VALUE_DRIVERS_TABLE}.project, {CVS_VCS_ROWS_TABLE}.id AS vcs_row FROM {CVS_VCS_ROWS_TABLE} " f"INNER JOIN {CVS_STAKEHOLDER_NEEDS_TABLE} ON {CVS_STAKEHOLDER_NEEDS_TABLE}.vcs_row = {CVS_VCS_ROWS_TABLE}.id " @@ -458,38 +458,38 @@ def get_all_formulas( all_row_vds = [ dict(zip(cursor.column_names, row)) for row in cursor.fetchall() ] - logger.debug(f'All row vds: {all_row_vds}') + logger.debug(f"All row vds: {all_row_vds}") formulas = [] for row in vcs_rows: - row_res = [r for r in res if r['vcs_row'] == row.id] + row_res = [r for r in res if r["vcs_row"] == row.id] r = {} if row_res: r = row_res[0] else: - r['vcs_row'] = row.id - r['design_group'] = design_group_id - r['time'] = '' - r['time_latex'] = '' - r['time_comment'] = '' - r['cost'] = '' - r['cost_latex'] = '' - r['cost_comment'] = '' - r['revenue'] = '' - r['revenue_latex'] = '' - r['revenue_comment'] = '' - r['time_unit'] = TimeFormat.YEAR - r['rate'] = Rate.PRODUCT - r['row_value_drivers'] = [vd for vd in all_row_vds if vd['vcs_row'] == row.id] - r['used_value_drivers'] = [ + r["vcs_row"] = row.id + r["design_group"] = design_group_id + r["time"] = "" + r["time_latex"] = "" + r["time_comment"] = "" + r["cost"] = "" + r["cost_latex"] = "" + r["cost_comment"] = "" + r["revenue"] = "" + r["revenue_latex"] = "" + r["revenue_comment"] = "" + r["time_unit"] = TimeFormat.YEAR + r["rate"] = Rate.PRODUCT + r["row_value_drivers"] = [vd for vd in all_row_vds if vd["vcs_row"] == row.id] + r["used_value_drivers"] = [ vd for vd in all_used_vds - if vd['vcs_row'] == row.id and vd['design_group'] == r['design_group'] + if vd["vcs_row"] == row.id and vd["design_group"] == r["design_group"] ] - r['used_external_factors'] = [ + r["used_external_factors"] = [ ef for ef in all_used_efs - if ef['vcs_row'] == row.id and ef['design_group'] == r['design_group'] + if ef["vcs_row"] == row.id and ef["design_group"] == r["design_group"] ] formulas.append(populate_formula_row(db_connection, r)) @@ -498,9 +498,9 @@ def get_all_formulas( def populate_formula( db_connection: PooledMySQLConnection, - text: str = '', - latex: str = '', - comment: str = '', + text: str = "", + latex: str = "", + comment: str = "", ) -> models.Formula: used_value_drivers = set() used_external_factors = set() @@ -521,81 +521,121 @@ def populate_formula( select_statement = MySQLStatementBuilder(db_connection) value_drivers = ( select_statement.select(CVS_VALUE_DRIVERS_TABLE, CVS_VALUE_DRIVERS_COLUMNS) - .where("id IN (" + ",".join(["%s" for _ in range(len(used_value_drivers))]) + ")", used_value_drivers) + .where( + "id IN (" + + ",".join(["%s" for _ in range(len(used_value_drivers))]) + + ")", + used_value_drivers, + ) .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) ) for vd in value_drivers: - vd_names[str(vd['id'])] = f"{vd['name']} [{vd['unit'] if vd['unit'] else 'N/A'}]" + vd_names[ + str(vd["id"]) + ] = f"{vd['name']} [{vd['unit'] if vd['unit'] else 'N/A'}]" if len(used_external_factors): select_statement = MySQLStatementBuilder(db_connection) external_factors = ( select_statement.select( CVS_EXTERNAL_FACTORS_TABLE, CVS_EXTERNAL_FACTORS_COLUMNS ) - .where("id IN (" + ",".join(["%s" for _ in range(len(used_external_factors))]) + ")", used_external_factors) + .where( + "id IN (" + + ",".join(["%s" for _ in range(len(used_external_factors))]) + + ")", + used_external_factors, + ) .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) ) for ef in external_factors: - ef_names[str(ef['id'])] = f"{ef['name']} [{ef['unit'] if ef['unit'] else 'N/A'}]" - + ef_names[ + str(ef["id"]) + ] = f"{ef['name']} [{ef['unit'] if ef['unit'] else 'N/A'}]" # replace value driver and external factors names in text for vd in used_value_drivers: - vd_replace_pattern = r'\{vd:' + vd + r',"([^"]+)"\}' - vd_name = vd_names[vd] if vd in vd_names else 'UNDEFINED [N/A]' - text = re.sub(vd_replace_pattern, '{vd:' + vd + ',"' + vd_name + '"}', text) - vd_latex_pattern = r'\\class{vd}{\\identifier{vd:' + vd + r'}{\\text{([^"]+)}}}' - latex_new = f'\+class{{vd}}{{\+identifier{{vd:{str(vd)}}}{{\+text{{{str(vd_name)}}}}}}}' + vd_replace_pattern = r"\{vd:" + vd + r',"(.*?)"\}' + vd_name = vd_names[vd] if vd in vd_names else "UNDEFINED [N/A]" + text = re.sub(vd_replace_pattern, "{vd:" + vd + ',"' + vd_name + '"}', text) + vd_latex_pattern = r"\\class{vd}{\\identifier{vd:" + vd + r"}{\\text{(.*?)}}}" + latex_new = ( + re.escape("\\class") + + "{vd}{" + + re.escape("\\identifier") + + "{vd:" + + str(vd) + + "}{" + + re.escape("\\text") + + "{" + + str(vd_name) + + "}}}" + ) latex = re.sub(vd_latex_pattern, latex_new, latex) for ef in used_external_factors: - ef_replace_pattern = r'\{ef:' + ef + r',"([^"]+)"\}' - ef_name = ef_names[ef] if ef in ef_names else 'UNDEFINED [N/A]' - text = re.sub(ef_replace_pattern, '{ef:' + ef + ',"' + ef_name + '"}', text) - ef_latex_pattern = r'\\class{ef}{\\identifier{ef:' + ef + r'}{\\text{([^"]+)}}}' - latex_new = f'\+class{{ef}}{{\+identifier{{ef:{str(ef)}}}{{\+text{{{str(ef_name)}}}}}}}' + ef_replace_pattern = r"\{ef:" + ef + r',"(.*?)"\}' + ef_name = ef_names[ef] if ef in ef_names else "UNDEFINED [N/A]" + text = re.sub(ef_replace_pattern, "{ef:" + ef + ',"' + ef_name + '"}', text) + ef_latex_pattern = r"\\class{ef}{\\identifier{ef:" + ef + r"}{\\text{(.*?)}}}" + latex_new = ( + re.escape("\\class") + + "{ef}{" + + re.escape("\\identifier") + + "{ef:" + + str(ef) + + "}{" + + re.escape("\\text") + + "{" + + str(ef_name) + + "}}}" + ) latex = re.sub(ef_latex_pattern, latex_new, latex) return models.Formula(text=text, latex=latex, comment=comment) -def populate_formula_row(db_connection: PooledMySQLConnection, db_result) -> models.FormulaRowGet: +def populate_formula_row( + db_connection: PooledMySQLConnection, db_result +) -> models.FormulaRowGet: return models.FormulaRowGet( - vcs_row_id=db_result['vcs_row'], - design_group_id=db_result['design_group'], + vcs_row_id=db_result["vcs_row"], + design_group_id=db_result["design_group"], time=populate_formula( db_connection, - text=db_result['time'], - latex=db_result['time_latex'], - comment=db_result['time_comment']), - time_unit=db_result['time_unit'], + text=db_result["time"], + latex=db_result["time_latex"], + comment=db_result["time_comment"], + ), + time_unit=db_result["time_unit"], cost=populate_formula( db_connection, - text=db_result['cost'], - latex=db_result['cost_latex'], - comment=db_result['cost_comment']), + text=db_result["cost"], + latex=db_result["cost_latex"], + comment=db_result["cost_comment"], + ), revenue=populate_formula( db_connection, - text=db_result['revenue'], - latex=db_result['revenue_latex'], - comment=db_result['revenue_comment']), - rate=db_result['rate'], + text=db_result["revenue"], + latex=db_result["revenue_latex"], + comment=db_result["revenue_comment"], + ), + rate=db_result["rate"], row_value_drivers=[ vcs_storage.populate_value_driver(valueDriver) - for valueDriver in db_result['row_value_drivers'] + for valueDriver in db_result["row_value_drivers"] ] - if db_result['row_value_drivers'] is not None + if db_result["row_value_drivers"] is not None else [], used_value_drivers=[ vcs_storage.populate_value_driver(valueDriver) - for valueDriver in db_result['used_value_drivers'] + for valueDriver in db_result["used_value_drivers"] ] - if db_result['used_value_drivers'] is not None + if db_result["used_value_drivers"] is not None else [], used_external_factors=[ populate_external_factor(externalFactor) - for externalFactor in db_result['used_external_factors'] + for externalFactor in db_result["used_external_factors"] ] - if db_result['used_external_factors'] is not None + if db_result["used_external_factors"] is not None else [], ) @@ -606,7 +646,7 @@ def delete_formulas( vcs_row_id: int, design_group_id: int, ) -> bool: - logger.debug(f'Deleting formulas with vcs_row_id: {vcs_row_id}') + logger.debug(f"Deleting formulas with vcs_row_id: {vcs_row_id}") get_design_group( db_connection, project_id, design_group_id @@ -616,7 +656,7 @@ def delete_formulas( delete_statement = MySQLStatementBuilder(db_connection) _, rows = ( delete_statement.delete(CVS_FORMULAS_TABLE) - .where('vcs_row = %s and design_group = %s', [vcs_row_id, design_group_id]) + .where("vcs_row = %s and design_group = %s", [vcs_row_id, design_group_id]) .execute(return_affected_rows=True) ) diff --git a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py index 54dd7163..90e7e38a 100644 --- a/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py +++ b/tests/apps/cvs/connect_design_vcs/test_connect_vcs_design.py @@ -26,12 +26,12 @@ def test_create_formulas(client, std_headers, std_user): external_factor = tu.seed_random_external_factor(project.id) # Act - time = f"2+{{vd:{str(value_driver.id)},\"{str(value_driver.name)} [{str(value_driver.unit)}]\"}}+{{vd:{str(value_driver.id)},\"{str(value_driver.name)} [{str(value_driver.unit)}]\"}}" - time_latex = f'2+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)} [{str(value_driver.unit)}]}}}}}}+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)} [{str(value_driver.unit)}]}}}}}}' - cost = f"2+{{ef:{str(external_factor.id)},\"{str(external_factor.name)} [{str(external_factor.unit)}]\"}}" - cost_latex = f'2+\\class{{ef}}{{\\identifier{{ef:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' - revenue = f"20+{{vd:{str(value_driver.id)},\"{str(value_driver.name)} [{str(value_driver.unit)}]\"}}+{{ef:{str(external_factor.id)},\"{str(external_factor.name)} [{str(external_factor.unit)}]\"}}" - revenue_latex = f'20+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}+\\class{{ef}}{{\\identifier{{vd:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' + time = f'2+{{vd:{str(value_driver.id)},"{str(value_driver.name)} [{str(value_driver.unit)}]"}}+5+{{vd:{str(value_driver.id)},"{str(value_driver.name)} [{str(value_driver.unit)}]"}}+2' + time_latex = f"2+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)} [{str(value_driver.unit)}]}}}}}}+5+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)} [{str(value_driver.unit)}]}}}}}}+2" + cost = f'2+{{ef:{str(external_factor.id)},"{str(external_factor.name)} [{str(external_factor.unit)}]"}}' + cost_latex = f"2+\\class{{ef}}{{\\identifier{{ef:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)} [{str(external_factor.unit)}]}}}}}}" + revenue = f'20+{{vd:{str(value_driver.id)},"{str(value_driver.name)} [{str(value_driver.unit)}]"}}+{{ef:{str(external_factor.id)},"{str(external_factor.name)} [{str(external_factor.unit)}]"}}' + revenue_latex = f"20+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)} [{str(value_driver.unit)}]}}}}}}+\\class{{ef}}{{\\identifier{{ef:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)} [{str(external_factor.unit)}]}}}}}}" time_comment = testutils.random_str(10, 200) cost_comment = testutils.random_str(10, 200) revenue_comment = None @@ -40,7 +40,7 @@ def test_create_formulas(client, std_headers, std_user): time_unit = tu.random_time_unit() res = client.put( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas", headers=std_headers, json=[ { @@ -59,24 +59,24 @@ def test_create_formulas(client, std_headers, std_user): ) res_get = client.get( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all", headers=std_headers, ) # Assert assert res.status_code == 200 - assert res_get.json()[0]['used_value_drivers'][0]['id'] == value_driver.id - assert res_get.json()[0]['used_external_factors'][0]['id'] == external_factor.id - assert res_get.json()[0]['time']['text'] == time - assert res_get.json()[0]['time']['latex'] == time_latex - assert res_get.json()[0]['time']['comment'] == time_comment - assert res_get.json()[0]['cost']['text'] == cost - assert res_get.json()[0]['cost']['latex'] == cost_latex - assert res_get.json()[0]['cost']['comment'] == cost_comment - assert res_get.json()[0]['revenue']['text'] == revenue - assert res_get.json()[0]['revenue']['latex'] == revenue_latex - assert res_get.json()[0]['revenue']['comment'] == revenue_comment - assert res_get.json()[0]['rate'] == rate + assert res_get.json()[0]["used_value_drivers"][0]["id"] == value_driver.id + assert res_get.json()[0]["used_external_factors"][0]["id"] == external_factor.id + assert res_get.json()[0]["time"]["text"] == time + assert res_get.json()[0]["time"]["latex"] == time_latex + assert res_get.json()[0]["time"]["comment"] == time_comment + assert res_get.json()[0]["cost"]["text"] == cost + assert res_get.json()[0]["cost"]["latex"] == cost_latex + assert res_get.json()[0]["cost"]["comment"] == cost_comment + assert res_get.json()[0]["revenue"]["text"] == revenue + assert res_get.json()[0]["revenue"]["latex"] == revenue_latex + assert res_get.json()[0]["revenue"]["comment"] == revenue_comment + assert res_get.json()[0]["rate"] == rate # Cleanup tu.delete_design_group(project.id, design_group.id) @@ -104,7 +104,7 @@ def test_create_formulas_no_optional(client, std_headers, std_user): rate = tu.random_rate_choice() res = client.put( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas", headers=std_headers, json=[ { @@ -143,7 +143,7 @@ def test_get_all_formulas(client, std_headers, std_user): ) res = client.get( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all", headers=std_headers, ) @@ -175,7 +175,7 @@ def test_get_all_formulas_invalid_project(client, std_headers, std_user): tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id) res = client.get( - f'/api/cvs/project/{invalid_proj_id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + f"/api/cvs/project/{invalid_proj_id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all", headers=std_headers, ) @@ -201,7 +201,7 @@ def test_get_all_formulas_invalid_vcs(client, std_headers, std_user): tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id) res = client.get( - f'/api/cvs/project/{project.id}/vcs/{invalid_vcs_id}/design-group/{design_group.id}/formulas/all', + f"/api/cvs/project/{project.id}/vcs/{invalid_vcs_id}/design-group/{design_group.id}/formulas/all", headers=std_headers, ) @@ -227,7 +227,7 @@ def get_all_formulas_invalid_design_group(client, std_headers, std_user): tu.seed_random_formulas(project.id, vcs.id, design_group.id, current_user.id) res = client.get( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{invalid_dg_id}/formulas/all', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{invalid_dg_id}/formulas/all", headers=std_headers, ) @@ -257,7 +257,7 @@ def test_edit_formulas(client, std_headers, std_user): # Act time = ( - '2+{vd:' + "2+{vd:" + str(value_driver.id) + ',"' + str(value_driver.name) @@ -267,9 +267,9 @@ def test_edit_formulas(client, std_headers, std_user): + str(value_driver.name) + '"}' ) - cost = '2+{ef:' + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' + cost = "2+{ef:" + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' revenue = ( - '20+{vd:' + "20+{vd:" + str(value_driver.id) + ',"' + str(value_driver.name) @@ -284,7 +284,7 @@ def test_edit_formulas(client, std_headers, std_user): rate = tu.random_rate_choice() res = client.put( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{formulas[0].design_group_id}/formulas', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{formulas[0].design_group_id}/formulas", headers=std_headers, json=[ { @@ -299,14 +299,14 @@ def test_edit_formulas(client, std_headers, std_user): ) res_get = client.get( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all", headers=std_headers, ) # Assert assert res.status_code == 200 - assert res_get.json()[0]['used_value_drivers'][0]['id'] == value_driver.id - assert res_get.json()[0]['used_external_factors'][0]['id'] == external_factor.id + assert res_get.json()[0]["used_value_drivers"][0]["id"] == value_driver.id + assert res_get.json()[0]["used_external_factors"][0]["id"] == external_factor.id # Cleanup tu.delete_project_by_id(project.id, current_user.id) @@ -333,7 +333,7 @@ def test_edit_formulas_no_optional(client, std_headers, std_user): rate = tu.random_rate_choice() res = client.put( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{formulas[0].design_group_id}/formulas', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{formulas[0].design_group_id}/formulas", headers=std_headers, json=[ { @@ -383,7 +383,7 @@ def test_edit_formulas_invalid_dg(client, std_headers, std_user): rate = tu.random_rate_choice() res = client.put( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{dg_invalid_id}/formulas', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{dg_invalid_id}/formulas", headers=std_headers, json=[ { @@ -427,7 +427,7 @@ def test_edit_formulas_invalid_vcs_row(client, std_headers, std_user): rate = tu.random_rate_choice() res = client.put( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas", headers=std_headers, json=[ { @@ -472,7 +472,7 @@ def test_edit_formulas_invalid_project(client, std_headers, std_user): rate = tu.random_rate_choice() res = client.put( - f'/api/cvs/project/{invalid_proj_id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas', + f"/api/cvs/project/{invalid_proj_id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas", headers=std_headers, json=[ { @@ -510,7 +510,7 @@ def test_delete_formulas(client, std_headers, std_user): ) res = client.delete( - f'/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas', + f"/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas", headers=std_headers, ) @@ -539,8 +539,8 @@ def test_delete_formulas_invalid_project(client, std_headers, std_user): ) res = client.delete( - f'/api/cvs/project/{invalid_proj_id}/vcs-row/{formulas[0].vcs_row_id}/design-group/' - f'{formulas[0].design_group_id}/formulas', + f"/api/cvs/project/{invalid_proj_id}/vcs-row/{formulas[0].vcs_row_id}/design-group/" + f"{formulas[0].design_group_id}/formulas", headers=std_headers, ) @@ -567,7 +567,7 @@ def test_delete_formulas_invalid_vcs_row(client, std_headers, std_user): invalid_vcs_row_id = formulas[0].vcs_row_id + 1 res = client.delete( - f'/api/cvs/project/{project.id}/vcs-row/{invalid_vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas', + f"/api/cvs/project/{project.id}/vcs-row/{invalid_vcs_row_id}/design-group/{formulas[0].design_group_id}/formulas", headers=std_headers, ) @@ -594,7 +594,7 @@ def test_delete_formulas_invalid_design_group(client, std_headers, std_user): ) res = client.delete( - f'/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{invalid_dg_id}/formulas', + f"/api/cvs/project/{project.id}/vcs-row/{formulas[0].vcs_row_id}/design-group/{invalid_dg_id}/formulas", headers=std_headers, ) @@ -624,7 +624,7 @@ def test_get_vcs_dg_pairs(client, std_headers, std_user): # Act res = client.get( - f'/api/cvs/project/{project.id}/vcs/design/formula-pairs', headers=std_headers + f"/api/cvs/project/{project.id}/vcs/design/formula-pairs", headers=std_headers ) # Assert @@ -661,7 +661,7 @@ def test_get_vcs_dg_pairs_invalid_project(client, std_headers, std_user): # Act res = client.get( - f'/api/cvs/project/{invalid_proj_id}/vcs/design/formula-pairs', + f"/api/cvs/project/{invalid_proj_id}/vcs/design/formula-pairs", headers=std_headers, ) @@ -686,7 +686,7 @@ def test_get_all_formulas_name_change(client, std_headers, std_user): external_factor = tu.seed_random_external_factor(project.id) time = ( - '2+{vd:' + "2+{vd:" + str(value_driver.id) + ',"' + str(value_driver.name) @@ -696,11 +696,11 @@ def test_get_all_formulas_name_change(client, std_headers, std_user): + str(value_driver.name) + '"}' ) - time_latex = f'2+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}' - cost = '2+{ef:' + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' - cost_latex = f'2+\\class{{ef}}{{\\identifier{{ef:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' + time_latex = f"2+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}+\\class{{vd}}{{\\identifier{{vd:{str(value_driver.id)}}}{{\\text{{{str(value_driver.name)}}}}}}}" + cost = "2+{ef:" + str(external_factor.id) + ',"' + str(external_factor.name) + '"}' + cost_latex = f"2+\\class{{ef}}{{\\identifier{{ef:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}" revenue = ( - '20+{vd:' + "20+{vd:" + str(value_driver2.id) + ',"' + str(value_driver2.name) @@ -710,7 +710,7 @@ def test_get_all_formulas_name_change(client, std_headers, std_user): + str(external_factor.name) + '"}' ) - revenue_latex = f'20+\\class{{vd}}{{\\identifier{{vd:{str(value_driver2.id)}}}{{\\text{{{str(value_driver2.name)}}}}}}}+\\class{{ef}}{{\\identifier{{vd:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}' + revenue_latex = f"20+\\class{{vd}}{{\\identifier{{vd:{str(value_driver2.id)}}}{{\\text{{{str(value_driver2.name)}}}}}}}+\\class{{ef}}{{\\identifier{{vd:{str(external_factor.id)}}}{{\\text{{{str(external_factor.name)}}}}}}}" rate = tu.random_rate_choice() @@ -742,18 +742,18 @@ def test_get_all_formulas_name_change(client, std_headers, std_user): impl_vcs.delete_value_driver(project.id, value_driver2.id) res = client.get( - f'/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all', + f"/api/cvs/project/{project.id}/vcs/{vcs.id}/design-group/{design_group.id}/formulas/all", headers=std_headers, ) # Assert assert res.status_code == 200 - assert 'new VD name [VD unit]' in res.json()[0]['time']['text'] - assert 'new VD name [VD unit]' in res.json()[0]['time']['latex'] - assert 'new EF name [EF unit]' in res.json()[0]['cost']['text'] - assert 'new EF name [EF unit]' in res.json()[0]['cost']['latex'] - assert 'UNDEFINED [N/A]' in res.json()[0]['revenue']['text'] - assert 'UNDEFINED [N/A]' in res.json()[0]['revenue']['latex'] + assert "new VD name [VD unit]" in res.json()[0]["time"]["text"] + assert "new VD name [VD unit]" in res.json()[0]["time"]["latex"] + assert "new EF name [EF unit]" in res.json()[0]["cost"]["text"] + assert "new EF name [EF unit]" in res.json()[0]["cost"]["latex"] + assert "UNDEFINED [N/A]" in res.json()[0]["revenue"]["text"] + assert "UNDEFINED [N/A]" in res.json()[0]["revenue"]["latex"] # Cleanup tu.delete_design_group(project.id, design_group.id) From 47a219d3232104a268483d8427c85f8f1bfeea4d Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 12 Jan 2024 11:24:02 +0100 Subject: [PATCH 198/210] upgrade desim-tool to 0.4.5 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 01e37e6f..59c91879 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.4.2 +desim-tool==0.4.5 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 From f2a87358cf24693f90cb4101c30ea44a262a1a1e Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Fri, 12 Jan 2024 11:24:31 +0100 Subject: [PATCH 199/210] upgrade desim-tool to 0.4.5 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 01e37e6f..59c91879 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.4.2 +desim-tool==0.4.5 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 From ba2778494cc2d173340dbd239c6ebfa81ed3dfae Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 14 Jan 2024 15:02:53 +0100 Subject: [PATCH 200/210] upgrade desim-tool to 0.4.6 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 59c91879..0ab291dc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.4.5 +desim-tool==0.4.6 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 From 325492f5f2661c9e7c00740ea01c0ae20a2743d4 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 14 Jan 2024 15:03:07 +0100 Subject: [PATCH 201/210] upgrade desim-tool to 0.4.6 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 59c91879..0ab291dc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.4.5 +desim-tool==0.4.6 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 From f26e3085491c5dff8c8cc2c2ae85002e637be11e Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 14 Jan 2024 15:31:27 +0100 Subject: [PATCH 202/210] upgrade desim-tool to 0.4.7 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 0ab291dc..cd643c8e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.4.6 +desim-tool==0.4.7 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 From f623c1bc60a0628e16e912efd680f9fe14381697 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Sun, 14 Jan 2024 15:31:43 +0100 Subject: [PATCH 203/210] upgrade desim-tool to 0.4.7 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 0ab291dc..cd643c8e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.4.6 +desim-tool==0.4.7 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 From fa186d65ba712b9946ffb4076348e629623d7e40 Mon Sep 17 00:00:00 2001 From: Ziidy Date: Wed, 17 Jan 2024 15:37:26 +0100 Subject: [PATCH 204/210] commented out API call for editsimsetting, made it so that runsim saves the settings aswell --- sedbackend/apps/core/db.py | 2 ++ sedbackend/apps/cvs/simulation/implementation.py | 3 ++- sedbackend/apps/cvs/simulation/router.py | 3 ++- sedbackend/apps/cvs/simulation/storage.py | 3 ++- 4 files changed, 8 insertions(+), 3 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index e08c2a6c..5fcccb61 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -11,8 +11,10 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') host = 'core-db' +#host = 'localhost' database = 'seddb' port = 3306 +#port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 985e511b..4812c5ad 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -299,7 +299,7 @@ def remove_simulation_file(project_id: int, user_id, file_id) -> bool: ) - +""" def edit_sim_settings( project_id: int, sim_settings: models.EditSimSettings, user_id: int ) -> bool: @@ -327,3 +327,4 @@ def edit_sim_settings( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Could not update simulation settings", ) +""" \ No newline at end of file diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index f64f2fd7..97e64a12 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -85,7 +85,7 @@ async def get_simulations(native_project_id: int) -> List[models.SimulationFetch ) async def remove_simulation_files(native_project_id: int, user: User = Depends(get_current_active_user)) -> bool: return implementation.remove_simulation_files(native_project_id, user.id) - +""" @router.put( '/project/{native_project_id}/simulation/settings', summary='Create or update simulation settings', @@ -95,6 +95,7 @@ async def remove_simulation_files(native_project_id: int, user: User = Depends(g async def put_sim_settings(native_project_id: int, sim_settings: models.EditSimSettings, user: User = Depends(get_current_active_user)) -> bool: return implementation.edit_sim_settings(native_project_id, sim_settings, user.id) +""" @router.get( '/project/{native_project_id}/simulation/file/{file_id}', diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 9678b710..cddf3e85 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -334,7 +334,8 @@ def run_simulation( ) sim_result.runs.append(sim_run_res) - vs_x_ds = str(len(sim_result.vcss)) + 'x' + str(len(sim_result.designs)) + vs_x_ds = str(len(sim_result.vcss)) + 'x' + str(len(sim_result.designs)) + edit_simulation_settings(db_connection, project_id, sim_settings, user_id) save_simulation(db_connection, project_id,sim_result, user_id, vs_x_ds) sim_file_info = get_simulation_content_with_max_file_id(db_connection, project_id) return sim_file_info From 2f86b8cc37cbc5ae934392481b642b4929c90965 Mon Sep 17 00:00:00 2001 From: Ziidy Date: Thu, 18 Jan 2024 11:56:09 +0100 Subject: [PATCH 205/210] uncommmented the API-call so that tests still run (since the editsimsettings in storage is still used) --- sedbackend/apps/cvs/simulation/implementation.py | 3 +-- sedbackend/apps/cvs/simulation/router.py | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 4812c5ad..985e511b 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -299,7 +299,7 @@ def remove_simulation_file(project_id: int, user_id, file_id) -> bool: ) -""" + def edit_sim_settings( project_id: int, sim_settings: models.EditSimSettings, user_id: int ) -> bool: @@ -327,4 +327,3 @@ def edit_sim_settings( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Could not update simulation settings", ) -""" \ No newline at end of file diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 97e64a12..8e68e851 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -85,7 +85,7 @@ async def get_simulations(native_project_id: int) -> List[models.SimulationFetch ) async def remove_simulation_files(native_project_id: int, user: User = Depends(get_current_active_user)) -> bool: return implementation.remove_simulation_files(native_project_id, user.id) -""" + @router.put( '/project/{native_project_id}/simulation/settings', summary='Create or update simulation settings', @@ -95,7 +95,7 @@ async def remove_simulation_files(native_project_id: int, user: User = Depends(g async def put_sim_settings(native_project_id: int, sim_settings: models.EditSimSettings, user: User = Depends(get_current_active_user)) -> bool: return implementation.edit_sim_settings(native_project_id, sim_settings, user.id) -""" + @router.get( '/project/{native_project_id}/simulation/file/{file_id}', From 78cb5cb0c973c8dc26ed49d809242c601c473ac0 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 25 Jan 2024 19:13:49 +0100 Subject: [PATCH 206/210] removed commented code --- sedbackend/apps/core/db.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sedbackend/apps/core/db.py b/sedbackend/apps/core/db.py index 5fcccb61..e08c2a6c 100644 --- a/sedbackend/apps/core/db.py +++ b/sedbackend/apps/core/db.py @@ -11,10 +11,8 @@ user = 'rw' password = Environment.get_variable('MYSQL_PWD_RW') host = 'core-db' -#host = 'localhost' database = 'seddb' port = 3306 -#port = 3001 try: connection_pool = mysql.connector.pooling.MySQLConnectionPool( From df7afcc41eef79d5badeba865959cf3de7a8514d Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Thu, 14 Mar 2024 21:52:13 +0100 Subject: [PATCH 207/210] New process names and test fixes (#137) * updated sql file with new processes and names * fixed formula test * fixed failing simulation tests * inshallah fixed * fixed failing dsm tests --------- Co-authored-by: = <=> --- sedbackend/apps/cvs/simulation/storage.py | 199 +++++++++++++------ sql/VZ_inserts_cvs.sql | 25 ++- tests/apps/cvs/life_cycle/files/input.csv | 4 +- tests/apps/cvs/life_cycle/test_dsm_files.py | 12 +- tests/apps/cvs/simulation/test_simulation.py | 1 - tests/apps/cvs/testutils.py | 6 +- tests/apps/cvs/vcs/test_subprocesses.py | 2 +- 7 files changed, 166 insertions(+), 83 deletions(-) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 20dd52a2..81cd23fa 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -21,7 +21,7 @@ from typing import List from sedbackend.apps.cvs.design.storage import get_all_designs -from mysqlsb import FetchType, MySQLStatementBuilder,Sort +from mysqlsb import FetchType, MySQLStatementBuilder, Sort from sedbackend.apps.cvs.project.router import CVS_APP_SID from sedbackend.apps.cvs.simulation.models import SimulationResult @@ -31,8 +31,15 @@ from sedbackend.apps.cvs.simulation import models import sedbackend.apps.cvs.simulation.exceptions as e from sedbackend.apps.cvs.vcs import storage as vcs_storage -from sedbackend.apps.cvs.life_cycle import exceptions as life_cycle_exceptions, storage as life_cycle_storage -from sedbackend.apps.core.files import models as file_models, storage as file_storage, exceptions as file_exceptions +from sedbackend.apps.cvs.life_cycle import ( + exceptions as life_cycle_exceptions, + storage as life_cycle_storage, +) +from sedbackend.apps.core.files import ( + models as file_models, + storage as file_storage, + exceptions as file_exceptions, +) from sedbackend.apps.core.projects import storage as core_project_storage from sedbackend.apps.core.files import models as file_models, storage as file_storage from sedbackend.apps.core.files.models import StoredFilePath @@ -64,23 +71,35 @@ "minutes": TimeFormat.MINUTES, } ) -MAX_FILE_SIZE = 100 * 10 ** 6 # 100MB +MAX_FILE_SIZE = 100 * 10**6 # 100MB SIM_SETTINGS_TABLE = "cvs_simulation_settings" -SIM_SETTINGS_COLUMNS = ['project', 'time_unit', 'flow_process', 'flow_start_time', 'flow_time', - 'interarrival_time', 'start_time', 'end_time', 'discount_rate', 'non_tech_add', 'monte_carlo', - 'runs'] +SIM_SETTINGS_COLUMNS = [ + "project", + "time_unit", + "flow_process", + "flow_start_time", + "flow_time", + "interarrival_time", + "start_time", + "end_time", + "discount_rate", + "non_tech_add", + "monte_carlo", + "runs", +] + +CVS_SIMULATION_FILES_TABLE = "cvs_simulation_files" +CVS_SIMULATION_FILES_COLUMNSS = ["project_id", "file", "vs_x_ds"] +CVS_SIMULATION_FILES_COLUMNS = ["project_id", "file", "insert_timestamp", "vs_x_ds"] -CVS_SIMULATION_FILES_TABLE = 'cvs_simulation_files' -CVS_SIMULATION_FILES_COLUMNSS = ['project_id', 'file','vs_x_ds'] -CVS_SIMULATION_FILES_COLUMNS = ['project_id', 'file', 'insert_timestamp', 'vs_x_ds'] def csv_from_dataframe(dataframe) -> UploadFile: dataframe = pd.DataFrame(dataframe) fd, path = tempfile.mkstemp() try: with open(path, "w+") as csv_file: - dataframe.to_json(csv_file,orient='columns') + dataframe.to_json(csv_file, orient="columns") finally: csv_file = open(path, "r+b") upload_file = UploadFile(filename=csv_file.name + ".json", file=csv_file) @@ -89,16 +108,35 @@ def csv_from_dataframe(dataframe) -> UploadFile: return upload_file -def save_simulation(db_connection: PooledMySQLConnection, project_id: int, simulation: SimulationResult,user_id: int, vs_x_ds: str) -> bool: + +def save_simulation( + db_connection: PooledMySQLConnection, + project_id: int, + simulation: SimulationResult, + user_id: int, + vs_x_ds: str, +) -> bool: upload_file = csv_from_dataframe(simulation) - logger.debug(f'upload_files: {upload_file.read}') - return save_simulation_file(db_connection, project_id, upload_file, user_id, vs_x_ds) - -def save_simulation_file(db_connection: PooledMySQLConnection, project_id: int, - file: UploadFile, user_id, vs_x_ds: str) -> bool: - subproject = core_project_storage.db_get_subproject_native(db_connection, CVS_APP_SID, project_id) - model_file = file_models.StoredFilePost.import_fastapi_file(file, user_id, subproject.id) - + logger.debug(f"upload_files: {upload_file.read}") + return save_simulation_file( + db_connection, project_id, upload_file, user_id, vs_x_ds + ) + + +def save_simulation_file( + db_connection: PooledMySQLConnection, + project_id: int, + file: UploadFile, + user_id, + vs_x_ds: str, +) -> bool: + subproject = core_project_storage.db_get_subproject_native( + db_connection, CVS_APP_SID, project_id + ) + model_file = file_models.StoredFilePost.import_fastapi_file( + file, user_id, subproject.id + ) + with model_file.file_object as f: f.seek(0) tmp_file = f.read() @@ -106,69 +144,96 @@ def save_simulation_file(db_connection: PooledMySQLConnection, project_id: int, if mime != "JSON text data" and "ASCII text" not in mime: raise life_cycle_exceptions.InvalidFileTypeException f.seek(0) - logger.debug(f'File content: {model_file}') + logger.debug(f"File content: {model_file}") f.seek(0) stored_file = file_storage.db_save_file(db_connection, model_file) - + insert_statement = MySQLStatementBuilder(db_connection) - insert_statement.insert(CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNSS) \ - .set_values([project_id, stored_file.id, vs_x_ds]) \ - .execute(fetch_type=FetchType.FETCH_NONE) + insert_statement.insert( + CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNSS + ).set_values([project_id, stored_file.id, vs_x_ds]).execute( + fetch_type=FetchType.FETCH_NONE + ) return True -def get_simulation_files(db_connection: PooledMySQLConnection, project_id: int) -> List[models.SimulationFetch]: + +def get_simulation_files( + db_connection: PooledMySQLConnection, project_id: int +) -> List[models.SimulationFetch]: select_statement = MySQLStatementBuilder(db_connection) - file_res = select_statement.select(CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNS) \ - .where('project_id = %s', [project_id]) \ - .order_by(['file'], Sort.DESCENDING) \ + file_res = ( + select_statement.select( + CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNS + ) + .where("project_id = %s", [project_id]) + .order_by(["file"], Sort.DESCENDING) .execute(fetch_type=FetchType.FETCH_ALL, dictionary=True) + ) for row in file_res: - row['insert_timestamp'] = row['insert_timestamp'].strftime("%Y-%m-%d") + row["insert_timestamp"] = row["insert_timestamp"].strftime("%Y-%m-%d") return file_res -def get_simulation_file_path(db_connection: PooledMySQLConnection, file_id, user_id) -> StoredFilePath: + +def get_simulation_file_path( + db_connection: PooledMySQLConnection, file_id, user_id +) -> StoredFilePath: return file_storage.db_get_file_path(db_connection, file_id, user_id) -def delete_simulation_file(db_connection: PooledMySQLConnection, project_id: int, file_id, user_id: int) -> bool: +def delete_simulation_file( + db_connection: PooledMySQLConnection, project_id: int, file_id, user_id: int +) -> bool: if file_id is None: - file_storage.db_delete_file(db_connection, file_id, user_id) + file_storage.db_delete_file(db_connection, file_id, user_id) delete_statement = MySQLStatementBuilder(db_connection) - _, rows = delete_statement.delete(CVS_SIMULATION_FILES_TABLE) \ - .where('file = %s', [file_id] ) \ + _, rows = ( + delete_statement.delete(CVS_SIMULATION_FILES_TABLE) + .where("file = %s", [file_id]) .execute(return_affected_rows=True) + ) return True -def delete_all_simulation_files(db_connection: PooledMySQLConnection, project_id: int, user_id: int) -> bool: +def delete_all_simulation_files( + db_connection: PooledMySQLConnection, project_id: int, user_id: int +) -> bool: files = get_simulation_files(db_connection, project_id) for file in files: - file_storage.db_delete_file(db_connection, file['file'],user_id) + file_storage.db_delete_file(db_connection, file["file"], user_id) return True -def get_file_content(db_connection: PooledMySQLConnection, user_id, file_id) -> SimulationResult: +def get_file_content( + db_connection: PooledMySQLConnection, user_id, file_id +) -> SimulationResult: path = get_simulation_file_path(db_connection, file_id, user_id).path - with open(path, newline='') as f: - data = pd.read_json(f, orient='columns') + with open(path, newline="") as f: + data = pd.read_json(f, orient="columns") designs, vcss, vds, run = data[1] - return SimulationResult(designs = designs, vcss = vcss,vds = vds,runs = run) + return SimulationResult(designs=designs, vcss=vcss, vds=vds, runs=run) -def get_simulation_content_with_max_file_id(db_connection: PooledMySQLConnection, project_id: int) -> models.SimulationFetch: +def get_simulation_content_with_max_file_id( + db_connection: PooledMySQLConnection, project_id: int +) -> models.SimulationFetch: select_statement = MySQLStatementBuilder(db_connection) - max_file_id_subquery = select_statement.select(CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNS) \ - .where('project_id = %s', [project_id]) \ - .order_by(['file'], Sort.DESCENDING) \ - .limit(1) \ + max_file_id_subquery = ( + select_statement.select( + CVS_SIMULATION_FILES_TABLE, CVS_SIMULATION_FILES_COLUMNS + ) + .where("project_id = %s", [project_id]) + .order_by(["file"], Sort.DESCENDING) + .limit(1) .execute(fetch_type=FetchType.FETCH_ONE, dictionary=True) - - max_file_id_subquery['insert_timestamp'] = max_file_id_subquery['insert_timestamp'].strftime("%Y-%m-%d") + ) - return max_file_id_subquery + max_file_id_subquery["insert_timestamp"] = max_file_id_subquery[ + "insert_timestamp" + ].strftime("%Y-%m-%d") + return max_file_id_subquery def run_simulation( @@ -313,9 +378,11 @@ def run_simulation( sim_run_res = models.Simulation( time=results.timesteps[-1], - mean_NPV=results.normalize_npv() - if normalized_npv - else results.mean_npv(), + mean_NPV=( + results.normalize_npv() + if normalized_npv + else results.mean_npv() + ), max_NPVs=results.all_max_npv(), mean_payback_time=results.mean_npv_payback_time(), all_npvs=results.npvs, @@ -326,12 +393,11 @@ def run_simulation( ) sim_result.runs.append(sim_run_res) - vs_x_ds = str(len(sim_result.vcss)) + 'x' + str(len(sim_result.designs)) - edit_simulation_settings(db_connection, project_id, sim_settings, user_id) - save_simulation(db_connection, project_id,sim_result, user_id, vs_x_ds) - sim_file_info = get_simulation_content_with_max_file_id(db_connection, project_id) + vs_x_ds = str(len(sim_result.vcss)) + "x" + str(len(sim_result.designs)) + edit_simulation_settings(db_connection, project_id, sim_settings, user_id) + save_simulation(db_connection, project_id, sim_result, user_id, vs_x_ds) + sim_file_info = get_simulation_content_with_max_file_id(db_connection, project_id) return sim_file_info - def populate_processes( @@ -382,7 +448,9 @@ def populate_processes( parser.evaluate(expr.replace_all("time", time, revenue_formula)), row["iso_name"], non_tech_add, - TIME_FORMAT_DICT.get(row["time_unit"].lower() if row["time_unit"] else "year"), + TIME_FORMAT_DICT.get( + row["time_unit"].lower() if row["time_unit"] else "year" + ), ) except Exception as exc: logger.debug(f"{exc.__class__}, {exc}") @@ -407,7 +475,9 @@ def populate_processes( parser.evaluate(expr.replace_all("time", time, revenue_formula)), sub_name, non_tech_add, - TIME_FORMAT_DICT.get(row["time_unit"].lower() if row["time_unit"] else "year"), + TIME_FORMAT_DICT.get( + row["time_unit"].lower() if row["time_unit"] else "year" + ), ) except Exception as exc: logger.debug(f"{exc.__class__}, {exc}") @@ -512,18 +582,25 @@ def edit_simulation_settings( for vcs in vcss: rows = vcs_storage.get_vcs_table(db_connection, project_id, vcs.id) for row in rows: + logger.debug( + f"Row: {('iso' + row.iso_process.name) if row.iso_process else ('sub' + row.subprocess.name)}" + ) if ( row.iso_process is not None and row.iso_process.name == sim_settings.flow_process ) or ( row.subprocess is not None - and f"{row.subprocess.name} ({row.subprocess.parent_process.name})" - == sim_settings.flow_process + and ( + f"{row.subprocess.name} ({row.subprocess.parent_process.name})" + == sim_settings.flow_process + or row.subprocess.name == sim_settings.flow_process + ) ): flow_process_exists = True break if not flow_process_exists: + logger.debug(f"Flow process {sim_settings.flow_process} not found") raise e.FlowProcessNotFoundException if count == 1: diff --git a/sql/VZ_inserts_cvs.sql b/sql/VZ_inserts_cvs.sql index e3f554bc..73ed490b 100644 --- a/sql/VZ_inserts_cvs.sql +++ b/sql/VZ_inserts_cvs.sql @@ -6,18 +6,23 @@ INSERT INTO cvs_iso_processes values (4, 'Infrastructure management', 'Organizat INSERT INTO cvs_iso_processes values (5, 'Project portfolio management', 'Organizational project-enabling processes'); INSERT INTO cvs_iso_processes values (6, 'Human resource management', 'Organizational project-enabling processes'); INSERT INTO cvs_iso_processes values (7, 'Quality management', 'Organizational project-enabling processes'); +INSERT INTO cvs_iso_processes values (26, 'Knowledge management', 'Organizational project-enabling processes'); -INSERT INTO cvs_iso_processes values (8, 'Project planning', 'Project processes'); -INSERT INTO cvs_iso_processes values (9, 'Project assessment and control', 'Project processes'); -INSERT INTO cvs_iso_processes values (10, 'Decision management', 'Project processes'); -INSERT INTO cvs_iso_processes values (11, 'Risk management', 'Project processes'); -INSERT INTO cvs_iso_processes values (12, 'Configuration management', 'Project processes'); -INSERT INTO cvs_iso_processes values (13, 'Information management', 'Project processes'); -INSERT INTO cvs_iso_processes values (14, 'Measurement', 'Project processes'); +INSERT INTO cvs_iso_processes values (8, 'Project planning', 'Technical Management processes'); +INSERT INTO cvs_iso_processes values (9, 'Project assessment and control', 'Technical Management processes'); +INSERT INTO cvs_iso_processes values (10, 'Decision management', 'Technical Management processes'); +INSERT INTO cvs_iso_processes values (11, 'Risk management', 'Technical Management processes'); +INSERT INTO cvs_iso_processes values (12, 'Configuration management', 'Technical Management processes'); +INSERT INTO cvs_iso_processes values (13, 'Information management', 'Technical Management processes'); +INSERT INTO cvs_iso_processes values (14, 'Measurement', 'Technical Management processes'); +INSERT INTO cvs_iso_processes values (27, 'Quality Assurance', 'Technical Management processes'); -INSERT INTO cvs_iso_processes values (15, 'Stakeholder requirements definition', 'Technical processes'); -INSERT INTO cvs_iso_processes values (16, 'Requirements analysis', 'Technical processes'); -INSERT INTO cvs_iso_processes values (17, 'Architectural design', 'Technical processes'); +INSERT INTO cvs_iso_processes values (28, 'Business or mission analysis', 'Technical processes'); +INSERT INTO cvs_iso_processes values (15, 'Stakeholder needs and requirements definition', 'Technical processes'); +INSERT INTO cvs_iso_processes values (16, 'System requirements definition', 'Technical processes'); +INSERT INTO cvs_iso_processes values (17, 'System architecture definition', 'Technical processes'); +INSERT INTO cvs_iso_processes values (29, 'Design definition', 'Technical processes'); +INSERT INTO cvs_iso_processes values (30, 'System analysis', 'Technical processes'); INSERT INTO cvs_iso_processes values (18, 'Implementation', 'Technical processes'); INSERT INTO cvs_iso_processes values (19, 'Integration', 'Technical processes'); INSERT INTO cvs_iso_processes values (20, 'Verification', 'Technical processes'); diff --git a/tests/apps/cvs/life_cycle/files/input.csv b/tests/apps/cvs/life_cycle/files/input.csv index 183d0ecc..14c9f456 100644 --- a/tests/apps/cvs/life_cycle/files/input.csv +++ b/tests/apps/cvs/life_cycle/files/input.csv @@ -1,5 +1,5 @@ -Processes,Start,Architectural design,Verification,End +Processes,Start,Implementation,Verification,End Start,X,1,0,0 -Architectural design,0,X,1,0 +Implementation,0,X,1,0 Verification,0,0,X,1 End,0,0,0,X \ No newline at end of file diff --git a/tests/apps/cvs/life_cycle/test_dsm_files.py b/tests/apps/cvs/life_cycle/test_dsm_files.py index 139d5ebf..745bef36 100644 --- a/tests/apps/cvs/life_cycle/test_dsm_files.py +++ b/tests/apps/cvs/life_cycle/test_dsm_files.py @@ -11,7 +11,7 @@ stakeholder=tu.tu.random_str(5, 50), stakeholder_needs=None, stakeholder_expectations=tu.tu.random_str(5, 50), - iso_process=17, + iso_process=18, subprocess=None ), tu.vcs_model.VcsRowPost( index=1, @@ -104,7 +104,7 @@ def test_upload_invalid_dsm_file(client, std_headers, std_user): stakeholder=tu.tu.random_str(5, 50), stakeholder_needs=None, stakeholder_expectations=tu.tu.random_str(5, 50), - iso_process=17, + iso_process=18, subprocess=None ) row2 = tu.vcs_model.VcsRowPost( @@ -208,9 +208,9 @@ def test_save_dsm(client, std_headers, std_user): rows = [std_rows[0], std_rows[1]] tu.create_vcs_table(project.id, vcs.id, rows) - dsm = [["Processes", "Start", "Architectural design", "Verification", "End"], + dsm = [["Processes", "Start", "Implementation", "Verification", "End"], ["Start", "X", "1", "0", "0"], - ["Architectural design", "0", "X", "1", "0"], + ["Implementation", "0", "X", "1", "0"], ["Verification", "0", "0", "X", "1"], ["End", "0", "0", "0", "X"]] @@ -244,9 +244,9 @@ def test_apply_dsm_to_all(client, std_headers, std_user): tu.create_vcs_table(project.id, vcss[1].id, rows) tu.create_vcs_table(project.id, vcss[2].id, rows_alt) - dsm = [["Processes", "Start", "Architectural design", "Verification", "End"], + dsm = [["Processes", "Start", "Implementation", "Verification", "End"], ["Start", "X", "1", "0", "0"], - ["Architectural design", "0", "X", "1", "0"], + ["Implementation", "0", "X", "1", "0"], ["Verification", "0", "0", "X", "1"], ["End", "0", "0", "0", "X"]] diff --git a/tests/apps/cvs/simulation/test_simulation.py b/tests/apps/cvs/simulation/test_simulation.py index 4104845d..561dab48 100644 --- a/tests/apps/cvs/simulation/test_simulation.py +++ b/tests/apps/cvs/simulation/test_simulation.py @@ -46,7 +46,6 @@ def test_run_single_simulation(client, std_headers, std_user): tu.delete_design_group(project.id, design_group.id) tu.delete_VCS_with_ids(current_user.id, project.id, [vcs.id]) tu.delete_project_by_id(project.id, current_user.id) - assert res.status_code == 200 def test_run_sim_invalid_design_group(client, std_headers, std_user): diff --git a/tests/apps/cvs/testutils.py b/tests/apps/cvs/testutils.py index 5ba1243f..834a7ddb 100644 --- a/tests/apps/cvs/testutils.py +++ b/tests/apps/cvs/testutils.py @@ -1,6 +1,8 @@ from typing import List, Tuple, Optional import random +from fastapi.logger import logger + from sedbackend.apps.core.files import implementation as impl_files import sedbackend.apps.cvs.simulation.implementation as sim_impl import sedbackend.apps.cvs.simulation.models as sim_model @@ -16,7 +18,7 @@ import sedbackend.apps.cvs.project.models import sedbackend.apps.cvs.vcs.implementation as vcs_impl import sedbackend.apps.cvs.vcs.models as vcs_model -from sedbackend.apps.cvs.link_design_lifecycle.models import FormulaRowGet, TimeFormat, Rate +from sedbackend.apps.cvs.link_design_lifecycle.models import TimeFormat, Rate from sedbackend.apps.cvs.market_input import models as market_input_model, implementation as market_input_impl import tests.testutils as tu @@ -99,7 +101,7 @@ def random_value_driver_post(user_id: int, project_id: int, name: str = None, un if name is None: name = tu.random_str(5, 50) if unit is None: - unit = tu.random_str(0, 10) + unit = tu.random_str(1, 10) return sedbackend.apps.cvs.vcs.models.ValueDriverPost( name=name, diff --git a/tests/apps/cvs/vcs/test_subprocesses.py b/tests/apps/cvs/vcs/test_subprocesses.py index d95c6ce5..89fb6402 100644 --- a/tests/apps/cvs/vcs/test_subprocesses.py +++ b/tests/apps/cvs/vcs/test_subprocesses.py @@ -54,7 +54,7 @@ def test_get_subprocess_not_found(client, std_headers, std_user): project = tu.seed_random_project(current_user.id) tu.seed_random_subprocesses(project.id, 1) # Act - res = client.get(f'/api/cvs/project/{project.id}/subprocess/999', headers=std_headers) + res = client.get(f'/api/cvs/project/{project.id}/subprocess/99999999', headers=std_headers) # Assert assert res.status_code == 404 # 404 Not Found # Cleanup From 57acb3990f9471b2b9de12162f206be6c4a90628 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Tue, 11 Jun 2024 14:34:04 +0200 Subject: [PATCH 208/210] desim-tool 0.4.8 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index cd643c8e..6dfb9c57 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ bcrypt==4.0.1 -desim-tool==0.4.7 +desim-tool==0.4.8 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 From 87f30f86c32cb77da99eeacd5875edebc55c2cf6 Mon Sep 17 00:00:00 2001 From: Jyborn Date: Mon, 7 Oct 2024 16:18:09 +0200 Subject: [PATCH 209/210] Surrogate model (#142) * updated sql file with new processes and names * fixed formula test * surrogate model integration first commit * get sim data * data formatting * model working * fixed failing simulation tests * fixed failing dsm tests * replace instead of insert * removed constraint from vcs_rows * added contstraint back * non numeric vds * sql fix + optimizer works --------- Co-authored-by: = <=> Co-authored-by: Oscar Bennet Co-authored-by: Oscar Bennet --- requirements.txt | 4 +- .../apps/cvs/simulation/implementation.py | 14 ++ sedbackend/apps/cvs/simulation/router.py | 11 +- sedbackend/apps/cvs/simulation/storage.py | 161 ++++++++++++++++++ sql/V230925_cvs.sql | 22 +-- sql/VZ_inserts_cvs.sql | 60 +++---- test | 38 +++++ test.pub | 1 + 8 files changed, 259 insertions(+), 52 deletions(-) create mode 100644 test create mode 100644 test.pub diff --git a/requirements.txt b/requirements.txt index 6dfb9c57..b148bb7f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,7 @@ desim-tool==0.4.8 fastapi==0.95.1 mvmlib==0.5.9 mysql-connector-python==8.0.33 +numpy==1.26.4 pandas==2.0.0 passlib==1.7.4 pyparsing==3.0.9 @@ -15,4 +16,5 @@ mysql-statement-builder==0.* python-magic==0.4.27 pytest==7.3.1 httpx==0.24.0 -plusminus==0.7.0 \ No newline at end of file +plusminus==0.7.0 +smt diff --git a/sedbackend/apps/cvs/simulation/implementation.py b/sedbackend/apps/cvs/simulation/implementation.py index 95a86827..2e83f2cf 100644 --- a/sedbackend/apps/cvs/simulation/implementation.py +++ b/sedbackend/apps/cvs/simulation/implementation.py @@ -324,3 +324,17 @@ def edit_sim_settings( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Could not update simulation settings", ) + + +def get_surrogate_model(user_id: int, file_id: int): + try: + with get_connection() as con: + result = storage.get_surrogate_model(con, user_id, file_id) + con.commit() + return result + except Exception as e: + logger.exception(e) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail=f"Could not get surrogate model for simulation", + ) \ No newline at end of file diff --git a/sedbackend/apps/cvs/simulation/router.py b/sedbackend/apps/cvs/simulation/router.py index 8e68e851..048ff6ea 100644 --- a/sedbackend/apps/cvs/simulation/router.py +++ b/sedbackend/apps/cvs/simulation/router.py @@ -3,6 +3,7 @@ from sedbackend.apps.core.authentication.utils import get_current_active_user from sedbackend.apps.core.projects.dependencies import SubProjectAccessChecker from sedbackend.apps.core.projects.models import AccessLevel +from sedbackend.apps.cvs.design.models import ValueDriverDesignValue from sedbackend.apps.cvs.project.router import CVS_APP_SID from sedbackend.apps.core.users.models import User from sedbackend.apps.cvs.simulation import implementation, models @@ -113,4 +114,12 @@ async def get_simulation_file_content(native_project_id,file_id: int, user: User dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] ) async def get_simulation_file_content(native_project_id,file_id: int, user: User = Depends(get_current_active_user)) -> bool: - return implementation.remove_simulation_file(native_project_id, user.id, file_id) \ No newline at end of file + return implementation.remove_simulation_file(native_project_id, user.id, file_id) + +@router.get( + '/project/{native_project_id}/simulation/surrogate', + summary='Get surrogate model for simulation', + dependencies=[Depends(SubProjectAccessChecker(AccessLevel.list_can_read(), CVS_APP_SID))] +) +async def get_surrogate_model(file_id: int, user: User = Depends(get_current_active_user)) -> models.List[ValueDriverDesignValue]: + return implementation.get_surrogate_model(user.id, file_id) diff --git a/sedbackend/apps/cvs/simulation/storage.py b/sedbackend/apps/cvs/simulation/storage.py index 81cd23fa..1d7c08ff 100644 --- a/sedbackend/apps/cvs/simulation/storage.py +++ b/sedbackend/apps/cvs/simulation/storage.py @@ -19,6 +19,8 @@ from desim.simulation import Process from typing import List + +from sedbackend.apps.cvs.design.models import DesignPut, Design, ValueDriverDesignValue from sedbackend.apps.cvs.design.storage import get_all_designs from mysqlsb import FetchType, MySQLStatementBuilder, Sort @@ -848,3 +850,162 @@ def populate_sim_settings(db_result) -> models.SimSettings: monte_carlo=db_result["monte_carlo"], runs=db_result["runs"], ) + + +# Should be moved to another repo probably +from smt.surrogate_models import KRG +from sklearn.metrics import mean_squared_error, r2_score +from smt.utils.misc import compute_rms_error +import numpy as np +import pandas as pd +from sklearn.model_selection import train_test_split +from scipy.optimize import minimize +from smt.sampling_methods import LHS + + +def is_numeric(value): + try: + float(value) + return True + except ValueError: + return False + + +def get_surrogate_model(db_connection: PooledMySQLConnection, user_id, file_id): + simres = get_file_content(db_connection, user_id, file_id) + designs = [] + vd_values = [] + non_numeric_values = {} + unique_vd_ids = set(); + + # Collect vd_values and design names + for design in simres.designs: + for vd_value in design.vd_design_values: + unique_vd_ids.add(vd_value.vd_id) + if not is_numeric(vd_value.value): + if vd_value.vd_id not in non_numeric_values: + non_numeric_values[vd_value.vd_id] = [] + if vd_value.value not in non_numeric_values[vd_value.vd_id]: + non_numeric_values[vd_value.vd_id].append(vd_value.value) + vd_values.append([(vd_value.vd_id, vd_value.value) for vd_value in design.vd_design_values]) + designs.append(design.name) + + spv_values = [run.surplus_value_end_result for run in simres.runs] + + vd_mapping_dict = {vd_id: {val: idx for idx, val in enumerate(values)} for vd_id, values in + non_numeric_values.items()} + reverse_vd_mapping_dict = {vd_id: {idx: val for val, idx in values.items()} for vd_id, values in + vd_mapping_dict.items()} + logger.debug("vd_values") + logger.debug(vd_values) + logger.debug("vd ids") + logger.debug(unique_vd_ids) + def translate_values(vd_list, mapping_dict): + translated_list = [] + for vd_id, val in vd_list: + if vd_id in mapping_dict and val in mapping_dict[vd_id]: + translated_list.append(mapping_dict[vd_id][val]) + else: + translated_list.append(float(val)) + return translated_list + + translated_vd_values = [translate_values(vd_list, vd_mapping_dict) for vd_list in vd_values] + + formatted_data = {} + for design, vd, spv in zip(designs, translated_vd_values, spv_values): + formatted_data[design] = {'vds': vd, 'spv': spv} + + logger.debug("formatted_data:") + logger.debug(formatted_data) + + logger.debug("VD_MAPPING_DICT:") + logger.debug(vd_mapping_dict) + logger.debug(reverse_vd_mapping_dict) + + design_points = [] + spv_values = [] + + for design_name, design_data in formatted_data.items(): + design_points.append(design_data['vds']) + spv_values.append(design_data['spv']) + + # This part almost exactly like jupyter notebook model part relies on the data format being the same as + # the jupyter notebook + design_points = np.array(design_points) + spv_values = np.array(spv_values) + + X_train, X_test, y_train, y_test = train_test_split(design_points, spv_values, test_size=0.2, random_state=42) + kriging_model = KRG(print_prediction=False) + kriging_model.set_training_values(X_train, y_train) + kriging_model.train() + + # Create design space + ndim = design_points.shape[1] + ndoe = int(10 * ndim) + + # Construction of the DOE + vd_bounds = [(np.min(design_points[:, i]), np.max(design_points[:, i])) for i in range(design_points.shape[1])] + sampling = LHS(xlimits=np.array(vd_bounds), criterion='ese', random_state=1) + xt = sampling(ndoe) + yt = kriging_model.predict_values(xt) + + # Combine sampled design points and actual data + combined_design_points = np.concatenate([xt, X_train]) + combined_spv_values = np.concatenate([yt.flatten(), y_train]) + + # Train new kriging model on the combined dataset + kriging_model_combined = KRG(print_prediction=False) + kriging_model_combined.set_training_values(combined_design_points, combined_spv_values) + kriging_model_combined.train() + + def objective_function(_vd_values): + _predicted_spv = kriging_model_combined.predict_values(np.array([_vd_values]))[0][0] + return -_predicted_spv + + #optimize_bounds = [(np.min(combined_design_points[:, i]), np.inf) for i in range(combined_design_points.shape[1])] + + + #logger.debug("COMBINED DESIGN POINTS") + #logger.debug(combined_design_points) + + default_upper_bound = np.inf + optimize_bounds = [] + + # Iterate over each index in the combined_design_points shape + for i, vd in enumerate(simres.designs[0].vd_design_values): + if vd.vd_id in vd_mapping_dict: + # Use custom bounds if specified in VD_MAPPING_DICT + bounds = (0, len(vd_mapping_dict[vd.vd_id]) - 1) # Assuming indices are 0, 1, 2, ... + optimize_bounds.append(bounds) + else: + # Use default bounds otherwise + optimize_bounds.append((np.min(combined_design_points[:, i]), default_upper_bound)) + + logger.debug("OPTIMZE BOUNDS") + logger.debug(optimize_bounds) + + init_guess = np.mean(combined_design_points, axis=0) + result = minimize(objective_function, x0=init_guess, bounds=optimize_bounds, method='Nelder-Mead', tol=1e-4) + + optimal_vd_values = result.x + logger.debug("OPTIMAL VD VALUES") + logger.debug(optimal_vd_values) + + logger.debug(simres.designs[0].vd_design_values) + translated_optimal_vd_values = [] + for i, vd in enumerate(simres.designs[0].vd_design_values): + logger.debug(vd) + if vd.vd_id in non_numeric_values: + translated_optimal_vd_values.append(reverse_vd_mapping_dict[vd.vd_id][int(round(optimal_vd_values[i]))]) + else: + translated_optimal_vd_values.append(optimal_vd_values[i]) + + predicted_spv = kriging_model_combined.predict_values(np.array([result.x]))[0][0] + + # Print the translated result + logger.debug(translated_optimal_vd_values) + logger.debug(predicted_spv) + + return [ValueDriverDesignValue(vd_id=vd.vd_id, value=translated_optimal_vd_values[i]) for i, vd in enumerate(simres.designs[0].vd_design_values)] + + diff --git a/sql/V230925_cvs.sql b/sql/V230925_cvs.sql index c936f7e2..a8a2dfee 100644 --- a/sql/V230925_cvs.sql +++ b/sql/V230925_cvs.sql @@ -32,16 +32,7 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_external_factors` `vcs_row` INT UNSIGNED NOT NULL, `design_group` INT UNSIGNED NOT NULL, `external_factor` INT UNSIGNED NOT NULL, - PRIMARY KEY (`vcs_row`, `design_group`, `external_factor`), - FOREIGN KEY (`vcs_row`) - REFERENCES `seddb`.`cvs_design_mi_formulas` (`vcs_row`) - ON DELETE CASCADE, - FOREIGN KEY (`design_group`) - REFERENCES `seddb`.`cvs_design_mi_formulas` (`design_group`) - ON DELETE CASCADE, - FOREIGN KEY (`external_factor`) - REFERENCES `seddb`.`cvs_market_inputs` (`id`) - ON DELETE CASCADE + PRIMARY KEY (`vcs_row`, `design_group`, `external_factor`) ); @@ -51,16 +42,7 @@ CREATE TABLE IF NOT EXISTS `seddb`.`cvs_formulas_value_drivers` `design_group` INT UNSIGNED NOT NULL, `value_driver` INT UNSIGNED NOT NULL, `project` INT UNSIGNED NOT NULL, - PRIMARY KEY (`vcs_row`, `design_group`, `value_driver`), - FOREIGN KEY (`vcs_row`) - REFERENCES `seddb`.`cvs_design_mi_formulas` (`vcs_row`) - ON DELETE CASCADE, - FOREIGN KEY (`design_group`) - REFERENCES `seddb`.`cvs_design_mi_formulas` (`design_group`) - ON DELETE CASCADE, - FOREIGN KEY (`value_driver`) - REFERENCES `seddb`.`cvs_value_drivers` (`id`) - ON DELETE CASCADE + PRIMARY KEY (`vcs_row`, `design_group`, `value_driver`) ); ALTER TABLE `seddb`.`cvs_design_mi_formulas` diff --git a/sql/VZ_inserts_cvs.sql b/sql/VZ_inserts_cvs.sql index 73ed490b..30befdaa 100644 --- a/sql/VZ_inserts_cvs.sql +++ b/sql/VZ_inserts_cvs.sql @@ -1,33 +1,33 @@ -INSERT INTO cvs_iso_processes values (1, 'Acquisition', 'Agreement Processes'); -INSERT INTO cvs_iso_processes values (2, 'Supply', 'Agreement Processes'); +REPLACE INTO cvs_iso_processes values (1, 'Acquisition', 'Agreement Processes'); +REPLACE INTO cvs_iso_processes values (2, 'Supply', 'Agreement Processes'); -INSERT INTO cvs_iso_processes values (3, 'Life-cycle model management', 'Organizational project-enabling processes'); -INSERT INTO cvs_iso_processes values (4, 'Infrastructure management', 'Organizational project-enabling processes'); -INSERT INTO cvs_iso_processes values (5, 'Project portfolio management', 'Organizational project-enabling processes'); -INSERT INTO cvs_iso_processes values (6, 'Human resource management', 'Organizational project-enabling processes'); -INSERT INTO cvs_iso_processes values (7, 'Quality management', 'Organizational project-enabling processes'); -INSERT INTO cvs_iso_processes values (26, 'Knowledge management', 'Organizational project-enabling processes'); +REPLACE INTO cvs_iso_processes values (3, 'Life-cycle model management', 'Organizational project-enabling processes'); +REPLACE INTO cvs_iso_processes values (4, 'Infrastructure management', 'Organizational project-enabling processes'); +REPLACE INTO cvs_iso_processes values (5, 'Project portfolio management', 'Organizational project-enabling processes'); +REPLACE INTO cvs_iso_processes values (6, 'Human resource management', 'Organizational project-enabling processes'); +REPLACE INTO cvs_iso_processes values (7, 'Quality management', 'Organizational project-enabling processes'); +REPLACE INTO cvs_iso_processes values (26, 'Knowledge management', 'Organizational project-enabling processes'); -INSERT INTO cvs_iso_processes values (8, 'Project planning', 'Technical Management processes'); -INSERT INTO cvs_iso_processes values (9, 'Project assessment and control', 'Technical Management processes'); -INSERT INTO cvs_iso_processes values (10, 'Decision management', 'Technical Management processes'); -INSERT INTO cvs_iso_processes values (11, 'Risk management', 'Technical Management processes'); -INSERT INTO cvs_iso_processes values (12, 'Configuration management', 'Technical Management processes'); -INSERT INTO cvs_iso_processes values (13, 'Information management', 'Technical Management processes'); -INSERT INTO cvs_iso_processes values (14, 'Measurement', 'Technical Management processes'); -INSERT INTO cvs_iso_processes values (27, 'Quality Assurance', 'Technical Management processes'); +REPLACE INTO cvs_iso_processes values (8, 'Project planning', 'Technical Management processes'); +REPLACE INTO cvs_iso_processes values (9, 'Project assessment and control', 'Technical Management processes'); +REPLACE INTO cvs_iso_processes values (10, 'Decision management', 'Technical Management processes'); +REPLACE INTO cvs_iso_processes values (11, 'Risk management', 'Technical Management processes'); +REPLACE INTO cvs_iso_processes values (12, 'Configuration management', 'Technical Management processes'); +REPLACE INTO cvs_iso_processes values (13, 'Information management', 'Technical Management processes'); +REPLACE INTO cvs_iso_processes values (14, 'Measurement', 'Technical Management processes'); +REPLACE INTO cvs_iso_processes values (27, 'Quality Assurance', 'Technical Management processes'); -INSERT INTO cvs_iso_processes values (28, 'Business or mission analysis', 'Technical processes'); -INSERT INTO cvs_iso_processes values (15, 'Stakeholder needs and requirements definition', 'Technical processes'); -INSERT INTO cvs_iso_processes values (16, 'System requirements definition', 'Technical processes'); -INSERT INTO cvs_iso_processes values (17, 'System architecture definition', 'Technical processes'); -INSERT INTO cvs_iso_processes values (29, 'Design definition', 'Technical processes'); -INSERT INTO cvs_iso_processes values (30, 'System analysis', 'Technical processes'); -INSERT INTO cvs_iso_processes values (18, 'Implementation', 'Technical processes'); -INSERT INTO cvs_iso_processes values (19, 'Integration', 'Technical processes'); -INSERT INTO cvs_iso_processes values (20, 'Verification', 'Technical processes'); -INSERT INTO cvs_iso_processes values (21, 'Transition', 'Technical processes'); -INSERT INTO cvs_iso_processes values (22, 'Validation', 'Technical processes'); -INSERT INTO cvs_iso_processes values (23, 'Operation', 'Technical processes'); -INSERT INTO cvs_iso_processes values (24, 'Maintenance', 'Technical processes'); -INSERT INTO cvs_iso_processes values (25, 'Disposal', 'Technical processes'); \ No newline at end of file +REPLACE INTO cvs_iso_processes values (28, 'Business or mission analysis', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (15, 'Stakeholder needs and requirements definition', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (16, 'System requirements definition', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (17, 'System architecture definition', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (29, 'Design definition', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (30, 'System analysis', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (18, 'Implementation', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (19, 'Integration', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (20, 'Verification', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (21, 'Transition', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (22, 'Validation', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (23, 'Operation', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (24, 'Maintenance', 'Technical processes'); +REPLACE INTO cvs_iso_processes values (25, 'Disposal', 'Technical processes'); \ No newline at end of file diff --git a/test b/test new file mode 100644 index 00000000..80ef332d --- /dev/null +++ b/test @@ -0,0 +1,38 @@ +-----BEGIN OPENSSH PRIVATE KEY----- +b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABlwAAAAdzc2gtcn +NhAAAAAwEAAQAAAYEAvjFrtkr+ebMDjOeV+0PSDM/FUKsI8jKoIuE50njv3aLz2O4Qg3rB +he3aMg6Bymof8ZhTJmhWHMT0G7zZBs3kHpYuv2MO8Z/nmjBrFXAnR8xczOi56yZGGptBCZ +TspJdKYIZpKHuS8f8GvZ/y/tPXwtOw5ZHXnZ5tqfbJb48Wpqr1BYIQy0b46F0KMWU2fOWP +2Nf4tS2BlYfmvQucpJ/ulk2xeVsbz+IO5mxdrZ5rtWonPZ7ADNH81Guz69dwEOwBcHTiIK +14O1QD0EgrQcjGTXP9J5CIl9uVFW7E9Q4W2S+7rf6t3G+Vv7j9whL1lypKqe6KaSP85bRl +es9cmKYNoyZJGSdjbQqPiCMXmywOB1/iC+Tgz26Ja+6C1P2epWmFpObGk5/tPc9LJrLE/Z +Ims/lItijc8OvAQmLi6zbyzYzdmnkS7IJ0ycP1gj3iuSiCrew2RcARBqnf78FbjdTnJsnc +UzFTPdZWCj74XGctXp8WK7/nqNlql4/xAhosgzBZAAAFkHljwaZ5Y8GmAAAAB3NzaC1yc2 +EAAAGBAL4xa7ZK/nmzA4znlftD0gzPxVCrCPIyqCLhOdJ4792i89juEIN6wYXt2jIOgcpq +H/GYUyZoVhzE9Bu82QbN5B6WLr9jDvGf55owaxVwJ0fMXMzouesmRhqbQQmU7KSXSmCGaS +h7kvH/Br2f8v7T18LTsOWR152eban2yW+PFqaq9QWCEMtG+OhdCjFlNnzlj9jX+LUtgZWH +5r0LnKSf7pZNsXlbG8/iDuZsXa2ea7VqJz2ewAzR/NRrs+vXcBDsAXB04iCteDtUA9BIK0 +HIxk1z/SeQiJfblRVuxPUOFtkvu63+rdxvlb+4/cIS9ZcqSqnuimkj/OW0ZXrPXJimDaMm +SRknY20Kj4gjF5ssDgdf4gvk4M9uiWvugtT9nqVphaTmxpOf7T3PSyayxP2SJrP5SLYo3P +DrwEJi4us28s2M3Zp5EuyCdMnD9YI94rkogq3sNkXAEQap3+/BW43U5ybJ3FMxUz3WVgo+ ++FxnLV6fFiu/56jZapeP8QIaLIMwWQAAAAMBAAEAAAGBALWkAVjB2rNj0ho5+rVTmkH+FF +XsYsjctFNLgquOMJcdz4D4K/FFxZkhBegUjsRAAm37qj3eG4+yUehDvYmQDTNg/xhthJIi +w1AwubRpvjoF5QMVqKn5ja5PZBfAkRjIHJJ/C+NL0ZUomMl0/t98dm1tO16koKW57sYKGW +tf3qQiVYuDIoLhL0rLwLZYEWfK3kycRrzRzxZ+ClG1y042J+iIHtzltHQfU1ehWv47dGZ4 +ES36dTRJQAunKNgukqq2mSvROWf6eIhk+SudKaSV8LhXx+O9d56zt48DvcCX5oBokuF27V +v5gR5/7OYmsC3FUOFmNgnJaxrqTJl2e3ogpPkxbKI8tJMVUkrl6l41zFPF2xN+Eedw4ezJ +yL2D1NsrlzUiBPr1iW6FwlpruTiac39PZGxatPV9MnD8AdStBxcEhwwdPTGG3EYQ6gqZVV +O3hE7k0aWYsS4z22UIQmcTjwAiFYNGRosX8PprksmK7t7QZ0NL/myCznmGWif/B726uQAA +AMB0q/sIm4PhT871uDDctDFR1/GC0ectB5IT/GTdP1NgR65KLpDZ/E8lYMqz+ixfBrwxlY +8DmxMKmF0pKRev16K31GTOPm3VW9enhRBSLA+ocDdP434tKBgO+QvwrC2Pxpq9FvYNm6uI +/w78ufCuv9JM80c8dYEudxACIhTqDj2p9yoPfdW947ztHAsxyyruUTCl7CN9B7eEyqutVq +TnythCLPYgujrOVzlr38ODhMedsrpy1oyukcQ1EmQbrnaKNS0AAADBAN1qJmVzUz4k82EJ +sX6XoTTE3B/4C6M3etZ6M2IqdHB02k7z36PhLiZ+DN/erJeYIWK40515QSPOXSipFW9fDZ +6x8Ti2rcLz5SYsbONlI7sbmV5HSqbu0LQ9V/UNLWCVTnOL1p/KpCYuy+2ZGq3AUyK2Ib8+ +3cuXyAv2hecw049pjKTHhFeFQYfpMoqyoyKQS//7uteAE9hvWVIrho88h92yVaHkT+MLp7 +MkE+V2kXteUxybRFxZdDhl1NDlcuKuOwAAAMEA2+bK6K9jth25UiuQ5axdzPD+xdqWP4id +rK30L1qLuV3AX0F0SK0LFqMNlnLrdKPgfu3UHQ3dqb6r2C+qPundHwnexzLlhQINz/ZNQA +rdugc3D4czI31JVC5NX6WAfw/ntffK5nbs0bSqclQp20C5q3ntlM/t2h5Q2DOhU3Exo82l +AC3wpsn8E+8hQRIAYBpJhE4920AAjiuQ3Jtr4aMrdw9v9l4HWWQTgwGLkMpfdiG+A1/0LL +LEBWhhF8Cf0s57AAAAFWFsZXhhQERFU0tUT1AtRzc5NVFQMQECAwQF +-----END OPENSSH PRIVATE KEY----- diff --git a/test.pub b/test.pub new file mode 100644 index 00000000..66fc857d --- /dev/null +++ b/test.pub @@ -0,0 +1 @@ +ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC+MWu2Sv55swOM55X7Q9IMz8VQqwjyMqgi4TnSeO/dovPY7hCDesGF7doyDoHKah/xmFMmaFYcxPQbvNkGzeQeli6/Yw7xn+eaMGsVcCdHzFzM6LnrJkYam0EJlOykl0pghmkoe5Lx/wa9n/L+09fC07Dlkdednm2p9slvjxamqvUFghDLRvjoXQoxZTZ85Y/Y1/i1LYGVh+a9C5ykn+6WTbF5WxvP4g7mbF2tnmu1aic9nsAM0fzUa7Pr13AQ7AFwdOIgrXg7VAPQSCtByMZNc/0nkIiX25UVbsT1DhbZL7ut/q3cb5W/uP3CEvWXKkqp7oppI/zltGV6z1yYpg2jJkkZJ2NtCo+IIxebLA4HX+IL5ODPbolr7oLU/Z6laYWk5saTn+09z0smssT9kiaz+Ui2KNzw68BCYuLrNvLNjN2aeRLsgnTJw/WCPeK5KIKt7DZFwBEGqd/vwVuN1OcmydxTMVM91lYKPvhcZy1enxYrv+eo2WqXj/ECGiyDMFk= alexa@DESKTOP-G795QP1 From c4843521c1fa6a2672f207769c662220beba8f68 Mon Sep 17 00:00:00 2001 From: Oscar Bennet Date: Mon, 7 Oct 2024 16:33:04 +0200 Subject: [PATCH 210/210] docker-compose is outdated changed to docker compose (#143) --- .github/workflows/main.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 19eb5346..f37d4e34 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -21,18 +21,18 @@ jobs: - name: Clear environment run: | - docker-compose down --volumes + docker compose down --volumes - name: Build environment run: | - docker-compose build + docker compose build - name: Run environment run: | - docker-compose up -d + docker compose up -d # Run tests inside docker environment - name: Run tests run: | docker exec backend-api /etc/scripts/wait-for-it.sh -t 30 core-db:3306 -- echo "Database online" - docker exec backend-api pytest \ No newline at end of file + docker exec backend-api pytest