From 0d227a02c7f8862d2463dfc0bc68152e381efb37 Mon Sep 17 00:00:00 2001 From: Max Metz Date: Mon, 10 Jun 2024 07:27:58 +0200 Subject: [PATCH 01/15] Added input validation for API requests. Refactored some methods, added docstrings. Solved marshmallow-warnings in BreCal.schemas.model. Added unit tests, now totaling 215. Added proposals to refactor all SQL queries into an object at BreCal.database.sql_queries.SQLQuery for better standardization. Created a helper-object to handle Emails, which prepares the notification-feature. --- src/server/BreCal/__init__.py | 6 +- src/server/BreCal/api/shipcalls.py | 51 +- src/server/BreCal/api/ships.py | 38 +- src/server/BreCal/api/times.py | 35 +- src/server/BreCal/api/user.py | 6 + src/server/BreCal/database/enums.py | 38 +- src/server/BreCal/database/sql_handler.py | 111 ++- src/server/BreCal/database/sql_queries.py | 418 ++++++++++ src/server/BreCal/database/sql_utils.py | 14 + src/server/BreCal/database/update_database.py | 2 +- src/server/BreCal/impl/berths.py | 3 + src/server/BreCal/impl/history.py | 3 + src/server/BreCal/impl/login.py | 5 +- src/server/BreCal/impl/notifications.py | 3 + src/server/BreCal/impl/participant.py | 3 + src/server/BreCal/impl/shipcalls.py | 66 +- src/server/BreCal/impl/ships.py | 9 + src/server/BreCal/impl/times.py | 3 + src/server/BreCal/impl/user.py | 10 + src/server/BreCal/local_db.py | 8 +- src/server/BreCal/schemas/model.py | 335 ++++++-- src/server/BreCal/services/auth_guard.py | 1 + src/server/BreCal/services/email_handling.py | 174 +++++ src/server/BreCal/services/jwt_handler.py | 30 + .../BreCal/services/schedule_routines.py | 18 +- src/server/BreCal/stubs/notification.py | 2 - src/server/BreCal/stubs/participant.py | 5 + src/server/BreCal/stubs/ship.py | 24 + src/server/BreCal/stubs/shipcall.py | 136 +++- src/server/BreCal/stubs/times_full.py | 113 ++- src/server/BreCal/stubs/user.py | 9 + .../BreCal/validators/input_validation.py | 122 ++- .../validators/input_validation_ship.py | 140 ++++ .../validators/input_validation_shipcall.py | 398 ++++++++++ .../validators/input_validation_times.py | 402 ++++++++++ .../validators/input_validation_utils.py | 182 +++++ src/server/BreCal/validators/time_logic.py | 33 + .../validators/validation_base_utils.py | 20 + .../validators/validation_rule_functions.py | 81 +- .../BreCal/validators/validation_rules.py | 117 +-- src/server/tests/database/__init__.py | 0 src/server/tests/database/test_sql_queries.py | 527 +++++++++++++ src/server/tests/schemas/test_model.py | 80 ++ src/server/tests/test_create_app.py | 5 +- .../validators/test_input_validation_ship.py | 226 ++++++ .../test_input_validation_shipcall.py | 737 ++++++++++++++++++ .../validators/test_input_validation_times.py | 398 ++++++++++ .../test_validation_rule_functions.py | 154 +++- 48 files changed, 5049 insertions(+), 252 deletions(-) create mode 100644 src/server/BreCal/database/sql_queries.py create mode 100644 src/server/BreCal/database/sql_utils.py create mode 100644 src/server/BreCal/services/email_handling.py create mode 100644 src/server/BreCal/validators/input_validation_ship.py create mode 100644 src/server/BreCal/validators/input_validation_shipcall.py create mode 100644 src/server/BreCal/validators/input_validation_times.py create mode 100644 src/server/BreCal/validators/input_validation_utils.py create mode 100644 src/server/BreCal/validators/validation_base_utils.py create mode 100644 src/server/tests/database/__init__.py create mode 100644 src/server/tests/database/test_sql_queries.py create mode 100644 src/server/tests/schemas/test_model.py create mode 100644 src/server/tests/validators/test_input_validation_ship.py create mode 100644 src/server/tests/validators/test_input_validation_shipcall.py create mode 100644 src/server/tests/validators/test_input_validation_times.py diff --git a/src/server/BreCal/__init__.py b/src/server/BreCal/__init__.py index eb832f5..263203e 100644 --- a/src/server/BreCal/__init__.py +++ b/src/server/BreCal/__init__.py @@ -34,7 +34,7 @@ from BreCal.stubs.df_times import get_df_times from BreCal.services.schedule_routines import setup_schedule, run_schedule_permanently_in_background -def create_app(test_config=None): +def create_app(test_config=None, instance_path=None): app = Flask(__name__, instance_relative_config=True) app.config.from_mapping( @@ -45,7 +45,11 @@ def create_app(test_config=None): else: app.config.from_mapping(test_config) + if instance_path is not None: + app.instance_path = instance_path + try: + import os print(f'Instance path = {app.instance_path}') os.makedirs(app.instance_path) except OSError: diff --git a/src/server/BreCal/api/shipcalls.py b/src/server/BreCal/api/shipcalls.py index 396dc93..68a927e 100644 --- a/src/server/BreCal/api/shipcalls.py +++ b/src/server/BreCal/api/shipcalls.py @@ -1,12 +1,17 @@ from flask import Blueprint, request from webargs.flaskparser import parser -from marshmallow import Schema, fields +from marshmallow import Schema, fields, ValidationError from ..schemas import model from .. import impl -from ..services.auth_guard import auth_guard +from ..services.auth_guard import auth_guard, check_jwt +from BreCal.validators.input_validation import validate_posted_shipcall_data, check_if_user_is_bsmd_type +from BreCal.validators.input_validation_shipcall import InputValidationShipcall +from BreCal.database.sql_handler import execute_sql_query_standalone import logging import json +import traceback +import werkzeug bp = Blueprint('shipcalls', __name__) @@ -14,7 +19,16 @@ bp = Blueprint('shipcalls', __name__) @auth_guard() # no restriction by role def GetShipcalls(): if 'Authorization' in request.headers: - token = request.headers.get('Authorization') + token = request.headers.get('Authorization') # see impl/login to see the token encoding, which is a JWT token. + + """ + from BreCal.services.jwt_handler import decode_jwt + jwt = token.split('Bearer ')[1] # string key + payload = decode_jwt(jwt) # dictionary, which includes 'id' (user id) and 'participant_id' + + # oneline: + payload = decode_jwt(request.headers.get("Authorization").split("Bearer ")[-1]) + """ options = {} options["participant_id"] = request.args.get("participant_id") options["past_days"] = request.args.get("past_days", default=1, type=int) @@ -31,8 +45,21 @@ def PostShipcalls(): try: content = request.get_json(force=True) loadedModel = model.ShipcallSchema().load(data=content, many=False, partial=True) + + # read the user data from the JWT token (set when login is performed) + user_data = check_jwt() + + # validate the posted shipcall data & the user's authority + InputValidationShipcall.evaluate_post_data(user_data, loadedModel, content) + + except ValidationError as ex: + logging.error(ex) + print(ex) + return json.dumps({"message":f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"}), 400 + except Exception as ex: logging.error(ex) + logging.error(traceback.format_exc()) print(ex) return json.dumps("bad format"), 400 @@ -45,8 +72,24 @@ def PutShipcalls(): try: content = request.get_json(force=True) - logging.info(content) loadedModel = model.ShipcallSchema().load(data=content, many=False, partial=True) + + # read the user data from the JWT token (set when login is performed) + user_data = check_jwt() + + # validate the PUT shipcall data and the user's authority + InputValidationShipcall.evaluate_put_data(user_data, loadedModel, content) + + except ValidationError as ex: + logging.error(ex) + print(ex) + return json.dumps({"message":f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"}), 400 + + except werkzeug.exceptions.Forbidden as ex: + logging.error(ex) + print(ex) + return json.dumps({"message":ex.description}), 403 + except Exception as ex: logging.error(ex) print(ex) diff --git a/src/server/BreCal/api/ships.py b/src/server/BreCal/api/ships.py index e31147e..96bceb2 100644 --- a/src/server/BreCal/api/ships.py +++ b/src/server/BreCal/api/ships.py @@ -1,11 +1,14 @@ from flask import Blueprint, request from .. import impl -from ..services.auth_guard import auth_guard -from marshmallow import EXCLUDE +from ..services.auth_guard import auth_guard, check_jwt +from marshmallow import EXCLUDE, ValidationError from ..schemas import model import json import logging +from BreCal.validators.input_validation import check_if_user_is_bsmd_type +from BreCal.validators.input_validation_ship import InputValidationShip + bp = Blueprint('ships', __name__) @bp.route('/ships', methods=['get']) @@ -24,8 +27,21 @@ def GetShips(): def PostShip(): try: + # read the user data from the JWT token (set when login is performed) + user_data = check_jwt() + + # check, whether the user belongs to a participant, which is of type ParticipantType.BSMD + # as ParticipantType is an IntFlag, a user belonging to multiple groups is properly evaluated. + is_bsmd = check_if_user_is_bsmd_type(user_data) + if not is_bsmd: + raise ValidationError(f"current user does not belong to BSMD. Cannot post shipcalls. Found user data: {user_data}") + content = request.get_json(force=True) loadedModel = model.ShipSchema().load(data=content, many=False, partial=True) + + # validate the request data & user permissions + InputValidationShip.evaluate_post_data(user_data, loadedModel, content) + except Exception as ex: logging.error(ex) print(ex) @@ -39,8 +55,15 @@ def PostShip(): def PutShip(): try: + # read the user data from the JWT token (set when login is performed) + user_data = check_jwt() + content = request.get_json(force=True) - loadedModel = model.ShipSchema().load(data=content, many=False, partial=True, unknown=EXCLUDE) + loadedModel = model.Ship().load(data=content, many=False, partial=True, unknown=EXCLUDE) + + # validate the request data & user permissions + InputValidationShip.evaluate_put_data(user_data, loadedModel, content) + except Exception as ex: logging.error(ex) print(ex) @@ -53,13 +76,20 @@ def PutShip(): @auth_guard() # no restriction by role def DeleteShip(): - # TODO check if I am allowed to delete this thing by deriving the participant from the bearer token try: + # read the user data from the JWT token (set when login is performed) + user_data = check_jwt() + ship_id = request.args.get("id") + if 'id' in request.args: options = {} options["id"] = request.args.get("id") else: return json.dumps("no id provided"), 400 + + # validate the request data & user permissions + InputValidationShip.evaluate_delete_data(user_data, ship_id) + except Exception as ex: logging.error(ex) print(ex) diff --git a/src/server/BreCal/api/times.py b/src/server/BreCal/api/times.py index 2c90397..e1b3786 100644 --- a/src/server/BreCal/api/times.py +++ b/src/server/BreCal/api/times.py @@ -1,9 +1,11 @@ from flask import Blueprint, request from ..schemas import model from .. import impl -from ..services.auth_guard import auth_guard +from ..services.auth_guard import auth_guard, check_jwt import json import logging +from marshmallow import ValidationError +from BreCal.validators.input_validation_times import InputValidationTimes bp = Blueprint('times', __name__) @@ -30,6 +32,17 @@ def PostTimes(): # body = parser.parse(schema, request, location='json') loadedModel = model.TimesSchema().load(data=content, many=False, partial=True) + # read the user data from the JWT token (set when login is performed) + user_data = check_jwt() + + # validate the request + InputValidationTimes.evaluate_post_data(user_data, loadedModel, content) + + except ValidationError as ex: + logging.error(ex) + print(ex) + return json.dumps(f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"), 400 + except Exception as ex: logging.error(ex) print(ex) @@ -46,6 +59,17 @@ def PutTimes(): content = request.get_json(force=True) loadedModel = model.TimesSchema().load(data=content, many=False, partial=True) + # read the user data from the JWT token (set when login is performed) + user_data = check_jwt() + + # validate the request + InputValidationTimes.evaluate_put_data(user_data, loadedModel, content) + + except ValidationError as ex: + logging.error(ex) + print(ex) + return json.dumps(f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"), 400 + except Exception as ex: logging.error(ex) print(ex) @@ -58,11 +82,16 @@ def PutTimes(): @auth_guard() # no restriction by role def DeleteTimes(): - # TODO check if I am allowd to delete this thing by deriving the participant from the bearer token - if 'id' in request.args: options = {} options["id"] = request.args.get("id") + + # read the user data from the JWT token (set when login is performed) + user_data = check_jwt() + + # validate the request + InputValidationTimes.evaluate_delete_data(user_data, times_id = request.args.get("id")) + return impl.times.DeleteTimes(options) else: logging.warning("Times delete missing id argument") diff --git a/src/server/BreCal/api/user.py b/src/server/BreCal/api/user.py index bbd5b4b..2c3c1a0 100644 --- a/src/server/BreCal/api/user.py +++ b/src/server/BreCal/api/user.py @@ -4,6 +4,7 @@ from .. import impl from ..services.auth_guard import auth_guard import json import logging +from marshmallow import ValidationError bp = Blueprint('user', __name__) @@ -14,6 +15,11 @@ def PutUser(): try: content = request.get_json(force=True) loadedModel = model.UserSchema().load(data=content, many=False, partial=True) + + except ValidationError as ex: + logging.error(ex) + print(ex) + return json.dumps(f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"), 400 except Exception as ex: logging.error(ex) diff --git a/src/server/BreCal/database/enums.py b/src/server/BreCal/database/enums.py index 3092fd8..a3c7de1 100644 --- a/src/server/BreCal/database/enums.py +++ b/src/server/BreCal/database/enums.py @@ -1,8 +1,8 @@ -from enum import Enum, IntFlag +from enum import IntEnum, Enum, IntFlag class ParticipantType(IntFlag): """determines the type of a participant""" - NONE = 0 + undefined = 0 BSMD = 1 TERMINAL = 2 PILOT = 4 @@ -11,12 +11,17 @@ class ParticipantType(IntFlag): PORT_ADMINISTRATION = 32 TUG = 64 -class ShipcallType(Enum): +class ShipcallType(IntEnum): """determines the type of a shipcall, as this changes the applicable validation rules""" + undefined = 0 INCOMING = 1 OUTGOING = 2 SHIFTING = 3 + @classmethod + def _missing_(cls, value): + return cls.undefined + class ParticipantwiseTimeDelta(): """stores the time delta for every participant, which triggers the validation rules in the rule set '0001'""" AGENCY = 1200.0 # 20 h * 60 min/h = 1200 min @@ -26,7 +31,9 @@ class ParticipantwiseTimeDelta(): TUG = 960.0 # 16 h * 60 min/h = 960 min TERMINAL = 960.0 # 16 h * 60 min/h = 960 min -class StatusFlags(Enum): + NOTIFICATION = 10.0 # after n minutes, an evaluation may rise a notification + +class StatusFlags(IntEnum): """ these enumerators ensure that each traffic light validation rule state corresponds to a value, which will be used in the ValidationRules object to identify the necessity of notifications. @@ -36,3 +43,26 @@ class StatusFlags(Enum): YELLOW = 2 RED = 3 +class PierSide(IntEnum): + """These enumerators determine the pier side of a shipcall.""" + PORTSIDE = 0 # Port/Backbord + STARBOARD_SIDE = 1 # Starboard / Steuerbord + +class NotificationType(IntFlag): + """determines the method by which a notification is distributed to users. Flagging allows selecting multiple notification types.""" + UNDEFINED = 0 + EMAIL = 1 + POPUP = 2 + MESSENGER = 4 + +class ParticipantFlag(IntFlag): + """ + | 1 | If this flag is set on a shipcall record with participant type Agency (8), + all participants of type BSMD (1) may edit the record. + """ + undefined = 0 + BSMD = 1 + + @classmethod + def _missing_(cls, value): + return cls.undefined diff --git a/src/server/BreCal/database/sql_handler.py b/src/server/BreCal/database/sql_handler.py index 59497e3..2d85175 100644 --- a/src/server/BreCal/database/sql_handler.py +++ b/src/server/BreCal/database/sql_handler.py @@ -1,9 +1,11 @@ import numpy as np import pandas as pd +import pydapper import datetime import typing -from BreCal.schemas.model import Shipcall, Ship, Participant, Berth, User, Times +from BreCal.schemas.model import Shipcall, Ship, Participant, Berth, User, Times, ShipcallParticipantMap from BreCal.database.enums import ParticipantType +from BreCal.local_db import getPoolConnection def pandas_series_to_data_model(): return @@ -19,8 +21,81 @@ def set_participant_type(x, participant_df)->int: participant_type = participant_df.loc[participant_id, "type"] return participant_type +def get_synchronous_shipcall_times_standalone(query_time:pd.Timestamp, all_df_times:pd.DataFrame, delta_threshold=900)->int: + """ + This function counts all entries in {all_df_times}, which have the same timestamp as {query_time}. + It does so by: + 1.) selecting all eta_berth & etd_berth entries + 2.) measuring the timedelta towards {query_time} + 3.) converting the timedelta to total absolute seconds (positive or negative time differences do not matter) + 4.) applying a {delta_threshold} to identify, whether two times are too closely together + 5.) counting the times, where the timedelta is below the threshold + returns: counts + """ + assert isinstance(query_time,pd.Timestamp) + # get a timedelta for each valid (not Null) time entry + time_deltas_eta = [(query_time.to_pydatetime()-time_.to_pydatetime()) for time_ in all_df_times.loc[:,"eta_berth"] if not pd.isnull(time_)] + time_deltas_etd = [(query_time.to_pydatetime()-time_.to_pydatetime()) for time_ in all_df_times.loc[:,"etd_berth"] if not pd.isnull(time_)] + + # consider both, eta and etd times + time_deltas = time_deltas_eta + time_deltas_etd + + # convert the timedelta to absolute total seconds + time_deltas = [abs(delta.total_seconds()) for delta in time_deltas] + + # consider only those time deltas, which are <= the determined threshold + # create a list of booleans + time_deltas_filtered = [delta <= delta_threshold for delta in time_deltas] + + # booleans can be added/counted in Python by using sum() + counts = sum(time_deltas_filtered) # int + return counts + +def execute_sql_query_standalone(query, param={}, pooledConnection=None, model=None, command_type="query"): + """ + execute an arbitrary query with a set of parameters, return the output and convert it to a list. + when the pooled connection is rebuilt, it will be closed at the end of the function. + """ + rebuild_pooled_connection = pooledConnection is None + + if rebuild_pooled_connection: + pooledConnection = getPoolConnection() + + commands = pydapper.using(pooledConnection) + # participant_query = "SELECT participant_id, type FROM shipcall_participant_map WHERE shipcall_id=?shipcall_id?"; + + # creates a generator + try: + if command_type=="query": + if model is None: + schemas = commands.query(query, model=dict, param=param, buffered=False) + else: + schemas = commands.query(query, model=model, param=param, buffered=False) + + # creates a list of results from the generator + schemas = [schema for schema in schemas] + + elif command_type=="execute": + schemas = commands.execute(query, param=param) + elif command_type=="single": + sentinel = object() + + # pulls a *single* row from the query. Typically, these queries require an ID within the param dictionary. + # when providing a model, such as model.Shipcall, the dataset is immediately translated into a data model. + schemas = commands.query_single_or_default(query, sentinel, param=param) if model is None else commands.query_single_or_default(query, sentinel, param=param, model=model) + if schemas is sentinel: + raise Exception("no such record") + + else: + raise ValueError(command_type) + + finally: # if needed, ensure that the pooled connection is closed. + if rebuild_pooled_connection: + pooledConnection.close() + return schemas + class SQLHandler(): """ An object that reads SQL queries from the sql_connection and stores it in pandas DataFrames. The object can read all available tables @@ -36,6 +111,15 @@ class SQLHandler(): if read_all: self.read_all(self.all_schemas) + def execute_sql_query(self, sql_connection, query, param): + """ + this method is best used in combination with a python context-manager, such as: + with mysql.connector.connect(**mysql_connection_data) as sql_connection: + schema = sql_handler.execute_sql_query(sql_connection, query) + """ + schemas = execute_sql_query_standalone(query, param, pooledConnection=sql_connection) + return schemas + def get_all_schemas_from_mysql(self): with self.sql_connection.cursor(buffered=True) as cursor: cursor.execute("SHOW TABLES") @@ -50,7 +134,8 @@ class SQLHandler(): 'ship'->BreCal.schemas.model.Ship object """ self.str_to_model_dict = { - "shipcall":Shipcall, "ship":Ship, "participant":Participant, "berth":Berth, "user":User, "times":Times + "shipcall":Shipcall, "ship":Ship, "participant":Participant, "berth":Berth, "user":User, "times":Times, + "shipcall_participant_map":ShipcallParticipantMap } return @@ -70,12 +155,16 @@ class SQLHandler(): data = [{k:v for k,v in zip(column_names, dat)} for dat in data] # 4.) build a dataframe from the respective data models (which ensures the correct data type) + df = self.build_df_from_data_and_name(data, table_name) + return df + + def build_df_from_data_and_name(self, data, table_name): data_model = self.str_to_model_dict.get(table_name) if data_model is not None: - df = pd.DataFrame([data_model(**dat) for dat in data]) + df = pd.DataFrame([data_model(**dat) for dat in data], columns=list(data_model.__annotations__.keys())) else: df = pd.DataFrame([dat for dat in data]) - return df + return df def mysql_to_df(self, query, table_name): """provide an arbitrary sql query that should be read from a mysql server {sql_connection}. returns a pandas DataFrame with the obtained data""" @@ -94,11 +183,7 @@ class SQLHandler(): # 4.) build a dataframe from the respective data models (which ensures the correct data type) data_model = self.str_to_model_dict.get(table_name) - if data_model is not None: - df = pd.DataFrame([data_model(**dat) for dat in data]) - else: - df = pd.DataFrame([dat for dat in data]) - + df = self.build_df_from_data_and_name(data, table_name) if 'id' in df.columns: df = df.set_index('id', inplace=False) # avoid inplace updates, so the raw sql remains unchanged return df @@ -332,6 +417,10 @@ class SQLHandler(): def get_unique_ship_counts(self, all_df_times:pd.DataFrame, times_agency:pd.DataFrame, query:str, rounding:str="min", maximum_threshold=3): """given a dataframe of all agency times, get all unique ship counts, their values (datetime) and the string tags. returns a tuple (values,unique,counts)""" + # #deprecated! + import warnings + warnings.warn(f"SQLHandler.get_unique_ship_counts is deprecated. Instead, please use SQLHandler.count_synchronous_shipcall_times") + # optional: rounding if rounding is not None: all_df_times.loc[:, query] = pd.to_datetime(all_df_times.loc[:, query]).dt.round(rounding) # e.g., 'min' --- # correcting the error: 'AttributeError: Can only use .dt accessor with datetimelike values' @@ -347,3 +436,7 @@ class SQLHandler(): # get unique entries and counts counts = len(values) # unique, counts = np.unique(values, return_counts=True) return counts # (values, unique, counts) + + def count_synchronous_shipcall_times(self, query_time:pd.Timestamp, all_df_times:pd.DataFrame, delta_threshold=900)->int: + """count all times entries, which are too close to the query_time. The {delta_threshold} determines the threshold. returns counts (int)""" + return get_synchronous_shipcall_times_standalone(query_time, all_df_times, delta_threshold) diff --git a/src/server/BreCal/database/sql_queries.py b/src/server/BreCal/database/sql_queries.py new file mode 100644 index 0000000..51a3dea --- /dev/null +++ b/src/server/BreCal/database/sql_queries.py @@ -0,0 +1,418 @@ +import logging + + +def create_sql_query_shipcall_get(options:dict)->str: + """ + creates an SQL query, which selects all shipcalls from the mysql database. + the agency eta times are used to order the entries. + + args: + options : dict. A dictionary, which must contains the 'past_days' key (int). Determines the range + by which shipcalls are filtered. + """ + query = ("SELECT s.id as id, ship_id, type, eta, voyage, etd, arrival_berth_id, departure_berth_id, tug_required, pilot_required, " + + "flags, s.pier_side, bunkering, replenishing_terminal, replenishing_lock, draft, tidal_window_from, " + + "tidal_window_to, rain_sensitive_cargo, recommended_tugs, anchored, moored_lock, canceled, evaluation, " + + "evaluation_message, evaluation_time, evaluation_notifications_sent, s.created as created, s.modified as modified, time_ref_point " + + "FROM shipcall s " + + "LEFT JOIN times t ON t.shipcall_id = s.id AND t.participant_type = 8 " + + "WHERE " + + "(type = 1 AND " + + "((t.id IS NOT NULL AND t.eta_berth >= DATE(NOW() - INTERVAL %d DAY)) OR " + + "(eta >= DATE(NOW() - INTERVAL %d DAY)))) OR " + + "((type = 2 OR type = 3) AND " + + "((t.id IS NOT NULL AND t.etd_berth >= DATE(NOW() - INTERVAL %d DAY)) OR " + + "(etd >= DATE(NOW() - INTERVAL %d DAY)))) " + + "ORDER BY eta") % (options["past_days"], options["past_days"], options["past_days"], options["past_days"]) + + return query + + +def create_sql_query_shipcall_post(schemaModel:dict)->str: + query = "INSERT INTO shipcall (" + isNotFirst = False + for key in schemaModel.keys(): + if key == "id": + continue + if key == "participants": + continue + if key == "created": + continue + if key == "modified": + continue + if key == "evaluation": + continue + if key == "evaluation_message": + continue + if key == "type_value": + continue + if key == "evaluation_value": + continue + if isNotFirst: + query += "," + isNotFirst = True + query += key + query += ") VALUES (" + isNotFirst = False + for key in schemaModel.keys(): + param_key = key + if key == "id": + continue + if key == "participants": + continue + if key == "created": + continue + if key == "modified": + continue + if key == "evaluation": + continue + if key == "evaluation_message": + continue + if key == "type": + param_key = "type_value" + if key == "type_value": + continue + if key == "evaluation": + param_key = "evaluation_value" + if key == "evaluation_value": + continue + if isNotFirst: + query += "," + isNotFirst = True + query += "?" + param_key + "?" + query += ")" + return query + +def create_sql_query_shipcall_put(schemaModel:dict)->str: + query = "UPDATE shipcall SET " + isNotFirst = False + for key in schemaModel.keys(): + param_key = key + if key == "id": + continue + if key == "participants": + continue + if key == "created": + continue + if key == "modified": + continue + if key == "evaluation": + continue + if key == "evaluation_message": + continue + if key == "type": + param_key = "type_value" + if key == "type_value": + continue + if key == "evaluation": + param_key = "evaluation_value" + if key == "evaluation_value": + continue + if isNotFirst: + query += ", " + isNotFirst = True + query += key + " = ?" + param_key + "? " + + query += "WHERE id = ?id?" + return query + + +def create_sql_query_history_post()->str: + query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?scid?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 1, 1)" + return query + +def create_sql_query_history_put()->str: + query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?scid?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 1, 2)" + return query + +def create_sql_query_user_put(schemaModel:dict): + query = "UPDATE user SET " + isNotFirst = False + for key in schemaModel.keys(): + if key == "id": + continue + if key == "old_password": + continue + if key == "new_password": + continue + if isNotFirst: + query += ", " + isNotFirst = True + query += key + " = ?" + key + "? " + + query += "WHERE id = ?id?" + return query + +def create_sql_query_ship_post(schemaModel:dict): + query = "INSERT INTO ship (" + isNotFirst = False + for key in schemaModel.keys(): + if key == "id": + continue + if key == "created": + continue + if key == "modified": + continue + if isNotFirst: + query += "," + isNotFirst = True + query += key + query += ") VALUES (" + isNotFirst = False + for key in schemaModel.keys(): + if key == "id": + continue + if key == "created": + continue + if key == "modified": + continue + if isNotFirst: + query += "," + isNotFirst = True + query += "?" + key + "?" + query += ")" + return query + +def create_sql_query_ship_put(schemaModel:dict): + query = "UPDATE ship SET " + isNotFirst = False + for key in schemaModel.keys(): + if key == "id": + continue + if key == "created": + continue + if key == "modified": + continue + if isNotFirst: + query += ", " + isNotFirst = True + query += key + " = ?" + key + "? " + + query += "WHERE id = ?id?" + return query + + + +class SQLQuery(): + """ + This class provides quick access to different SQL query functions, which creates default queries for the BreCal package. + Each method is callable without initializing the SQLQuery object. + + Example: + SQLQuery.get_berths() + + When the data violates one of the rules, a marshmallow.ValidationError is raised, which details the issues. + """ + def __init__(self) -> None: + pass + + @staticmethod + def get_berth()->str: + query = "SELECT id, name, `lock`, owner_id, authority_id, created, modified, deleted FROM berth WHERE deleted = 0 ORDER BY name" + return query + + @staticmethod + def get_history()->str: + query = "SELECT id, participant_id, shipcall_id, timestamp, eta, type, operation FROM history WHERE shipcall_id = ?shipcallid?" + return query + + @staticmethod + def get_user()->str: + query = "SELECT id, participant_id, first_name, last_name, user_name, user_email, user_phone, password_hash, " +\ + "api_key, notify_email, notify_whatsapp, notify_signal, notify_popup, created, modified FROM user " +\ + "WHERE user_name = ?username? OR user_email = ?username?" + return query + + @staticmethod + def get_notifications()->str: + query = "SELECT id, shipcall_id, level, type, message, created, modified FROM notification " + \ + "WHERE shipcall_id = ?scid?" + return query + + @staticmethod + def get_participant_by_user_id()->str: + query = "SELECT p.id as id, p.name as name, p.street as street, p.postal_code as postal_code, p.city as city, p.type as type, p.flags as flags, p.created as created, p.modified as modified, p.deleted as deleted FROM participant p INNER JOIN user u WHERE u.participant_id = p.id and u.id = ?userid?" + return query + + @staticmethod + def get_participants()->str: + query = "SELECT id, name, street, postal_code, city, type, flags, created, modified, deleted FROM participant p ORDER BY p.name" + return query + + @staticmethod + def get_shipcalls(options:dict={'past_days':3})->str: + # a pytest proves this method to be identical to create_sql_query_shipcall_get(options) + assert 'past_days' in list(options.keys()), f"there must be a key 'past_days' in the options, which determines, how recent the returned list of shipcalls shall be." # part of a pytest.raises + past_days = options['past_days'] + + query = ("SELECT s.id as id, ship_id, type, eta, voyage, etd, arrival_berth_id, departure_berth_id, tug_required, pilot_required, " + \ + "flags, s.pier_side, bunkering, replenishing_terminal, replenishing_lock, draft, tidal_window_from, " + \ + "tidal_window_to, rain_sensitive_cargo, recommended_tugs, anchored, moored_lock, canceled, evaluation, " + \ + "evaluation_message, evaluation_time, evaluation_notifications_sent, s.created as created, s.modified as modified, time_ref_point " + \ + "FROM shipcall s " + \ + "LEFT JOIN times t ON t.shipcall_id = s.id AND t.participant_type = 8 " + \ + "WHERE " + \ + "(type = 1 AND " + \ + f"((t.id IS NOT NULL AND t.eta_berth >= DATE(NOW() - INTERVAL {past_days} DAY)) OR " + \ + f"(eta >= DATE(NOW() - INTERVAL {past_days} DAY)))) OR " + \ + "((type = 2 OR type = 3) AND " + \ + f"((t.id IS NOT NULL AND t.etd_berth >= DATE(NOW() - INTERVAL {past_days} DAY)) OR " + \ + f"(etd >= DATE(NOW() - INTERVAL {past_days} DAY)))) " + \ + "ORDER BY eta") + return query + + @staticmethod + def get_ships()->str: + query = "SELECT id, name, imo, callsign, participant_id, length, width, is_tug, bollard_pull, eni, created, modified, deleted FROM ship ORDER BY name" + return query + + @staticmethod + def get_times()->str: + query = "SELECT id, eta_berth, eta_berth_fixed, etd_berth, etd_berth_fixed, lock_time, lock_time_fixed, " + \ + "zone_entry, zone_entry_fixed, operations_start, operations_end, remarks, shipcall_id, participant_id, " + \ + "berth_id, berth_info, pier_side, participant_type, created, modified, ata, atd, eta_interval_end, etd_interval_end FROM times " + \ + "WHERE times.shipcall_id = ?scid?" + return query + + @staticmethod + def get_user_by_id(): + query = "SELECT * FROM user where id = ?id?" + return query + + @staticmethod + def get_user_put(schemaModel:dict): + # a pytest proves this method to be identical to create_sql_query_user_put(schemaModel) + prefix = "UPDATE user SET " + suffix = "WHERE id = ?id?" + center = [f"{key} = ?{key}? " for key in schemaModel.keys() if key not in ["id", "old_password", "new_password"]] + + query = prefix + ", ".join(center) + suffix + return query + + @staticmethod + def get_update_user_password()->str: + query = "UPDATE user SET password_hash = ?password_hash? WHERE id = ?id?" + return query + + @staticmethod + def get_participants()->str: + query = "SELECT participant_id, type FROM shipcall_participant_map WHERE shipcall_id=?shipcall_id?" + return query + + @staticmethod + def get_shipcall_post(schemaModel:dict)->str: + # a pytest proves this method to be identical to create_sql_query_shipcall_post(schemaModel) + + param_keys = {key:key for key in schemaModel.keys()} + param_keys["type"] = "type_value" + param_keys["evaluation"] = "evaluation_value" + + prefix = "INSERT INTO shipcall (" + bridge = ") VALUES (" + suffix = ")" + + stage1 = ",".join([key for key in schemaModel.keys() if not key in ["id","participants","created","modified","evaluation","evaluation_message","type_value","evaluation_value"]]) + stage2 = ",".join([f"?{param_keys.get(key)}?" for key in schemaModel.keys() if not key in ["id","participants","created","modified","evaluation","evaluation_message","type_value","evaluation_value"]]) + + query = prefix+stage1+bridge+stage2+suffix + return query + + @staticmethod + def get_last_insert_id()->str: + query = "select last_insert_id()" + return query + + @staticmethod + def get_shipcall_post_last_insert_id()->str: + """alias function. May be deleted soon""" + query = SQLQuery.get_last_insert_id() + return query + + @staticmethod + def get_shipcall_post_update_shipcall_participant_map()->str: + query = "INSERT INTO shipcall_participant_map (shipcall_id, participant_id, type) VALUES (?shipcall_id?, ?participant_id?, ?type?)" + return query + + @staticmethod + def create_sql_query_history_post()->str: + query = create_sql_query_history_post() + return query + + @staticmethod + def get_shipcall_by_id()->str: + query = "SELECT * FROM shipcall where id = ?id?" + return query + + @staticmethod + def get_shipcall_put(schemaModel:dict)->str: + # a pytest proves this method to be identical to create_sql_query_shipcall_put(schemaModel) + param_keys = {key:key for key in schemaModel.keys()} + param_keys["type"] = "type_value" + param_keys["evaluation"] = "evaluation_value" + + prefix = "UPDATE shipcall SET " + suffix = "WHERE id = ?id?" + body = ", ".join([f"{key} = ?{param_keys.get(key)}? " for key in schemaModel.keys() if key not in ["id", "participants", "created", "modified", "evaluation", "evaluation_message", "type_value", "evaluation_value"]]) + + query = prefix + body + suffix + return query + + @staticmethod + def get_shipcall_participant_map_by_shipcall_id()->str: + query = "SELECT id, participant_id, type FROM shipcall_participant_map where shipcall_id = ?id?" + return query + + @staticmethod + def get_shipcall_participant_map_delete_by_id()->str: + query = "DELETE FROM shipcall_participant_map WHERE id = ?existing_id?" + return query + + @staticmethod + def create_sql_query_history_put()->str: + query = create_sql_query_history_put() + return query + + @staticmethod + def get_ship_post(schemaModel:dict)->str: + # a pytest proves this method to be identical to create_sql_query_ship_post(schemaModel) + prefix = "INSERT INTO ship (" + suffix = ")" + bridge = ") VALUES (" + + stage1 = ",".join([key for key in schemaModel.keys() if not key in ["id", "created", "modified"]]) + stage2 = ",".join([f"?{key}?" for key in schemaModel.keys() if not key in ["id", "created", "modified"]]) + + query = prefix + stage1 + bridge + stage2 + suffix + return query + + @staticmethod + def get_ship_put(schemaModel:dict)->str: + # a pytest proves this method to be identical to create_sql_query_ship_put(schemaModel) + prefix = "UPDATE ship SET " + suffix = "WHERE id = ?id?" + body = ", ".join([f"{key} = ?{key}? " for key in schemaModel.keys() if not key in ["id","created","modified"]]) + + query = prefix + body + suffix + return query + + @staticmethod + def get_ship_delete_by_id()->str: + query = "UPDATE ship SET deleted = 1 WHERE id = ?id?" + return query + + @staticmethod + def get_notification_post()->str: + raise NotImplementedError() + # #TODO: this query is wrong and just a proxy for a POST request + query = "INSERT INTO shipcall_participant_map (shipcall_id, participant_id, type) VALUES (?shipcall_id?, ?participant_id?, ?type?)" + return query + + @staticmethod + def get_shipcall_put_notification_state()->str: + raise NotImplementedError() + # #TODO: use evaluation_notifications_sent here and consider only the shipcall_id + # #TODO: query + query = ... + return query + + diff --git a/src/server/BreCal/database/sql_utils.py b/src/server/BreCal/database/sql_utils.py new file mode 100644 index 0000000..5780ace --- /dev/null +++ b/src/server/BreCal/database/sql_utils.py @@ -0,0 +1,14 @@ +from BreCal.database.sql_handler import execute_sql_query_standalone +import datetime + +def get_user_data_for_id(user_id:int, expiration_time:int=90): + """debugging function, which is useful to pull user_data from the database, which may be used to create stub data and unit tests""" + query = "SELECT * FROM user where id = ?id?" + pdata = execute_sql_query_standalone(query=query, param={"id":user_id}) + pdata = pdata[0] if len(pdata)>0 else None + assert pdata is not None, f"could not find user with id {user_id}" + + user_data = {k:v for k,v in pdata.items() if k in ['id','participant_id','first_name','last_name','user_name','user_phone','user_email']} + user_data["exp"] = (datetime.datetime.now()+datetime.timedelta(minutes=expiration_time)).timestamp() + return user_data + diff --git a/src/server/BreCal/database/update_database.py b/src/server/BreCal/database/update_database.py index 0e5e5ac..395f725 100644 --- a/src/server/BreCal/database/update_database.py +++ b/src/server/BreCal/database/update_database.py @@ -34,6 +34,7 @@ def update_shipcall_in_mysql_database(sql_connection, shipcall:Shipcall, relevan def build_mysql_query_to_update_shipcall(shipcall, relevant_keys:list): """builds a mysql query, which updates the shipcall table. In particular, the provided shipcall will be updated for each key in {relevant_keys}""" + # #TODO: refactor into SQLQuery schemaModel = shipcall.__dict__ # prepare prefix and suffix. Then build the body of the query @@ -68,7 +69,6 @@ def evaluate_shipcall_state(mysql_connector_instance, shipcall_id:int=None, debu with mysql.connector.connect(**mysql_connection_data) as mysql_connector_instance: evaluate_shipcall_state(mysql_connector_instance) returns None - """ sql_handler = SQLHandler(sql_connection=mysql_connector_instance, read_all=True) vr = ValidationRules(sql_handler) diff --git a/src/server/BreCal/impl/berths.py b/src/server/BreCal/impl/berths.py index 9087856..0ed2ca9 100644 --- a/src/server/BreCal/impl/berths.py +++ b/src/server/BreCal/impl/berths.py @@ -4,6 +4,7 @@ import pydapper from ..schemas import model from .. import local_db +from BreCal.database.sql_queries import SQLQuery def GetBerths(token): """ @@ -13,6 +14,8 @@ def GetBerths(token): try: pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) + # query = SQLQuery.get_berth() + # data = commands.query(query, model=model.Berth) data = commands.query("SELECT id, name, `lock`, owner_id, authority_id, created, modified, deleted FROM berth WHERE deleted = 0 ORDER BY name", model=model.Berth) return json.dumps(data, default=model.obj_dict), 200, {'Content-Type': 'application/json; charset=utf-8'} diff --git a/src/server/BreCal/impl/history.py b/src/server/BreCal/impl/history.py index f408001..53f159c 100644 --- a/src/server/BreCal/impl/history.py +++ b/src/server/BreCal/impl/history.py @@ -7,6 +7,7 @@ from ..schemas import model from ..schemas.model import History from .. import local_db +from BreCal.database.sql_queries import SQLQuery def GetHistory(options): @@ -20,6 +21,8 @@ def GetHistory(options): commands = pydapper.using(pooledConnection) if "shipcall_id" in options and options["shipcall_id"]: + # query = SQLQuery.get_history() + # data = commands.query(query, model=History.from_query_row, param={"shipcallid" : options["shipcall_id"]}) data = commands.query("SELECT id, participant_id, shipcall_id, timestamp, eta, type, operation FROM history WHERE shipcall_id = ?shipcallid?", model=History.from_query_row, param={"shipcallid" : options["shipcall_id"]}) diff --git a/src/server/BreCal/impl/login.py b/src/server/BreCal/impl/login.py index 21b46a6..66e709b 100644 --- a/src/server/BreCal/impl/login.py +++ b/src/server/BreCal/impl/login.py @@ -6,6 +6,7 @@ import bcrypt from ..schemas import model from .. import local_db from ..services import jwt_handler +from BreCal.database.sql_queries import SQLQuery def GetUser(options): @@ -14,11 +15,13 @@ def GetUser(options): hash = bcrypt.hashpw(options["password"].encode('utf-8'), bcrypt.gensalt( 12 )).decode('utf8') pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) + # query = SQLQuery.get_user() + # data = commands.query(query, model=model.User, param={"username" : options["username"]}) data = commands.query("SELECT id, participant_id, first_name, last_name, user_name, user_email, user_phone, password_hash, " + "api_key, notify_email, notify_whatsapp, notify_signal, notify_popup, created, modified FROM user " + "WHERE user_name = ?username? OR user_email = ?username?", model=model.User, param={"username" : options["username"]}) - # print(data) + if len(data) == 1: if bcrypt.checkpw(options["password"].encode("utf-8"), bytes(data[0].password_hash, "utf-8")): result = { diff --git a/src/server/BreCal/impl/notifications.py b/src/server/BreCal/impl/notifications.py index 67619ab..2f0014b 100644 --- a/src/server/BreCal/impl/notifications.py +++ b/src/server/BreCal/impl/notifications.py @@ -4,6 +4,7 @@ import pydapper from ..schemas import model from .. import local_db +from BreCal.database.sql_queries import SQLQuery def GetNotifications(options): """ @@ -16,6 +17,8 @@ def GetNotifications(options): pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) + # query = SQLQuery.get_notifications() + # data = commands.query(query, model=model.Notification.from_query_row, param={"scid" : options["shipcall_id"]}) data = commands.query("SELECT id, shipcall_id, level, type, message, created, modified FROM notification " + "WHERE shipcall_id = ?scid?", model=model.Notification.from_query_row, param={"scid" : options["shipcall_id"]}) pooledConnection.close() diff --git a/src/server/BreCal/impl/participant.py b/src/server/BreCal/impl/participant.py index aad0def..7f9cc48 100644 --- a/src/server/BreCal/impl/participant.py +++ b/src/server/BreCal/impl/participant.py @@ -4,6 +4,7 @@ import pydapper from ..schemas import model from .. import local_db +from BreCal.database.sql_queries import SQLQuery def GetParticipant(options): """ @@ -16,8 +17,10 @@ def GetParticipant(options): pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) if "user_id" in options and options["user_id"]: + # query = SQLQuery.get_participant_by_user_id() data = commands.query("SELECT p.id as id, p.name as name, p.street as street, p.postal_code as postal_code, p.city as city, p.type as type, p.flags as flags, p.created as created, p.modified as modified, p.deleted as deleted FROM participant p INNER JOIN user u WHERE u.participant_id = p.id and u.id = ?userid?", model=model.Participant, param={"userid" : options["user_id"]}) else: + # query = SQLQuery.get_participants() data = commands.query("SELECT id, name, street, postal_code, city, type, flags, created, modified, deleted FROM participant p ORDER BY p.name", model=model.Participant) return json.dumps(data, default=model.obj_dict), 200, {'Content-Type': 'application/json; charset=utf-8'} diff --git a/src/server/BreCal/impl/shipcalls.py b/src/server/BreCal/impl/shipcalls.py index 59f9311..83385cf 100644 --- a/src/server/BreCal/impl/shipcalls.py +++ b/src/server/BreCal/impl/shipcalls.py @@ -8,6 +8,8 @@ from .. import local_db from ..services.auth_guard import check_jwt from BreCal.database.update_database import evaluate_shipcall_state +from BreCal.database.sql_queries import create_sql_query_shipcall_get, create_sql_query_shipcall_post, create_sql_query_shipcall_put, create_sql_query_history_post, create_sql_query_history_put, SQLQuery +from marshmallow import Schema, fields, ValidationError def GetShipcalls(options): """ @@ -18,23 +20,14 @@ def GetShipcalls(options): pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) - query = ("SELECT s.id as id, ship_id, type, eta, voyage, etd, arrival_berth_id, departure_berth_id, tug_required, pilot_required, " + - "flags, s.pier_side, bunkering, replenishing_terminal, replenishing_lock, draft, tidal_window_from, " + - "tidal_window_to, rain_sensitive_cargo, recommended_tugs, anchored, moored_lock, canceled, evaluation, " + - "evaluation_message, evaluation_time, evaluation_notifications_sent, s.created as created, s.modified as modified, time_ref_point " + - "FROM shipcall s " + - "LEFT JOIN times t ON t.shipcall_id = s.id AND t.participant_type = 8 " + - "WHERE " + - "(type = 1 AND " + - "((t.id IS NOT NULL AND t.eta_berth >= DATE(NOW() - INTERVAL %d DAY)) OR " + - "(eta >= DATE(NOW() - INTERVAL %d DAY)))) OR " + - "((type = 2 OR type = 3) AND " + - "((t.id IS NOT NULL AND t.etd_berth >= DATE(NOW() - INTERVAL %d DAY)) OR " + - "(etd >= DATE(NOW() - INTERVAL %d DAY)))) " + - "ORDER BY eta") % (options["past_days"], options["past_days"], options["past_days"], options["past_days"]) + # query = SQLQuery.get_shipcalls(options) + query = create_sql_query_shipcall_get(options) data = commands.query(query, model=model.Shipcall.from_query_row, buffered=True) for shipcall in data: + # participant_query = SQLQuery.get_participants() + # participants = commands.query(participant_query, model=dict, param={"shipcall_id" : shipcall.id}, buffered=False) + # for record in participants: participant_query = "SELECT participant_id, type FROM shipcall_participant_map WHERE shipcall_id=?shipcall_id?"; for record in commands.query(participant_query, model=dict, param={"shipcall_id" : shipcall.id}, buffered=False): # model.Participant_Assignment = model.Participant_Assignment() @@ -58,18 +51,29 @@ def GetShipcalls(options): def PostShipcalls(schemaModel): """ + This function *executes* a post-request for shipcalls. The function is accessible as part of an API route. + + The common sequence is: + a) issue a request to the Flask API + b) BreCal.api.shipcalls.PostShipcalls, to verify the incoming request (which includes an authentification guard) + c) BreCal.impl.shipcalls.PostShipcalls, to execute the incoming request :param schemaModel: The deserialized dict of the request + e.g., + { + 'ship_id': 1, 'type': 1, 'eta': datetime.datetime(2023, 7, 23, 7, 18, 19), + 'voyage': '43B', 'tug_required': False, 'pilot_required': True, 'flags': 0, + 'pier_side': False, 'bunkering': True, 'recommended_tugs': 2, 'type_value': 1, 'evaluation_value': 0} + } """ - # TODO: Validate the upload data - # This creates a *new* entry try: pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) + # query = SQLQuery.get_shipcall_post(schemaModel) # create_sql_query_shipcall_post(schemaModel) query = "INSERT INTO shipcall (" isNotFirst = False for key in schemaModel.keys(): @@ -122,12 +126,15 @@ def PostShipcalls(schemaModel): isNotFirst = True query += "?" + param_key + "?" query += ")" - commands.execute(query, schemaModel) + + # lquery = SQLQuery.get_shipcall_post_last_insert_id() + # new_id = commands.execute_scalar(lquery) new_id = commands.execute_scalar("select last_insert_id()") # add participant assignments if we have a list of participants if 'participants' in schemaModel: + # pquery = SQLQuery.get_shipcall_post_update_shipcall_participant_map() pquery = "INSERT INTO shipcall_participant_map (shipcall_id, participant_id, type) VALUES (?shipcall_id?, ?participant_id?, ?type?)" for participant_assignment in schemaModel["participants"]: commands.execute(pquery, param={"shipcall_id" : new_id, "participant_id" : participant_assignment["participant_id"], "type" : participant_assignment["type"]}) @@ -138,10 +145,16 @@ def PostShipcalls(schemaModel): # save history data # TODO: set ETA properly user_data = check_jwt() + # query = SQLQuery.create_sql_query_history_post() query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?scid?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 1, 1)" commands.execute(query, {"scid" : new_id, "pid" : user_data["participant_id"], "uid" : user_data["id"]}) return json.dumps({"id" : new_id}), 201, {'Content-Type': 'application/json; charset=utf-8'} + + except ValidationError as ex: + logging.error(ex) + print(ex) + return json.dumps({"message":f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"}), 400 except Exception as ex: logging.error(traceback.format_exc()) @@ -168,14 +181,19 @@ def PutShipcalls(schemaModel): pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) + user_data = check_jwt() + # test if object to update is found sentinel = object() + # query = SQLQuery.get_shipcall_by_id() + # theshipcall = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}) theshipcall = commands.query_single_or_default("SELECT * FROM shipcall where id = ?id?", sentinel, param={"id" : schemaModel["id"]}) if theshipcall is sentinel: pooledConnection.close() return json.dumps("no such record"), 404, {'Content-Type': 'application/json; charset=utf-8'} + # query = SQLQuery.get_shipcall_put(schemaModel) query = "UPDATE shipcall SET " isNotFirst = False for key in schemaModel.keys(): @@ -206,8 +224,10 @@ def PutShipcalls(schemaModel): query += key + " = ?" + param_key + "? " query += "WHERE id = ?id?" + affected_rows = commands.execute(query, param=schemaModel) + # pquery = SQLQuery.get_shipcall_participant_map_by_shipcall_id() pquery = "SELECT id, participant_id, type FROM shipcall_participant_map where shipcall_id = ?id?" pdata = commands.query(pquery,param={"id" : schemaModel["id"]}) # existing list of assignments @@ -220,6 +240,7 @@ def PutShipcalls(schemaModel): found_participant = True break if not found_participant: + # nquery = SQLQuery.get_shipcall_post_update_shipcall_participant_map() nquery = "INSERT INTO shipcall_participant_map (shipcall_id, participant_id, type) VALUES (?shipcall_id?, ?participant_id?, ?type?)" commands.execute(nquery, param={"shipcall_id" : schemaModel["id"], "participant_id" : participant_assignment["participant_id"], "type" : participant_assignment["type"]}) @@ -231,19 +252,22 @@ def PutShipcalls(schemaModel): found_participant = True break; if not found_participant: + # dquery = SQLQuery.get_shipcall_participant_map_delete_by_id() dquery = "DELETE FROM shipcall_participant_map WHERE id = ?existing_id?" commands.execute(dquery, param={"existing_id" : elem["id"]}) - # apply 'Traffic Light' evaluation to obtain 'GREEN', 'YELLOW' or 'RED' evaluation state. The function internally updates the mysql database - # evaluate_shipcall_state(mysql_connector_instance=pooledConnection, shipcall_id=schemaModel["id"]) # schemaModel["id"] refers to the shipcall id - # save history data # TODO: set ETA properly - user_data = check_jwt() + # query = SQLQuery.create_sql_query_history_put() query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?scid?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 1, 2)" commands.execute(query, {"scid" : schemaModel["id"], "pid" : user_data["participant_id"], "uid" : user_data["id"]}) return json.dumps({"id" : schemaModel["id"]}), 200 + + except ValidationError as ex: + logging.error(ex) + print(ex) + return json.dumps({"message":f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"}), 400 except Exception as ex: logging.error(traceback.format_exc()) diff --git a/src/server/BreCal/impl/ships.py b/src/server/BreCal/impl/ships.py index b0b0b94..0733071 100644 --- a/src/server/BreCal/impl/ships.py +++ b/src/server/BreCal/impl/ships.py @@ -4,6 +4,7 @@ import pydapper from ..schemas import model from .. import local_db +from BreCal.database.sql_queries import SQLQuery def GetShips(token): """ @@ -14,6 +15,8 @@ def GetShips(token): pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) + # query = SQLQuery.get_ships() + # data = commands.query(query, model=model.Ship) data = commands.query("SELECT id, name, imo, callsign, participant_id, length, width, is_tug, bollard_pull, eni, created, modified, deleted FROM ship ORDER BY name", model=model.Ship) return json.dumps(data, default=model.obj_dict), 200, {'Content-Type': 'application/json; charset=utf-8'} @@ -46,6 +49,7 @@ def PostShip(schemaModel): pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) + # query = SQLQuery.create_sql_query_ship_post(schemaModel) query = "INSERT INTO ship (" isNotFirst = False for key in schemaModel.keys(): @@ -75,6 +79,8 @@ def PostShip(schemaModel): query += ")" commands.execute(query, schemaModel) + # nquery = SQLQuery.get_last_insert_id() + # new_id = commands.execute_scalar(nquery) new_id = commands.execute_scalar("select last_insert_id()") pooledConnection.close() @@ -100,6 +106,7 @@ def PutShip(schemaModel): pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) + # query = SQLQuery.create_sql_query_ship_put(schemaModel) query = "UPDATE ship SET " isNotFirst = False for key in schemaModel.keys(): @@ -140,6 +147,8 @@ def DeleteShip(options): pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) + # query = SQLQuery.get_ship_delete_by_id() + # affected_rows = commands.execute(query, param={"id" : options["id"]}) affected_rows = commands.execute("UPDATE ship SET deleted = 1 WHERE id = ?id?", param={"id" : options["id"]}) pooledConnection.close() diff --git a/src/server/BreCal/impl/times.py b/src/server/BreCal/impl/times.py index 1f16574..8c9f0e8 100644 --- a/src/server/BreCal/impl/times.py +++ b/src/server/BreCal/impl/times.py @@ -6,6 +6,7 @@ import pydapper from ..schemas import model from .. import local_db from ..services.auth_guard import check_jwt +from BreCal.database.sql_queries import SQLQuery from BreCal.database.update_database import evaluate_shipcall_state @@ -20,6 +21,8 @@ def GetTimes(options): pooledConnection = local_db.getPoolConnection() commands = pydapper.using(pooledConnection) + # query = SQLQuery.get_times() + # data = commands.query(query, model=model.Times, param={"scid" : options["shipcall_id"]}) data = commands.query("SELECT id, eta_berth, eta_berth_fixed, etd_berth, etd_berth_fixed, lock_time, lock_time_fixed, " + "zone_entry, zone_entry_fixed, operations_start, operations_end, remarks, shipcall_id, participant_id, " + "berth_id, berth_info, pier_side, participant_type, created, modified, ata, atd, eta_interval_end, etd_interval_end FROM times " + diff --git a/src/server/BreCal/impl/user.py b/src/server/BreCal/impl/user.py index cd79f89..733998c 100644 --- a/src/server/BreCal/impl/user.py +++ b/src/server/BreCal/impl/user.py @@ -5,6 +5,7 @@ import bcrypt from ..schemas import model from .. import local_db +from BreCal.database.sql_queries import SQLQuery, create_sql_query_user_put def PutUser(schemaModel): """ @@ -21,13 +22,21 @@ def PutUser(schemaModel): # test if object to update is found sentinel = object() + # query = SQLQuery.get_user_by_id() + # theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User) theuser = commands.query_single_or_default("SELECT * FROM user where id = ?id?", sentinel, param={"id" : schemaModel["id"]}, model=model.User) if theuser is sentinel: pooledConnection.close() + # #TODO: result = {"message":"no such record"} -> json.dumps return json.dumps("no such record"), 404, {'Content-Type': 'application/json; charset=utf-8'} # see if we need to update public fields + # #TODO_determine: this filter blocks Put-Requests, which update the 'notify_email', 'notify_whatsapp', 'notify_signal', 'notify_popup' fields + # should this be refactored? + # Also, what about the 'user_name'? + # 'participant_id' would also not trigger an update in isolation if "first_name" in schemaModel or "last_name" in schemaModel or "user_phone" in schemaModel or "user_email" in schemaModel: + # query = SQLQuery.get_user_put(schemaModel) query = "UPDATE user SET " isNotFirst = False for key in schemaModel.keys(): @@ -49,6 +58,7 @@ def PutUser(schemaModel): if "old_password" in schemaModel and schemaModel["old_password"] and "new_password" in schemaModel and schemaModel["new_password"]: if bcrypt.checkpw(schemaModel["old_password"].encode("utf-8"), bytes(theuser.password_hash, "utf-8")): # old pw matches password_hash = bcrypt.hashpw(schemaModel["new_password"].encode('utf-8'), bcrypt.gensalt( 12 )).decode('utf8') + # query = SQLQuery.get_update_user_password() query = "UPDATE user SET password_hash = ?password_hash? WHERE id = ?id?" commands.execute(query, param={"password_hash" : password_hash, "id" : schemaModel["id"]}) else: diff --git a/src/server/BreCal/local_db.py b/src/server/BreCal/local_db.py index 85dff68..330bc13 100644 --- a/src/server/BreCal/local_db.py +++ b/src/server/BreCal/local_db.py @@ -7,16 +7,16 @@ import sys config_path = None -def initPool(instancePath): +def initPool(instancePath, connection_filename="connection_data_devel.json"): try: global config_path if(config_path == None): - config_path = os.path.join(instancePath,'../../../secure/connection_data_devel.json'); + config_path = os.path.join(instancePath,f'../../../secure/{connection_filename}') #connection_data_devel.json'); print (config_path) if not os.path.exists(config_path): - print ('cannot find ' + config_path) + print ('cannot find ' + os.path.abspath(config_path)) print("instance path", instancePath) sys.exit(1) @@ -39,4 +39,4 @@ def getPoolConnection(): global config_path f = open(config_path); connection_data = json.load(f) - return mysql.connector.connect(**connection_data) \ No newline at end of file + return mysql.connector.connect(**connection_data) diff --git a/src/server/BreCal/schemas/model.py b/src/server/BreCal/schemas/model.py index bf157aa..92232e2 100644 --- a/src/server/BreCal/schemas/model.py +++ b/src/server/BreCal/schemas/model.py @@ -1,5 +1,5 @@ from dataclasses import field, dataclass -from marshmallow import Schema, fields, post_load, INCLUDE, ValidationError +from marshmallow import Schema, fields, INCLUDE, ValidationError, validate, validates, post_load from marshmallow.fields import Field from marshmallow_enum import EnumField from enum import IntEnum @@ -9,6 +9,11 @@ from typing import List import json import datetime +from BreCal.validators.time_logic import validate_time_is_in_not_too_distant_future +from BreCal.validators.validation_base_utils import check_if_string_has_special_characters +from BreCal.database.enums import ParticipantType, ParticipantFlag + +# from BreCal. ... import check_if_user_is_bsmd_type def obj_dict(obj): if isinstance(obj, datetime.datetime): @@ -49,10 +54,20 @@ class EvaluationType(IntEnum): def _missing_(cls, value): return cls.undefined -class NotificationType(IntEnum): +class NotificationType(IntEnum): + """ + Any user has the attributes + 'notify_email' -> NotificationType.email + 'notify_popup' -> NotificationType.push + 'notify_whatsapp' -> undeclared + 'notify_signal' -> undeclared + """ undefined = 0 email = 1 push = 2 + # whatsapp = 3 + # signal = 4 + @classmethod def _missing_(cls, value): return cls.undefined @@ -104,7 +119,7 @@ class History: return self(id, participant_id, shipcall_id, timestamp, eta, ObjectType(type), OperationType(operation)) class Error(Schema): - message = fields.String(required=True) + message = fields.String(metadata={'required':True}) class GetVerifyInlineResp(Schema): @@ -112,11 +127,17 @@ class GetVerifyInlineResp(Schema): @dataclass class Notification: + """ + Base data class for any notification. + + Description: + 'An entry corresponds to an alarm given by a violated rule during times update' + """ id: int - shipcall_id: int - level: int - type: NotificationType - message: str + shipcall_id: int # 'shipcall record that caused the notification' + level: int # 'severity of the notification' + type: NotificationType # 'type of the notification' + message: str # 'individual message' created: datetime modified: datetime @@ -142,68 +163,99 @@ class Participant(Schema): street: str postal_code: str city: str - type: int + type: int # fields.Enum(ParticipantType ...) flags: int created: datetime modified: datetime deleted: bool + @validates("type") + def validate_type(self, value): + # e.g., when an IntFlag has the values 1,2,4; the maximum valid value is 7 + max_int = sum([int(val) for val in list(ParticipantType._value2member_map_.values())]) + min_int = 0 + + valid_type = 0 <= value < max_int + if not valid_type: + raise ValidationError(f"the provided integer is not supported for default behaviour of the ParticipantType IntFlag. Your choice: {value}. Supported values are: 0 <= value {max_int}") + + + @validates("flags") + def validate_flags(self, value): + # e.g., when an IntFlag has the values 1,2,4; the maximum valid value is 7 + max_int = sum([int(val) for val in list(ParticipantFlag._value2member_map_.values())]) + min_int = 0 + + valid_type = 0 <= value < max_int + if not valid_type: + raise ValidationError(f"the provided integer is not supported for default behaviour of the ParticipantFlag IntFlag. Your choice: {value}. Supported values are: 0 <= value {max_int}") + + class ParticipantList(Participant): pass class ParticipantAssignmentSchema(Schema): - participant_id = fields.Int() - type = fields.Int() + participant_id = fields.Integer() + type = fields.Integer() class ShipcallSchema(Schema): def __init__(self): super().__init__(unknown=None) pass - id = fields.Int() - ship_id = fields.Int() - type = fields.Enum(ShipcallType, required=True) - eta = fields.DateTime(Required = False, allow_none=True) - voyage = fields.Str(allow_none=True, metadata={'Required':False}) # Solving: RemovedInMarshmallow4Warning: Passing field metadata as keyword arguments is deprecated. Use the explicit `metadata=...` argument instead. Additional metadata: {'Required': False} - etd = fields.DateTime(Required = False, allow_none=True) - arrival_berth_id = fields.Int(Required = False, allow_none=True) - departure_berth_id = fields.Int(Required = False, allow_none=True) - tug_required = fields.Bool(Required = False, allow_none=True) - pilot_required = fields.Bool(Required = False, allow_none=True) - flags = fields.Int(Required = False, allow_none=True) - pier_side = fields.Bool(Required = False, allow_none=True) - bunkering = fields.Bool(Required = False, allow_none=True) - replenishing_terminal = fields.Bool(Required = False, allow_none=True) - replenishing_lock = fields.Bool(Required = False, allow_none=True) - draft = fields.Float(Required = False, allow_none=True) - tidal_window_from = fields.DateTime(Required = False, allow_none=True) - tidal_window_to = fields.DateTime(Required = False, allow_none=True) - rain_sensitive_cargo = fields.Bool(Required = False, allow_none=True) - recommended_tugs = fields.Int(Required = False, allow_none=True) - anchored = fields.Bool(Required = False, allow_none=True) - moored_lock = fields.Bool(Required = False, allow_none=True) - canceled = fields.Bool(Required = False, allow_none=True) - evaluation = fields.Enum(EvaluationType, required=False, allow_none=True, default=EvaluationType.undefined) + id = fields.Integer(metadata={'required':True}) + ship_id = fields.Integer(metadata={'required':True}) + #type = fields.Enum(ShipcallType, default=ShipcallType.undefined) # type = fields.Integer() # make enum: shipcall type. add validator + type = fields.Integer(metadata={'required':True}) # make enum: shipcall type. add validator # type = fields.Enum(ShipcallType, default=ShipcallType.undefined) # type = fields.Integer() # make enum: shipcall type. add validator + eta = fields.DateTime(metadata={'required':False}, allow_none=True) + voyage = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(max=16)]) # Solving: RemovedInMarshmallow4Warning: Passing field metadata as keyword arguments is deprecated. Use the explicit `metadata=...` argument instead. Additional metadata: {'Required': False} + etd = fields.DateTime(metadata={'required':False}, allow_none=True) + arrival_berth_id = fields.Integer(metadata={'required':False}, allow_none=True) + departure_berth_id = fields.Integer(metadata={'required':False}, allow_none=True) + tug_required = fields.Bool(metadata={'required':False}, allow_none=True) + pilot_required = fields.Bool(metadata={'required':False}, allow_none=True) + flags = fields.Integer(metadata={'required':False}, allow_none=True) + pier_side = fields.Bool(metadata={'required':False}, allow_none=True) + bunkering = fields.Bool(metadata={'required':False}, allow_none=True) + replenishing_terminal = fields.Bool(metadata={'required':False}, allow_none=True) + replenishing_lock = fields.Bool(metadata={'required':False}, allow_none=True) + draft = fields.Float(metadata={'required':False}, allow_none=True, validate=[validate.Range(min=0, max=20, min_inclusive=False, max_inclusive=True)]) + tidal_window_from = fields.DateTime(metadata={'required':False}, allow_none=True) + tidal_window_to = fields.DateTime(metadata={'required':False}, allow_none=True) + rain_sensitive_cargo = fields.Bool(metadata={'required':False}, allow_none=True) + recommended_tugs = fields.Integer(metadata={'required':False}, allow_none=True, validate=[validate.Range(min=0, max=10, min_inclusive=True, max_inclusive=True)]) + anchored = fields.Bool(metadata={'required':False}, allow_none=True) + moored_lock = fields.Bool(metadata={'required':False}, allow_none=True) + canceled = fields.Bool(metadata={'required':False}, allow_none=True) + evaluation = fields.Enum(EvaluationType, metadata={'required':False}, allow_none=True, default=EvaluationType.undefined) evaluation_message = fields.Str(allow_none=True, metadata={'Required':False}) # Solving: RemovedInMarshmallow4Warning: Passing field metadata as keyword arguments is deprecated. Use the explicit `metadata=...` argument instead. Additional metadata: {'Required': False} - evaluation_time = fields.DateTime(Required = False, allow_none=True) - evaluation_notifications_sent = fields.Bool(Required = False, allow_none=True) - time_ref_point = fields.Int(Required = False, allow_none=True) + evaluation_time = fields.DateTime(metadata={'required':False}, allow_none=True) + evaluation_notifications_sent = fields.Bool(metadata={'required':False}, allow_none=True) + time_ref_point = fields.Integer(metadata={'required':False}, allow_none=True) participants = fields.List(fields.Nested(ParticipantAssignmentSchema)) - created = fields.DateTime(Required = False, allow_none=True) - modified = fields.DateTime(Required = False, allow_none=True) + created = fields.DateTime(metadata={'required':False}, allow_none=True) + modified = fields.DateTime(metadata={'required':False}, allow_none=True) @post_load def make_shipcall(self, data, **kwargs): if 'type' in data: - data['type_value'] = data['type'].value + data['type_value'] = int(data['type']) else: - data['type_value'] = ShipcallType.undefined + data['type_value'] = int(ShipcallType.undefined) if 'evaluation' in data: if data['evaluation']: - data['evaluation_value'] = data['evaluation'].value + data['evaluation_value'] = int(data['evaluation']) else: - data['evaluation_value'] = EvaluationType.undefined + data['evaluation_value'] = int(EvaluationType.undefined) return data + + @validates("type") + def validate_type(self, value): + valid_shipcall_type = int(value) in [item.value for item in ShipcallType] + + if not valid_shipcall_type: + raise ValidationError(f"the provided type is not a valid shipcall type.") + @dataclass class Participant_Assignment: @@ -215,6 +267,9 @@ class Participant_Assignment: participant_id: int type: int # a variant would be to use the IntFlag type (with appropriate serialization) + def to_json(self): + return self.__dict__ + @dataclass class Shipcall: @@ -230,7 +285,7 @@ class Shipcall: tug_required: bool pilot_required: bool flags: int - pier_side: bool + pier_side: bool # enumerator object in database/enum/PierSide bunkering: bool replenishing_terminal: bool replenishing_lock: bool @@ -297,35 +352,91 @@ class ShipcallId(Schema): # this is the way! + class TimesSchema(Schema): def __init__(self): super().__init__(unknown=None) pass - id = fields.Int(Required=False) - eta_berth = fields.DateTime(Required = False, allow_none=True) - eta_berth_fixed = fields.Bool(Required = False, allow_none=True) - etd_berth = fields.DateTime(Required = False, allow_none=True) - etd_berth_fixed = fields.Bool(Required = False, allow_none=True) - lock_time = fields.DateTime(Required = False, allow_none=True) - lock_time_fixed = fields.Bool(Required = False, allow_none=True) - zone_entry = fields.DateTime(Required = False, allow_none=True) - zone_entry_fixed = fields.Bool(Required = False, allow_none=True) - operations_start = fields.DateTime(Required = False, allow_none=True) - operations_end = fields.DateTime(Required = False, allow_none=True) - remarks = fields.String(Required = False, allow_none=True) - participant_id = fields.Int(Required = True) - berth_id = fields.Int(Required = False, allow_none = True) - berth_info = fields.String(Required = False, allow_none=True) - pier_side = fields.Bool(Required = False, allow_none = True) - shipcall_id = fields.Int(Required = True) - participant_type = fields.Int(Required = False, allow_none=True) - ata = fields.DateTime(Required = False, allow_none=True) - atd = fields.DateTime(Required = False, allow_none=True) - eta_interval_end = fields.DateTime(Required = False, allow_none=True) - etd_interval_end = fields.DateTime(Required = False, allow_none=True) - created = fields.DateTime(Required = False, allow_none=True) - modified = fields.DateTime(Required = False, allow_none=True) + id = fields.Integer(metadata={'required':False}) + eta_berth = fields.DateTime(metadata={'required':False}, allow_none=True) + eta_berth_fixed = fields.Bool(metadata={'required':False}, allow_none=True) + etd_berth = fields.DateTime(metadata={'required':False}, allow_none=True) + etd_berth_fixed = fields.Bool(metadata={'required':False}, allow_none=True) + lock_time = fields.DateTime(metadata={'required':False}, allow_none=True) + lock_time_fixed = fields.Bool(metadata={'required':False}, allow_none=True) + zone_entry = fields.DateTime(metadata={'required':False}, allow_none=True) + zone_entry_fixed = fields.Bool(metadata={'required':False}, allow_none=True) + operations_start = fields.DateTime(metadata={'required':False}, allow_none=True) + operations_end = fields.DateTime(metadata={'required':False}, allow_none=True) + remarks = fields.String(metadata={'required':False}, allow_none=True, validate=[validate.Length(max=512)]) + participant_id = fields.Integer(metadata={'required':True}) + berth_id = fields.Integer(metadata={'required':False}, allow_none = True) + berth_info = fields.String(metadata={'required':False}, allow_none=True, validate=[validate.Length(max=512)]) + pier_side = fields.Bool(metadata={'required':False}, allow_none = True) + shipcall_id = fields.Integer(metadata={'required':True}) + participant_type = fields.Integer(Required = False, allow_none=True)# TODO: could become Enum. fields.Enum(ParticipantType, metadata={'required':False}, allow_none=True, default=ParticipantType.undefined) #fields.Integer(metadata={'required':False}, allow_none=True) + ata = fields.DateTime(metadata={'required':False}, allow_none=True) + atd = fields.DateTime(metadata={'required':False}, allow_none=True) + eta_interval_end = fields.DateTime(metadata={'required':False}, allow_none=True) + etd_interval_end = fields.DateTime(metadata={'required':False}, allow_none=True) + created = fields.DateTime(metadata={'required':False}, allow_none=True) + modified = fields.DateTime(metadata={'required':False}, allow_none=True) + + @validates("participant_type") + def validate_participant_type(self, value): + # #TODO: it may also make sense to block multi-assignments, whereas a value could be BSMD+AGENCY + # while the validation fails when one of those multi-assignments is BSMD, it passes in cases, + # such as AGENCY+PILOT + + # a participant type should not be .BSMD + if not isinstance(value, ParticipantType): + value = ParticipantType(value) + + if ParticipantType.BSMD in value: + raise ValidationError(f"the participant_type must not be .BSMD") + + @validates("eta_berth") + def validate_eta_berth(self, value): + # violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future + # when 'value' is 'None', a ValidationError is not issued. + valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12) + return + + @validates("etd_berth") + def validate_etd_berth(self, value): + # violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future + # when 'value' is 'None', a ValidationError is not issued. + valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12) + return + + @validates("lock_time") + def validate_lock_time(self, value): + # violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future + # when 'value' is 'None', a ValidationError is not issued. + valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12) + return + + @validates("zone_entry") + def validate_zone_entry(self, value): + # violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future + # when 'value' is 'None', a ValidationError is not issued. + valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12) + return + + @validates("operations_start") + def validate_operations_start(self, value): + # violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future + # when 'value' is 'None', a ValidationError is not issued. + valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12) + return + + @validates("operations_end") + def validate_operations_end(self, value): + # violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future + # when 'value' is 'None', a ValidationError is not issued. + valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12) + return # deserialize PUT object target @@ -333,13 +444,26 @@ class UserSchema(Schema): def __init__(self): super().__init__(unknown=None) pass - id = fields.Int(required=True) - first_name = fields.Str(allow_none=True, metadata={'Required':False}) - last_name = fields.Str(allow_none=True, metadata={'Required':False}) - user_phone = fields.Str(allow_none=True, metadata={'Required':False}) - user_email = fields.Str(allow_none=True, metadata={'Required':False}) - old_password = fields.Str(allow_none=True, metadata={'Required':False}) - new_password = fields.Str(allow_none=True, metadata={'Required':False}) + id = fields.Integer(metadata={'required':True}) + first_name = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(max=64)]) + last_name = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(max=64)]) + user_phone = fields.String(allow_none=True, metadata={'Required':False}) + user_email = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(max=64)]) + old_password = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(max=128)]) + new_password = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(min=6, max=128)]) + # #TODO: the user schema does not (yet) include the 'notify_' fields + + @validates("user_phone") + def validate_user_phone(self, value): + valid_characters = list(map(str,range(0,10)))+["+", " "] + if not all([v in valid_characters for v in value]): + raise ValidationError(f"one of the phone number values is not valid.") + + @validates("user_email") + def validate_user_email(self, value): + if not "@" in value: + raise ValidationError(f"invalid email address") + @dataclass class Times: @@ -380,10 +504,10 @@ class User: user_phone: str password_hash: str api_key: str - notify_email: bool - notify_whatsapp: bool - notify_signal: bool - notify_popup: bool + notify_email: bool # #TODO_clarify: should we use an IntFlag for multi-assignment? + notify_whatsapp: bool # #TODO_clarify: should we use an IntFlag for multi-assignment? + notify_signal: bool # #TODO_clarify: should we use an IntFlag for multi-assignment? + notify_popup: bool # #TODO_clarify: should we use an IntFlag for multi-assignment? created: datetime modified: datetime @@ -409,13 +533,13 @@ class ShipSchema(Schema): super().__init__(unknown=None) pass - id = fields.Int(Required=False) + id = fields.Int(metadata={'required':False}) name = fields.String(allow_none=False, metadata={'Required':True}) imo = fields.Int(allow_none=False, metadata={'Required':True}) callsign = fields.String(allow_none=True, metadata={'Required':False}) participant_id = fields.Int(allow_none=True, metadata={'Required':False}) - length = fields.Float(allow_none=True, metadata={'Required':False}) - width = fields.Float(allow_none=True, metadata={'Required':False}) + length = fields.Float(allow_none=True, metadata={'Required':False}, validate=[validate.Range(min=0, max=1000, min_inclusive=False, max_inclusive=False)]) + width = fields.Float(allow_none=True, metadata={'Required':False}, validate=[validate.Range(min=0, max=100, min_inclusive=False, max_inclusive=False)]) is_tug = fields.Bool(allow_none=True, metadata={'Required':False}, default=False) bollard_pull = fields.Int(allow_none=True, metadata={'Required':False}) eni = fields.Int(allow_none=True, metadata={'Required':False}) @@ -423,6 +547,34 @@ class ShipSchema(Schema): modified = fields.DateTime(allow_none=True, metadata={'Required':False}) deleted = fields.Bool(allow_none=True, metadata={'Required':False}, default=False) + @validates("name") + def validate_name(self, value): + character_length = len(str(value)) + if character_length>=64: + raise ValidationError(f"'name' argument should have at max. 63 characters") + + if check_if_string_has_special_characters(value): + raise ValidationError(f"'name' argument should not have special characters.") + return + + @validates("imo") + def validate_imo(self, value): + imo_length = len(str(value)) + if imo_length != 7: + raise ValidationError(f"'imo' should be a 7-digit number") + return + + @validates("callsign") + def validate_callsign(self, value): + if value is not None: + callsign_length = len(str(value)) + if callsign_length>8: + raise ValidationError(f"'callsign' argument should not have more than 8 characters") + + if check_if_string_has_special_characters(value): + raise ValidationError(f"'callsign' argument should not have special characters.") + return + class TimesId(Schema): pass @@ -442,3 +594,22 @@ class Shipcalls(Shipcall): class TimesList(Times): pass + +@dataclass +class ShipcallParticipantMap: + id: int + shipcall_id: int + participant_id: int + type : ShipcallType + created: datetime + modified: datetime + + def to_json(self): + return { + "id": self.id, + "shipcall_id": self.shipcall_id, + "participant_id": self.participant_id, + "type": self.type.name, + "created": self.created.isoformat() if self.created else "", + "modified": self.modified.isoformat() if self.modified else "", + } diff --git a/src/server/BreCal/services/auth_guard.py b/src/server/BreCal/services/auth_guard.py index 9c5824d..9bd27c6 100644 --- a/src/server/BreCal/services/auth_guard.py +++ b/src/server/BreCal/services/auth_guard.py @@ -9,6 +9,7 @@ def check_jwt(): if not token: raise Exception('Missing access token') jwt = token.split('Bearer ')[1] + try: return decode_jwt(jwt) except Exception as e: diff --git a/src/server/BreCal/services/email_handling.py b/src/server/BreCal/services/email_handling.py new file mode 100644 index 0000000..e06021d --- /dev/null +++ b/src/server/BreCal/services/email_handling.py @@ -0,0 +1,174 @@ +import os +import typing +import smtplib +from getpass import getpass +from email.message import EmailMessage +import mimetypes + +import email +# from email.mime.base import MIMEBase +# from email.mime.image import MIMEImage +# from email.mime.text import MIMEText +from email.mime.multipart import MIMEMultipart +from email.mime.application import MIMEApplication + + +class EmailHandler(): + """ + Creates an EmailHandler, which is capable of connecting to a mail server at a respective port, + as well as logging into a specific user's mail address. + Upon creating messages, these can be sent via this handler. + + Options: + mail_server: address of the server, such as 'smtp.gmail.com' or 'w01d5503.kasserver.com + mail_port: + 25 - SMTP Port, to send emails + 110 - POP3 Port, to receive emails + 143 - IMAP Port, to receive from IMAP + 465 - SSL Port of SMTP + 587 - alternative SMTP Port + 993 - SSL/TLS-Port of IMAP + 995 - SSL/TLS-Port of POP3 + mail_address: a specific user's Email address, which will be used to send Emails. Example: "my_user@gmail.com" + """ + def __init__(self, mail_server:str, mail_port:int, mail_address:str): + self.mail_server = mail_server + self.mail_port = mail_port + self.mail_address = mail_address + + self.server = smtplib.SMTP_SSL(self.mail_server, self.mail_port) # alternatively, SMTP + + def check_state(self): + """check, whether the server login took place and is open.""" + try: + (status_code, status_msg) = self.server.noop() + return status_code==250 # 250: b'2.0.0 Ok' + except smtplib.SMTPServerDisconnected: + return False + + def check_connection(self): + """check, whether the server object is connected to the server. If not, connect it. """ + try: + self.server.ehlo() + except smtplib.SMTPServerDisconnected: + self.server.connect(self.mail_server, self.mail_port) + return + + def check_login(self)->bool: + """check, whether the server object is logged in as a user""" + user = self.server.__dict__.get("user",None) + return user is not None + + def login(self, interactive:bool=True): + """ + login on the determined mail server's mail address. By default, this function opens an interactive window to + type the password without echoing (printing '*******' instead of readable characters). + + returns (status_code, status_msg) + """ + self.check_connection() + if interactive: + (status_code, status_msg) = self.server.login(self.mail_address, password=getpass()) + else: + # fernet + password file + raise NotImplementedError() + return (status_code, status_msg) # should be: (235, b'2.7.0 Authentication successful') + + def create_email(self, subject:str, message_body:str)->EmailMessage: + """ + Create an EmailMessage object, which contains the Email's header ("Subject"), content ("Message Body") and the sender's address ("From"). + The EmailMessage object does not contain the recipients yet, as these will be defined upon sending the Email. + """ + msg = EmailMessage() + msg["Subject"] = subject + msg["From"] = self.mail_address + #msg["To"] = email_tgts # will be defined in self.send_email + msg.set_content(message_body) + return msg + + def build_recipients(self, email_tgts:list[str]): + """ + email formatting does not support lists. Instead, items are joined into a comma-space-separated string. + Example: + [mail1@mail.com, mail2@mail.com] becomes + 'mail1@mail.com, mail2@mail.com' + """ + return ', '.join(email_tgts) + + def open_mime_application(self, path:str)->MIMEApplication: + """open a local file, read the bytes into a MIMEApplication object, which is built with the proper subtype (based on the file extension)""" + with open(path, 'rb') as file: + attachment = MIMEApplication(file.read(), _subtype=mimetypes.MimeTypes().guess_type(path)) + + attachment.add_header('Content-Disposition','attachment',filename=str(os.path.basename(path))) + return attachment + + def attach_file(self, path:str, msg:email.mime.multipart.MIMEMultipart)->None: + """ + attach a file to the message. This function opens the file, reads its bytes, defines the mime type by the + path extension. The filename is appended as the header. + + mimetypes: # https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types + """ + attachment = self.open_mime_application(path) + msg.attach(attachment) + return + + def send_email(self, msg:EmailMessage, email_tgts:list[str], cc_tgts:typing.Optional[list[str]]=None, bcc_tgts:typing.Optional[list[str]]=None, debug:bool=False)->typing.Union[dict,EmailMessage]: + """ + send a prepared email message to recipients (email_tgts), copy (cc_tgts) and blind copy (bcc_tgts). + Returns a dictionary of feedback, which is commonly empty and the EmailMessage. + + When failing, this function returns an SMTP error instead of returning the default outputs. + """ + # Set the Recipients + msg["To"] = self.build_recipients(email_tgts) + + # optionally, add CC and BCC (copy and blind-copy) + if cc_tgts is not None: + msg["Cc"] = self.build_recipients(cc_tgts) + if bcc_tgts is not None: + msg["Bcc"] = self.build_recipients(bcc_tgts) + + # when debugging, do not send the Email, but return the EmailMessage. + if debug: + return {}, msg + + assert self.check_login(), f"currently not logged in. Cannot send an Email. Make sure to properly use self.login first. " + # send the prepared EmailMessage via the server. + feedback = self.server.send_message(msg) + return feedback, msg + + def translate_mail_to_multipart(self, msg:EmailMessage): + """EmailMessage does not support HTML and attachments. Hence, one can convert an EmailMessage object.""" + if msg.is_multipart(): + return msg + + # create a novel MIMEMultipart email + msg_new = MIMEMultipart("mixed") + headers = list((k, v) for (k, v) in msg.items() if k not in ("Content-Type", "Content-Transfer-Encoding")) + + # add the headers of msg to the new message + for k,v in headers: + msg_new[k] = v + + # delete the headers from msg + for k,v in headers: + del msg[k] + + # attach the remainder of the msg, such as the body, to the MIMEMultipart + msg_new.attach(msg) + return msg_new + + def print_email_attachments(self, msg:MIMEMultipart)->list[str]: + """return a list of lines of an Email, which contain 'filename=' as a list. """ + return [line_ for line_ in msg.as_string().split("\n") if "filename=" in line_] + + def close(self): + self.server.__dict__.pop("user",None) + self.server.__dict__.pop("password",None) + + # quit the server connection (internally uses .close) + self.server.quit() + return + diff --git a/src/server/BreCal/services/jwt_handler.py b/src/server/BreCal/services/jwt_handler.py index 66c0f43..d1e7ad6 100644 --- a/src/server/BreCal/services/jwt_handler.py +++ b/src/server/BreCal/services/jwt_handler.py @@ -7,11 +7,41 @@ def create_api_key(): return secrets.token_urlsafe(16) def generate_jwt(payload, lifetime=None): + """ + creates an encoded token, which is based on the 'SECRET_KEY' environment variable. The environment variable + is set when the .wsgi application is started or can theoretically be set on system-level. + + args: + payload: + json-dictionary with key:value pairs. + + lifetime: + When a 'lifetime' (integer) is provided, the payload will be extended by an expiration key 'exp', which is + valid for the next {lifetime} minutes. + + returns: token, a JWT-encoded string + """ if lifetime: payload['exp'] = (datetime.datetime.now() + datetime.timedelta(minutes=lifetime)).timestamp() return jwt.encode(payload, os.environ.get('SECRET_KEY'), algorithm="HS256") def decode_jwt(token): + """ + this function reverts the {generate_jwt} function. An encoded JWT token is decoded into a JSON dictionary. + The function is commonly used to decode a login-token and obtain a 'user_data' variable, which is a dictionary. + + Example of 'user_data': + { + 'id': 1, + 'participant_id': 1, + 'first_name': 'Firstname', + 'last_name': 'Lastname', + 'user_name': 'xUsername01', + 'user_phone': '+01 123 456 7890', + 'user_email': 'firstname.lastname@internet.com', + 'exp': 1716881626.056438 # expiration timestamp + } + """ return jwt.decode(token, os.environ.get('SECRET_KEY'), algorithms=["HS256"]) diff --git a/src/server/BreCal/services/schedule_routines.py b/src/server/BreCal/services/schedule_routines.py index 542b213..a36a1fc 100644 --- a/src/server/BreCal/services/schedule_routines.py +++ b/src/server/BreCal/services/schedule_routines.py @@ -3,6 +3,7 @@ import pydapper from BreCal.schemas import model from BreCal.local_db import getPoolConnection from BreCal.database.update_database import evaluate_shipcall_state +from BreCal.database.sql_queries import create_sql_query_shipcall_get import threading import schedule @@ -26,16 +27,8 @@ def UpdateShipcalls(options:dict = {'past_days':2}): pooledConnection = getPoolConnection() commands = pydapper.using(pooledConnection) - query = ("SELECT s.id as id, ship_id, type, eta, voyage, etd, arrival_berth_id, departure_berth_id, tug_required, pilot_required, " - "flags, s.pier_side, bunkering, replenishing_terminal, replenishing_lock, draft, tidal_window_from, tidal_window_to, rain_sensitive_cargo, recommended_tugs, " - "anchored, moored_lock, canceled, evaluation, evaluation_message, evaluation_notifications_sent, evaluation_time, s.created as created, s.modified as modified, time_ref_point FROM shipcall s " + - "LEFT JOIN times t ON t.shipcall_id = s.id AND t.participant_type = 8 " - "WHERE " - "(type = 1 AND (COALESCE(t.eta_berth, eta) >= DATE(NOW() - INTERVAL %d DAY))) OR " - "((type = 2 OR type = 3) AND (COALESCE(t.etd_berth, etd) >= DATE(NOW() - INTERVAL %d DAY)))" - "ORDER BY s.id") % (options["past_days"], options["past_days"]) - - # obtain data from the MYSQL database + # obtain data from the MYSQL database (uses 'options' to filter the resulting data by the ETA, considering those entries of 'past_days'-range) + query = create_sql_query_shipcall_get(options) data = commands.query(query, model=model.Shipcall) # get the shipcall ids, which are of interest @@ -57,6 +50,11 @@ def add_function_to_schedule__update_shipcalls(interval_in_minutes:int, options: schedule.every(interval_in_minutes).minutes.do(UpdateShipcalls, **kwargs_) return +def add_function_to_schedule__send_notifications(vr, interval_in_minutes:int=10): + schedule.every(interval_in_minutes).minutes.do(vr.notifier.send_notifications) + return + + def setup_schedule(update_shipcalls_interval_in_minutes:int=60): logging.getLogger('schedule').setLevel(logging.INFO); # set the logging level of the schedule module to INFO diff --git a/src/server/BreCal/stubs/notification.py b/src/server/BreCal/stubs/notification.py index 63df90f..8971a76 100644 --- a/src/server/BreCal/stubs/notification.py +++ b/src/server/BreCal/stubs/notification.py @@ -7,7 +7,6 @@ def get_notification_simple(): """creates a default notification, where 'created' is now, and modified is now+10 seconds""" notification_id = generate_uuid1_int() # uid? times_id = generate_uuid1_int() # uid? - acknowledged = False level = 10 type = 0 message = "hello world" @@ -17,7 +16,6 @@ def get_notification_simple(): notification = Notification( notification_id, times_id, - acknowledged, level, type, message, diff --git a/src/server/BreCal/stubs/participant.py b/src/server/BreCal/stubs/participant.py index 38f303c..1a4aef3 100644 --- a/src/server/BreCal/stubs/participant.py +++ b/src/server/BreCal/stubs/participant.py @@ -30,3 +30,8 @@ def get_participant_simple(): deleted ) return participant + + +def get_stub_list_of_valid_participants(): + participants = [{'participant_id': 2, 'type': 4}, {'participant_id': 3, 'type': 1}, {'participant_id': 4, 'type': 2}, {'participant_id': 5, 'type': 8}] + return participants diff --git a/src/server/BreCal/stubs/ship.py b/src/server/BreCal/stubs/ship.py index 4ab288e..77e6e45 100644 --- a/src/server/BreCal/stubs/ship.py +++ b/src/server/BreCal/stubs/ship.py @@ -36,3 +36,27 @@ def get_ship_simple(): ) return ship +def get_stub_valid_ship(): + post_data = { + 'name': 'BOTHNIABORG', + 'imo': 9267728, + 'callsign': "PBIO", + 'participant_id': None, + 'length': 153.05, + 'width': 21.8, + 'is_tug': 0, + 'bollard_pull': None, + 'eni': None, + 'created': '2023-10-04 11:52:32', + 'modified': None, + 'deleted': 0 + } + return post_data + +def get_stub_valid_ship_loaded_model(post_data=None): + from BreCal.schemas import model + if post_data is None: + post_data = get_stub_valid_ship() + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + return loadedModel + \ No newline at end of file diff --git a/src/server/BreCal/stubs/shipcall.py b/src/server/BreCal/stubs/shipcall.py index e86d379..299fdbe 100644 --- a/src/server/BreCal/stubs/shipcall.py +++ b/src/server/BreCal/stubs/shipcall.py @@ -3,25 +3,30 @@ from BreCal.stubs import generate_uuid1_int from BreCal.schemas.model import Shipcall from dataclasses import field +import json +import datetime +from BreCal.schemas.model import ShipcallType +from BreCal.stubs.participant import get_stub_list_of_valid_participants + def get_shipcall_simple(): # only used for the stub base_time = datetime.datetime.now() - shipcall_id = generate_uuid1_int() - ship_id = generate_uuid1_int() + shipcall_id = 124 # generate_uuid1_int() + ship_id = 5 # generate_uuid1_int() eta = base_time+datetime.timedelta(hours=3, minutes=12) role_type = 1 voyage = "987654321" etd = base_time+datetime.timedelta(hours=6, minutes=12) # should never be before eta - arrival_berth_id = generate_uuid1_int() - departure_berth_id = generate_uuid1_int() + arrival_berth_id = 140 #generate_uuid1_int() + departure_berth_id = 140 #generate_uuid1_int() tug_required = False pilot_required = False - flags = 0 # #TODO_shipcall_flags. What is meant here? What should be tested? + flags = 0 # #TODO_shipcall_flags. What is meant here? What should be tested? pier_side = False # whether a ship will be fixated on the pier side. en: pier side, de: Anlegestelle. From 'BremenCalling_Datenmodell.xlsx': gedreht/ungedreht bunkering = False # #TODO_bunkering_unclear replenishing_terminal = False # en: replenishing terminal, de: Nachfüll-Liegeplatz @@ -37,11 +42,13 @@ def get_shipcall_simple(): recommended_tugs = 2 # assert 0= time_now: + raise ValidationError(f"'eta' must be in the future. Incorrect datetime provided.") + elif int(type_)==int(ShipcallType.departure): + etd = loadedModel.get("etd") + if (content.get("etd", None) is None): + raise ValidationError(f"providing 'etd' is mandatory. Missing key!") + if content.get("departure_berth_id", None) is None: + raise ValidationError(f"providing 'departure_berth_id' is mandatory. Missing key!") + if not etd >= time_now: + raise ValidationError(f"'etd' must be in the future. Incorrect datetime provided.") + elif int(type_)==int(ShipcallType.shifting): + eta = loadedModel.get("eta") + etd = loadedModel.get("etd") + # * arrival_berth_id / departure_berth_id (depending on type, see above) + if (content.get("eta", None) is None) or (content.get("etd", None) is None): + raise ValidationError(f"providing 'eta' and 'etd' is mandatory. Missing one of those keys!") + if (content.get("arrival_berth_id", None) is None) or (content.get("departure_berth_id", None) is None): + raise ValidationError(f"providing 'arrival_berth_id' & 'departure_berth_id' is mandatory. Missing key!") + if (not eta >= time_now) or (not etd >= time_now) or (not eta >= etd): + raise ValidationError(f"'eta' and 'etd' must be in the future. Incorrect datetime provided.") + + tidal_window_from = loadedModel.get("tidal_window_from", None) + tidal_window_to = loadedModel.get("tidal_window_to", None) + if tidal_window_to is not None: + if not tidal_window_to >= time_now: + raise ValidationError(f"'tidal_window_to' must be in the future. Incorrect datetime provided.") + + if tidal_window_from is not None: + if not tidal_window_from >= time_now: + raise ValidationError(f"'tidal_window_from' must be in the future. Incorrect datetime provided.") + + # #TODO: assert tidal_window_from > tidal_window_to + + # #TODO: len of participants > 0, if agency + # * assigned participant for agency + return + class InputValidation(): def __init__(self): @@ -162,4 +280,4 @@ class ParticipantValidation(DataclassValidation): ] ] return all_rules - + diff --git a/src/server/BreCal/validators/input_validation_ship.py b/src/server/BreCal/validators/input_validation_ship.py new file mode 100644 index 0000000..eab2480 --- /dev/null +++ b/src/server/BreCal/validators/input_validation_ship.py @@ -0,0 +1,140 @@ +import typing +import json +import datetime +from abc import ABC, abstractmethod +from marshmallow import ValidationError +from string import ascii_letters, digits + +from BreCal.schemas.model import Ship, Shipcall, Berth, User, Participant, ShipcallType +from BreCal.impl.participant import GetParticipant +from BreCal.impl.ships import GetShips +from BreCal.impl.berths import GetBerths + +from BreCal.database.enums import ParticipantType, ParticipantFlag +from BreCal.validators.input_validation_utils import check_if_user_is_bsmd_type, check_if_ship_id_is_valid, check_if_berth_id_is_valid, check_if_participant_ids_are_valid, check_if_participant_ids_and_types_are_valid, get_shipcall_id_dictionary, get_participant_type_from_user_data +from BreCal.database.sql_handler import execute_sql_query_standalone +from BreCal.validators.validation_base_utils import check_if_int_is_valid_flag +from BreCal.validators.validation_base_utils import check_if_string_has_special_characters +import werkzeug + +class InputValidationShip(): + """ + This class combines a complex set of individual input validation functions into a joint object. + It uses static methods, so the object does not need to be instantiated, but functions can be called immediately. + + Example: + InputValidationShip.evaluate(user_data, loadedModel, content) + + When the data violates one of the rules, a marshmallow.ValidationError is raised, which details the issues. + """ + def __init__(self) -> None: + pass + + @staticmethod + def evaluate_post_data(user_data:dict, loadedModel:dict, content:dict): + # 1.) Only users of type BSMD are allowed to POST + InputValidationShip.check_user_is_bsmd_type(user_data) + + # 2.) The ship IMOs are used as matching keys. They must be unique in the database. + InputValidationShip.check_ship_imo_already_exists(loadedModel) + + # 3.) Check for reasonable Values (see BreCal.schemas.model.ShipSchema) + InputValidationShip.optionally_evaluate_bollard_pull_value(content) + return + + @staticmethod + def evaluate_put_data(user_data:dict, loadedModel:dict, content:dict): + # 1.) Only users of type BSMD are allowed to PUT + InputValidationShip.check_user_is_bsmd_type(user_data) + + # 2.) The IMO number field may not be changed + InputValidationShip.put_content_may_not_contain_imo_number(content) + + # 3.) Check for reasonable Values (see BreCal.schemas.model.ShipSchema) + InputValidationShip.optionally_evaluate_bollard_pull_value(content) + + # 4.) ID field is mandatory + InputValidationShip.content_contains_ship_id(content) + return + + @staticmethod + def evaluate_delete_data(user_data:dict, ship_id:int): + # 1.) Only users of type BSMD are allowed to PUT + InputValidationShip.check_user_is_bsmd_type(user_data) + + # 2.) The dataset entry may not be deleted already + InputValidationShip.check_if_entry_is_already_deleted(ship_id) + return + + @staticmethod + def optionally_evaluate_bollard_pull_value(content:dict): + bollard_pull = content.get("bollard_pull",None) + is_tug = content.get("is_tug", None) + + if bollard_pull is not None: + if not is_tug: + raise ValidationError(f"'bollard_pull' is only allowed, when a ship is a tug ('is_tug').") + + if (not (0 < bollard_pull < 500)) & (is_tug): + raise ValidationError(f"when a ship is a tug, the bollard pull must be 0 < value < 500. ") + + @staticmethod + def check_user_is_bsmd_type(user_data:dict): + is_bsmd = check_if_user_is_bsmd_type(user_data) + if not is_bsmd: + raise ValidationError(f"current user does not belong to BSMD. Cannot post, put or delete ships. Found user data: {user_data}") + + @staticmethod + def check_ship_imo_already_exists(loadedModel:dict): + # get the ships, convert them to a list of JSON dictionaries + response, status_code, header = GetShips(token=None) + ships = json.loads(response) + + # extract only the 'imo' values + ship_imos = [ship.get("imo") for ship in ships] + + # check, if the imo in the POST-request already exists in the list + imo_already_exists = loadedModel.get("imo") in ship_imos + if imo_already_exists: + raise ValidationError(f"the provided ship IMO {loadedModel.get('imo')} already exists. A ship may only be added, if there is no other ship with the same IMO number.") + return + + @staticmethod + def put_content_may_not_contain_imo_number(content:dict): + put_data_ship_imo = content.get("imo",None) + if put_data_ship_imo is not None: + raise ValidationError(f"The IMO number field may not be changed since it serves the purpose of a primary (matching) key.") + return + + @staticmethod + def content_contains_ship_id(content:dict): + put_data_ship_id = content.get('id',None) + if put_data_ship_id is None: + raise ValidationError(f"The id field is required.") + return + + @staticmethod + def check_if_entry_is_already_deleted(ship_id:int): + """ + When calling a delete request for ships, the dataset may not be deleted already. This method + makes sure, that the request contains and ID, has a matching entry in the database, and the + database entry may not have a deletion state already. + """ + if ship_id is None: + raise ValidationError(f"The ship_id must be provided.") + + response, status_code, header = GetShips(token=None) + ships = json.loads(response) + existing_database_entries = [ship for ship in ships if ship.get("id")==ship_id] + if len(existing_database_entries)==0: + raise ValidationError(f"Could not find a ship with the specified ID. Selected: {ship_id}") + + existing_database_entry = existing_database_entries[0] + + deletion_state = existing_database_entry.get("deleted",None) + if deletion_state: + raise ValidationError(f"The selected ship entry is already deleted.") + return + + + diff --git a/src/server/BreCal/validators/input_validation_shipcall.py b/src/server/BreCal/validators/input_validation_shipcall.py new file mode 100644 index 0000000..0b8ad3b --- /dev/null +++ b/src/server/BreCal/validators/input_validation_shipcall.py @@ -0,0 +1,398 @@ +import typing +import json +import datetime +from abc import ABC, abstractmethod +from marshmallow import ValidationError +from string import ascii_letters, digits + +from BreCal.schemas.model import Ship, Shipcall, Berth, User, Participant, ShipcallType +from BreCal.impl.participant import GetParticipant +from BreCal.impl.ships import GetShips +from BreCal.impl.berths import GetBerths + +from BreCal.database.enums import ParticipantType, ParticipantFlag +from BreCal.validators.input_validation_utils import check_if_user_is_bsmd_type, check_if_ship_id_is_valid, check_if_berth_id_is_valid, check_if_participant_ids_are_valid, check_if_participant_ids_and_types_are_valid, get_shipcall_id_dictionary, get_participant_type_from_user_data +from BreCal.database.sql_handler import execute_sql_query_standalone +from BreCal.validators.validation_base_utils import check_if_int_is_valid_flag +from BreCal.validators.validation_base_utils import check_if_string_has_special_characters +import werkzeug + +class InputValidationShipcall(): + """ + This class combines a complex set of individual input validation functions into a joint object. + It uses static methods, so the object does not need to be instantiated, but functions can be called immediately. + + Example: + InputValidationShipcall.evaluate(user_data, loadedModel, content) + + When the data violates one of the rules, a marshmallow.ValidationError is raised, which details the issues. + + """ + def __init__(self) -> None: + pass + + @staticmethod + def evaluate_post_data(user_data:dict, loadedModel:dict, content:dict): + """ + this function combines multiple validation functions to verify data, which is sent to the API as a shipcall's POST-request + + checks: + 1. permission: only participants that belong to the BSMD group are allowed to POST shipcalls + 2. reference checks: all refered objects within the Shipcall must exist + 3. existance of required fields + 4. reasonable values: validates the values within the Shipcall + """ + # check for permission (only BSMD-type participants) + InputValidationShipcall.check_user_is_bsmd_type(user_data) + + # check references (referred IDs must exist) + InputValidationShipcall.check_referenced_ids(loadedModel) + + # POST-request only: check the existance of required fields based on the ShipcallType + InputValidationShipcall.check_required_fields_exist_based_on_type(loadedModel, content) + + # POST-request only: check the existance of a participant list, when the user is of type agency + InputValidationShipcall.check_participant_list_not_empty_when_user_is_agency(loadedModel) + + # check for reasonable values in the shipcall fields + InputValidationShipcall.check_shipcall_values(loadedModel, content, forbidden_keys=["canceled", "evaluation", "evaluation_message"]) + return + + @staticmethod + def evaluate_put_data(user_data:dict, loadedModel:dict, content:dict): + """ + this function combines multiple validation functions to verify data, which is sent to the API as a shipcall's PUT-request + + checks: + 1. whether the user belongs to participant group type BSMD + 2. users of the agency may edit the shipcall, when the shipcall-participant-map entry lists them + 3. existance of required fields + 4. all value-rules of the POST evaluation + 5. a canceled shipcall may not be changed + """ + # check for permission (only BSMD-type participants) + # #TODO: are both, bsmd and agency, user types accepted? + InputValidationShipcall.check_user_is_bsmd_type(user_data) + + # check, whether an agency is listed in the shipcall-participant-map + InputValidationShipcall.check_agency_in_shipcall_participant_map(user_data, loadedModel, content) + + # the ID field is required, all missing fields will be ignored in the update + InputValidationShipcall.check_required_fields_of_put_request(content) + + # check for reasonable values in the shipcall fields and checks for forbidden keys. + InputValidationShipcall.check_shipcall_values(loadedModel, content, forbidden_keys=["evaluation", "evaluation_message"]) + + # a canceled shipcall cannot be selected + # Note: 'canceled' is allowed in PUT-requests, if it is not already set (which is checked by InputValidationShipcall.check_shipcall_is_cancel) + InputValidationShipcall.check_shipcall_is_canceled(loadedModel, content) + return + + @staticmethod + def check_shipcall_values(loadedModel:dict, content:dict, forbidden_keys:list=["canceled", "evaluation", "evaluation_message"]): + """ + individually checks each value provided in the loadedModel/content. + This function validates, whether the values are reasonable. + + Also, some data may not be set in a POST-request. + """ + # Note: BreCal.schemas.model.ShipcallSchema has an internal validation, which the marshmallow library provides. This is used + # to verify values individually, when the schema is loaded with data. + # This function focuses on more complex input validation, which may require more sophisticated methods + + # loadedModel fills missing values, sometimes using optional values. Hence, the 'content'-variable is prefered for some of these verifications + # voyage shall not contain special characters + voyage_str_is_invalid = check_if_string_has_special_characters(text=content.get("voyage","")) + if voyage_str_is_invalid: + raise ValidationError(f"there are invalid characters in the 'voyage'-string. Please use only digits and ASCII letters. Allowed: {ascii_letters+digits}. Found: {content.get('voyage')}") + + # the 'flags' integer must be valid + flags_value = content.get("flags", 0) + if check_if_int_is_valid_flag(flags_value, enum_object=ParticipantFlag): + raise ValidationError(f"incorrect value provided for 'flags'. Must be a valid combination of the flags.") + + # time values must use future-dates + InputValidationShipcall.check_times_are_in_future(loadedModel, content) + + # some arguments must not be provided + InputValidationShipcall.check_forbidden_arguments(content, forbidden_keys=forbidden_keys) + return + + @staticmethod + def check_agency_in_shipcall_participant_map(user_data:dict, loadedModel:dict, content:dict, spm_shipcall_data:typing.Optional[list]=None): + """ + When the request is issued by a user of type 'AGENCY', there must be special caution. Agency users cannot self-assign as participants + of a shipcall. Further, when no AGENCY is assigned to the shipcall, a PUT-request is not feasible. In those cases, the + BSMD must first assign an agency, before a PUT-request can assign further participants. + + Upon violation, this method issues 'Forbidden'-Exceptions with HTTP status code 403. There are four reasons for violations: + a) an agency tries to self-assign for a shipcall + b) there is no assigned agency for the current shipcall + c) an agency is assigned, but the current agency-user belongs to a different participant_id + d) the user must be of ParticipantType BSMD or AGENCY + + args: + spm_shipcall_data: + a list of entries obtained from the ShipcallParticipantMap. These are deserialized dictionaries. + e.g., [{'participant_id': 136, 'type': 8}, ] + """ + if spm_shipcall_data is None: + # read the ShipcallParticipantMap entry of the current shipcall_id. This is used within the input validation of a PUT request + spm_shipcall_data = execute_sql_query_standalone( + query = "SELECT participant_id, type FROM shipcall_participant_map WHERE shipcall_id=?shipcall_id?", + param={"shipcall_id":loadedModel["id"]}, + pooledConnection=None + ) + + # which role should be set by the PUT request? If the agency is about to be set, an error will be created + # read the user data from the JWT token (set when login is performed) + user_type = get_participant_type_from_user_data(user_data) # decode JWT -> get 'type' value + + # select the matching entries from the ShipcallParticipantMap + agency_entries = [spm_entry for spm_entry in spm_shipcall_data if int(spm_entry.get("type"))==int(ParticipantType.AGENCY)] # find all entries of type AGENCY (there should be at max. 1) + + # when the request stems from an AGENCY user, and the user wants to PUT an AGENCY role, the request should fail + # boolean: check, whether any of the assigned participants is of type AGENCY + types = [participant.get("type") for participant in loadedModel["participants"]] # readout the participants from the loadedModel, which shall be assigned by the PUT request + any_type_is_agency = any([int(type_) == int(ParticipantType.AGENCY) for type_ in types]) # check, whether *any* of the participants is an agency + + if not (int(user_type) in [int(ParticipantType.AGENCY), int(ParticipantType.BSMD)]): + # user not AGENCY or BSMD + raise werkzeug.exceptions.Forbidden(f"PUT Requests for shipcalls can only be issued by AGENCY or BSMD users.") # Forbidden: 403 + + if (int(user_type) == int(ParticipantType.AGENCY)) & (any_type_is_agency): + # self-assignment: agency sets agency participant + raise werkzeug.exceptions.Forbidden(f"An agency cannot self-register for a shipcall. The request is issued by an agency-user and tries to assign an AGENCY as the participant of the shipcall.") # Forbidden: 403 + + if len(agency_entries)>0: + # agency participant exists: participant id must be the same as shipcall participant map entry + matching_spm_entry = [spm_entry for spm_entry in spm_shipcall_data if (spm_entry.get("participant_id")==user_data["id"]) & (int(spm_entry.get("type"))==int(ParticipantType.AGENCY))] + + if len(matching_spm_entry)==0: + # An AGENCY was found, but a different participant_id is assigned to that AGENCY + raise werkzeug.exceptions.Forbidden(f"A different participant_id is assigned as the AGENCY of this shipcall. Provided ID: {user_data.get('id')}, Assigned ShipcallParticipantMap: {agency_entries}") # Forbidden: 403 + else: + # a matching agency was found: no violation + return + + else: + # agency participant does not exist: there is no assigned agency role for the shipcall {shipcall_id} + raise werkzeug.exceptions.Forbidden(f"There is no assigned agency for this shipcall. Shipcall ID: {loadedModel['id']}") # Forbidden: 403 + return + + @staticmethod + def check_user_is_bsmd_type(user_data): + """ + check, whether the user belongs to a participant, which is of type ParticipantType.BSMD + as ParticipantType is an IntFlag, a user belonging to multiple groups is properly evaluated. + """ + is_bsmd = check_if_user_is_bsmd_type(user_data) + if not is_bsmd: + raise ValidationError(f"current user does not belong to BSMD. Cannot post or put shipcalls. Found user data: {user_data}") + return + + @staticmethod + def check_referenced_ids(loadedModel): + """ + check, whether the referenced entries exist (e.g., when a Ship ID is referenced, but does not exist, the validation fails) + """ + # get all IDs from the loadedModel + ship_id = loadedModel.get("ship_id", None) + arrival_berth_id = loadedModel.get("arrival_berth_id", None) + departure_berth_id = loadedModel.get("departure_berth_id", None) + participants = loadedModel.get("participants",[]) + + valid_ship_id = check_if_ship_id_is_valid(ship_id=ship_id) + if not valid_ship_id: + raise ValidationError(f"provided an invalid ship id, which is not found in the database: {ship_id}") + + valid_arrival_berth_id = check_if_berth_id_is_valid(berth_id=arrival_berth_id) + if not valid_arrival_berth_id: + raise ValidationError(f"provided an invalid arrival berth id, which is not found in the database: {arrival_berth_id}") + + valid_departure_berth_id = check_if_berth_id_is_valid(berth_id=departure_berth_id) + if not valid_departure_berth_id: + raise ValidationError(f"provided an invalid departure berth id, which is not found in the database: {departure_berth_id}") + + valid_participant_ids = check_if_participant_ids_are_valid(participants=participants) + if not valid_participant_ids: + raise ValidationError(f"one of the provided participant ids is invalid. Could not find one of these in the database: {participants}") + + valid_participant_types = check_if_participant_ids_and_types_are_valid(participants=participants) + if not valid_participant_types: + raise ValidationError(f"every participant id and type should be listed only once. Found multiple entries for one of the participants.") + + @staticmethod + def check_forbidden_arguments(content:dict, forbidden_keys=["canceled", "evaluation", "evaluation_message"]): + """ + a post-request must not contain the arguments 'canceled', 'evaluation', 'evaluation_message'. + a put-request must not contain the arguments 'evaluation', 'evaluation_message' + + """ + # the following keys should not be set in a POST-request. + for forbidden_key in forbidden_keys: + value = content.get(forbidden_key, None) + if value is not None: + raise ValidationError(f"'{forbidden_key}' may not be set on POST. Found: {value}") + return + + @staticmethod + def check_required_fields_exist_based_on_type(loadedModel:dict, content:dict): + """ + depending on the ShipcallType, some fields are *required* in a POST-request + """ + type_ = loadedModel.get("type", int(ShipcallType.undefined)) + ship_id = content.get("ship_id", None) + eta = content.get("eta", None) + etd = content.get("etd", None) + arrival_berth_id = content.get("arrival_berth_id", None) + departure_berth_id = content.get("departure_berth_id", None) + + if ship_id is None: + raise ValidationError(f"providing 'ship_id' is mandatory. Missing key!") + + if int(type_)==int(ShipcallType.undefined): + raise ValidationError(f"providing 'type' is mandatory. Missing key!") + + # arrival: arrival_berth_id & eta must exist + elif int(type_)==int(ShipcallType.arrival): + if eta is None: + raise ValidationError(f"providing 'eta' is mandatory. Missing key!") + + if arrival_berth_id is None: + raise ValidationError(f"providing 'arrival_berth_id' is mandatory. Missing key!") + + # departure: departive_berth_id and etd must exist + elif int(type_)==int(ShipcallType.departure): + if etd is None: + raise ValidationError(f"providing 'etd' is mandatory. Missing key!") + + if departure_berth_id is None: + raise ValidationError(f"providing 'departure_berth_id' is mandatory. Missing key!") + + # shifting: arrival_berth_id, departure_berth_id, eta and etd must exist + elif int(type_)==int(ShipcallType.shifting): + if (eta is None) or (etd is None): + raise ValidationError(f"providing 'eta' and 'etd' is mandatory. Missing one of those keys!") + if (arrival_berth_id is None) or (departure_berth_id is None): + raise ValidationError(f"providing 'arrival_berth_id' & 'departure_berth_id' is mandatory. Missing key!") + + else: + raise ValidationError(f"incorrect 'type' provided!") + return + + @staticmethod + def check_times_are_in_future(loadedModel:dict, content:dict): + """ + Dates should be in the future. Depending on the ShipcallType, specific values should be checked + Perfornms datetime checks in the loadedModel (datetime.datetime objects). + """ + # obtain the current datetime to check, whether the provided values are in the future + time_now = datetime.datetime.now() + + type_ = loadedModel.get("type", int(ShipcallType.undefined)) + eta = loadedModel.get("eta") + etd = loadedModel.get("etd") + tidal_window_from = loadedModel.get("tidal_window_from", None) + tidal_window_to = loadedModel.get("tidal_window_to", None) + + # Estimated arrival or departure times + InputValidationShipcall.check_times_in_future_based_on_type(type_, time_now, eta, etd) + + # Tidal Window + InputValidationShipcall.check_tidal_window_in_future(time_now, tidal_window_from, tidal_window_to) + return + + @staticmethod + def check_times_in_future_based_on_type(type_, time_now, eta, etd): + """ + checks, whether the ETA & ETD times are in the future. + based on the type, this function checks: + arrival: eta + departure: etd + shifting: eta & etd + """ + if int(type_)==int(ShipcallType.undefined): + raise ValidationError(f"providing 'type' is mandatory. Missing key!") + elif int(type_)==int(ShipcallType.arrival): + if not eta > time_now: + raise ValidationError(f"'eta' must be in the future. Incorrect datetime provided. Current Time: {time_now}. ETA: {eta}.") + elif int(type_)==int(ShipcallType.departure): + if not etd > time_now: + raise ValidationError(f"'etd' must be in the future. Incorrect datetime provided. Current Time: {time_now}. ETD: {etd}.") + elif int(type_)==int(ShipcallType.shifting): + if (not eta > time_now) or (not etd > time_now): + raise ValidationError(f"'eta' and 'etd' must be in the future. Incorrect datetime provided. Current Time: {time_now}. ETA: {eta}. ETD: {etd}") + if (not etd > eta): + raise ValidationError(f"'etd' must be larger than 'eta'. The ship cannot depart, before it has arrived. Found: ETA {eta}, ETD: {etd}") + return + + @staticmethod + def check_tidal_window_in_future(time_now, tidal_window_from, tidal_window_to): + if tidal_window_to is not None: + if not tidal_window_to >= time_now: + raise ValidationError(f"'tidal_window_to' must be in the future. Incorrect datetime provided.") + + if tidal_window_from is not None: + if not tidal_window_from >= time_now: + raise ValidationError(f"'tidal_window_from' must be in the future. Incorrect datetime provided.") + + if (tidal_window_to is not None) and (tidal_window_from is not None): + if tidal_window_to < tidal_window_from: + raise ValidationError(f"'tidal_window_to' must take place after 'tidal_window_from'. Incorrect datetime provided. Found 'tidal_window_to': {tidal_window_to}, 'tidal_window_from': {tidal_window_to}.") + return + + @staticmethod + def check_participant_list_not_empty_when_user_is_agency(loadedModel): + """ + For each POST request, one of the participants in the list must be assigned as a ParticipantType.AGENCY + """ + participants = loadedModel.get("participants", []) + is_agency_participant = [ParticipantType.AGENCY in ParticipantType(participant.get("type")) for participant in participants] + + if not any(is_agency_participant): + raise ValidationError(f"One of the assigned participants *must* be of type 'ParticipantType.AGENCY'. Found list of participants: {participants}") + return + + @staticmethod + def check_shipcall_is_canceled(loadedModel, content): + # read the shipcall_id from the PUT data + shipcall_id = loadedModel.get("id") + + # get all shipcalls in the database + shipcalls = get_shipcall_id_dictionary() + + # search for the matching shipcall in the database + shipcall = shipcalls.get(shipcall_id,{}) + + # if the *existing* shipcall in the database is canceled, it may not be changed + if shipcall.get("canceled", False): + raise ValidationError(f"The shipcall with id 'shipcall_id' is canceled. A canceled shipcall may not be changed.") + return + + @staticmethod + def check_required_fields_of_put_request(content:dict): + shipcall_id = content.get("id", None) + if shipcall_id is None: + raise ValidationError(f"A PUT request requires an 'id' to refer to.") + + + +""" +# copy +def validate_posted_shipcall_data(user_data:dict, loadedModel:dict, content:dict): + ##### Section 1: check user_data ##### + # DONE: refactored + + ##### Section 2: check loadedModel ##### + # DONE: refactored + + ##### Section 3: check content ##### + # DONE: refactored + + + ##### Section 4: check loadedModel & content ##### + # DONE: refactored ET and BERTH ID existance check + # DONE: refactored 'time in future' checks + return +""" \ No newline at end of file diff --git a/src/server/BreCal/validators/input_validation_times.py b/src/server/BreCal/validators/input_validation_times.py new file mode 100644 index 0000000..6b8ab6a --- /dev/null +++ b/src/server/BreCal/validators/input_validation_times.py @@ -0,0 +1,402 @@ +import typing +import json +import datetime +from abc import ABC, abstractmethod +from marshmallow import ValidationError +from string import ascii_letters, digits + +from BreCal.schemas.model import Ship, Shipcall, Berth, User, Participant, ShipcallType, Times +from BreCal.impl.participant import GetParticipant +from BreCal.impl.ships import GetShips +from BreCal.impl.berths import GetBerths +from BreCal.impl.times import GetTimes + +from BreCal.database.enums import ParticipantType, ParticipantFlag +from BreCal.validators.input_validation_utils import check_if_user_is_bsmd_type, check_if_ship_id_is_valid, check_if_berth_id_is_valid, check_if_participant_ids_are_valid, check_if_participant_ids_and_types_are_valid, check_if_shipcall_id_is_valid, get_shipcall_id_dictionary, get_participant_type_from_user_data, get_participant_id_dictionary, check_if_participant_id_is_valid_standalone +from BreCal.database.sql_handler import execute_sql_query_standalone +from BreCal.validators.validation_base_utils import check_if_int_is_valid_flag, check_if_string_has_special_characters +import werkzeug + +def build_post_data_type_dependent_required_fields_dict()->dict[ShipcallType,dict[ParticipantType,typing.Optional[list[str]]]]: + """ + The required fields of a POST-request depend on ShipcallType and ParticipantType. This function creates + a dictionary, which maps those types to a list of required fields. + + The participant types 'undefined' and 'bsmd' should not be used in POST-requests. They return 'None'. + """ + post_data_type_dependent_required_fields_dict = { + ShipcallType.arrival:{ + ParticipantType.undefined:None, # should not be set in POST requests + ParticipantType.BSMD:None, # should not be set in POST requests + ParticipantType.TERMINAL:["operations_start"], + ParticipantType.AGENCY:["eta_berth"], + ParticipantType.MOORING:["eta_berth"], + ParticipantType.PILOT:["eta_berth"], + ParticipantType.PORT_ADMINISTRATION:["eta_berth"], + ParticipantType.TUG:["eta_berth"], + }, + ShipcallType.departure:{ + ParticipantType.undefined:None, # should not be set in POST requests + ParticipantType.BSMD:None, # should not be set in POST requests + ParticipantType.TERMINAL:["operations_end"], + ParticipantType.AGENCY:["etd_berth"], + ParticipantType.MOORING:["etd_berth"], + ParticipantType.PILOT:["etd_berth"], + ParticipantType.PORT_ADMINISTRATION:["etd_berth"], + ParticipantType.TUG:["etd_berth"], + }, + ShipcallType.shifting:{ + ParticipantType.undefined:None, # should not be set in POST requests + ParticipantType.BSMD:None, # should not be set in POST requests + ParticipantType.TERMINAL:["operations_start", "operations_end"], + ParticipantType.AGENCY:["eta_berth", "etd_berth"], + ParticipantType.MOORING:["eta_berth", "etd_berth"], + ParticipantType.PILOT:["eta_berth", "etd_berth"], + ParticipantType.PORT_ADMINISTRATION:["eta_berth", "etd_berth"], + ParticipantType.TUG:["eta_berth", "etd_berth"], + }, + } + return post_data_type_dependent_required_fields_dict + + + +class InputValidationTimes(): + """ + This class combines a complex set of individual input validation functions into a joint object. + It uses static methods, so the object does not need to be instantiated, but functions can be called immediately. + + Example: + InputValidationTimes.evaluate(user_data, loadedModel, content) + + When the data violates one of the rules, a marshmallow.ValidationError is raised, which details the issues. + """ + def __init__(self) -> None: + pass + + @staticmethod + def evaluate_post_data(user_data:dict, loadedModel:dict, content:dict): + # 0.) Check for the presence of required fields + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + + # 1.) datasets may only be created, if the current user fits the appropriate type in the ShipcallParticipantMap + InputValidationTimes.check_if_user_fits_shipcall_participant_map(user_data, loadedModel, content) + + # 2.) datasets may only be created, if the respective participant type did not already create one. + InputValidationTimes.check_if_entry_already_exists_for_participant_type(user_data, loadedModel, content) + + # 3.) only users who are *not* of type BSMD may post times datasets. + InputValidationTimes.check_user_is_not_bsmd_type(user_data) + + # 4.) Reference checking + InputValidationTimes.check_dataset_references(content) + + # 5.) Value checking + InputValidationTimes.check_dataset_values(user_data, loadedModel, content) + return + + @staticmethod + def evaluate_put_data(user_data:dict, loadedModel:dict, content:dict): + # 1.) Check for the presence of required fields + InputValidationTimes.check_times_required_fields_put_data(content) + + # 2.) Only users of the same participant_id, which the times dataset refers to, can update the entry + InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=loadedModel, times_id=None) + + # 3.) Reference checking + InputValidationTimes.check_dataset_references(content) + + # 4.) Value checking + InputValidationTimes.check_dataset_values(user_data, loadedModel, content) + return + + @staticmethod + def evaluate_delete_data(user_data:dict, times_id:int): + # #TODO_determine: is times_id always an int or does the request.args call provide a string? + times_id = int(times_id) if not isinstance(times_id, int) else times_id + + # 1.) The dataset entry may not be deleted already + InputValidationTimes.check_if_entry_is_already_deleted(times_id) + + # 2.) Only users of the same participant_id, which the times dataset refers to, can delete the entry + InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=None, times_id=times_id) + return + + @staticmethod + def check_if_entry_is_already_deleted(times_id:int): + """ + When calling a delete request for times, the dataset may not be deleted already. This method + makes sure, that the request contains and ID, has a matching entry in the database. + When a times dataset is deleted, it is directly removed from the database. + + To identify deleted entries, query from the database and check, whether there is a match for the times id. + + """ + # perform an SQL query. Creates a pooled connection internally, queries the database, then closes the connection. + query = "SELECT shipcall_id FROM times WHERE id = ?id?" + pdata = execute_sql_query_standalone(query=query, param={"id":times_id}, pooledConnection=None) + + if len(pdata)==0: + raise ValidationError(f"The selected time entry is already deleted. ID: {times_id}") + return + + @staticmethod + def check_user_is_not_bsmd_type(user_data:dict): + """a new dataset may only be created by a user who is *not* belonging to participant group BSMD""" + is_bsmd = check_if_user_is_bsmd_type(user_data) + if is_bsmd: + raise ValidationError(f"current user belongs to BSMD. Cannot post 'times' datasets. Found user data: {user_data}") + return + + @staticmethod + def check_dataset_values(user_data:dict, loadedModel:dict, content:dict): + """ + this method validates POST and PUT data. Most of the dataset arguments are validated directly in the + BreCal.schemas.model.TimesSchema, using @validates. This is exclusive for 'simple' validation rules. + + This applies to: + "remarks" & "berth_info" + "eta_berth", "etd_berth", "lock_time", "zone_entry", "operations_start", "operations_end" + """ + # while InputValidationTimes.check_user_is_not_bsmd_type already validates a user, this method + # validates the times dataset. + + # ensure loadedModel["participant_type"] is of type ParticipantType + if not isinstance(loadedModel["participant_type"], ParticipantType): + loadedModel["participant_type"] = ParticipantType(loadedModel["participant_type"]) + + if ParticipantType.BSMD in loadedModel["participant_type"]: + raise ValidationError(f"current user belongs to BSMD. Cannot post times datasets. Found user data: {user_data}") + return + + @staticmethod + def check_dataset_references(content:dict): + """ + When IDs are referenced, they must exist in the database. This method individually validates the existance of referred + berth ID, participant IDs and shipcall ID. + + Note: whenever an ID is 'None', there is no exception, because a different method is supposed to capture non-existant mandatory fields. + """ + # extract the IDs + berth_id, participant_id, shipcall_id = content.get("berth_id"), content.get("participant_id"), content.get("shipcall_id") + + valid_berth_id_reference = check_if_berth_id_is_valid(berth_id) + if not valid_berth_id_reference: + raise ValidationError(f"The referenced berth_id '{berth_id}' does not exist in the database.") + + valid_shipcall_id_reference = check_if_shipcall_id_is_valid(shipcall_id) + if not valid_shipcall_id_reference: + raise ValidationError(f"The referenced shipcall_id '{shipcall_id}' does not exist in the database.") + + valid_participant_id_reference = check_if_participant_id_is_valid_standalone(participant_id) + if not valid_participant_id_reference: + raise ValidationError(f"The referenced participant_id '{participant_id}' does not exist in the database.") + + return + + @staticmethod + def check_times_required_fields_post_data(loadedModel:dict, content:dict): + """ + Depending on ShipcallType and ParticipantType, there is a rather complex set of required fields. + Independent of those types, any POST request for times should always include the default fields. + + The dependent and independent fields are validated by checking, whether the respective value in 'content' + is undefined (returns None). When any of these fields is undefined, a ValidationError is raised. + """ + participant_type = loadedModel["participant_type"] + shipcall_id = loadedModel["shipcall_id"] + + # build a dictionary of id:item pairs, so one can select the respective participant + # must look-up the shipcall_type based on the shipcall_id + shipcalls = get_shipcall_id_dictionary() + shipcall_type = ShipcallType[shipcalls.get(shipcall_id,{}).get("type",ShipcallType.undefined.name)] + + if (participant_type is None) or (int(shipcall_type) == int(ShipcallType.undefined)): + raise ValidationError(f"At least one of the required fields is missing. Missing: 'participant_type' or 'shipcall_type'") + + + # build a list of required fields based on shipcall and participant type, as well as type-independent fields + independent_required_fields = InputValidationTimes.get_post_data_type_independent_fields() + dependent_required_fields = InputValidationTimes.get_post_data_type_dependent_fields(shipcall_type, participant_type) + + required_fields = independent_required_fields + dependent_required_fields + + # generate a list of booleans, where each element shows, whether one of the required fields is missing. + missing_required_fields = [ + content.get(field,None) is None for field in required_fields + ] + + if any(missing_required_fields): + # create a tuple of (field_key, bool) to describe to a user, which one of the fields may be missing + verbosity_tuple = [(field, missing) for field, missing in zip(required_fields, missing_required_fields) if missing] + raise ValidationError(f"At least one of the required fields is missing. Missing: {verbosity_tuple}") + return + + @staticmethod + def check_times_required_fields_put_data(content:dict): + """in a PUT request, only the 'id' is a required field. All other fields are simply ignored, when they are not provided.""" + if content.get("id") is None: + raise ValidationError(f"A PUT-request requires an 'id' reference, which was not found.") + return + + @staticmethod + def get_post_data_type_independent_fields()->list[str]: + """ + Independent of the ShipcallType and ParticipantType, any POST request for times should always include the default fields. + """ + independent_required_fields = [ + "shipcall_id", "participant_id", "participant_type" + ] + return independent_required_fields + + @staticmethod + def get_post_data_type_dependent_fields(shipcall_type:typing.Union[int, ShipcallType], participant_type:typing.Union[int, ParticipantType]): + """ + Depending on ShipcallType and ParticipantType, there is a rather complex set of required fields. + + Arriving shipcalls need arrival times (e.g., 'eta'), Departing shipcalls need departure times (e.g., 'etd') and + Shifting shipcalls need both times (e.g., 'eta' and 'etd'). + + Further, the ParticipantType determines the set of relevant times. In particular, the terminal uses + 'operations_start' and 'operations_end', while other users use 'eta_berth' or 'etd_berth'. + """ + # ensure that both, shipcall_type and participant_type, refer to the enumerators, as opposed to integers. + if not isinstance(shipcall_type, ShipcallType): + shipcall_type = ShipcallType(shipcall_type) + if not isinstance(participant_type, ParticipantType): + participant_type = ParticipantType(participant_type) + + # build a dictionary, which maps shipcall type and participant type to a list of fields + dependent_required_fields_dict = build_post_data_type_dependent_required_fields_dict() + + # select shipcall type & participant type + dependent_required_fields = dependent_required_fields_dict.get(shipcall_type,{}).get(participant_type,None) + return dependent_required_fields + + @staticmethod + def check_if_user_fits_shipcall_participant_map(user_data:dict, loadedModel:dict, content:dict, spm_shipcall_data=None): + """ + a new dataset may only be created, if the user belongs to the participant group (participant_id), + which is assigned to the shipcall within the ShipcallParticipantMap + + This method does not validate, what the POST-request contains, but it validates, whether the *user* is + authorized to send the request. + + options: + spm_shipcall_data: + data from the ShipcallParticipantMap, which refers to the respective shipcall ID. The SPM can be + an optional argument to allow for much easier unit testing. + """ + + # identify shipcall_id + shipcall_id = loadedModel["shipcall_id"] + + # identify user's participant_id & type (get all participants; then filter these for the {participant_id}) + participant_id = user_data["participant_id"] #participants = get_participant_id_dictionary() #participant_type = ParticipantType(participants.get(participant_id,{}).get("type")) + participant_type = ParticipantType(loadedModel["participant_type"]) if not isinstance(loadedModel["participant_type"],ParticipantType) else loadedModel["participant_type"] + + # get ShipcallParticipantMap for the shipcall_id + if spm_shipcall_data is None: + # read the ShipcallParticipantMap entry of the current shipcall_id. This is used within the input validation of a PUT request + # creates a list of {'participant_id: ..., 'type': ...} elements + spm_shipcall_data = execute_sql_query_standalone( + query = "SELECT participant_id, type FROM shipcall_participant_map WHERE shipcall_id=?shipcall_id?", + param={"shipcall_id":shipcall_id}, + pooledConnection=None + ) + + # check, if participant_id is assigned to the ShipcallParticipantMap + matching_spm = [ + spm + for spm in spm_shipcall_data + if spm.get("participant_id")==participant_id + ] + + if not len(matching_spm)>0: + raise ValidationError(f'The participant group with id {participant_id} is not assigned to the shipcall. Found ShipcallParticipantMap: {spm_shipcall_data}') + + # check, if the assigned participant_id is assigned with the same role + matching_spm_element = matching_spm[0] + matching_spm_element_participant_type = ParticipantType(matching_spm_element.get("type")) + if not matching_spm_element_participant_type in participant_type: + raise ValidationError(f'The participant group with id {participant_id} is assigned to the shipcall in a different role. Request Role: {participant_type}, ShipcallParticipantMap Role Assignment: {matching_spm_element_participant_type}') + return + + @staticmethod + def check_if_entry_already_exists_for_participant_type(user_data:dict, loadedModel:dict, content:dict): + """determines, whether a dataset for the participant type is already present""" + # determine participant_type and shipcall_id from the loadedModel + participant_type = loadedModel["participant_type"] + if not isinstance(participant_type, ParticipantType): # ensure the correct data type + participant_type = ParticipantType(participant_type) + shipcall_id = loadedModel["shipcall_id"] + + # get all times entries of the shipcall_id from the database + times, status_code, headers = GetTimes(options={"shipcall_id":shipcall_id}) + times = json.loads(times) + + # check, if there is already a dataset for the participant type + participant_type_exists_already = any([ParticipantType(time_.get("participant_type",0)) in participant_type for time_ in times]) + if participant_type_exists_already: + raise ValidationError(f"A dataset for the participant type is already present. Participant Type: {participant_type}. Times Datasets: {times}") + return + + @staticmethod + def check_user_belongs_to_same_group_as_dataset_determines(user_data:dict, loadedModel:typing.Optional[dict]=None, times_id:typing.Optional[int]=None): + """ + This method checks, whether a user belongs to the same participant_id, as the dataset entry refers to. + It is used in, both, PUT requests and DELETE requests, but uses different arguments to determine the matching + time dataset entry. + + PUT: + loadedModel is unbundled to identify the matching times entry by the shipcall id + + DELETE: + times_id is used to directly identify the matching times entry + """ + assert not ((loadedModel is None) and (times_id is None)), f"must provide either loadedModel OR times_id. Both are 'None'" + assert (loadedModel is None) or (times_id is None), f"must provide either loadedModel OR times_id. Both are defined." + + # identify the user's participant id + user_participant_id = user_data["participant_id"] + + if loadedModel is not None: + shipcall_id = loadedModel["shipcall_id"] + participant_type = loadedModel["participant_type"] + + # get all times entries of the shipcall_id from the database as a list of {'participant_id':..., 'participant_type':...} elements + query = "SELECT participant_id, participant_type FROM times WHERE shipcall_id = ?shipcall_id?" + times = execute_sql_query_standalone(query=query, param={"shipcall_id":shipcall_id}, pooledConnection=None) + + # get the matching datasets, where the participant id is identical + time_datasets_of_participant_type = [time_ for time_ in times if time_.get("participant_type")==participant_type] + + # when there are no matching participants, raise a ValidationError + if not len(time_datasets_of_participant_type)>0: + raise ValidationError(f"Could not find a matching time dataset for the provided participant_type: {participant_type}. Found Time Datasets: {times}") + + # take the first match. There should always be only one match. + time_datasets_of_participant_type = time_datasets_of_participant_type[0] + participant_id_of_times_dataset = time_datasets_of_participant_type.get("participant_id") + + if times_id is not None: + # perform an SQL query. Creates a pooled connection internally, queries the database, then closes the connection. + query = "SELECT participant_id FROM times WHERE id = ?id?" + pdata = execute_sql_query_standalone(query=query, param={"id":times_id}, pooledConnection=None) + + # extracts the participant_id from the first matching entry, if applicable + if not len(pdata)>0: + # this case is usually covered by the InputValidationTimes.check_if_entry_is_already_deleted method already + raise ValidationError(f"Unknown times_id. Could not find a matching entry for ID: {times_id}") + else: + participant_id_of_times_dataset = pdata[0].get("participant_id") + + if user_participant_id != participant_id_of_times_dataset: + raise ValidationError(f"The dataset may only be changed by a user belonging to the same participant group as the times dataset is referring to. User participant_id: {user_participant_id}; Dataset participant_id: {participant_id_of_times_dataset}") + return + + + + + + + diff --git a/src/server/BreCal/validators/input_validation_utils.py b/src/server/BreCal/validators/input_validation_utils.py new file mode 100644 index 0000000..325e3fb --- /dev/null +++ b/src/server/BreCal/validators/input_validation_utils.py @@ -0,0 +1,182 @@ +import logging +import json +from collections import Counter + +from BreCal.impl.participant import GetParticipant +from BreCal.impl.ships import GetShips +from BreCal.impl.berths import GetBerths +from BreCal.impl.shipcalls import GetShipcalls + +from BreCal.database.enums import ParticipantType + +def get_participant_id_dictionary(): + """ + get a dictionary of all participants, where the key is the participant's id, and the value is a dictionary + of common participant data (not a data model). + """ + # get all participants + response,status_code,header = GetParticipant(options={}) + + # build a dictionary of id:item pairs, so one can select the respective participant + participants = json.loads(response) + participants = {items.get("id"):items for items in participants} + return participants + +def get_berth_id_dictionary(): + # get all berths + response,status_code,header = GetBerths(token=None) + + # build a dictionary of id:item pairs, so one can select the respective participant + berths = json.loads(response) + berths = {items.get("id"):items for items in berths} + return berths + +def get_ship_id_dictionary(): + # get all ships + response,status_code,header = GetShips(token=None) + + # build a dictionary of id:item pairs, so one can select the respective participant + ships = json.loads(response) + ships = {items.get("id"):items for items in ships} + return ships + +def get_shipcall_id_dictionary(): + # get all ships + response,status_code,header = GetShipcalls(options={'past_days':30000}) + + # build a dictionary of id:item pairs, so one can select the respective participant + shipcalls = json.loads(response) + shipcalls = {items.get("id"):items for items in shipcalls} + return shipcalls + + +def get_participant_type_from_user_data(user_data:dict)->ParticipantType: + # user_data = decode token + participant_id = user_data.get("participant_id") + + # build a dictionary of id:item pairs, so one can select the respective participant + participants = get_participant_id_dictionary() + participant = participants.get(participant_id,{}) + participant_type = ParticipantType(participant.get("type",0)) + return participant_type + +def check_if_user_is_bsmd_type(user_data:dict)->bool: + """ + given a dictionary of user data, determine the respective participant id and read, whether + that participant is a .BSMD-type + + Note: ParticipantType is an IntFlag. + Hence, ParticipantType(1) is ParticipantType.BSMD, + and ParticipantType(7) is [ParticipantType.BSMD, ParticipantType.TERMINAL, ParticipantType.PILOT] + + both would return 'True' + + returns: boolean. Whether the participant id is a .BSMD type element + """ + # use the decoded JWT token and extract the participant type + participant_type = get_participant_type_from_user_data(user_data) + + # boolean check: is the participant of type .BSMD? + is_bsmd = ParticipantType.BSMD in participant_type + return is_bsmd + + +def check_if_ship_id_is_valid(ship_id): + """check, whether the provided ID is valid. If it is 'None', it will be considered valid. This is, because a shipcall POST-request, does not have to include all IDs at once""" + if ship_id is None: + return True + + # build a dictionary of id:item pairs, so one can select the respective participant + ships = get_ship_id_dictionary() + + # boolean check + ship_id_is_valid = ship_id in list(ships.keys()) + return ship_id_is_valid + +def check_if_berth_id_is_valid(berth_id): + """check, whether the provided ID is valid. If it is 'None', it will be considered valid. This is, because a shipcall POST-request, does not have to include all IDs at once""" + if berth_id is None: + return True + + # build a dictionary of id:item pairs, so one can select the respective participant + berths = get_berth_id_dictionary() + + # boolean check + berth_id_is_valid = berth_id in list(berths.keys()) + return berth_id_is_valid + +def check_if_shipcall_id_is_valid(shipcall_id:int): + """check, whether the provided ID is valid. If it is 'None', it will be considered valid. This is, because a request, may not have to include all IDs at once""" + if shipcall_id is None: + return True + + # build a dictionary of id:item pairs, so one can select the respective participant + shipcalls = get_shipcall_id_dictionary() + + # boolean check + shipcall_id_is_valid = shipcall_id in list(shipcalls.keys()) + return shipcall_id_is_valid + +def check_if_participant_id_is_valid_standalone(participant_id:int): + """check, whether the provided ID is valid. If it is 'None', it will be considered valid. This is, because a request, may not have to include all IDs at once""" + if participant_id is None: + return True + + # build a dictionary of id:item pairs, so one can select the respective participant + participants = get_participant_id_dictionary() + + # boolean check + participant_id_is_valid = participant_id in list(participants.keys()) + return participant_id_is_valid + +def check_if_participant_id_is_valid(participant:dict): + """ + check, whether the provided ID is valid. If it is 'None', it will be considered valid. This is, because a shipcall POST-request, does not have to include all IDs at once + + Following the common BreCal.schemas.model.ParticipantAssignmentSchema, a participant dictionary contains the keys: + 'participant_id' : int + 'type' : ParticipantType + """ + # #TODO1: Daniel Schick: 'types may only appear once and must not include type "BSMD"' + participant_id = participant.get("participant_id", None) + participant_id_is_valid = check_if_participant_id_is_valid_standalone(participant_id) + return participant_id_is_valid + +def check_if_participant_ids_are_valid(participants:list[dict]): + """ + + args: + participants (list of participant-elements) + Following the common BreCal.schemas.model.ParticipantAssignmentSchema, a participant dictionary contains the keys: + 'participant_id' : int + 'type' : ParticipantType + """ + # empty list -> invalid + if participants is None: + return False + + # check each participant id individually + valid_participant_ids = [check_if_participant_id_is_valid(participant) for participant in participants] + + # boolean check, whether all participant ids are valid + return all(valid_participant_ids) + +def check_if_participant_ids_and_types_are_valid(participants:list[dict[str,int]]): + # creates a Counter object, which counts the number of unique elements + # key of counter: type, value of counter: number of listings in 'participants' + # e.g., {1: 4, 2: 1, 8: 1} (type 1 occurs 4 times in this example) + counter_type = Counter([participant.get("type") for participant in participants]) + counter_id = Counter([participant.get("type") for participant in participants]) + + # obtains the maximum count from the counter's values + max_count_type = max(list(counter_type.values())) if len(list(counter_type.values()))>0 else 0 + max_count_ids = max(list(counter_id.values())) if len(list(counter_id.values()))>0 else 0 + + # when 0 or 1 count for the participant ids or types, return true. Return false, when there is more than one entry. + return max_count_type <= 1 and max_count_ids <= 1 + + + + + + diff --git a/src/server/BreCal/validators/time_logic.py b/src/server/BreCal/validators/time_logic.py index 3c096a1..8ebad56 100644 --- a/src/server/BreCal/validators/time_logic.py +++ b/src/server/BreCal/validators/time_logic.py @@ -2,6 +2,8 @@ import datetime import numpy as np import pandas as pd +from marshmallow import ValidationError + def validate_time_exceeds_threshold(value:datetime.datetime, seconds:int=60, minutes:int=60, hours:int=24, days:int=30, months:int=12)->bool: """returns a boolean when the input value is very distant in the future. The parameters provide the threshold""" # time difference in seconds. Positive: in the future, Negative: in the past @@ -10,6 +12,37 @@ def validate_time_exceeds_threshold(value:datetime.datetime, seconds:int=60, min threshold = seconds*minutes*hours*days*months return time_>=threshold +def validate_time_is_in_future(value:datetime.datetime): + """returns a boolean when the input value is in the future.""" + current_time = datetime.datetime.now() + return value >= current_time + +def validate_time_is_in_not_too_distant_future(raise_validation_error:bool, value:datetime.datetime, seconds:int=60, minutes:int=60, hours:int=24, days:int=30, months:int=12)->bool: + """ + combines two boolean operations. Returns True when both conditions are met. + a) value is in the future + b) value is not too distant (e.g., at max. 1 year in the future) + + When the value is 'None', the validation will be skipped. A ValidationError is never issued, but the method returns 'False'. + + options: + raise_validation_error: boolean. If set to True, this method issues a marshmallow.ValidationError, when the conditions fail. + """ + if value is None: + return False + + is_in_future = validate_time_is_in_future(value) + is_too_distant = validate_time_exceeds_threshold(value, seconds, minutes, hours, days, months) + + if raise_validation_error: + if not is_in_future: + raise ValidationError(f"The provided value must be in the future. Current Time: {datetime.datetime.now()}, Value: {value}") + + if is_too_distant: + raise ValidationError(f"The provided value is in the too distant future and exceeds a threshold for 'reasonable' entries. Found: {value}") + + return is_in_future & (not is_too_distant) + class TimeLogic(): def __init__(self): return diff --git a/src/server/BreCal/validators/validation_base_utils.py b/src/server/BreCal/validators/validation_base_utils.py new file mode 100644 index 0000000..82ced1f --- /dev/null +++ b/src/server/BreCal/validators/validation_base_utils.py @@ -0,0 +1,20 @@ +from string import ascii_letters, digits + + +def check_if_string_has_special_characters(text:str): + """ + check, whether there are any characters within the provided string, which are not found in the ascii letters or digits + ascii_letters: abcd (...) and ABCD (...) + digits: 0123 (...) + + Source: https://stackoverflow.com/questions/57062794/is-there-a-way-to-check-if-a-string-contains-special-characters + User: https://stackoverflow.com/users/10035985/andrej-kesely + returns bool + """ + return bool(set(text).difference(ascii_letters + digits)) + + +def check_if_int_is_valid_flag(value, enum_object): + # e.g., when an IntFlag has the values 1,2,4; the maximum valid value is 7 + max_int = sum([int(val) for val in list(enum_object._value2member_map_.values())]) + return 0 < value <= max_int \ No newline at end of file diff --git a/src/server/BreCal/validators/validation_rule_functions.py b/src/server/BreCal/validators/validation_rule_functions.py index 98fff6e..24bbc62 100644 --- a/src/server/BreCal/validators/validation_rule_functions.py +++ b/src/server/BreCal/validators/validation_rule_functions.py @@ -38,14 +38,16 @@ error_message_dict = { "validation_rule_fct_etd_time_not_in_tidal_window":"The tidal window does not fit to the agency's estimated time of departure (ETD) {Rule #0004B}", # 0005 A+B - "validation_rule_fct_too_many_identical_eta_times":"There are more than three ships with the same planned time of arrival (ETA) {Rule #0005A}", - "validation_rule_fct_too_many_identical_etd_times":"There are more than three ships with the same planned time of departure (ETD) {Rule #0005B}", + "validation_rule_fct_too_many_identical_eta_times":"More than three shipcalls are planned at the same time as the defined ETA {Rule #0005A}", + "validation_rule_fct_too_many_identical_etd_times":"More than three shipcalls are planned at the same time as the defined ETD {Rule #0005B}", # 0006 A+B "validation_rule_fct_agency_and_terminal_berth_id_disagreement":"Agency and Terminal are planning with different berths (the berth_id deviates). {Rule #0006A}", "validation_rule_fct_agency_and_terminal_pier_side_disagreement":"Agency and Terminal are planning with different pier sides (the pier_side deviates). {Rule #0006B}", } + + class ValidationRuleBaseFunctions(): """ Base object with individual functions, which the {ValidationRuleFunctions}-child refers to. @@ -71,6 +73,18 @@ class ValidationRuleBaseFunctions(): def get_no_violation_default_output(self): """return the default output of a validation function with no validation: a tuple of (GREEN state, None)""" return (StatusFlags.GREEN, None) + + def check_if_header_exists(self, df_times:pd.DataFrame, participant_type:ParticipantType)->bool: + """ + Given a pandas DataFrame, which contains times entries for a specific shipcall id, + this function checks, whether one of the times entries belongs to the requested ParticipantType. + + returns bool + """ + # empty DataFrames form a special case, as they might miss the 'participant_type' column. + if len(df_times)==0: + return False + return participant_type in df_times.loc[:,"participant_type"].values def check_time_delta_violation_query_time_to_now(self, query_time:pd.Timestamp, key_time:pd.Timestamp, threshold:float)->bool: """ @@ -144,7 +158,6 @@ class ValidationRuleBaseFunctions(): return violation_state df_times = df_times.loc[df_times["participant_type"].isin(participant_types),:] - agency_time = [time_ for time_ in agency_times.loc[:,query].tolist() if isinstance(time_, pd.Timestamp)] # for the given query, e.g., 'eta_berth', sample all times from the pandas DataFrame # exclude missing entries and consider only pd.Timestamp entries (which ignores pd.NaT/null entries) @@ -172,6 +185,7 @@ class ValidationRuleBaseFunctions(): violation_state = any(time_difference_exceeds_threshold) # this (previous) solution compares times to the reference (agency) time and checks if the difference is greater than 15 minutes + # agency_time = [time_ for time_ in agency_times.loc[:,query].tolist() if isinstance(time_, pd.Timestamp)] # violation_state = ((np.max(estimated_times) - agency_time[0]) > pd.Timedelta("15min")) or ((agency_time[0] - np.min(estimated_times)) > pd.Timedelta("15min")) # this solution to the rule compares all times to each other. When there is a total difference of more than 15 minutes, a violation occurs @@ -762,10 +776,12 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions): return self.get_no_violation_default_output() # check, if the header is filled in (agency & terminal) - if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) != 1: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY): + # if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) != 1: return self.get_no_violation_default_output() # rule not applicable - if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) != 1: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.TERMINAL): + #if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) != 1: return self.get_no_violation_default_output() # rule not applicable # get agency & terminal times @@ -805,10 +821,12 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions): return self.get_no_violation_default_output() # check, if the header is filled in (agency & terminal) - if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) != 1: + # if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) != 1: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY): return self.get_no_violation_default_output() # rule not applicable - if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) != 1: + # if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) != 1: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.TERMINAL): return self.get_no_violation_default_output() # rule not applicable # get agency & terminal times @@ -845,7 +863,8 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions): return self.get_no_violation_default_output() # check, if the header is filled in (agency) - if len(df_times.loc[df_times["participant_type"].isin([ParticipantType.AGENCY.value])]) != 1: + # if len(df_times.loc[df_times["participant_type"].isin([ParticipantType.AGENCY.value])]) != 1: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY): return self.get_no_violation_default_output() times_agency = self.sql_handler.get_times_for_participant_type(df_times, participant_type=ParticipantType.AGENCY.value) @@ -876,7 +895,8 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions): return self.get_no_violation_default_output() # check, if the header is filled in (agency) - if len(df_times.loc[df_times["participant_type"].isin([ParticipantType.AGENCY.value])]) != 1: + # if len(df_times.loc[df_times["participant_type"].isin([ParticipantType.AGENCY.value])]) != 1: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY): return self.get_no_violation_default_output() times_agency = self.sql_handler.get_times_for_participant_type(df_times, participant_type=ParticipantType.AGENCY.value) @@ -898,16 +918,19 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions): """ Code: #0005-A Type: Global Rule - Description: this validation rule checks, whether there are too many shipcalls with identical ETA times. + Description: this validation rule checks, whether there are too many shipcalls with identical times to the query ETA. """ - times_agency = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value] # check, if the header is filled in (agency) - if len(times_agency) != 1: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY): # if len(times_agency) != 1: return self.get_no_violation_default_output() - # when ANY of the unique values exceeds the threshold, a violation is observed - query = "eta_berth" - violation_state = self.check_unique_shipcall_counts(query, times_agency=times_agency, rounding=rounding, maximum_threshold=maximum_threshold, all_times_agency=all_times_agency) + # get the agency's query time + times_agency = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value] + query_time = times_agency.iloc[0].eta_berth + + # count the number of times, where a times entry is very close to the query time (uses an internal threshold, such as 15 minutes) + counts = self.sql_handler.count_synchronous_shipcall_times(query_time, all_df_times=all_times_agency) + violation_state = counts > maximum_threshold if violation_state: validation_name = "validation_rule_fct_too_many_identical_eta_times" @@ -919,16 +942,19 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions): """ Code: #0005-B Type: Global Rule - Description: this validation rule checks, whether there are too many shipcalls with identical ETD times. + Description: this validation rule checks, whether there are too many shipcalls with identical times to the query ETD. """ - times_agency = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value] # check, if the header is filled in (agency) - if len(times_agency) != 1: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY): #if len(times_agency) != 1: return self.get_no_violation_default_output() - # when ANY of the unique values exceeds the threshold, a violation is observed - query = "etd_berth" - violation_state = self.check_unique_shipcall_counts(query, times_agency=times_agency, rounding=rounding, maximum_threshold=maximum_threshold, all_times_agency=all_times_agency) + # get the agency's query time + times_agency = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value] + query_time = times_agency.iloc[0].etd_berth + + # count the number of times, where a times entry is very close to the query time (uses an internal threshold, such as 15 minutes) + counts = self.sql_handler.count_synchronous_shipcall_times(query_time, all_df_times=all_times_agency) + violation_state = counts > maximum_threshold if violation_state: validation_name = "validation_rule_fct_too_many_identical_etd_times" @@ -943,10 +969,12 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions): Description: This validation rule checks, whether agency and terminal agree with their designated berth place by checking berth_id. """ # check, if the header is filled in (agency & terminal) - if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) == 0: + # if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) == 0: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY): return self.get_no_violation_default_output() # rule not applicable - if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) == 0: + # if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) == 0: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.TERMINAL): return self.get_no_violation_default_output() # rule not applicable times_agency = self.sql_handler.get_times_for_participant_type(df_times, participant_type=ParticipantType.AGENCY.value) @@ -979,13 +1007,14 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions): Description: This validation rule checks, whether agency and terminal agree with their designated pier side by checking pier_side. """ # check, if the header is filled in (agency & terminal) - if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) == 0: + # if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) == 0: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY): return self.get_no_violation_default_output() # rule not applicable - if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) == 0: + # if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) == 0: + if not self.check_if_header_exists(df_times, participant_type=ParticipantType.TERMINAL): return self.get_no_violation_default_output() # rule not applicable - times_agency = self.sql_handler.get_times_for_participant_type(df_times, participant_type=ParticipantType.AGENCY.value) times_terminal = self.sql_handler.get_times_for_participant_type(df_times, participant_type=ParticipantType.TERMINAL.value) # when one of the two values is null, the state is GREEN diff --git a/src/server/BreCal/validators/validation_rules.py b/src/server/BreCal/validators/validation_rules.py index 405b0b6..03ffc1f 100644 --- a/src/server/BreCal/validators/validation_rules.py +++ b/src/server/BreCal/validators/validation_rules.py @@ -3,6 +3,7 @@ import logging import re import numpy as np import pandas as pd +import datetime from BreCal.database.enums import StatusFlags from BreCal.validators.validation_rule_functions import ValidationRuleFunctions from BreCal.schemas.model import Shipcall @@ -17,10 +18,6 @@ class ValidationRules(ValidationRuleFunctions): """ def __init__(self, sql_handler): # use the entire data that is provided for this query (e.g., json input) super().__init__(sql_handler) - - self.validation_state = self.determine_validation_state() - # currently flagged: notification_state initially was based on using one ValidationRules object for each query. This is deprecated. - # self.notification_state = self.determine_notification_state() # (state:str, should_notify:bool) return def evaluate(self, shipcall): @@ -31,9 +28,9 @@ class ValidationRules(ValidationRuleFunctions): returns: (evaluation_state, violations) """ # prepare df_times, which every validation rule tends to use - df_times = self.sql_handler.df_dict.get('times', pd.DataFrame()) # -> pd.DataFrame + all_df_times = self.sql_handler.df_dict.get('times', pd.DataFrame()) # -> pd.DataFrame - if len(df_times)==0: + if len(all_df_times)==0: return (StatusFlags.GREEN.value, []) spm = self.sql_handler.df_dict["shipcall_participant_map"] @@ -74,16 +71,26 @@ class ValidationRules(ValidationRuleFunctions): return evaluation_state, violations def evaluate_shipcalls(self, shipcall_df:pd.DataFrame)->pd.DataFrame: - """apply 'evaluate_shipcall_from_df' to each individual shipcall in {shipcall_df}. Returns shipcall_df ('evaluation' and 'evaluation_message' are updated)""" - results = shipcall_df.apply(lambda x: self.evaluate_shipcall_from_df(x), axis=1).values + """apply 'evaluate_shipcall_from_df' to each individual shipcall in {shipcall_df}. Returns shipcall_df ('evaluation', 'evaluation_message', 'evaluation_time' and 'evaluation_notifications_sent' are updated)""" + evaluation_states_old = [state_old for state_old in shipcall_df.loc[:,"evaluation"]] + evaluation_states_old = [state_old if not pd.isna(state_old) else 0 for state_old in evaluation_states_old] + results = shipcall_df.apply(lambda x: self.evaluate_shipcall_from_df(x), axis=1).values # returns tuple (state, message) - # unbundle individual results. evaluation_state becomes an integer, violation - evaluation_state = [StatusFlags(res[0]).value for res in results] + # unbundle individual results. evaluation_states becomes an integer, violation + evaluation_states_new = [StatusFlags(res[0]).value for res in results] violations = [",\r\n".join(res[1]) if len(res[1])>0 else None for res in results] violations = [self.concise_evaluation_message_if_too_long(violation) for violation in violations] - shipcall_df.loc[:,"evaluation"] = evaluation_state + # build the list of evaluation times ('now', as isoformat) + #evaluation_time = self.get_notification_times(evaluation_states_new) + + # build the list of 'evaluation_notifications_sent'. The value is 'False', when a notification should be created + #evaluation_notifications_sent = self.get_notification_states(evaluation_states_old, evaluation_states_new) + + shipcall_df.loc[:,"evaluation"] = evaluation_states_new shipcall_df.loc[:,"evaluation_message"] = violations + #shipcall_df.loc[:,"evaluation_time"] = evaluation_time + #shipcall_df.loc[:,"evaluation_notifications_sent"] = evaluation_notifications_sent return shipcall_df def concise_evaluation_message_if_too_long(self, violation): @@ -100,36 +107,27 @@ class ValidationRules(ValidationRuleFunctions): # e.g.: Evaluation message too long. Violated Rules: ['Rule #0001C', 'Rule #0001H', 'Rule #0001F', 'Rule #0001G', 'Rule #0001L', 'Rule #0001M', 'Rule #0001J', 'Rule #0001K'] violation = f"Evaluation message too long. Violated Rules: {concise}" return violation - - def determine_validation_state(self) -> str: + + def undefined_method(self) -> str: + """this function should apply the ValidationRules to the respective .shipcall, in regards to .times""" + return (StatusFlags.GREEN, False) # (state:str, should_notify:bool) + + def determine_notification_state(self, state_old, state_new): """ - this method determines the validation state of a shipcall. The state is either ['green', 'yellow', 'red'] and signals, - whether an entry causes issues within the workflow of users. - - returns: validation_state_new (str) + this method determines state changes in the notification state. When the state increases, a user is notified about it. + state order: (NONE = GREEN < YELLOW < RED) """ - (validation_state_new, description) = self.undefined_method() - # should there also be notifications for critical validation states? In principle, the traffic light itself provides that notification. - self.validation_state = validation_state_new - return validation_state_new + # identify a state increase + should_notify = self.identify_notification_state_change(state_old=state_old, state_new=state_new) - def determine_notification_state(self) -> (str, bool): + # when a state increases, a notification must be sent. Thereby, the field should be set to False ({evaluation_notifications_sent}) + evaluation_notifications_sent = False if bool(should_notify) else None + return evaluation_notifications_sent + + def identify_notification_state_change(self, state_old, state_new) -> bool: """ - this method determines state changes in the notification state. When the state is changed to yellow or red, - a user is notified about it. The only exception for this rule is when the state was yellow or red before, - as the user has then already been notified. - - returns: notification_state_new (str), should_notify (bool) - """ - (state_new, description) = self.undefined_method() # determine the successor - should_notify = self.identify_notification_state_change(state_new) - self.notification_state = state_new # overwrite the predecessor - return state_new, should_notify - - def identify_notification_state_change(self, state_new) -> bool: - """ - determines, whether the observed state change should trigger a notification. - internally, this function maps a color string to an integer and determines, if the successor state is more severe than the predecessor. + determines, whether the observed state change should trigger a notification. + internally, this function maps StatusFlags to an integer and determines, if the successor state is more severe than the predecessor. state changes trigger a notification in the following cases: green -> yellow @@ -138,15 +136,46 @@ class ValidationRules(ValidationRuleFunctions): (none -> yellow) or (none -> red) due to the values in the enumeration objects, the states are mapped to provide this function. - green=1, yellow=2, red=3, none=1. Hence, critical changes can be observed by simply checking with "greater than". + green=1, yellow=2, red=3, none=1. Hence, critical changes can be observed by simply checking with "greater than". returns bool, whether a notification should be triggered """ # state_old is always considered at least 'Green' (1) - state_old = max(copy.copy(self.notification_state) if "notification_state" in list(self.__dict__.keys()) else StatusFlags.NONE, StatusFlags.GREEN.value) - return state_new.value > state_old.value + if state_old is None: + state_old = StatusFlags.NONE.value + state_old = max(int(state_old), StatusFlags.GREEN.value) + return int(state_new) > int(state_old) + + def get_notification_times(self, evaluation_states_new)->list[datetime.datetime]: + """# build the list of evaluation times ('now', as isoformat)""" + evaluation_times = [datetime.datetime.now().isoformat() for _i in range(len(evaluation_states_new))] + return evaluation_times + + def get_notification_states(self, evaluation_states_old, evaluation_states_new)->list[bool]: + """# build the list of 'evaluation_notifications_sent'. The value is 'False', when a notification should be created""" + evaluation_notifications_sent = [self.determine_notification_state(state_old=int(state_old), state_new=int(state_new)) for state_old, state_new in zip(evaluation_states_old, evaluation_states_new)] + return evaluation_notifications_sent - def undefined_method(self) -> str: - """this function should apply the ValidationRules to the respective .shipcall, in regards to .times""" - # #TODO_traffic_state - return (StatusFlags.GREEN, False) # (state:str, should_notify:bool) + +def inspect_shipcall_evaluation(vr, sql_handler, shipcall_id): + """ + # debug only! + + a simple debugging function, which serves in inspecting an evaluation function for a single shipcall id. It returns the result and all related data. + returns: result, shipcall_df (filtered by shipcall id), shipcall, spm (shipcall participant map, filtered by shipcall id), times_df (filtered by shipcall id) + """ + shipcall_df = sql_handler.df_dict.get("shipcall").loc[shipcall_id:shipcall_id,:] + + shipcall = Shipcall(**{**{"id":shipcall_id},**sql_handler.df_dict.get("shipcall").loc[shipcall_id].to_dict()}) + result = vr.evaluate(shipcall=shipcall) + notification_state = vr.identify_notification_state_change(state_old=int(shipcall.evaluation), state_new=int(result[0])) + print(f"Previous state: {int(shipcall.evaluation)}, New State: {result[0]}, Notification State: {notification_state}") + + times_df = sql_handler.df_dict.get("times") + times_df = times_df.loc[times_df["shipcall_id"]==shipcall_id] + + + spm = sql_handler.df_dict["shipcall_participant_map"] + spm = spm.loc[spm["shipcall_id"]==shipcall_id] + + return result, shipcall_df, shipcall, spm, times_df diff --git a/src/server/tests/database/__init__.py b/src/server/tests/database/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/server/tests/database/test_sql_queries.py b/src/server/tests/database/test_sql_queries.py new file mode 100644 index 0000000..88e546d --- /dev/null +++ b/src/server/tests/database/test_sql_queries.py @@ -0,0 +1,527 @@ +import pytest + +import os +import bcrypt +import pydapper +from BreCal import local_db +from BreCal.database.sql_handler import execute_sql_query_standalone +from BreCal.database.sql_queries import SQLQuery +from BreCal.schemas import model +from BreCal.stubs.user import get_user_simple + +instance_path = os.path.join(os.path.expanduser('~'), "brecal", "src", "server", "instance", "instance") +local_db.initPool(os.path.dirname(instance_path), connection_filename="connection_data_local.json") + +def test_sql_query_every_call_returns_str(): + assert isinstance(SQLQuery.get_berth(), str) + return + +def test_sql_query_get_berths(): + schemas = execute_sql_query_standalone(query=SQLQuery.get_berth(), param={}) + berths = [model.Berth(**schema) for schema in schemas] + assert all([isinstance(berth, model.Berth) for berth in berths]), f"one of the returned schemas is not a Berth object" + return + +def test_sql_query_get_history(): + options = {"shipcall_id":157} + history = execute_sql_query_standalone(query=SQLQuery.get_history(), param={"shipcallid" : options["shipcall_id"]}, model=model.History.from_query_row) + assert all([isinstance(hist,model.History) for hist in history]) + return + +def test_sql_query_get_user(): + options = {"username":"maxm"} + users = execute_sql_query_standalone(query=SQLQuery.get_user(), param={"username" : options["username"]}, model=model.User) + assert all([isinstance(user,model.User) for user in users]) + assert users[0].user_name==options["username"] + return + +def test_sql_get_notifications(): + import mysql.connector + + # unfortunately, there currently is *no* notification in the database. + with pytest.raises(mysql.connector.errors.ProgrammingError, match="Unknown column 'shipcall_id' in 'field list'"): + options = {"shipcall_id":417} + notifications = execute_sql_query_standalone(query=SQLQuery.get_notifications(), param={"scid" : options["shipcall_id"]}, model=model.Notification.from_query_row) + assert all([isinstance(notification,model.Notification) for notification in notifications]) + return + +def test_sql_get_participants(): + participants = execute_sql_query_standalone(query=SQLQuery.get_participants(), param={}, model=model.Participant) + assert all([isinstance(participant,model.Participant) for participant in participants]) + return + +def test_sql_get_participants(): + options = {"user_id":29} + query = SQLQuery.get_participant_by_user_id() + participants = execute_sql_query_standalone(query=query, param={"userid" : options["user_id"]}, model=model.Participant) + assert all([isinstance(participant,model.Participant) for participant in participants]) + assert len(participants)==1, f"there should only be one match for the respective user id" + assert participants[0].id == 136, f"user 29 belongs to participant_id 136" + return + +def test_sql_get_shipcalls(): + from BreCal.database.sql_queries import create_sql_query_shipcall_get + + # different styles for the same outcome + options = {"past_days":3000} + query = create_sql_query_shipcall_get(options) + shipcalls = execute_sql_query_standalone(query=query, param=options, model=model.Shipcall.from_query_row) + assert all([isinstance(shipcall,model.Shipcall) for shipcall in shipcalls]) + + query = SQLQuery.get_shipcalls() # defaults to 'past_days'=3 + shipcalls = execute_sql_query_standalone(query=query, param=options, model=model.Shipcall.from_query_row) + assert all([isinstance(shipcall,model.Shipcall) for shipcall in shipcalls]) + + query = SQLQuery.get_shipcalls({'past_days':3000}) + shipcalls = execute_sql_query_standalone(query=query, param=options, model=model.Shipcall.from_query_row) + assert all([isinstance(shipcall,model.Shipcall) for shipcall in shipcalls]) + + # fails: options must contain 'past_days' key + with pytest.raises(AssertionError, match="there must be a key 'past_days' in the options, which determines"): + query = SQLQuery.get_shipcalls(options={}) + shipcalls = execute_sql_query_standalone(query=query, param=options, model=model.Shipcall.from_query_row) + return + +def test_sql_get_ships(): + ships = execute_sql_query_standalone(query=SQLQuery.get_ships(), model=model.Ship) + assert all([isinstance(ship, model.Ship) for ship in ships]) + return + +def test_sql_get_times(): + options = {'shipcall_id':153} + times = execute_sql_query_standalone(query=SQLQuery.get_times(), model=model.Times, param={"scid" : options["shipcall_id"]}) + assert all([isinstance(time_,model.Times) for time_ in times]) + assert times[0].shipcall_id==options["shipcall_id"] + return + +def test_sql_get_user_by_id(): + # success: id 29 exists + schemaModel = get_user_simple().__dict__ + schemaModel["id"] = 29 + + sentinel = object() + query = SQLQuery.get_user_by_id() + pooledConnection = local_db.getPoolConnection() + commands = pydapper.using(pooledConnection) + theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User) + + if theuser is sentinel: + pooledConnection.close() + + theuser.id == schemaModel["id"] + + # fails: id 292212 does not exist (returns default, which is the sentinel object in this case) + schemaModel = get_user_simple().__dict__ + schemaModel["id"] = 292212 + + sentinel = object() + query = SQLQuery.get_user_by_id() + pooledConnection = local_db.getPoolConnection() + commands = pydapper.using(pooledConnection) + theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User) + + if theuser is sentinel: + pooledConnection.close() + + assert theuser is sentinel + return + +def test_sql_get_user_put(): + """the PUT query for a user must be built based on a schemaModel, as the available data is dynamic and the query must be adaptive.""" + schemaModel = get_user_simple().__dict__ + schemaModel["id"] = 29 + + query = "UPDATE user SET " + isNotFirst = False + for key in schemaModel.keys(): + if key == "id": + continue + if key == "old_password": + continue + if key == "new_password": + continue + if isNotFirst: + query += ", " + isNotFirst = True + query += key + " = ?" + key + "? " + + query += "WHERE id = ?id?" + + assert SQLQuery.get_user_put(schemaModel) == query + return + +def test_sql_user_put_set_lastname_check_unset_lastname_check(): + """ + Simply put, this method updates the last_name of user 29 to "Metz", verifies the change, + and then proceeds to set it back to "Mustermann", and verifies the change. + """ + # 1.) SET the last_name of user_id 29 to 'Metz' by a PUT command + schemaModel = get_user_simple().__dict__ + schemaModel["id"] = 29 + schemaModel["last_name"] = "Metz" # -> "Metz" -> "Mustermann" + schemaModel = {k:v for k,v in schemaModel.items() if k in ["id", "last_name"]} + + query = SQLQuery.get_user_put(schemaModel) + affected_rows = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute") + assert affected_rows==1, f"at least one row should be affected by this call." + + # 2.) GET the user_id 29 and verify the last_name is 'Metz' + sentinel = object() + query = SQLQuery.get_user_by_id() + pooledConnection = local_db.getPoolConnection() + commands = pydapper.using(pooledConnection) + theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User) + pooledConnection.close() + assert not theuser is sentinel, f"failed GET user query" + assert theuser.last_name=="Metz", f"PUT command has been unsuccessful." + + # 3.) SET the last_name of user_id 29 to 'Mustermann' by a PUT command + schemaModel = theuser.__dict__ + schemaModel["last_name"] = "Mustermann" + schemaModel = {k:v for k,v in schemaModel.items() if k in ["id", "last_name"]} + + query = SQLQuery.get_user_put(schemaModel) + affected_rows = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute") + assert affected_rows==1, f"at least one row should be affected by this call." + + # 4.) GET the user_id 29 and verify the last_name is 'Mustermann' + sentinel = object() + query = SQLQuery.get_user_by_id() + pooledConnection = local_db.getPoolConnection() + commands = pydapper.using(pooledConnection) + theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User) + pooledConnection.close() + assert not theuser is sentinel, f"failed GET user query" + assert theuser.last_name=="Mustermann", f"PUT command has been unsuccessful." + return + +def test_sql_user_update_password(): + """This test updates the default password of user 29 from 'Start1234' to 'Start4321' and afterwards sets it back to 'Start1234'.""" + # #TODO: this test very openly displays the password of 'maxm'. It makes sense to create a stub user in the database, which can be + # used for these tests, so an account without any importance or valuable assigned role is insecure instead. + + # Set. Update the password of user 29 from 'Start1234' to 'Start4321' + schemaModel = get_user_simple().__dict__ + schemaModel["id"] = 29 + schemaModel["old_password"] = "Start1234" + schemaModel["new_password"] = "Start4321" + + sentinel = object() + query = SQLQuery.get_user_by_id() + pooledConnection = local_db.getPoolConnection() + commands = pydapper.using(pooledConnection) + theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User) + pooledConnection.close() + assert not theuser is sentinel, f"failed GET user query" + + assert bcrypt.checkpw(schemaModel["old_password"].encode("utf-8"), bytes(theuser.password_hash, "utf-8")), f"old password does not match to the database entry" + + password_hash = bcrypt.hashpw(schemaModel["new_password"].encode('utf-8'), bcrypt.gensalt( 12 )).decode('utf8') + query = SQLQuery.get_update_user_password() + affected_rows = execute_sql_query_standalone(query=query, param={"password_hash" : password_hash, "id" : schemaModel["id"]}, command_type="execute") + assert affected_rows == 1 + + # 2.) Revert. Set password back to the default (from 'Start4321' to 'Start1234') + schemaModel = get_user_simple().__dict__ + schemaModel["id"] = 29 + schemaModel["old_password"] = "Start4321" + schemaModel["new_password"] = "Start1234" + + sentinel = object() + query = SQLQuery.get_user_by_id() + pooledConnection = local_db.getPoolConnection() + commands = pydapper.using(pooledConnection) + theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User) + pooledConnection.close() + assert not theuser is sentinel, f"failed GET user query" + + assert bcrypt.checkpw(schemaModel["old_password"].encode("utf-8"), bytes(theuser.password_hash, "utf-8")), f"old password does not match to the database entry" + + password_hash = bcrypt.hashpw(schemaModel["new_password"].encode('utf-8'), bcrypt.gensalt( 12 )).decode('utf8') + query = SQLQuery.get_update_user_password() + affected_rows = execute_sql_query_standalone(query=query, param={"password_hash" : password_hash, "id" : schemaModel["id"]}, command_type="execute") + assert affected_rows == 1 + return + +def test_sql_get_participants_of_shipcall_id(): + shipcall_id = 389 + query = SQLQuery.get_participants() + participants = execute_sql_query_standalone(query=query, model=dict, param={"shipcall_id" : shipcall_id}) + assert all([part.get("participant_id") is not None for part in participants]) + assert all([part.get("type") is not None for part in participants]) + + # try to convert every participant into a model.Participant_Assignment + participants = [ + model.Participant_Assignment(part["participant_id"], part["type"]) + for part in participants + ] + assert all([isinstance(part,model.Participant_Assignment) for part in participants]) + return + +def test_sql_get_all_shipcalls_and_assign_participants(): + """ + this test reproduces the SQL query within BreCal.impl.shipcalls to make sure, that the + query first returns all shipcalls, and then assigns all participants of the respective shipcall to it. + """ + # get all shipcalls + options = {'past_days':30000} + query = SQLQuery.get_shipcalls(options) + shipcalls = execute_sql_query_standalone(query=query, model=model.Shipcall.from_query_row) + assert all([isinstance(shipcall,model.Shipcall) for shipcall in shipcalls]) + + # for every shipcall, assign all of its participants to it + for shipcall in shipcalls: + participant_query = SQLQuery.get_participants() + participants = execute_sql_query_standalone(query=participant_query, model=dict, param={"shipcall_id" : shipcall.id}) + + for record in participants: + pa = model.Participant_Assignment(record["participant_id"], record["type"]) + shipcall.participants.append(pa) + + assert any([ + any([isinstance(participant, model.Participant_Assignment) for participant in shipcall.participants]) + for shipcall in shipcalls + ]), f"at least one of the shipcalls should have an assigned model.Participant_Assignment" + return + +def test_sqlquery_get_shipcal_post_identical_to_create_sql_query_shipcall_post(): + from BreCal.database.sql_queries import create_sql_query_shipcall_post + from BreCal.stubs.shipcall import get_stub_valid_shipcall_shifting, get_stub_valid_ship_loaded_model + + post_data = get_stub_valid_shipcall_shifting() + schemaModel = get_stub_valid_ship_loaded_model(post_data) + query1 = SQLQuery.get_shipcall_post(schemaModel) # refactored variant of create_sql_query_shipcall_post (more concise) + query2 = create_sql_query_shipcall_post(schemaModel) + + assert query1==query2 + return + +def test_sql_post_shipcall(): + """issues a post-request with stub data and adds it to the database.""" + from BreCal.database.sql_queries import create_sql_query_shipcall_post + from BreCal.stubs.shipcall import get_stub_valid_shipcall_shifting, get_stub_valid_ship_loaded_model + + pooledConnection = local_db.getPoolConnection() + try: + commands = pydapper.using(pooledConnection) + + post_data = get_stub_valid_shipcall_shifting() + post_data["voyage"] = "pytestRS71" # perform tagging to identify the shipcalls created by pytests (<16 characters, no special characters). + schemaModel = get_stub_valid_ship_loaded_model(post_data) + query = SQLQuery.get_shipcall_post(schemaModel) # refactored variant of create_sql_query_shipcall_post (more concise) + schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute", pooledConnection=pooledConnection) + assert schemas==1, f"unsuccessful query execution. Query: {query}" + + # within the same pooledConnection, ask for the last inserted id + query = SQLQuery.get_shipcall_post_last_insert_id() + new_id = commands.execute_scalar(query) + assert new_id > 0, f"the new id should be unlike 0.." + + # add participant assignments if we have a list of participants + if 'participants' in schemaModel: + pquery = SQLQuery.get_shipcall_post_update_shipcall_participant_map() + for participant_assignment in schemaModel["participants"]: + schemas = execute_sql_query_standalone(query=pquery, param={"shipcall_id" : new_id, "participant_id" : participant_assignment["participant_id"], "type" : participant_assignment["type"]}, command_type="execute", pooledConnection=pooledConnection) + + + from BreCal.stubs.user import get_user_simple + # assign an artificial user with id 29 (maxm) & participant type 136 + user_data = get_user_simple().__dict__ + user_data["id"] = 29 + user_data["participant_id"] = 136 + + # POST in the history + query = SQLQuery.create_sql_query_history_post() + schemas = execute_sql_query_standalone(query=query, param={"scid" : new_id, "pid" : user_data["participant_id"], "uid" : user_data["id"]}, command_type="execute", pooledConnection=pooledConnection) + assert schemas == 1, f"unsuccessful history POST" + + + finally: + pooledConnection.close() + return + +def test_sql_create_history_post_matches_legacy_function(): + from BreCal.database.sql_queries import create_sql_query_history_post + + query_refactored = SQLQuery.create_sql_query_history_post() + query_legacy = create_sql_query_history_post() + assert isinstance(query_refactored,str) + assert query_refactored==query_legacy, f"the refactored code to generate the query must be absolutely identical to the legacy version" + return + +def test_sql_get_shipcall_by_id(): + schemaModel = {"id":63} + + sentinel = object() + query = SQLQuery.get_shipcall_by_id() + pooledConnection = local_db.getPoolConnection() + commands = pydapper.using(pooledConnection) + theshipcall = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.Shipcall) + pooledConnection.close() + assert not theshipcall is sentinel, f"failed GET user query" + assert theshipcall.id==schemaModel["id"] + return + +def test_sql_get_shipcall_by_id_short_version(): + schemaModel = {"id":63} + + # when model is defined, returns the data model + query = SQLQuery.get_shipcall_by_id() + schemas = execute_sql_query_standalone(query=query, param={"id" : schemaModel["id"]}, model=model.Shipcall, command_type="single") + + assert schemas.id==schemaModel["id"] + assert isinstance(schemas, model.Shipcall) + + # when model = None, returns a dictionary + query = SQLQuery.get_shipcall_by_id() + schemas = execute_sql_query_standalone(query=query, param={"id" : schemaModel["id"]}, command_type="single") + assert isinstance(schemas, dict) + assert schemas.get("id")==schemaModel["id"] + return + +def test_sql_get_shipcall_put_refactored_equals_extended_version(): + from BreCal.database.sql_queries import create_sql_query_shipcall_put + from BreCal.stubs.shipcall import get_stub_valid_shipcall_shifting, get_stub_valid_ship_loaded_model + + post_data = get_stub_valid_shipcall_shifting() + post_data["voyage"] = "pytestRS71" # perform tagging to identify the shipcalls created by pytests (<16 characters, no special characters). + schemaModel = get_stub_valid_ship_loaded_model(post_data) + + legacy_query = create_sql_query_shipcall_put(schemaModel) + refactored_query = SQLQuery.get_shipcall_put(schemaModel) + + assert refactored_query == legacy_query, f"version conflict. the refactored query must precisely match the legacy query!" + return + +def test_sql_get_shipcall_participant_map_by_shipcall_id(): + schemaModel = {"id":152} + + query = SQLQuery.get_shipcall_participant_map_by_shipcall_id() + pdata = execute_sql_query_standalone(query=query, param={"id" : schemaModel["id"]}, command_type="query") # existing list of assignments + + assert len(pdata)==4, f"there should be four assigned participants for the shipcall with id {schemaModel.get('id')}" + return + +def test_sql__get_shipcall__get_spm__optionally_update_shipcall(): + schemaModel = {'id': 152} + query = SQLQuery.get_shipcall_by_id() + shipcall = execute_sql_query_standalone(query=query, param={"id" : schemaModel["id"]}, command_type="single", model=model.Shipcall) + + query = SQLQuery.get_shipcall_participant_map_by_shipcall_id() + pdata = execute_sql_query_standalone(query=query, param={"id" : shipcall.id}, command_type="query") # existing list of assignments + + assert len(pdata)==4, f"there should be four assigned participants for the shipcall with id {shipcall.id}" + + for participant_assignment in shipcall.participants: + found_participant = False + for elem in pdata: + if elem["participant_id"] == participant_assignment["participant_id"] and elem["type"] == participant_assignment["type"]: + found_participant = True + break + if not found_participant: + nquery = SQLQuery.get_shipcall_post_update_shipcall_participant_map() + ndata = execute_sql_query_standalone(query=nquery, param={"shipcall_id" : shipcall.id, "participant_id" : participant_assignment["participant_id"], "type" : participant_assignment["type"]}, command_type="execute") # existing list of assignments + return + +def test_sql__shipcall_post__get_last_insert_id__get_spm__update_participants__verify_changes(): + """ + this combinatorial test: + 1.) creates a novel shipcall + 2.) obtains the ID of the just-created shipcall + 3.) reads the participant map for that ID and verifies, that there are no participants listed + 4.) iteratively updates the participant map of the ID (using proxy data) + 5.) verifies the update + """ + from BreCal.stubs.shipcall import get_stub_valid_shipcall_shifting, get_stub_valid_ship_loaded_model + pooledConnection = local_db.getPoolConnection() + commands = pydapper.using(pooledConnection) + try: + + # 1.) create shipcall + post_data = get_stub_valid_shipcall_shifting() + post_data["voyage"] = "pytestRS71" # perform tagging to identify the shipcalls created by pytests (<16 characters, no special characters). + schemaModel = get_stub_valid_ship_loaded_model(post_data) + query = SQLQuery.get_shipcall_post(schemaModel) # refactored variant of create_sql_query_shipcall_post (more concise) + schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute", pooledConnection=pooledConnection) + assert schemas==1, f"unsuccessful query execution. Query: {query}" + + # 2.) obtain the ID of the novel shipcall + # within the same pooledConnection, ask for the last inserted id + query = SQLQuery.get_shipcall_post_last_insert_id() + new_id = commands.execute_scalar(query) + assert new_id > 0, f"the new id should be unlike 0.." + + # 3.) read the ShipcallParticipantMap for the novel id + query = SQLQuery.get_shipcall_participant_map_by_shipcall_id() + pdata = execute_sql_query_standalone(query=query, param={"id" : new_id}, command_type="query") # existing list of assignments + assert len(pdata)==0, f"as the POST query does not include participants in this case, the novel id should not have assigned participants." + + ### proxy data ### + # loop across passed participant ids, creating entries for those not present in pdata + schemaModel = {'id': new_id, "participants":[{'id': 128, 'participant_id': 2, 'type': 4}, {'id': 129, 'participant_id': 3, 'type': 1}, {'id': 130, 'participant_id': 4, 'type': 2}, {'id': 131, 'participant_id': 6, 'type': 8}]} + + # 4.) assign the participants + for participant_assignment in schemaModel["participants"]: + found_participant = False + for elem in pdata: + if elem["participant_id"] == participant_assignment["participant_id"] and elem["type"] == participant_assignment["type"]: + found_participant = True + break + if not found_participant: + nquery = SQLQuery.get_shipcall_post_update_shipcall_participant_map() + ndata = execute_sql_query_standalone(query=nquery, param={"shipcall_id" : new_id, "participant_id" : participant_assignment["participant_id"], "type" : participant_assignment["type"]}, command_type="execute") # existing list of assignments + + # 5.) verify the update (5 participants, including the false one) + query = SQLQuery.get_shipcall_participant_map_by_shipcall_id() + pdata = execute_sql_query_standalone(query=query, param={"id" : new_id}, command_type="query") # existing list of assignments + assert len(pdata)==5, f"due to the PUT, there shall now be five participants, as defined in schemaModel." + + # 6.) delete the incorrect participant (last entry in the list in this case) + dquery = SQLQuery.get_shipcall_participant_map_delete_by_id() + ddata = execute_sql_query_standalone(query=dquery, param={"existing_id" : pdata[-1].get("id")}, command_type="execute") + + # 7.) verify the update (now 4 participants) + query = SQLQuery.get_shipcall_participant_map_by_shipcall_id() + pdata = execute_sql_query_standalone(query=query, param={"id" : new_id}, command_type="query") # existing list of assignments + assert len(pdata)==4, f"due to the PUT, there shall now be five participants, as defined in schemaModel." + + finally: + pooledConnection.close() + return + +def test_sql_query_get_shipcalls_is_identical_to_legacy_query(): + from BreCal.database.sql_queries import create_sql_query_shipcall_get + + options = {'past_days':7} + query_refactored = SQLQuery.get_shipcalls(options) + query_legacy = create_sql_query_shipcall_get(options) + + assert query_refactored == query_legacy, f"the refactored code to generate the query must be absolutely identical to the legacy version" + return + +def test_sql_query_post_ship_is_identical_to_legacy_query(): + from BreCal.database.sql_queries import SQLQuery, create_sql_query_ship_post, create_sql_query_ship_put + from BreCal.stubs.ship import get_stub_valid_ship_loaded_model + + schemaModel = get_stub_valid_ship_loaded_model() + + query_refactored = SQLQuery.get_ship_post(schemaModel) + query_legacy = create_sql_query_ship_post(schemaModel) + + assert query_refactored == query_legacy, f"the refactored code to generate the query must be absolutely identical to the legacy version" + return + +def test_sql_query_put_ship_is_identical_to_legacy_query(): + from BreCal.database.sql_queries import SQLQuery, create_sql_query_ship_post, create_sql_query_ship_put + from BreCal.stubs.ship import get_stub_valid_ship_loaded_model + + schemaModel = get_stub_valid_ship_loaded_model() + query_refactored = SQLQuery.get_ship_put(schemaModel) + query_legacy = create_sql_query_ship_put(schemaModel) + + assert query_refactored == query_legacy, f"the refactored code to generate the query must be absolutely identical to the legacy version" + return + + + +#schemas = execute_sql_query_standalone(query=SQLQuery.get_berth(), param={}) diff --git a/src/server/tests/schemas/test_model.py b/src/server/tests/schemas/test_model.py new file mode 100644 index 0000000..8944f45 --- /dev/null +++ b/src/server/tests/schemas/test_model.py @@ -0,0 +1,80 @@ +from marshmallow import ValidationError +import pytest +from BreCal.schemas.model import ShipcallSchema + + +@pytest.fixture(scope="function") # function: destroy fixture at the end of each test +def prepare_shipcall_content(): + import datetime + from BreCal.stubs.shipcall import get_shipcall_simple + shipcall_stub = get_shipcall_simple() + content = shipcall_stub.__dict__ + content["participants"] = [] + content = {k:v.isoformat() if isinstance(v, datetime.datetime) else v for k,v in content.items()} + return locals() + +def test_shipcall_input_validation_draft(prepare_shipcall_content): + content = prepare_shipcall_content["content"] + content["draft"] = 24.11 + + schemaModel = ShipcallSchema() + with pytest.raises(ValidationError, match="Must be greater than 0 and less than or equal to 20."): + loadedModel = schemaModel.load(data=content, many=False, partial=True) + return + +def test_shipcall_input_validation_voyage(prepare_shipcall_content): + content = prepare_shipcall_content["content"] + content["voyage"] = "".join(list(map(str,list(range(0,24))))) # 38 characters + + schemaModel = ShipcallSchema() + with pytest.raises(ValidationError, match="Longer than maximum length "): + loadedModel = schemaModel.load(data=content, many=False, partial=True) + return + + +@pytest.fixture(scope="function") # function: destroy fixture at the end of each test +def prepare_user_content(): + import datetime + from BreCal.stubs.user import get_user_simple + from BreCal.schemas.model import UserSchema + schemaModel = UserSchema() + + user_stub = get_user_simple() + content = user_stub.__dict__ + content = {k:v.isoformat() if isinstance(v, datetime.datetime) else v for k,v in content.items()} + content = {k:v for k,v in content.items() if k in list(schemaModel.fields.keys())} + content["old_password"] = "myfavoritedog123" + content["new_password"] = "SecuRepassW0rd!" + return locals() + + +def test_input_validation_berth_phone_number_is_valid(prepare_user_content): + content, schemaModel = prepare_user_content["content"], prepare_user_content["schemaModel"] + content["user_phone"] = "+49123 45678912" # whitespace and + are valid + + loadedModel = schemaModel.load(data=content, many=False, partial=True) + return + +def test_input_validation_berth_phone_number_is_invalid(prepare_user_content): + content, schemaModel = prepare_user_content["content"], prepare_user_content["schemaModel"] + content["user_phone"] = "+49123 45678912!" # ! is invalid + + with pytest.raises(ValidationError, match="one of the phone number values is not valid."): + loadedModel = schemaModel.load(data=content, many=False, partial=True) + return + +def test_input_validation_new_password_too_short(prepare_user_content): + content, schemaModel = prepare_user_content["content"], prepare_user_content["schemaModel"] + content["new_password"] = "1234" # must have between 6 and 128 characters + + with pytest.raises(ValidationError, match="Length must be between 6 and 128."): + loadedModel = schemaModel.load(data=content, many=False, partial=True) + return + +def test_input_validation_user_email_invalid(prepare_user_content): + content, schemaModel = prepare_user_content["content"], prepare_user_content["schemaModel"] + content["user_email"] = "userbrecal.com" # forgot @ -> invalid + + with pytest.raises(ValidationError, match="invalid email address"): + loadedModel = schemaModel.load(data=content, many=False, partial=True) + return diff --git a/src/server/tests/test_create_app.py b/src/server/tests/test_create_app.py index 652ae3d..706e872 100644 --- a/src/server/tests/test_create_app.py +++ b/src/server/tests/test_create_app.py @@ -8,13 +8,14 @@ def test_create_app(): import sys from BreCal import get_project_root - project_root = get_project_root("brecal") + project_root = os.path.join(os.path.expanduser("~"), "brecal") lib_location = os.path.join(project_root, "src", "server") sys.path.append(lib_location) from BreCal import create_app os.chdir(os.path.join(lib_location,"BreCal")) # set the current directory to ~/brecal/src/server/BreCal, so the config is found - application = create_app() + instance_path = os.path.join(os.path.expanduser('~'), "brecal", "src", "server", "instance", "instance") + application = create_app(test_config=None, instance_path=instance_path) return if __name__=="__main__": diff --git a/src/server/tests/validators/test_input_validation_ship.py b/src/server/tests/validators/test_input_validation_ship.py new file mode 100644 index 0000000..b1de16e --- /dev/null +++ b/src/server/tests/validators/test_input_validation_ship.py @@ -0,0 +1,226 @@ +import pytest + +import os +import jwt +import json +import requests +import datetime +import werkzeug +import re +from marshmallow import ValidationError + +from BreCal import local_db +from BreCal.schemas import model + +from BreCal.impl.ships import GetShips + +from BreCal.schemas.model import Participant_Assignment, EvaluationType, ShipcallType +from BreCal.stubs.ship import get_stub_valid_ship, get_stub_valid_ship_loaded_model +from BreCal.validators.input_validation import validation_error_default_asserts +from BreCal.schemas.model import ParticipantType +from BreCal.validators.input_validation_ship import InputValidationShip + +instance_path = os.path.join(os.path.expanduser('~'), "brecal", "src", "server", "instance", "instance") +local_db.initPool(os.path.dirname(instance_path), connection_filename="connection_data_local.json") + +@pytest.fixture(scope="session") +def get_stub_token(): + """ + performs a login to the user 'maxm' and returns the respective url and the token. The token will be used in + further requests in the following format (example of post-request): + requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + """ + port = 9013 + url = f"http://127.0.0.1:{port}" + + # set the JWT key + os.environ['SECRET_KEY'] = 'zdiTz8P3jXOc7jztIQAoelK4zztyuCpJ' + + try: + response = requests.post(f"{url}/login", json=jwt.decode("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6Im1heG0iLCJwYXNzd29yZCI6IlN0YXJ0MTIzNCJ9.uIrbz3g-IwwTLz6C1zXELRGtAtRJ_myYJ4J4x0ozjAI", key=os.environ.get("SECRET_KEY"), algorithms=["HS256"])) + except requests.ConnectionError as err: + raise AssertionError(f"could not establish a connection to the default url. Did you start an instance of the local database at port {port}? Looking for a connection to {url}") + user = response.json() + token = user.get("token") + return locals() + +def test_input_validation_ship_fails_when_length_is_incorrect(): + with pytest.raises(ValidationError, match=re.escape("Must be greater than 0 and less than 1000.")): + post_data = get_stub_valid_ship() + post_data["length"] = 0 + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + with pytest.raises(ValidationError, match=re.escape("Must be greater than 0 and less than 1000.")): + post_data = get_stub_valid_ship() + post_data["length"] = 1000 + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + # success + post_data = get_stub_valid_ship() + post_data["length"] = 123 + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + return + +def test_input_validation_ship_fails_when_width_is_incorrect(): + with pytest.raises(ValidationError, match=re.escape("Must be greater than 0 and less than 100.")): + post_data = get_stub_valid_ship() + post_data["width"] = 0 + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + with pytest.raises(ValidationError, match=re.escape("Must be greater than 0 and less than 100.")): + post_data = get_stub_valid_ship() + post_data["width"] = 100 + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + # success + post_data = get_stub_valid_ship() + post_data["width"] = 12 + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + return + +def test_input_validation_ship_fails_when_name_is_incorrect(): + with pytest.raises(ValidationError, match=re.escape("'name' argument should have at max. 63 characters")): + post_data = get_stub_valid_ship() + post_data["name"] = "0123456789012345678901234567890123456789012345678901234567890123" + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + with pytest.raises(ValidationError, match=re.escape("'name' argument should not have special characters.")): + post_data = get_stub_valid_ship() + post_data["name"] = '👽' + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + post_data = get_stub_valid_ship() + post_data["name"] = "012345678901234567890123456789012345678901234567890123456789012" + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + return + +def test_input_validation_ship_fails_when_callsign_is_incorrect(): + with pytest.raises(ValidationError, match=re.escape("'callsign' argument should not have more than 8 characters")): + post_data = get_stub_valid_ship() + post_data["callsign"] = "123456789" + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + with pytest.raises(ValidationError, match=re.escape("'callsign' argument should not have special characters.")): + post_data = get_stub_valid_ship() + post_data["callsign"] = '👽' + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + # success + post_data = get_stub_valid_ship() + post_data["callsign"] = 'PBIO' + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + # success + post_data = get_stub_valid_ship() + post_data["callsign"] = None + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + return + +def test_input_validation_ship_fails_when_imo_is_incorrect(): + # imo must have exactly 7 digits and can't be None + with pytest.raises(ValidationError, match=re.escape("'imo' should be a 7-digit number")): + post_data = get_stub_valid_ship() + post_data["imo"] = 123456 + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + with pytest.raises(ValidationError, match=re.escape("'imo' should be a 7-digit number")): + post_data = get_stub_valid_ship() + post_data["imo"] = 12345678 + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + with pytest.raises(ValidationError, match=re.escape("Field may not be null.")): + post_data = get_stub_valid_ship() + post_data["imo"] = None + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + + # success + post_data = get_stub_valid_ship() + post_data["imo"] = 1234567 + loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True) + return + +def test_input_validation_ship_fails_when_bollard_pull_and_tug_values_are_set(): + ivs = InputValidationShip() + + with pytest.raises(ValidationError, match=re.escape("'bollard_pull' is only allowed, when a ship is a tug ('is_tug').")): + content = {'is_tug':0, 'bollard_pull':230} + ivs.optionally_evaluate_bollard_pull_value(content) + + with pytest.raises(ValidationError, match=re.escape("'bollard_pull' is only allowed, when a ship is a tug ('is_tug').")): + content = {'is_tug':None, 'bollard_pull':230} + ivs.optionally_evaluate_bollard_pull_value(content) + + content = {'is_tug':0, 'bollard_pull':None} + ivs.optionally_evaluate_bollard_pull_value(content) + + content = {'is_tug':1, 'bollard_pull':None} + ivs.optionally_evaluate_bollard_pull_value(content) + + content = {'is_tug':1, 'bollard_pull':125} + ivs.optionally_evaluate_bollard_pull_value(content) + + with pytest.raises(ValidationError, match=re.escape("when a ship is a tug, the bollard pull must be 0 < value < 500.")): + content = {'is_tug':1, 'bollard_pull':-1} + ivs.optionally_evaluate_bollard_pull_value(content) + + with pytest.raises(ValidationError, match=re.escape("when a ship is a tug, the bollard pull must be 0 < value < 500.")): + content = {'is_tug':1, 'bollard_pull':0} + ivs.optionally_evaluate_bollard_pull_value(content) + + with pytest.raises(ValidationError, match=re.escape("when a ship is a tug, the bollard pull must be 0 < value < 500.")): + content = {'is_tug':1, 'bollard_pull':500} + ivs.optionally_evaluate_bollard_pull_value(content) + + with pytest.raises(ValidationError, match=re.escape("when a ship is a tug, the bollard pull must be 0 < value < 500.")): + content = {'is_tug':1, 'bollard_pull':501} + ivs.optionally_evaluate_bollard_pull_value(content) + return + +def test_input_validation_ship_post_request_fails_when_ship_imo_already_exists(): + # get the ships, convert them to a list of JSON dictionaries + response, status_code, header = GetShips(token=None) + ships = json.loads(response) + + # extract only the 'imo' values + ship_imos = [ship.get("imo") for ship in ships] + + post_data = get_stub_valid_ship() + post_data["imo"] = ship_imos[-1] # assign one of the IMOs, which already exist + loadedModel = get_stub_valid_ship_loaded_model(post_data) + content = post_data + + with pytest.raises(ValidationError, match="the provided ship IMO 9186687 already exists. A ship may only be added, if there is no other ship with the same IMO number."): + InputValidationShip.check_ship_imo_already_exists(loadedModel) + return + + + +def test_input_validation_ship_put_request_fails_when_ship_imo_should_be_changed(): + # get the ships, convert them to a list of JSON dictionaries + response, status_code, header = GetShips(token=None) + ships = json.loads(response) + selected_ship = ships[-1] # select one of the ships; in this case the last one. + + put_data = get_stub_valid_ship() + put_data["imo"] = selected_ship.get("imo")+1 # assign one of the IMOs, which already exist + + loadedModel = get_stub_valid_ship_loaded_model(put_data) + content = put_data + + with pytest.raises(ValidationError, match=re.escape("The IMO number field may not be changed since it serves the purpose of a primary (matching) key.")): + InputValidationShip.put_content_may_not_contain_imo_number(content) + return + + +def test_input_validation_ship_put_request_fails_when_ship_id_is_missing(): + put_data = get_stub_valid_ship() + put_data.pop("id",None) # make sure there is no ID within the put data for this test + + loadedModel = get_stub_valid_ship_loaded_model(put_data) + content = put_data + + with pytest.raises(ValidationError, match="The id field is required."): + InputValidationShip.content_contains_ship_id(content) + return diff --git a/src/server/tests/validators/test_input_validation_shipcall.py b/src/server/tests/validators/test_input_validation_shipcall.py new file mode 100644 index 0000000..925466c --- /dev/null +++ b/src/server/tests/validators/test_input_validation_shipcall.py @@ -0,0 +1,737 @@ +import pytest + +import os +import jwt +import json +import requests +import datetime +import werkzeug +from marshmallow import ValidationError + +from BreCal import local_db + +from BreCal.schemas.model import Participant_Assignment, EvaluationType, ShipcallType +from BreCal.stubs.shipcall import create_postman_stub_shipcall, get_stub_valid_shipcall_arrival, get_stub_valid_shipcall_departure, get_stub_valid_shipcall_shifting, get_stub_shipcall_arrival_invalid_missing_eta, get_stub_shipcall_shifting_invalid_missing_eta, get_stub_shipcall_shifting_invalid_missing_etd, get_stub_shipcall_arrival_invalid_missing_type, get_stub_shipcall_departure_invalid_missing_etd +from BreCal.stubs.participant import get_stub_list_of_valid_participants +from BreCal.validators.input_validation import validation_error_default_asserts +from BreCal.schemas.model import ParticipantType +from BreCal.validators.input_validation_shipcall import InputValidationShipcall + +instance_path = os.path.join(os.path.expanduser('~'), "brecal", "src", "server", "instance", "instance") +local_db.initPool(os.path.dirname(instance_path), connection_filename="connection_data_local.json") + +@pytest.fixture(scope="session") +def get_stub_token(): + """ + performs a login to the user 'maxm' and returns the respective url and the token. The token will be used in + further requests in the following format (example of post-request): + requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + """ + port = 9013 + url = f"http://127.0.0.1:{port}" + + # set the JWT key + os.environ['SECRET_KEY'] = 'zdiTz8P3jXOc7jztIQAoelK4zztyuCpJ' + + try: + response = requests.post(f"{url}/login", json=jwt.decode("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6Im1heG0iLCJwYXNzd29yZCI6IlN0YXJ0MTIzNCJ9.uIrbz3g-IwwTLz6C1zXELRGtAtRJ_myYJ4J4x0ozjAI", key=os.environ.get("SECRET_KEY"), algorithms=["HS256"])) + except requests.ConnectionError as err: + raise AssertionError(f"could not establish a connection to the default url. Did you start an instance of the local database at port {port}? Looking for a connection to {url}") + user = response.json() + token = user.get("token") + return locals() + +@pytest.fixture(scope="session") +def get_shipcall_id_after_stub_post_request(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_arrival() + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + assert response.status_code==201 + shipcall_id = response.json().get("id") + assert shipcall_id is not None + return locals() + +def test_shipcall_post_request_fails_when_ship_id_is_invalid(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data["ship_id"] = 1234562 + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + + with pytest.raises(ValidationError, match=f"provided an invalid ship id"): + assert response.status_code==400 + raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome + return + +def test_shipcall_post_request_fails_when_arrival_berth_id_is_invalid(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data["arrival_berth_id"] = 1234562 + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + + with pytest.raises(ValidationError, match=f"provided an invalid arrival berth id"): + assert response.status_code==400 + raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome + return + +def test_shipcall_post_request_fails_when_departure_berth_id_is_invalid(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data["departure_berth_id"] = 1234562 + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + + with pytest.raises(ValidationError, match=f"provided an invalid departure berth id"): + assert response.status_code==400 + raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome + return + +def test_shipcall_post_request_fails_when_participant_ids_are_invalid(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data["participants"] = [Participant_Assignment(1234562,4).to_json()] # identical to: [{'participant_id': 1234562, 'type': 4}] + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + + with pytest.raises(ValidationError, match=f"one of the provided participant ids is invalid"): + assert response.status_code==400 + raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome + return + +def test_shipcall_post_request_fails_when_forbidden_keys_are_set(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + for forbidden_key, forbidden_value in zip(["canceled", "evaluation", "evaluation_message"], [1, EvaluationType.red.name, "random error message"]): + post_data = original_post_data.copy() + post_data[forbidden_key] = forbidden_value + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + assert response.status_code==400 + with pytest.raises(ValidationError, match=f"may not be set on POST. "): + raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome + return + +def test_shipcall_post_request_fails_when_draft_is_out_of_range(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data["draft"] = 0 + + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + with pytest.raises(ValidationError, match=f"Must be greater than 0 and less than or equal to "): + assert response.status_code==400 + raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome + + post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data["draft"] = 21 + + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + with pytest.raises(ValidationError, match=f"Must be greater than 0 and less than or equal to "): + assert response.status_code==400 + raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome + + post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data["draft"] = 20 + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + assert response.status_code==201, f"the request should accept 20.0 as a valid 'draft' value" + return + +def test_shipcall_post_request_fails_when_recommended_tugs_is_out_of_range(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data = original_post_data.copy() + post_data["recommended_tugs"] = 10 + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + assert response.status_code == 201 + + post_data = original_post_data.copy() + post_data["recommended_tugs"] = 0 + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + assert response.status_code == 201 + + post_data = original_post_data.copy() + post_data["recommended_tugs"] = 11 + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + with pytest.raises(ValidationError, match=f"Must be greater than or equal to 0 and less than or equal to"): + assert response.status_code==400 + raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome + + +def test_shipcall_post_request_fails_when_voyage_string_is_invalid(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + # Accept + post_data = original_post_data.copy() + post_data["voyage"] = "abcdefghijklmnop" + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + assert response.status_code==201 + + # Fail: too long string + post_data = original_post_data.copy() + post_data["voyage"] = "abcdefghijklmnopq" + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + with pytest.raises(ValidationError, match="Longer than maximum length 16"): + assert response.status_code==400 + raise ValidationError(response.json()) + + # Fail: special characters + post_data = original_post_data.copy() + post_data["voyage"] = '👽' + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + with pytest.raises(ValidationError, match="Please use only digits and ASCII letters."): + assert response.status_code==400 + raise ValidationError(response.json()) + return + +def test_shipcall_post_request_fails_when_type_arrival_and_not_in_future(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + # accept + post_data = original_post_data.copy() + post_data["type"] = ShipcallType.arrival + post_data["eta"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat() + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + assert response.status_code == 201 + + # error + post_data = original_post_data.copy() + post_data["type"] = ShipcallType.arrival + post_data["eta"] = (datetime.datetime.now() - datetime.timedelta(hours=3)).isoformat() + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + with pytest.raises(ValidationError, match="must be in the future. Incorrect datetime provided"): + assert response.status_code==400 + raise ValidationError(response.json()) + return + +def test_shipcall_post_request_fails_when_type_departure_and_not_in_future(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + original_post_data = get_stub_valid_shipcall_departure() # create_postman_stub_shipcall() + + # accept + post_data = original_post_data.copy() + post_data["type"] = ShipcallType.departure + post_data["etd"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat() + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + assert response.status_code == 201 + + # error + post_data = original_post_data.copy() + post_data["type"] = ShipcallType.departure + post_data["etd"] = (datetime.datetime.now() - datetime.timedelta(hours=3)).isoformat() + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + with pytest.raises(ValidationError, match="must be in the future. Incorrect datetime provided"): + assert response.status_code==400 + raise ValidationError(response.json()) + return + +def test_shipcall_post_request_fails_when_type_shifting_and_not_in_future(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + original_post_data = get_stub_valid_shipcall_departure() # create_postman_stub_shipcall() + + # accept + post_data = original_post_data.copy() + post_data["type"] = ShipcallType.departure + post_data["eta"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat() + post_data["etd"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat() + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + assert response.status_code == 201 + + # error + post_data = original_post_data.copy() + post_data["type"] = ShipcallType.departure + post_data["eta"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat() + post_data["etd"] = (datetime.datetime.now() - datetime.timedelta(hours=3)).isoformat() + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + with pytest.raises(ValidationError, match="must be in the future. Incorrect datetime provided"): + assert response.status_code==400 + raise ValidationError(response.json()) + return + +def test_shipcall_post_request_fails_when_type_arrival_and_missing_eta(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data = original_post_data.copy() + post_data.pop("eta", None) + post_data["type"] = ShipcallType.arrival + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + with pytest.raises(ValidationError, match="Missing key!"): + assert response.status_code==400 + raise ValidationError(response.json()) + return + +def test_shipcall_post_request_fails_when_type_departure_and_missing_etd(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data = original_post_data.copy() + post_data.pop("etd", None) + post_data["type"] = ShipcallType.departure + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + with pytest.raises(ValidationError, match="Missing key!"): + assert response.status_code==400 + raise ValidationError(response.json()) + return + +def test_shipcall_post_request_fails_when_type_shifting_and_missing_eta(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data = original_post_data.copy() + post_data["type"] = ShipcallType.departure + post_data.pop("eta", None) + post_data["etd"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat() + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + with pytest.raises(ValidationError, match="Missing key!"): + assert response.status_code==400 + raise ValidationError(response.json()) + return + +def test_shipcall_post_request_fails_when_type_shifting_and_missing_etd(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall() + + post_data = original_post_data.copy() + post_data["type"] = ShipcallType.departure + post_data["eta"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat() + post_data.pop("etd", None) + response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data) + + with pytest.raises(ValidationError, match="Missing key!"): + assert response.status_code==400 + raise ValidationError(response.json()) + return + + + +def test_shipcall_post_invalid_tidal_window_to_smaller_than_tidal_window_from(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_shifting() + post_data["tidal_window_to"] = (datetime.datetime.fromisoformat(post_data["tidal_window_from"])-datetime.timedelta(minutes=1)).isoformat() + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "\'tidal_window_to\' must take place after \'tidal_window_from\'" in response.json().get("message","") + return + +def test_shipcall_post_invalid_tidal_windows_must_be_in_future(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_shifting() + post_data["tidal_window_from"] = (datetime.datetime.now()-datetime.timedelta(minutes=1)).isoformat() + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "\'tidal_window_from\' must be in the future. " in response.json().get("message","") + + post_data = get_stub_valid_shipcall_shifting() + post_data["tidal_window_to"] = (datetime.datetime.now()-datetime.timedelta(minutes=1)).isoformat() + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "\'tidal_window_to\' must be in the future. " in response.json().get("message","") + return + +def test_shipcall_post_invalid_canceled_must_not_be_set(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + stubs = [("canceled", 1), ("evaluation", "green"), ("evaluation_message", "this is an error message")] + + for key, value in stubs: + post_data = get_stub_valid_shipcall_shifting() + post_data[key] = value + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert f"\'{key}\' may not be set on POST. Found:" in response.json().get("message","") + return + +def test_shipcall_post_invalid_participant_type_listed_multiple_times(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + participants = get_stub_list_of_valid_participants() + + # create double entry for 'id' and 'type'. Either of the two should raise an exception. + participants.append(participants[0]) + + post_data = get_stub_valid_shipcall_shifting() + post_data["participants"] = participants + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert f"every participant id and type should be listed only once. Found multiple entries for one of the participants." in response.json().get("message","") + return + +def test_shipcall_post_invalid_participants_missing_agency(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = {} + response = requests.get( + f"{url}/participants", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + participants = response.json() + participant_id_dict = {item.get("id"):{"participant_id":item.get("id"), "type":item.get("type")} for item in response.json()} + + # e.g., [{'participant_id': 2, 'type': 4}, {'participant_id': 3, 'type': 1}, {'participant_id': 4, 'type': 2}] + participants = [participant_id_dict[2], participant_id_dict[3], participant_id_dict[4]] + + post_data = get_stub_valid_shipcall_shifting() + post_data["participants"] = participants + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "One of the assigned participants *must* be of type \'ParticipantType.AGENCY\'" in response.json().get("message","") + return + +def test_shipcall_post_invalid_etd_smaller_than_eta(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_shifting() + post_data["etd"] = (datetime.datetime.fromisoformat(post_data["eta"])-datetime.timedelta(minutes=1)).isoformat() + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "\'etd\' must be larger than \'eta\'. " in response.json().get("message","") + return + +def test_shipcall_post_invalid_eta_and_etd_must_be_in_future(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_shifting() + post_data["etd"] = (datetime.datetime.now()-datetime.timedelta(minutes=1)).isoformat() + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "\'eta\' and \'etd\' must be in the future. " in response.json().get("message","") + + post_data = get_stub_valid_shipcall_shifting() + post_data["eta"] = (datetime.datetime.now()-datetime.timedelta(minutes=1)).isoformat() + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "\'eta\' and \'etd\' must be in the future. " in response.json().get("message","") + return + +def test_shipcall_post_request_missing_mandatory_keys(get_stub_token): # fixture: some sort of local API start in the background + """ + creates a valid shipcall entry and modifies it, by dropping one of the mandatory keys. This test ensures, + that each mandatory key raises a ValidationError, when the key is missing. + """ + url = get_stub_token.get("url") + token = get_stub_token.get("token") + + post_data = get_stub_shipcall_arrival_invalid_missing_eta() + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "providing \'eta\' is mandatory." in response.json().get("message","") + + post_data = get_stub_shipcall_departure_invalid_missing_etd() + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "providing \'etd\' is mandatory." in response.json().get("message","") + + post_data = get_stub_shipcall_shifting_invalid_missing_eta() + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "providing \'eta\' and \'etd\' is mandatory." in response.json().get("message","") + + post_data = get_stub_shipcall_shifting_invalid_missing_etd() + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "providing \'eta\' and \'etd\' is mandatory." in response.json().get("message","") + + # the following keys all share the same logic and will be tested in sequence + for KEY in ["eta", "arrival_berth_id", "type", "ship_id"]: + + # artificially remove the KEY from a valid shipcall entry + post_data = get_stub_valid_shipcall_arrival() + post_data.pop(KEY,None) + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert f"providing \'{KEY}\' is mandatory." in response.json().get("message","") + + # BERTH ID (arrival or departure, based on type) + post_data = get_stub_valid_shipcall_arrival() + post_data.pop("arrival_berth_id",None) + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "providing \'arrival_berth_id\' is mandatory." in response.json().get("message","") + + + post_data = get_stub_valid_shipcall_departure() + post_data.pop("departure_berth_id",None) + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "providing \'departure_berth_id\' is mandatory." in response.json().get("message","") + + + post_data = get_stub_valid_shipcall_shifting() + post_data.pop("arrival_berth_id",None) + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "providing \'arrival_berth_id\' & \'departure_berth_id\' is mandatory." in response.json().get("message","") + + + post_data = get_stub_valid_shipcall_shifting() + post_data.pop("departure_berth_id",None) + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "providing \'arrival_berth_id\' & \'departure_berth_id\' is mandatory." in response.json().get("message","") + + return + + +def test_shipcall_post_invalid_agency_missing_participant_list(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_shifting() + + # keep all participants, but drop the agency + post_data["participants"] = [ + participant for participant in post_data.get("participants") + if not int(participant.get("type")) == int(ParticipantType.AGENCY) + ] + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + assert "One of the assigned participants *must* be of type \'ParticipantType.AGENCY\'" in response.json().get("message","") + return + +def test_shipcall_post_type_is_wrong(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + post_data = get_stub_valid_shipcall_arrival() + + # type 1 should be successful (201) + post_data["type"] = 1 + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + assert response.status_code == 201 + + # type 51 should not be successful (400 BAD REQUEST) + post_data["type"] = 51 + + response = requests.post( + f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data + ) + validation_error_default_asserts(response) + return + +def test_shipcall_put_request_fails_when_different_participant_id_is_assigned(get_shipcall_id_after_stub_post_request): + url, token, shipcall_id = get_shipcall_id_after_stub_post_request["token"], get_shipcall_id_after_stub_post_request["url"], get_shipcall_id_after_stub_post_request["shipcall_id"] + + post_data = get_stub_valid_shipcall_arrival() + post_data["id"] = shipcall_id + + user_data = {'id':6, 'participant_id':1} + loadedModel = post_data + content = post_data + spm_shipcall_data = [{'participant_id': 6, 'type': 4}, + {'participant_id': 3, 'type': 1}, + {'participant_id': 4, 'type': 2}, + {'participant_id': 5, 'type': 8}] + + + # agency with different participant id is assigned + ivs = InputValidationShipcall() + with pytest.raises(werkzeug.exceptions.Forbidden, match=f"A different participant_id is assigned as the AGENCY of this shipcall. "): + ivs.check_agency_in_shipcall_participant_map(user_data, loadedModel, content, spm_shipcall_data) + return + + +def test_shipcall_put_request_success(get_shipcall_id_after_stub_post_request): + url, token, shipcall_id = get_shipcall_id_after_stub_post_request["token"], get_shipcall_id_after_stub_post_request["url"], get_shipcall_id_after_stub_post_request["shipcall_id"] + + post_data = get_stub_valid_shipcall_arrival() + post_data["id"] = shipcall_id + + # success happens, when shipcall data is valid, the user is authorized and the assigned spm shipcall data is suitable + user_data = {'id':6, 'participant_id':1} + loadedModel = post_data + content = post_data + spm_shipcall_data = [{'participant_id': 6, 'type': 8}, + {'participant_id': 3, 'type': 1}, + {'participant_id': 4, 'type': 2}, + {'participant_id': 5, 'type': 4}] + + + # success + ivs = InputValidationShipcall() + ivs.check_agency_in_shipcall_participant_map(user_data, loadedModel, content, spm_shipcall_data) + return + +def test_shipcall_put_request_fails_when_no_agency_is_assigned(get_shipcall_id_after_stub_post_request): + url, token, shipcall_id = get_shipcall_id_after_stub_post_request["token"], get_shipcall_id_after_stub_post_request["url"], get_shipcall_id_after_stub_post_request["shipcall_id"] + + post_data = get_stub_valid_shipcall_arrival() + post_data["id"] = shipcall_id + + user_data = {'id':6, 'participant_id':1} + loadedModel = post_data + content = post_data + spm_shipcall_data = [ + {'participant_id': 3, 'type': 1}, + {'participant_id': 4, 'type': 2}, + {'participant_id': 5, 'type': 4}] + + + # no agency assigned + ivs = InputValidationShipcall() + with pytest.raises(werkzeug.exceptions.Forbidden, match=f"There is no assigned agency for this shipcall."): + ivs.check_agency_in_shipcall_participant_map(user_data, loadedModel, content, spm_shipcall_data) + return + +def test_shipcall_put_request_fails_when_user_is_not_authorized(get_shipcall_id_after_stub_post_request): + url, token, shipcall_id = get_shipcall_id_after_stub_post_request["token"], get_shipcall_id_after_stub_post_request["url"], get_shipcall_id_after_stub_post_request["shipcall_id"] + + post_data = get_stub_valid_shipcall_arrival() + post_data["id"] = shipcall_id + + # user '1' is artificially set as participant 2, which has ParticipantType 4 (pilot), and is not authorized as an agency + user_data = {'id':1, 'participant_id':2} + loadedModel = post_data + content = post_data + spm_shipcall_data = [ + {'participant_id': 2, 'type': 8}, + {'participant_id': 3, 'type': 1}, + {'participant_id': 4, 'type': 2}, + {'participant_id': 5, 'type': 4}] + + + # current user is not authorized + ivs = InputValidationShipcall() + with pytest.raises(werkzeug.exceptions.Forbidden, match=f"PUT Requests for shipcalls can only be issued by AGENCY or BSMD users."): + ivs.check_agency_in_shipcall_participant_map(user_data, loadedModel, content, spm_shipcall_data) + return + +def test_shipcall_put_request_fails_when_user_tries_self_assignment(get_shipcall_id_after_stub_post_request): + url, token, shipcall_id = get_shipcall_id_after_stub_post_request["token"], get_shipcall_id_after_stub_post_request["url"], get_shipcall_id_after_stub_post_request["shipcall_id"] + + post_data = get_stub_valid_shipcall_arrival() + post_data["id"] = shipcall_id + + user_data = {'id':1, 'participant_id':6} + loadedModel = post_data + content = post_data + spm_shipcall_data = [{'participant_id': 6, 'type': 8}, + {'participant_id': 3, 'type': 1}, + {'participant_id': 4, 'type': 2}, + {'participant_id': 5, 'type': 4}] + + + # self-assignment. User is participant 6, and wants to assign participant 6. + ivs = InputValidationShipcall() + with pytest.raises(werkzeug.exceptions.Forbidden, match=f"An agency cannot self-register for a shipcall. The request is issued by an agency-user and tries to assign an AGENCY as the participant of the shipcall."): + ivs.check_agency_in_shipcall_participant_map(user_data, loadedModel, content, spm_shipcall_data) + return + +def test_shipcall_put_request_fails_input_validation_shipcall_when_shipcall_is_canceled(get_stub_token): + url, token = get_stub_token["url"], get_stub_token["token"] + + # get all shipcalls and grab shipcall with ID 4 + # #TODO: there must be a better way to accomplish this easily... + response = requests.get(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, params={"past_days":30000}) + assert response.status_code==200 + assert isinstance(response.json(), list) + shipcalls = response.json() + + shipcall_id = 4 + sh4 = [sh for sh in shipcalls if sh.get("id")==shipcall_id][0] + put_data = {k:v for k,v in sh4.items() if k in ["eta", "type", "ship_id", "arrival_berth_id", "participants"]} + put_data["id"] = shipcall_id + + loadedModel = put_data + content = put_data + + # a canceled shipcall cannot be selected + with pytest.raises(ValidationError, match="The shipcall with id 'shipcall_id' is canceled. A canceled shipcall may not be changed."): + InputValidationShipcall.check_shipcall_is_canceled(loadedModel, content) + return diff --git a/src/server/tests/validators/test_input_validation_times.py b/src/server/tests/validators/test_input_validation_times.py new file mode 100644 index 0000000..98a2689 --- /dev/null +++ b/src/server/tests/validators/test_input_validation_times.py @@ -0,0 +1,398 @@ +import pytest + +import os +import random +import datetime +from marshmallow import ValidationError + +from BreCal import local_db +from BreCal.schemas import model + +from BreCal.schemas.model import ParticipantType +from BreCal.validators.input_validation_times import InputValidationTimes + +from BreCal.stubs.times_full import get_valid_stub_times, get_valid_stub_for_pytests + +instance_path = os.path.join(os.path.expanduser('~'), "brecal", "src", "server", "instance", "instance") +local_db.initPool(os.path.dirname(instance_path), connection_filename="connection_data_local.json") + + +def test_input_validation_times_fails_when_berth_info_exceeds_length_limit(): + # success + post_data = get_valid_stub_times() + post_data["berth_info"] = "a"*512 # 512 characters + model.TimesSchema().load(data=post_data, many=False, partial=True) + + post_data["berth_info"] = "" # 0 characters + model.TimesSchema().load(data=post_data, many=False, partial=True) + + # failure + with pytest.raises(ValidationError, match="Longer than maximum length 512."): + post_data["berth_info"] = "a"*513 # 513 characters + model.TimesSchema().load(data=post_data, many=False, partial=True) + return + +def test_input_validation_times_fails_when_remarks_exceeds_length_limit(): + # success + post_data = get_valid_stub_times() + post_data["remarks"] = "a"*512 # 512 characters + model.TimesSchema().load(data=post_data, many=False, partial=True) + + post_data["remarks"] = "" # 0 characters + model.TimesSchema().load(data=post_data, many=False, partial=True) + + # failure + with pytest.raises(ValidationError, match="Longer than maximum length 512."): + post_data["remarks"] = "a"*513 # 513 characters + model.TimesSchema().load(data=post_data, many=False, partial=True) + return + +def test_input_validation_times_fails_when_participant_type_is_bsmd(): + # BSMD -> Failure + post_data = get_valid_stub_times() + post_data["participant_type"] = int(ParticipantType.BSMD) + with pytest.raises(ValidationError, match="the participant_type must not be .BSMD"): + model.TimesSchema().load(data=post_data, many=False, partial=True) + + # IntFlag property: BSMD & AGENCY -> Failure + post_data = get_valid_stub_times() + post_data["participant_type"] = int(ParticipantType(ParticipantType.BSMD+ParticipantType.AGENCY)) + with pytest.raises(ValidationError, match="the participant_type must not be .BSMD"): + model.TimesSchema().load(data=post_data, many=False, partial=True) + return + +def test_input_validation_times_fails_when_time_key_is_not_reasonable(): + """ + every time key (e.g., 'eta_berth' or 'zone_entry') must be reasonable. The validation expects + these values to be 'in the future' (larger than datetime.datetime.now()) and not 'in the too distant future' + (e.g., more than one year from now.) + """ + for time_key in ["eta_berth", "etd_berth", "lock_time", "zone_entry", "operations_start", "operations_end"]: + post_data = get_valid_stub_times() + + # success + post_data[time_key] = (datetime.datetime.now() + datetime.timedelta(minutes=11)).isoformat() + model.TimesSchema().load(data=post_data, many=False, partial=True) + + # fails + with pytest.raises(ValidationError, match="The provided value must be in the future."): + post_data[time_key] = (datetime.datetime.now() - datetime.timedelta(minutes=11)).isoformat() + model.TimesSchema().load(data=post_data, many=False, partial=True) + + # fails + with pytest.raises(ValidationError, match="The provided value is in the too distant future and exceeds a threshold for 'reasonable' entries."): + post_data[time_key] = (datetime.datetime.now() + datetime.timedelta(days=367)).isoformat() + model.TimesSchema().load(data=post_data, many=False, partial=True) + return + +def test_input_validation_times_fails_when_user_is_bsmd_user(): + # create stub-data for a POST request + from BreCal.services.jwt_handler import decode_jwt + from BreCal.database.sql_utils import get_user_data_for_id + import re + + # user 4 is a BSMD user -> fails + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=4) + + with pytest.raises(ValidationError, match=re.escape("current user belongs to BSMD. Cannot post 'times' datasets.")): + InputValidationTimes.check_user_is_not_bsmd_type(user_data) + + # user 13 is not a BSMD user -> passes + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=13) + + # success + InputValidationTimes.check_user_is_not_bsmd_type(user_data) + return + +def test_input_validation_times_fails_when_participant_type_entry_already_exists(): + # the participant type already has an entry -> fails + with pytest.raises(ValidationError, match="A dataset for the participant type is already present. Participant Type:"): + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["participant_type"] = int(ParticipantType.AGENCY) + + # 2.) datasets may only be created, if the respective participant type did not already create one. + InputValidationTimes.check_if_entry_already_exists_for_participant_type(user_data, loadedModel, content) + return + +def test_input_validation_times_fails_when_participant_type_deviates_from_shipcall_participant_map(): + # success + # user id 3 is assigned as participant_type=4, but the stub assigns participant_type=4 + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + InputValidationTimes.check_if_user_fits_shipcall_participant_map(user_data, loadedModel, content) + + # fails + # user id 4 is assigned as participant_type=1, but the stub assigns participant_type=4 + with pytest.raises(ValidationError, match="is assigned to the shipcall in a different role."): + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=4) + InputValidationTimes.check_if_user_fits_shipcall_participant_map(user_data, loadedModel, content) + return + +def test_input_validation_times_fails_when_id_references_do_not_exist(): + # success: all IDs exist + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + InputValidationTimes.check_dataset_references(content) + + # fails: IDs do not exist + # iterates once for each, berth_id, shipcall_id, participant_id and generates an artificial, non-existing ID + for key in ["berth_id", "shipcall_id", "participant_id"]: + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + content[key] = loadedModel[key] = 9912737 + + with pytest.raises(ValidationError, match=f"The referenced {key} '{content[key]}' does not exist in the database."): + InputValidationTimes.check_dataset_references(content) + return + +from BreCal.schemas.model import ParticipantType + + +def test_input_validation_times_fails_when_missing_required_fields_arrival(): + """ + evaluates every individual combination of arriving shipcalls, where one of the required values is arbitrarily missing + randomly selects one of the non-terminal ParticipantTypes, which are reasonable (not .BSMD), and validates. This makes sure, + that over time, every possible combination has been tested. + """ + # arrival + not-terminal + non_terminal_list = [ParticipantType.AGENCY, ParticipantType.MOORING, ParticipantType.PILOT, ParticipantType.PORT_ADMINISTRATION, ParticipantType.TUG] + for key in ["eta_berth"]+InputValidationTimes.get_post_data_type_independent_fields(): + random_participant_type_for_unit_test = random.sample(non_terminal_list,k=1)[0] + + # pass: all required fields exist for the current shipcall type (arrival/incoming) + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 222 + loadedModel["participant_type"] = random_participant_type_for_unit_test + content["participant_type"] = int(random_participant_type_for_unit_test) + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + + # fails: iteratively creates stubs, where one of the required keys is missing + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 222 + loadedModel["participant_type"] = random_participant_type_for_unit_test + content["participant_type"] = int(random_participant_type_for_unit_test) + with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"): + loadedModel[key] = content[key] = None + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + + # arrival + terminal + for key in ["operations_start"]+InputValidationTimes.get_post_data_type_independent_fields(): + # pass: all required fields exist for the current shipcall type (arrival/incoming) + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 222 + loadedModel["participant_type"] = ParticipantType.TERMINAL + content["participant_type"] = int(ParticipantType.TERMINAL) + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + + # fails: iteratively creates stubs, where one of the required keys is missing + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 222 + loadedModel["participant_type"] = ParticipantType.TERMINAL + content["participant_type"] = int(ParticipantType.TERMINAL) + + with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"): + loadedModel[key] = content[key] = None + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + return + +def test_input_validation_times_fails_when_missing_required_fields_departure(): + """ + evaluates every individual combination of departing shipcalls, where one of the required values is arbitrarily missing + randomly selects one of the non-terminal ParticipantTypes, which are reasonable (not .BSMD), and validates. This makes sure, + that over time, every possible combination has been tested. + """ + # departure + not-terminal + non_terminal_list = [ParticipantType.AGENCY, ParticipantType.MOORING, ParticipantType.PILOT, ParticipantType.PORT_ADMINISTRATION, ParticipantType.TUG] + + for key in ["etd_berth"]+InputValidationTimes.get_post_data_type_independent_fields(): + # select a *random* particiipant type, which is reasonable and *not* TERMINAL, and validate the function. + random_participant_type_for_unit_test = random.sample(non_terminal_list,k=1)[0] + + # pass + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 241 + loadedModel["participant_type"] = random_participant_type_for_unit_test + content["participant_type"] = int(random_participant_type_for_unit_test) + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + + # fails: iteratively creates stubs, where one of the required keys is missing + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 241 + loadedModel["participant_type"] = random_participant_type_for_unit_test + content["participant_type"] = int(random_participant_type_for_unit_test) + with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"): + loadedModel[key] = content[key] = None + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + + # departure + terminal + for key in ["operations_end"]+InputValidationTimes.get_post_data_type_independent_fields(): + # pass + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 241 + loadedModel["participant_type"] = ParticipantType.TERMINAL + content["participant_type"] = int(ParticipantType.TERMINAL) + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + + # fails: iteratively creates stubs, where one of the required keys is missing + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 241 + loadedModel["participant_type"] = ParticipantType.TERMINAL + content["participant_type"] = int(ParticipantType.TERMINAL) + with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"): + loadedModel[key] = content[key] = None + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + return + +def test_input_validation_times_fails_when_missing_required_fields_shifting(): + """ + evaluates every individual combination of shifting shipcalls, where one of the required values is arbitrarily missing + randomly selects one of the non-terminal ParticipantTypes, which are reasonable (not .BSMD), and validates. This makes sure, + that over time, every possible combination has been tested. + """ + # shifting + not-terminal + non_terminal_list = [ParticipantType.AGENCY, ParticipantType.MOORING, ParticipantType.PILOT, ParticipantType.PORT_ADMINISTRATION, ParticipantType.TUG] + for key in ["eta_berth", "etd_berth"]+InputValidationTimes.get_post_data_type_independent_fields(): + # select a *random* particiipant type, which is reasonable and *not* TERMINAL, and validate the function. + random_participant_type_for_unit_test = random.sample(non_terminal_list,k=1)[0] + # pass: all required fields exist for the current shipcall type (arrival/incoming) + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 189 + loadedModel["participant_type"] =random_participant_type_for_unit_test + content["participant_type"] = int(random_participant_type_for_unit_test) + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + + # fails: iteratively creates stubs, where one of the required keys is missing + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 189 + loadedModel["participant_type"] = random_participant_type_for_unit_test + content["participant_type"] = int(random_participant_type_for_unit_test) + with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"): + loadedModel[key] = content[key] = None + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + + # shifting + terminal + for key in ["operations_start", "operations_end"]+InputValidationTimes.get_post_data_type_independent_fields(): + # pass: all required fields exist for the current shipcall type (arrival/incoming) + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 189 + loadedModel["participant_type"] = ParticipantType.TERMINAL + content["participant_type"] = int(ParticipantType.TERMINAL) + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + + # fails: iteratively creates stubs, where one of the required keys is missing + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + loadedModel["shipcall_id"] = content["shipcall_id"] = 189 + loadedModel["participant_type"] = ParticipantType.TERMINAL + content["participant_type"] = int(ParticipantType.TERMINAL) + with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"): + loadedModel[key] = content[key] = None + InputValidationTimes.check_times_required_fields_post_data(loadedModel, content) + return + + + +def test_input_validation_times_fails_when_participant_type_is_not_assigned__or__user_does_not_belong_to_the_same_participant_id(): + """ + There are two failure cases in InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines + 1.) when the participant type is simply not assigned + 2.) when the participant type matches to the user, but the participant_id is not assigned + + Test case: + shipcall_id 222 is assigned to the participants {"participant_id": 136, "type":2} and {"participant_id": 136, "type":8} + + Case 1: + When user_id 3 should be set as participant_type 4, the call fails, because type 4 is not assigned + + Case 2: + When user_id 2 (participant_id 2) should be set as participant_type 2, the call fails even though type 2 exists, + because participant_id 136 is assigned + + Case 3: + When user_id 28 (participant_id 136) is set as participant_type 2, the call passes. + """ + # fails: participant type 4 does not exist + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + participant_type = 4 + loadedModel["shipcall_id"] = content["shipcall_id"] = 222 + loadedModel["participant_id"] = content["participant_id"] = 2 + loadedModel["participant_type"] = content["participant_type"] = participant_type + + with pytest.raises(ValidationError, match=f"Could not find a matching time dataset for the provided participant_type: {participant_type}. Found Time Datasets:"): + InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=loadedModel, times_id=None) + + # fails: participant type 2 exists, but user_id 2 is part of the wrong participant_id group (user_id 28 or 29 would be) + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=2) + loadedModel["shipcall_id"] = content["shipcall_id"] = 222 + participant_type = 2 + loadedModel["participant_type"] = content["participant_type"] = participant_type + with pytest.raises(ValidationError, match="The dataset may only be changed by a user belonging to the same participant group as the times dataset is referring to. User participant_id:"): + InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=loadedModel, times_id=None) + + # pass: participant type 2 exists & user_id is part of participant_id group 136, which is correct + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=28) + loadedModel["shipcall_id"] = content["shipcall_id"] = 222 + participant_type = 2 + loadedModel["participant_type"] = content["participant_type"] = participant_type + InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=loadedModel, times_id=None) + return + + +def test_input_validation_times_put_request_fails_when_id_field_is_missing(): + """used within PUT-requests. When 'id' is missing, a ValidationError is issued""" + # passes: as an 'id' is provided + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + content["id"] = 379 + InputValidationTimes.check_times_required_fields_put_data(content) + + # fails: 'id' field is missing + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + content.pop("id",None) + with pytest.raises(ValidationError, match="A PUT-request requires an 'id' reference, which was not found."): + InputValidationTimes.check_times_required_fields_put_data(content) + return + +def test_input_validation_times_delete_request_fails_when_times_id_is_deleted_already(): + # passes: id exists + times_id = 379 + InputValidationTimes.check_if_entry_is_already_deleted(times_id) + + # passes: id exists + times_id = 391 + InputValidationTimes.check_if_entry_is_already_deleted(times_id) + + # fails + times_id = 11 + with pytest.raises(ValidationError, match=f"The selected time entry is already deleted. ID: {times_id}"): + InputValidationTimes.check_if_entry_is_already_deleted(times_id) + + # fails + times_id = 4 + with pytest.raises(ValidationError, match=f"The selected time entry is already deleted. ID: {times_id}"): + InputValidationTimes.check_if_entry_is_already_deleted(times_id) + return + +def test_input_validation_times_delete_request_fails_when_times_id_does_not_exist_(): + # passes: times_id exists + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=28) + times_id = 392 + InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=None, times_id=times_id) + + # fails: times_id does not exist + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=28) + times_id = 4 + with pytest.raises(ValidationError, match=f"Unknown times_id. Could not find a matching entry for ID: {times_id}"): + InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=None, times_id=times_id) + return + +def test_input_validation_times_delete_request_fails_when_user_belongs_to_wrong_participant_id(): + # fails: participant_id should be 136, but user_id=3 belongs to participant_id=2 + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3) + times_id = 392 + + with pytest.raises(ValidationError, match=f"The dataset may only be changed by a user belonging to the same participant group as the times dataset is referring to. User participant_id:"): + InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=None, times_id=times_id) + + # passes: participant_id should be 136, and user_id=28 belongs to participant_id=2 + user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=28) + times_id = 392 + InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=None, times_id=times_id) + return + + diff --git a/src/server/tests/validators/test_validation_rule_functions.py b/src/server/tests/validators/test_validation_rule_functions.py index fdd8152..e928474 100644 --- a/src/server/tests/validators/test_validation_rule_functions.py +++ b/src/server/tests/validators/test_validation_rule_functions.py @@ -12,7 +12,17 @@ from BreCal.stubs.df_times import get_df_times, random_time_perturbation, get_df @pytest.fixture(scope="session") def build_sql_proxy_connection(): import mysql.connector - conn_from_pool = mysql.connector.connect(**{'host':'localhost', 'port':3306, 'user':'root', 'password':'HalloWach_2323XXL!!', 'pool_name':'brecal_pool', 'pool_size':20, 'database':'bremen_calling', 'autocommit': True}) + import os + import json + connection_data_path = os.path.join(os.path.expanduser("~"),"secure","connection_data_local.json") + assert os.path.exists(connection_data_path) + + with open(connection_data_path, "r") as jr: + connection_data = json.load(jr) + connection_data = {k:v for k,v in connection_data.items() if k in ["host", "port", "user", "password", "pool_size", "pool_name", "database"]} + + conn_from_pool = mysql.connector.connect(**connection_data) + #conn_from_pool = mysql.connector.connect(**{'host':'localhost', 'port':3306, 'user':'root', 'password':'HalloWach_2323XXL!!', 'pool_name':'brecal_pool', 'pool_size':20, 'database':'bremen_calling_local', 'autocommit': True}) sql_handler = SQLHandler(sql_connection=conn_from_pool, read_all=True) vr = ValidationRules(sql_handler) return locals() @@ -654,6 +664,9 @@ def test_validation_rule_fct_missing_time_tug_berth_etd__shipcall_soon_but_parti def test_validation_rule_fct_missing_time_terminal_berth_eta__shipcall_soon_but_participant_estimated_time_undefined(build_sql_proxy_connection): """0001-L validation_rule_fct_missing_time_terminal_berth_eta""" vr = build_sql_proxy_connection['vr'] + import copy + reset_to_default = copy.deepcopy(vr.ignore_terminal_flag) + vr.ignore_terminal_flag = False shipcall = get_shipcall_simple() df_times = get_df_times(shipcall) @@ -684,6 +697,46 @@ def test_validation_rule_fct_missing_time_terminal_berth_eta__shipcall_soon_but_ # expectation: green state, no msg assert state==StatusFlags.YELLOW, f"function should return 'yellow', because the participant did not provide a time and the shipcall takes place soon (according to the agency)" + vr.ignore_terminal_flag = reset_to_default + return + +def test_validation_rule_fct_missing_time_terminal_berth_eta__shipcall_soon_but_participant_estimated_time_undefined__no_violation_because_terminal_flag_is_active(build_sql_proxy_connection): + """0001-L validation_rule_fct_missing_time_terminal_berth_eta""" + vr = build_sql_proxy_connection['vr'] + import copy + reset_to_default = copy.deepcopy(vr.ignore_terminal_flag) + vr.ignore_terminal_flag = True + + shipcall = get_shipcall_simple() + df_times = get_df_times(shipcall) + + # according to the agency, a shipcall takes place soon (ETA/ETD) + df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "eta_berth"] = datetime.datetime.now() + datetime.timedelta(minutes=ParticipantwiseTimeDelta.TERMINAL-10) + + # set times agency to be undetermined + df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "operations_start"] = None # previously: eta_berth, which does not exist in times_terminal + + # must adapt the shipcall_participant_map, so it suits the test + agency_participant_id = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "participant_id"].iloc[0] + terminal_participant_id = df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "participant_id"].iloc[0] + + spm = vr.sql_handler.df_dict["shipcall_participant_map"] + df = pd.DataFrame( + [ + {"id":10001, "shipcall_id":shipcall.id, "participant_id":agency_participant_id, "type":ParticipantType.AGENCY.value, "created":pd.Timestamp(datetime.datetime.now().isoformat()), "modified":None}, + {"id":10002, "shipcall_id":shipcall.id, "participant_id":terminal_participant_id, "type":ParticipantType.TERMINAL.value, "created":pd.Timestamp(datetime.datetime.now().isoformat()), "modified":None} + ] + ) + df.set_index("id", inplace=True) + spm = pd.concat([spm, df], axis=0, ignore_index=True) + vr.sql_handler.df_dict["shipcall_participant_map"] = spm + + # apply the validation rule + (state, msg) = vr.validation_rule_fct_missing_time_terminal_berth_eta(shipcall=shipcall, df_times=df_times) + + # expectation: green state, no msg + assert state==StatusFlags.GREEN, f"function should return 'green', becaues the ignore terminal flag is active. Hence, terminal validation rules are ignored." + vr.ignore_terminal_flag = reset_to_default return @@ -691,6 +744,9 @@ def test_validation_rule_fct_missing_time_terminal_berth_eta__shipcall_soon_but_ def test_validation_rule_fct_missing_time_terminal_berth_etd__shipcall_soon_but_participant_estimated_time_undefined(build_sql_proxy_connection): """0001-M validation_rule_fct_missing_time_terminal_berth_etd""" vr = build_sql_proxy_connection['vr'] + import copy + reset_to_default = copy.deepcopy(vr.ignore_terminal_flag) + vr.ignore_terminal_flag = False shipcall = get_shipcall_simple() shipcall.type = ShipcallType.OUTGOING.value @@ -723,6 +779,48 @@ def test_validation_rule_fct_missing_time_terminal_berth_etd__shipcall_soon_but_ # expectation: green state, no msg assert state==StatusFlags.YELLOW, f"function should return 'yellow', because the participant did not provide a time and the shipcall takes place soon (according to the agency)" + vr.ignore_terminal_flag = reset_to_default + return + +def test_validation_rule_fct_missing_time_terminal_berth_etd__shipcall_soon_but_participant_estimated_time_undefined__no_violation_because_terminal_flag_is_active(build_sql_proxy_connection): + """0001-M validation_rule_fct_missing_time_terminal_berth_etd""" + vr = build_sql_proxy_connection['vr'] + import copy + reset_to_default = copy.deepcopy(vr.ignore_terminal_flag) + vr.ignore_terminal_flag = True + + shipcall = get_shipcall_simple() + shipcall.type = ShipcallType.OUTGOING.value + df_times = get_df_times(shipcall) + + # according to the agency, a shipcall takes place soon (ETA/ETD) + df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "etd_berth"] = datetime.datetime.now() + datetime.timedelta(minutes=ParticipantwiseTimeDelta.TERMINAL-10) + + # set times agency to be undetermined + df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "operations_end"] = None # previously: etd_berth, which does not exist in times_terminal + + + # must adapt the shipcall_participant_map, so it suits the test + agency_participant_id = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "participant_id"].iloc[0] + terminal_participant_id = df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "participant_id"].iloc[0] + + spm = vr.sql_handler.df_dict["shipcall_participant_map"] + df = pd.DataFrame( + [ + {"id":10001, "shipcall_id":shipcall.id, "participant_id":agency_participant_id, "type":ParticipantType.AGENCY.value, "created":pd.Timestamp(datetime.datetime.now().isoformat()), "modified":None}, + {"id":10002, "shipcall_id":shipcall.id, "participant_id":terminal_participant_id, "type":ParticipantType.TERMINAL.value, "created":pd.Timestamp(datetime.datetime.now().isoformat()), "modified":None} + ] + ) + df.set_index("id", inplace=True) + spm = pd.concat([spm, df], axis=0, ignore_index=True) + vr.sql_handler.df_dict["shipcall_participant_map"] = spm + + # apply the validation rule + (state, msg) = vr.validation_rule_fct_missing_time_terminal_berth_etd(shipcall=shipcall, df_times=df_times) + + # expectation: green state, no msg + assert state==StatusFlags.GREEN, f"function should return 'green', becaues the ignore terminal flag is active. Hence, terminal validation rules are ignored." + vr.ignore_terminal_flag = reset_to_default return @@ -912,6 +1010,10 @@ def test_validation_rule_fct_shipcall_shifting_participants_disagree_on_eta_or_e def test_validation_rule_fct_eta_time_not_in_operation_window__times_dont_match(build_sql_proxy_connection): """0003-A validation_rule_fct_eta_time_not_in_operation_window""" vr = build_sql_proxy_connection['vr'] + import copy + reset_to_default = copy.deepcopy(vr.ignore_terminal_flag) + vr.ignore_terminal_flag = False + shipcall = get_shipcall_simple() df_times = get_df_times(shipcall) @@ -923,11 +1025,37 @@ def test_validation_rule_fct_eta_time_not_in_operation_window__times_dont_match( (code, msg) = vr.validation_rule_fct_eta_time_not_in_operation_window(shipcall, df_times) assert code==StatusFlags.RED, f"status flag should be 'red', because the planned operations start is BEFORE the estimated time of arrival for the shipcall" + vr.ignore_terminal_flag = reset_to_default + return + +def test_validation_rule_fct_eta_time_not_in_operation_window__times_dont_match__no_violation_because_terminal_flag_is_active(build_sql_proxy_connection): + """0003-A validation_rule_fct_eta_time_not_in_operation_window""" + vr = build_sql_proxy_connection['vr'] + import copy + reset_to_default = copy.deepcopy(vr.ignore_terminal_flag) + vr.ignore_terminal_flag = True + + shipcall = get_shipcall_simple() + df_times = get_df_times(shipcall) + + t0_time = datetime.datetime.now() # reference time for easier readability + + # the planned operations_start is before eta_berth (by one minute in this case) + df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "eta_berth"] = t0_time + datetime.timedelta(minutes=1) + df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "operations_start"] = t0_time + datetime.timedelta(minutes=0) + + (code, msg) = vr.validation_rule_fct_eta_time_not_in_operation_window(shipcall, df_times) + assert code==StatusFlags.GREEN, f"the ignore terminal flag is active, so this validation rule is ignored. There should not be a violation" + vr.ignore_terminal_flag = reset_to_default return def test_validation_rule_fct_etd_time_not_in_operation_window__times_dont_match(build_sql_proxy_connection): """0003-B validation_rule_fct_etd_time_not_in_operation_window""" vr = build_sql_proxy_connection['vr'] + import copy + reset_to_default = copy.deepcopy(vr.ignore_terminal_flag) + vr.ignore_terminal_flag = False + shipcall = get_shipcall_simple() shipcall.type = ShipcallType.SHIFTING.value df_times = get_df_times(shipcall) @@ -941,6 +1069,30 @@ def test_validation_rule_fct_etd_time_not_in_operation_window__times_dont_match( (code, msg) = vr.validation_rule_fct_etd_time_not_in_operation_window(shipcall, df_times) assert code==StatusFlags.RED, f"status flag should be 'red', because the planned operations end is AFTER the estimated time of departure for the shipcall" + vr.ignore_terminal_flag = reset_to_default + return + +def test_validation_rule_fct_etd_time_not_in_operation_window__times_dont_match__no_violation_because_terminal_flag_is_active(build_sql_proxy_connection): + """0003-B validation_rule_fct_etd_time_not_in_operation_window""" + vr = build_sql_proxy_connection['vr'] + import copy + reset_to_default = copy.deepcopy(vr.ignore_terminal_flag) + vr.ignore_terminal_flag = True + + shipcall = get_shipcall_simple() + shipcall.type = ShipcallType.SHIFTING.value + df_times = get_df_times(shipcall) + + t0_time = datetime.datetime.now() # reference time for easier readability + + # the planned operations_end is after etd_berth (by one minute in this case) + df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "etd_berth"] = t0_time + datetime.timedelta(hours=1) + df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "operations_end"] = t0_time+datetime.timedelta(hours=1, minutes=1) + + + (code, msg) = vr.validation_rule_fct_etd_time_not_in_operation_window(shipcall, df_times) + assert code==StatusFlags.GREEN, f"the ignore terminal flag is active, so this validation rule is ignored. There should not be a violation" + vr.ignore_terminal_flag = reset_to_default return def test_validation_rule_fct_eta_time_not_in_operation_window_and_validation_rule_fct_etd_time_not_in_operation_window__always_okay(build_sql_proxy_connection): From d682cb3d267ad03393584f982f87c07c163a6123 Mon Sep 17 00:00:00 2001 From: Max Metz Date: Mon, 10 Jun 2024 08:09:51 +0200 Subject: [PATCH 02/15] Created Notifier object, which provides the logic to create notifications and issue them to the respective users, depending on the type of notification they have selected. Building the skeleton, where the methods will be filled functionally soon. Adapted the History-schema. --- src/server/BreCal/api/user.py | 4 + src/server/BreCal/database/sql_queries.py | 6 +- src/server/BreCal/impl/history.py | 2 +- src/server/BreCal/notifications/__init__.py | 0 .../notification_content_email.py | 53 +++ .../notifications/notification_functions.py | 13 + src/server/BreCal/notifications/notifier.py | 313 ++++++++++++++++++ src/server/BreCal/schemas/model.py | 11 +- .../BreCal/services/schedule_routines.py | 3 +- src/server/BreCal/stubs/notification.py | 10 +- 10 files changed, 401 insertions(+), 14 deletions(-) create mode 100644 src/server/BreCal/notifications/__init__.py create mode 100644 src/server/BreCal/notifications/notification_content_email.py create mode 100644 src/server/BreCal/notifications/notification_functions.py create mode 100644 src/server/BreCal/notifications/notifier.py diff --git a/src/server/BreCal/api/user.py b/src/server/BreCal/api/user.py index 2c3c1a0..14a03f8 100644 --- a/src/server/BreCal/api/user.py +++ b/src/server/BreCal/api/user.py @@ -11,6 +11,10 @@ bp = Blueprint('user', __name__) @bp.route('/user', methods=['put']) @auth_guard() # no restriction by role def PutUser(): + # #TODO: user validation should be extended by the notifications. When someone wants to set + # notify_email = 1, the email must be either present or part of the loadedModel + # notify_whatsapp = 1, there must be a phone number (same for notify_signal) + # notify_push = 1, there must be a phone number (#TODO_determine ... or an app-id? Unclear still) try: content = request.get_json(force=True) diff --git a/src/server/BreCal/database/sql_queries.py b/src/server/BreCal/database/sql_queries.py index 51a3dea..3f84d75 100644 --- a/src/server/BreCal/database/sql_queries.py +++ b/src/server/BreCal/database/sql_queries.py @@ -213,7 +213,7 @@ class SQLQuery(): @staticmethod def get_history()->str: - query = "SELECT id, participant_id, shipcall_id, timestamp, eta, type, operation FROM history WHERE shipcall_id = ?shipcallid?" + query = "SELECT id, participant_id, shipcall_id, user_id, timestamp, eta, type, operation FROM history WHERE shipcall_id = ?shipcallid?" return query @staticmethod @@ -402,14 +402,14 @@ class SQLQuery(): @staticmethod def get_notification_post()->str: - raise NotImplementedError() + raise NotImplementedError("skeleton") # #TODO: this query is wrong and just a proxy for a POST request query = "INSERT INTO shipcall_participant_map (shipcall_id, participant_id, type) VALUES (?shipcall_id?, ?participant_id?, ?type?)" return query @staticmethod def get_shipcall_put_notification_state()->str: - raise NotImplementedError() + raise NotImplementedError("skeleton") # #TODO: use evaluation_notifications_sent here and consider only the shipcall_id # #TODO: query query = ... diff --git a/src/server/BreCal/impl/history.py b/src/server/BreCal/impl/history.py index 53f159c..a2e1cb0 100644 --- a/src/server/BreCal/impl/history.py +++ b/src/server/BreCal/impl/history.py @@ -23,7 +23,7 @@ def GetHistory(options): if "shipcall_id" in options and options["shipcall_id"]: # query = SQLQuery.get_history() # data = commands.query(query, model=History.from_query_row, param={"shipcallid" : options["shipcall_id"]}) - data = commands.query("SELECT id, participant_id, shipcall_id, timestamp, eta, type, operation FROM history WHERE shipcall_id = ?shipcallid?", + data = commands.query("SELECT id, participant_id, shipcall_id, user_id, timestamp, eta, type, operation FROM history WHERE shipcall_id = ?shipcallid?", model=History.from_query_row, param={"shipcallid" : options["shipcall_id"]}) diff --git a/src/server/BreCal/notifications/__init__.py b/src/server/BreCal/notifications/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/server/BreCal/notifications/notification_content_email.py b/src/server/BreCal/notifications/notification_content_email.py new file mode 100644 index 0000000..ce8d653 --- /dev/null +++ b/src/server/BreCal/notifications/notification_content_email.py @@ -0,0 +1,53 @@ +import pandas as pd +from server.BreCal.database.enums import ParticipantType, ShipcallType, StatusFlags + + + +#### Verbosity Functions #### +def get_default_header()->str: + # HEADER (greeting and default message) + header = "Dear Sir or Madam\n\nThank you for participating in the project 'Bremen Calling'. During analysis, our software has identified an event, which may be worth a second look. Here is the summary. \n\n" + return header + + +def get_default_footer()->str: + # FOOTER (signature) + footer = "\n\nWe would kindly ask you to have a look at the shipcall and verify, if any action is required from your side. \n\nKind regards\nThe 'Bremen Calling' Team" + return footer + + +def get_agency_name(sql_handler, times_df): + times_agency = times_df.loc[times_df["participant_type"]==ParticipantType.AGENCY.value,"participant_id"] + if len(times_agency)==0: + agency_name = "" + else: + agency_participant_id = times_agency.iloc[0] + agency_name = sql_handler.df_dict.get("participant").loc[agency_participant_id,"name"] + return agency_name + + +def get_ship_name(sql_handler, shipcall): + ship = sql_handler.df_dict.get("ship").loc[shipcall.ship_id] + ship_name = ship.loc["name"] # when calling ship.name, the ID is returned (pandas syntax) + return ship_name + + +def create_notification_body(sql_handler, times_df, shipcall, result)->str: + # #TODO: add 'Link zum Anlauf' + # URL: https://trello.com/c/qenZyJxR/75-als-bsmd-m%C3%B6chte-ich-%C3%BCber-gelbe-und-rote-ampeln-informiert-werden-um-die-systembeteiligung-zu-st%C3%A4rken + header = get_default_header() + footer = get_default_footer() + + agency_name = get_agency_name(sql_handler, times_df) + ship_name = get_ship_name(sql_handler, shipcall) + + verbosity_introduction = f"Respective Shipcall:\n" + traffic_state_verbosity = f"\tTraffic Light State: {StatusFlags(result[0]).name}\n" + ship_name_verbosity = f"\tShip: {ship_name} (the ship is {ShipcallType(shipcall.type).name.lower()})\n" + agency_name_verbosity = f"\tResponsible Agency: {agency_name}\n" + eta_verbosity = f"\tEstimated Arrival Time: {shipcall.eta.isoformat()}\n" if not pd.isna(shipcall.eta) else "" + etd_verbosity = f"\tEstimated Departure Time: {shipcall.etd.isoformat()}\n" if not pd.isna(shipcall.etd) else "" + error_verbosity = f"\nError Description:\n\t" + "\n\t".join(result[1]) + + message_body = "".join([header, verbosity_introduction, traffic_state_verbosity, ship_name_verbosity, agency_name_verbosity, eta_verbosity, etd_verbosity, error_verbosity, footer]) + return message_body \ No newline at end of file diff --git a/src/server/BreCal/notifications/notification_functions.py b/src/server/BreCal/notifications/notification_functions.py new file mode 100644 index 0000000..e61bfcd --- /dev/null +++ b/src/server/BreCal/notifications/notification_functions.py @@ -0,0 +1,13 @@ +import datetime +from BreCal.schemas.model import Notification +from BreCal.database.enums import NotificationType, StatusFlags + +def create_notification(id, times_id, message, level, notification_type:NotificationType, created=None, modified=None): + # #TODO_determine: determine, whether this function is still in active use. The data-model seems outdated. + created = (datetime.datetime.now()).isoformat() or created + + notification = Notification( + id=id, + times_id=times_id, acknowledged=False, level=level, type=notification_type.value, message=message, created=created, modified=modified + ) + return notification diff --git a/src/server/BreCal/notifications/notifier.py b/src/server/BreCal/notifications/notifier.py new file mode 100644 index 0000000..dc74b91 --- /dev/null +++ b/src/server/BreCal/notifications/notifier.py @@ -0,0 +1,313 @@ +import typing +from BreCal.database.sql_handler import execute_sql_query_standalone +from BreCal.database.sql_queries import SQLQuery +from BreCal.schemas import model + +class Notifier(): + """ + This class provides quick access to different notification functions. + Each method is callable without initializing the Notifier object. + + Example: + Notifier.send_notifications(*args) + + The Notifier has three main methods. + Notifier.send_notifications() --- can be called routinely. Identifies all candidates and notifies the users + Notifier.send_notification(shipcall) --- applies filters to identify, whether a notification is desired. If so, notifies the users + Notifer.create(user) --- 'naive' method, which simply creates a message and sends it to the user's preferred choice + + # #TODO_determine: it makes sense to go one step finer. .create could produce messages and recipients, whereas .publish may then issue those and .document may update the SQL dataset + ## naming idea: Notifier.send_notifications, Notifier.send_notification, Notifier.send (which may contain .create, .publish, .document) + """ + def __init__(self) -> None: + pass + + @staticmethod + def send_notifications(is_test:bool=False) -> None: + """ + This method is used in BreCal.services.schedule_routines and will issue notifications, once they are due. + It is purposely defined in a way, where no external dependencies or arguments are required. The only exception is the + 'is_test' boolean, as it prevents the notifications from being *actually* sent as part of the pytests. + + Steps: + - get all shipcalls + - filter: consider only those, which are not yet sent (uses shipcall.evaluation_notifications_sent) + - iterate over each remaining shipcall and apply .send_notification + - those which are unsent, shall be sent by the respective type + """ + raise NotImplementedError("skeleton") + + # get all shipcalls + all_shipcalls = NotImplementedError + + shipcalls = [shipcall for shipcall in all_shipcalls if not shipcall.evaluation_notifications_sent] + for shipcall in shipcalls: + notification_list = Notifier.send_notification(shipcall, is_test=is_test) + + # #TODO: get all notifications + # #TODO: get matching shipcall (based on shipcall_id) + + # #TODO: filter: consider only those, which are not yet sent + + # identify necessity + # #TODO: get the 'evaluation_notifications_sent' field from all shipcalls (based on shipcall_id) + # if not -> return + # USE shipcall.evaluation_notifications_sent + + # #TODO: those which are unsent, shall be created&sent by the respective type -- Note: consider the is_test argument + # iterate over the list of Notifier.build_notification_type_list + # one might use Notifier.create(..., update_database=True) + # use the History (GetHistory -- by shipcall_id) to identify all subscribed users + + # #TODO: update the shipcall dataset ('evaluation_notifications_sent') -- Note: consider the is_test argument + + # #TODO_clarify: how to handle the 'evaluation_notifications_sent', when there is no recipient? + return + + @staticmethod + def send_notification(shipcall:model.Shipcall, is_test:bool=False)->list[model.Notification]: + """ + Complex-function, which is responsible of creating notification messages, issuing them to users and optionally updating + the database. The requirement is, that the notification is required and passes through an internal set of filters. + + Steps: + - get all notifications of shipcall_id + - identify the assigned list of users + - apply all filters. When a filter triggers, exit. If not, create and send a notification. + """ + update_database = False if is_test else True + # #TODO: the concept of old state and new state must be refactored. + # old state: read shipcall_id from notifications and look for the latest finding (if None -> EvaluationType.undefined) + # new state: read shipcall_id from shipcalls and look for the *current* 'evaluation' (-> EvaluationType(value)) + + # get existing notifications by shipcall_id (list) + existing_notifications = Notifier.get_existing_notifications(shipcall_id=shipcall.id) + old_state = NotImplementedError + + new_state = shipcall.evaluation + + # get User by querying all History objects of a shipcall_id + users = Notifier.get_users_via_history(shipcall_id=shipcall.id) + + # identify necessity + # state-check: Did the 'evaluation' shift to a higher level of severity? + severity_bool = Notifier.check_higher_severity(old_state, new_state) + if not severity_bool: + return None + + + # #TODO: time-based filter. There shall be 'enough' time between the evaluation time and NOW + evaluation_time = shipcall.evaluation_time + # latency_bool = #TODO_DIFFERENCE_FROM_NOW_TO_EVALUATION_TIME____THIS_METHOD_ALREADY_EXISTS(evaluation_time) + # careful: what is True, what is False? + # if latency_booL: + # return None + + notification_list = [] + for user in users: + notification = Notifier.create( + shipcall.id, + old_state, + new_state, + user, + update_database=update_database, + is_test=is_test + ) + notification_list.append(notification) + return notification_list + + @staticmethod + def publish(shipcall_id, old_state, new_state, user, update_database:bool=False)->typing.Optional[model.Notification]: + """ + Complex-function, which creates, sends and documents a notification. It serves as a convenience function. + The method does not apply internal filters to identify, whether a notification should be created in the first place. + + options: + update_database: bool. + # #TODO: instead of update_database, one may also use is_test + """ + # 1.) create + # ... = Notifier.create(shipcall_id, old_state, new_state, user) # e.g., might return a dictionary of dict[model.NotificationType, str], where str is the message + + # 2.) send + # ... = Notifier.send(...) # should contain internal 'logistics', which user the respective handlers to send notifications + + # 3.) document (mysql database) + # if update_database + # ... = Notifier.document(...) + raise NotImplementedError("skeleton") + return + + @staticmethod + def create(shipcall_id, old_state, new_state, user, update_database:bool=False)->typing.Optional[model.Notification]: + """ + Standalone function, which creates a Notification for a specific user. + + Steps: + - identify a list of notification_types, which shall be issued (based on the user's 'notify_*' settings) + - create messages based on the respective NotificationType, which the user has enabled + - send the messages + - update the shipcall dataset ('evaluation_notifications_sent') + + args: + update_database: whether to update the MySQL database by posting the notification. + """ + assert user.id is not None + assert shipcall_id is not None + assert old_state is not None + assert new_state is not None + + # get Shipcall by shipcall_id + shipcall = Notifier.get_shipcall(shipcall_id=shipcall_id) + + """ + ## TODO: this might simply be removed due to incorrect concept + ## could also relocate this to the generation function, which identifies the notifications to be created + ## should be unnecessary due to shipcall.evaluation_notifications_sent + + # a) filter existing notifictions and consider only the dataset, where type (notification_type) and level (new_state) are suitable + notification_exists = Notifier.check_notification_type_and_level_exists(shipcall_id=shipcall_id, notification_type=notification_type, level=new_state, existing_notifications=existing_notifications) + if notification_exists: + return None + """ + + + # get a list of all subscribed notification types and track the state (success or failure) + successes = {} + notification_type_list = Notifier.build_notification_type_list(user) + for notification_type in notification_type_list: + # generate message based on the notification type + message = Notifier.generate_notification_message_by_type(notification_type, evaluation_message=shipcall.evaluation_message, user=user) + + # send the message + success_state = Notifier.send_notification_by_type(notification_type, message) + successes[notification_type] = success_state + + raise NotImplementedError("skeleton") + notification = ... + return notification + + @staticmethod + def find_latest_notification(notifications:list[model.Notification])->typing.Optional[model.Notification]: + """given a list of notification objects, this method returns the object, where the .created field corresponds to the *latest* notification object""" + latest_notification = sorted(notifications, key=lambda notification: notification.created, reverse=False)[-1] if len(notifications)>0 else None + return latest_notification + + @staticmethod + def get_users_via_history(shipcall_id:int)->list[model.User]: + """using the History objects, one can infer the user_id, which allows querying the Users""" + histories = execute_sql_query_standalone(query=SQLQuery.get_history(), param={"shipcallid" : shipcall_id}, model=model.History, command_type="query") + user_ids = [ + history.user_id + for history in histories + ] + users = [Notifier.get_user(user_id) for user_id in user_ids] + return users + + @staticmethod + def get_user(user_id:int)->model.User: + """Given a user_id, this method executes an SQL query to return a User""" + user = execute_sql_query_standalone(query=SQLQuery.get_user_by_id(), param={"id" : user_id}, model=model.User, command_type="single") + return user + + @staticmethod + def get_shipcall(shipcall_id:int)->model.Shipcall: + """Given a shipcall_id, this method executes an SQL query to return a Shipcall""" + shipcall = execute_sql_query_standalone(query=SQLQuery.get_shipcall_by_id(), param={"id" : shipcall_id}, model=model.Shipcall.from_query_row, command_type="single") + return shipcall + + @staticmethod + def get_existing_notifications(shipcall_id:int)->list[model.Notification]: + existing_notifications = execute_sql_query_standalone(query=SQLQuery.get_notifications(), param={"scid" : shipcall_id}, model=model.Notification, command_type="query") + return existing_notifications + + @staticmethod + def build_notification_type_list(user:model.User)->list[model.NotificationType]: + """ + based on a User, this method generates a list of notification types. These can be used as instructions to + generate the respective Notification datasets. + """ + notification_type_list = [] + + if user.notify_email: + notification_type_list.append(model.NotificationType.email) + + if user.notify_popup: + notification_type_list.append(model.NotificationType.push) + + if user.notify_whatsapp: + # currently not defined as a data model. Must be included / changed, once the data model of NotificationType is updated + notification_type_list.append(model.NotificationType.undefined) + + if user.notify_signal: + # currently not defined as a data model. Must be included / changed, once the data model of NotificationType is updated + notification_type_list.append(model.NotificationType.undefined) + return notification_type_list + + @staticmethod + def check_notification_type_and_level_exists(shipcall_id:int, notification_type:model.NotificationType, level:model.EvaluationType, existing_notifications:list[model.Notification])->bool: + """This method checks, whether one of the Notification elements in the provided list is a perfect match to the arguments shipcall_id, notification_type and level""" + # #TODO_determine: should a notification be *skipped*, when there already is a dataset with + # identical level and type? ---> currently enabled. + + # check, if any of the existing notifications is a perfect match for notification type & level & shipcall + matches = [note for note in existing_notifications if (int(level)==int(note.level)) and (int(note.type)==int(notification_type)) and (shipcall_id==note.shipcall_id)] + + # bool: whether there is a perfect match + exists = len(matches)>0 + raise Exception("deprecated") + return exists + + @staticmethod + def check_higher_severity(old_state:model.EvaluationType, new_state:model.EvaluationType)->bool: + """ + determines, whether the observed state change should trigger a notification. + internally, this function maps StatusFlags to an integer and determines, if the successor state is more severe than the predecessor. + + state changes trigger a notification in the following cases: + green -> yellow + green -> red + yellow -> red + + (none -> yellow) or (none -> red) + due to the values in the enumeration objects, the states are mapped to provide this function. + green=1, yellow=2, red=3, none=1. Hence, critical changes can be observed by simply checking with "greater than". + + returns bool, whether a notification should be triggered + """ + # undefined previous state: .undefined (0) + if old_state is None: + old_state = model.EvaluationType.undefined + + # old_state is always considered at least .green (1) (hence, .undefined becomes .green) + old_state = max(int(old_state), model.EvaluationType.green) + + # the IntEnum values are correctly sequenced. .red > .yellow > .green > .undefined + # as .undefined becomes .green, an old_state is always *at least* green. + severity_grew = int(new_state) > int(old_state) + return severity_grew + + @staticmethod + def generate_notification_message_by_type(notification_type:model.NotificationType, evaluation_message:str, user:model.User): + assert isinstance(user, model.User) + + if int(notification_type) == int(model.NotificationType.undefined): + raise NotImplementedError("skeleton") + elif int(notification_type) == int(model.NotificationType.email): + raise NotImplementedError("skeleton") + elif int(notification_type) == int(model.NotificationType.push): + raise NotImplementedError("skeleton") + #elif int(notification_type) == int(model.NotificationType.whatsapp): + #raise NotImplementedError("skeleton") + #elif int(notification_type) == int(model.NotificationType.signal): + #raise NotImplementedError("skeleton") + elif int(notification_type) == int(model.NotificationType.undefined): + raise NotImplementedError("skeleton") + else: + raise ValueError(notification_type) + return + + + +"""# build the list of evaluation times ('now', as isoformat)""" +#evaluation_times = [datetime.datetime.now().isoformat() for _i in range(len(evaluation_states_new))] \ No newline at end of file diff --git a/src/server/BreCal/schemas/model.py b/src/server/BreCal/schemas/model.py index 92232e2..395905b 100644 --- a/src/server/BreCal/schemas/model.py +++ b/src/server/BreCal/schemas/model.py @@ -85,10 +85,11 @@ class ShipcallType(IntEnum): @dataclass class History: - def __init__(self, id, participant_id, shipcall_id, timestamp, eta, type, operation): + def __init__(self, id, participant_id, shipcall_id, user_id, timestamp, eta, type, operation): self.id = id self.participant_id = participant_id self.shipcall_id = shipcall_id + self.user_id = user_id self.timestamp = timestamp self.eta = eta self.type = type @@ -98,6 +99,7 @@ class History: id: int participant_id: int shipcall_id: int + user_id: int timestamp: datetime eta: datetime type: ObjectType @@ -108,6 +110,7 @@ class History: "id": self.id, "participant_id": self.participant_id, "shipcall_id": self.shipcall_id, + "user_id": self.user_id, "timestamp": self.timestamp.isoformat() if self.timestamp else "", "eta": self.eta.isoformat() if self.eta else "", "type": self.type.name, @@ -115,8 +118,8 @@ class History: } @classmethod - def from_query_row(self, id, participant_id, shipcall_id, timestamp, eta, type, operation): - return self(id, participant_id, shipcall_id, timestamp, eta, ObjectType(type), OperationType(operation)) + def from_query_row(self, id, participant_id, shipcall_id, user_id, timestamp, eta, type, operation): + return self(id, participant_id, shipcall_id, user_id, timestamp, eta, ObjectType(type), OperationType(operation)) class Error(Schema): message = fields.String(metadata={'required':True}) @@ -135,7 +138,7 @@ class Notification: """ id: int shipcall_id: int # 'shipcall record that caused the notification' - level: int # 'severity of the notification' + level: int # 'severity of the notification'. #TODO_determine: Should this be identical to EvaluationType? type: NotificationType # 'type of the notification' message: str # 'individual message' created: datetime diff --git a/src/server/BreCal/services/schedule_routines.py b/src/server/BreCal/services/schedule_routines.py index a36a1fc..2e0be3e 100644 --- a/src/server/BreCal/services/schedule_routines.py +++ b/src/server/BreCal/services/schedule_routines.py @@ -4,6 +4,7 @@ from BreCal.schemas import model from BreCal.local_db import getPoolConnection from BreCal.database.update_database import evaluate_shipcall_state from BreCal.database.sql_queries import create_sql_query_shipcall_get +from BreCal.notifications.notifier import Notifier import threading import schedule @@ -51,7 +52,7 @@ def add_function_to_schedule__update_shipcalls(interval_in_minutes:int, options: return def add_function_to_schedule__send_notifications(vr, interval_in_minutes:int=10): - schedule.every(interval_in_minutes).minutes.do(vr.notifier.send_notifications) + schedule.every(interval_in_minutes).minutes.do(Notifier.send_notifications) return diff --git a/src/server/BreCal/stubs/notification.py b/src/server/BreCal/stubs/notification.py index 8971a76..b1d42c0 100644 --- a/src/server/BreCal/stubs/notification.py +++ b/src/server/BreCal/stubs/notification.py @@ -1,21 +1,21 @@ import datetime from BreCal.stubs import generate_uuid1_int -from BreCal.schemas.model import Notification +from BreCal.schemas.model import Notification, NotificationType def get_notification_simple(): """creates a default notification, where 'created' is now, and modified is now+10 seconds""" notification_id = generate_uuid1_int() # uid? - times_id = generate_uuid1_int() # uid? - level = 10 - type = 0 + shipcall_id = 85 + level = 2 + type = NotificationType.email message = "hello world" created = datetime.datetime.now() modified = created+datetime.timedelta(seconds=10) notification = Notification( notification_id, - times_id, + shipcall_id, level, type, message, From 5ae2d74745e14e0f9570595449d4297588a2b3b0 Mon Sep 17 00:00:00 2001 From: Max Metz Date: Mon, 10 Jun 2024 09:03:35 +0200 Subject: [PATCH 03/15] removed a comment. Notification types in the User's schema will remain booleans (email, whatsapp, signal, popup). Daniel clarified that some time ago. --- src/server/BreCal/schemas/model.py | 8 ++++---- src/server/BreCal/validators/time_logic.py | 13 +++++++++++-- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/server/BreCal/schemas/model.py b/src/server/BreCal/schemas/model.py index 395905b..5130c9c 100644 --- a/src/server/BreCal/schemas/model.py +++ b/src/server/BreCal/schemas/model.py @@ -507,10 +507,10 @@ class User: user_phone: str password_hash: str api_key: str - notify_email: bool # #TODO_clarify: should we use an IntFlag for multi-assignment? - notify_whatsapp: bool # #TODO_clarify: should we use an IntFlag for multi-assignment? - notify_signal: bool # #TODO_clarify: should we use an IntFlag for multi-assignment? - notify_popup: bool # #TODO_clarify: should we use an IntFlag for multi-assignment? + notify_email: bool + notify_whatsapp: bool + notify_signal: bool + notify_popup: bool created: datetime modified: datetime diff --git a/src/server/BreCal/validators/time_logic.py b/src/server/BreCal/validators/time_logic.py index 8ebad56..8a0e592 100644 --- a/src/server/BreCal/validators/time_logic.py +++ b/src/server/BreCal/validators/time_logic.py @@ -76,8 +76,17 @@ class TimeLogic(): minute_delta = delta / np.timedelta64(1, unit) return minute_delta - def time_delta_from_now_to_tgt(self, tgt_time, unit="m"): - return self.time_delta(datetime.datetime.now(), tgt_time=tgt_time, unit=unit) + def time_delta_from_now_to_tgt(self, tgt_time, unit="m", now_time=None): + """ + This method computes the timedelta between a target time {tgt_time} and the current timestamp. For the purpose of + reproducibility and testing, the current timestamp {now_time} can be overwritten. The default behaviour uses the + datetime.now() function. + """ + if now_time is None: + return self.time_delta(datetime.datetime.now(), tgt_time=tgt_time, unit=unit) + else: + assert isinstance(now_time,datetime.datetime), f"incorrect type for now_time: {now_time} with type {type(now_time)}" + return self.time_delta(now_time, tgt_time=tgt_time, unit=unit) def time_inbetween(self, query_time:datetime.datetime, start_time:datetime.datetime, end_time:datetime.datetime) -> bool: """ From cea615fe633aad5f62d3fdeeee8a1cf02d382844 Mon Sep 17 00:00:00 2001 From: Max Metz Date: Tue, 30 Jul 2024 11:41:46 +0200 Subject: [PATCH 04/15] building out and preparing the EmailHandler's adaptive content. Using HTML formatting. --- src/server/BreCal/services/email_handling.py | 67 ++++++++++++++++++-- 1 file changed, 61 insertions(+), 6 deletions(-) diff --git a/src/server/BreCal/services/email_handling.py b/src/server/BreCal/services/email_handling.py index e06021d..ba21027 100644 --- a/src/server/BreCal/services/email_handling.py +++ b/src/server/BreCal/services/email_handling.py @@ -12,6 +12,12 @@ import email from email.mime.multipart import MIMEMultipart from email.mime.application import MIMEApplication +import subprocess +import sys +import time +from tempfile import NamedTemporaryFile +import json +from cryptography.fernet import Fernet class EmailHandler(): """ @@ -36,7 +42,7 @@ class EmailHandler(): self.mail_port = mail_port self.mail_address = mail_address - self.server = smtplib.SMTP_SSL(self.mail_server, self.mail_port) # alternatively, SMTP + self.server = smtplib.SMTP_SSL(self.mail_server, self.mail_port) # alternatively, use smtplib.SMTP def check_state(self): """check, whether the server login took place and is open.""" @@ -59,7 +65,7 @@ class EmailHandler(): user = self.server.__dict__.get("user",None) return user is not None - def login(self, interactive:bool=True): + def login(self, interactive:bool=True, pwd=typing.Optional[bytes]): """ login on the determined mail server's mail address. By default, this function opens an interactive window to type the password without echoing (printing '*******' instead of readable characters). @@ -71,19 +77,30 @@ class EmailHandler(): (status_code, status_msg) = self.server.login(self.mail_address, password=getpass()) else: # fernet + password file - raise NotImplementedError() + assert pwd is not None, f"when non-interactive login is selected, one must provide a password" + assert isinstance(pwd, bytes), "please provide only byte-encrypted secure passwords. Those should be Fernet encoded." + + fernet_key_path = os.path.join(os.path.expanduser("~"), "secure", "email_login_fernet_key.json") + assert os.path.exists(fernet_key_path), f"cannot find fernet key file at path: {fernet_key_path}" + + with open(fernet_key_path, "r") as jr: + json_content = json.load(jr) + assert "fernet_key" in json_content + key = json_content.get("fernet_key").encode("utf8") + + (status_code, status_msg) = self.server.login(self.mail_address, password=Fernet(key).decrypt(pwd).decode()) return (status_code, status_msg) # should be: (235, b'2.7.0 Authentication successful') - def create_email(self, subject:str, message_body:str)->EmailMessage: + def create_email(self, subject:str, message_body:str, subtype:typing.Optional[str]=None, sender_address:typing.Optional[str]=None)->EmailMessage: """ Create an EmailMessage object, which contains the Email's header ("Subject"), content ("Message Body") and the sender's address ("From"). The EmailMessage object does not contain the recipients yet, as these will be defined upon sending the Email. """ msg = EmailMessage() msg["Subject"] = subject - msg["From"] = self.mail_address + msg["From"] = self.mail_address if sender_address is None else sender_address #msg["To"] = email_tgts # will be defined in self.send_email - msg.set_content(message_body) + msg.set_content(message_body, subtype=subtype) if subtype is not None else msg.set_content(message_body, subtype=subtype) return msg def build_recipients(self, email_tgts:list[str]): @@ -172,3 +189,41 @@ class EmailHandler(): self.server.quit() return + def preview_html_content(self, html:str, delete_after_s_seconds:typing.Optional[float]=None, file_path_dict:dict={}): + """ + Given an HTML-formatted text string, this method creates a temporary .html file and + spawns the local default webbrowser to preview the content. + + This method is useful to design or debug HTML files before sending them via the EmailHandler. + + When providing a floating point to the 'delete_after_s_seconds' argument, the temporary file will be + automatically removed after those seconds. The python script is blocked for the duration (using time.sleep) + + args: + file_path_dict: + it is common to refer to images via 'cid:FILE_ID' within the HTML content. The preview cannot + display this, as the attached files are missing. To circumvent this, one can provide a dictionary, which + replaced the referred key + (e.g., 'cid:FILE_ID') + with the actual path, such as a logo or remote absolute path + (e.g., 'file:///C:/Users/User/brecal/misc/logo_bremen_calling.png') + + Inspired by: https://stackoverflow.com/questions/53452322/is-there-a-way-that-i-can-preview-my-html-file + User: https://stackoverflow.com/users/355230/martineau + """ + for k, v in file_path_dict.items(): + html = html.replace(k, v) + + with NamedTemporaryFile(mode='wt', suffix='.html', delete=False, encoding="utf-8") as temp_file: + temp_file.write(html) + temp_filename = temp_file.name # Save temp file's name. + + command = f"{sys.executable} -m webbrowser -n {temp_filename}" + browser = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + if delete_after_s_seconds is not None: + assert isinstance(delete_after_s_seconds, float) + time.sleep(delete_after_s_seconds) + if os.path.exists(temp_filename): + os.remove(temp_filename) + return \ No newline at end of file From 9cef84a5a8d7b4fd00ac2c0d76cf575bef8cce18 Mon Sep 17 00:00:00 2001 From: Max Metz Date: Tue, 30 Jul 2024 17:20:51 +0200 Subject: [PATCH 05/15] creating an HTML Email template for notifications, which includes a logo file (#TODO: store logo within BreCal git). Built out most of the Notifier. Provided suitable SQLQueries and updated the EmailHandler. --- src/server/BreCal/database/sql_queries.py | 23 ++- src/server/BreCal/notifications/notifier.py | 165 +++++++++++++++++- src/server/BreCal/services/email_handling.py | 76 +++++++- .../BreCal/stubs/default_email_template.txt | 159 +++++++++++++++++ src/server/BreCal/stubs/email_template.py | 15 ++ 5 files changed, 428 insertions(+), 10 deletions(-) create mode 100644 src/server/BreCal/stubs/default_email_template.txt create mode 100644 src/server/BreCal/stubs/email_template.py diff --git a/src/server/BreCal/database/sql_queries.py b/src/server/BreCal/database/sql_queries.py index 3f84d75..aac4ff9 100644 --- a/src/server/BreCal/database/sql_queries.py +++ b/src/server/BreCal/database/sql_queries.py @@ -222,7 +222,7 @@ class SQLQuery(): "api_key, notify_email, notify_whatsapp, notify_signal, notify_popup, created, modified FROM user " +\ "WHERE user_name = ?username? OR user_email = ?username?" return query - + @staticmethod def get_notifications()->str: query = "SELECT id, shipcall_id, level, type, message, created, modified FROM notification " + \ @@ -266,6 +266,11 @@ class SQLQuery(): query = "SELECT id, name, imo, callsign, participant_id, length, width, is_tug, bollard_pull, eni, created, modified, deleted FROM ship ORDER BY name" return query + @staticmethod + def get_ship_by_id()->str: + query = "SELECT id, name, imo, callsign, participant_id, length, width, is_tug, bollard_pull, eni, created, modified, deleted FROM ship WHERE id = ?id?" + return query + @staticmethod def get_times()->str: query = "SELECT id, eta_berth, eta_berth_fixed, etd_berth, etd_berth_fixed, lock_time, lock_time_fixed, " + \ @@ -316,6 +321,22 @@ class SQLQuery(): query = prefix+stage1+bridge+stage2+suffix return query + + @staticmethod + def get_notifications_post(schemaModel:dict)->str: + param_keys = {key:key for key in schemaModel.keys()} + + prefix = "INSERT INTO notification (" + bridge = ") VALUES (" + suffix = ")" + + non_dynamic_keys = ["id", "created", "modified"] + + stage1 = ",".join([key for key in schemaModel.keys() if not key in non_dynamic_keys]) + stage2 = ",".join([f"?{param_keys.get(key)}?" for key in schemaModel.keys() if not key in non_dynamic_keys]) + + query = prefix+stage1+bridge+stage2+suffix + return query @staticmethod def get_last_insert_id()->str: diff --git a/src/server/BreCal/notifications/notifier.py b/src/server/BreCal/notifications/notifier.py index dc74b91..c4c538b 100644 --- a/src/server/BreCal/notifications/notifier.py +++ b/src/server/BreCal/notifications/notifier.py @@ -1,7 +1,18 @@ import typing +import datetime from BreCal.database.sql_handler import execute_sql_query_standalone from BreCal.database.sql_queries import SQLQuery from BreCal.schemas import model +from BreCal.brecal_utils.time_handling import difference_to_then + + +from BreCal.schemas.model import ShipcallType +eta_etd_type_dict = { + ShipcallType.arrival : "Ankunft", + ShipcallType.departure : "Abfahrt", + ShipcallType.shifting : "Wechselnd" +} + class Notifier(): """ @@ -141,6 +152,8 @@ class Notifier(): @staticmethod def create(shipcall_id, old_state, new_state, user, update_database:bool=False)->typing.Optional[model.Notification]: """ + # #TODO_refactor: drastically change this method. It should only generate notifications, but not send them. + Standalone function, which creates a Notification for a specific user. Steps: @@ -173,6 +186,7 @@ class Notifier(): # get a list of all subscribed notification types and track the state (success or failure) + raise NotImplementedError("skeleton") successes = {} notification_type_list = Notifier.build_notification_type_list(user) for notification_type in notification_type_list: @@ -183,7 +197,6 @@ class Notifier(): success_state = Notifier.send_notification_by_type(notification_type, message) successes[notification_type] = success_state - raise NotImplementedError("skeleton") notification = ... return notification @@ -197,11 +210,10 @@ class Notifier(): def get_users_via_history(shipcall_id:int)->list[model.User]: """using the History objects, one can infer the user_id, which allows querying the Users""" histories = execute_sql_query_standalone(query=SQLQuery.get_history(), param={"shipcallid" : shipcall_id}, model=model.History, command_type="query") - user_ids = [ - history.user_id - for history in histories - ] - users = [Notifier.get_user(user_id) for user_id in user_ids] + assert isinstance(histories,list) + assert all([isinstance(history,model.History) for history in histories]) + + users = [Notifier.get_user(history.user_id) for history in histories] return users @staticmethod @@ -306,8 +318,147 @@ class Notifier(): else: raise ValueError(notification_type) return + + @staticmethod + def get_eligible_shipcalls(): + """ + get all eligible shipcalls, which do not have a sent notification yet + criterion a) notification shall not be sent yet (evaluation_notifications_sent = 0) + criterion b) evaluation state is yellow or red (type 2 or 3) + """ + query = 'SELECT * FROM shipcall WHERE (evaluation_notifications_sent = ?evaluation_notifications_sent?) AND (evaluation = 2 OR evaluation = 3)' + evaluation_notifications_sent = 0 + eligible_shipcalls = execute_sql_query_standalone(query=query, model=model.Shipcall, param={"evaluation_notifications_sent" : evaluation_notifications_sent}) + return eligible_shipcalls + @staticmethod + def get_eligible_notifications_of_shipcall(shipcall:model.Shipcall, time_diff_threshold:float)->list[model.Notification]: + """obtain all notifications, which belong to the shipcall id""" + query = SQLQuery.get_notifications() + eligible_notifications = execute_sql_query_standalone(query=query, model=model.Notification, param={"scid" : shipcall.id}) + eligible_notifications = [notification for notification in eligible_notifications if Notifier.check_notification_exceeds_minimum_time_difference(notification, time_diff_threshold)] + eligible_notifications = [notification for notification in eligible_notifications if Notifier.check_notification_level_matches_shipcall_entry(notification, shipcall)] + return eligible_notifications + + @staticmethod + def check_notification_exceeds_minimum_time_difference(notification:model.Notification, time_diff_threshold:float): + """ + a notification may only be sent, when the created notification has been created or modified {time_diff_threshold} seconds ago. + """ + assert (notification.created is not None) or (notification.modified is not None), f"must provide either 'created' or 'modified'" + if notification.modified is not None: + return difference_to_then(notification.modified)>time_diff_threshold + else: + return difference_to_then(notification.created)>time_diff_threshold + + @staticmethod + def check_notification_level_matches_shipcall_entry(notification, shipcall): + """ + a notification may only be sent, when the shipcall entry matches the notification level. + otherwise, a user may have adapted the shipcall in the mean-time, so a notification would no longer be useful. + """ + return int(shipcall.evaluation) == int(notification.level) + + @staticmethod + def get_eligible_notifications(shipcalls:list[model.Shipcall], time_diff_threshold:float): + """obtain a list of all notifications of each element of the shipcall list.""" + eligible_notifications = [] + for shipcall in shipcalls: + eligible_notification = Notifier.get_eligible_notifications_of_shipcall(shipcall, time_diff_threshold) + eligible_notifications.extend(eligible_notification) + return eligible_notifications + + @staticmethod + def create_notifications_for_user_list(shipcall, users:list[model.User]): + for user in users: + notification_type_list = Notifier.build_notification_type_list(user) + + for notification_type in notification_type_list: + schemaModel = dict(shipcall_id = shipcall.id, level = int(shipcall.evaluation), type = notification_type, message = "", created = datetime.datetime.now(), modified=None) + query = SQLQuery.get_notifications_post(schemaModel) + schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute") + return + + @staticmethod + def generate_notifications(shipcall_id): + """ + This one-line method creates all notifications for the provided shipcall id. It does so by obtaining the shipcall, + looking up its history, and finding all attached users. + For each user, a notification will be created for each subscribed notification type (e.g., Email) + """ + # get the respective shipcall + shipcall = Notifier.get_shipcall(shipcall_id) + + # find all attached users of the shipcall (checks the history, then reads out the user ids and builds the users) + users = Notifier.get_users_via_history(shipcall_id=shipcall.id) + + # for each user, create one notification for each subscribed notification type (e.g., Email) + Notifier.create_notifications_for_user_list(shipcall, users) + return + + @staticmethod + def create_etaetd_string(eta, etd): # #TODO_rename: function name is improvable + eta = eta.strftime("%d.%m.%Y %H:%M") if eta is not None else None + etd = etd.strftime("%d.%m.%Y %H:%M") if etd is not None else None + + eta_etd = "" + + if eta is not None and etd is not None: + eta_etd = f"{eta} - {etd}" + + if eta is not None and etd is None: + eta_etd = f"{eta}" + + if etd is None and etd is not None: + eta_etd = f"{etd}" + return eta_etd + + @staticmethod + def prepare_notification_body(notification): + # obtain the respective shipcall and ship + shipcall = execute_sql_query_standalone(query=SQLQuery.get_shipcall_by_id(), model=model.Shipcall, param={"id" : notification.shipcall_id}, command_type="single") + ship = execute_sql_query_standalone(query=SQLQuery.get_ship_by_id(), model=model.Ship, param={"id" : shipcall.ship_id}, command_type="single") + + # use ship & shipcall data models to prepare the body + ship_name = ship.name + eta_etd = Notifier.create_etaetd_string(shipcall.eta, shipcall.etd) + eta_etd_type = eta_etd_type_dict[ShipcallType(shipcall.type)] + evaluation_message = shipcall.evaluation_message + return (ship_name, evaluation_message, eta_etd, eta_etd_type) + + @staticmethod + def shipcall_put_update_evaluation_notifications_sent_flag(notification): + # change the 'evaluation_notifications_sent' flag + evaluation_notifications_sent = 1 + schemaModel = {"id":notification.shipcall_id, "evaluation_notifications_sent":evaluation_notifications_sent} + query = SQLQuery.get_shipcall_put(schemaModel) + + schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute") + return + + @staticmethod + def check_user_is_subscribed_to_notification_type(user,notification_type): + """given a notification, one can check, whether the current user has subscribed to the respective notification_type. Returns a boolean""" + if int(notification_type) == int(model.NotificationType.email): + return user.notify_email + + elif int(notification_type) == int(model.NotificationType.push): + return user.notify_popup + + elif int(notification_type) == int(model.NotificationType.undefined): + pass + + ### placeholders: + #elif int(notification_type) == int(model.NotificationType.whatsapp): + #return user.notify_whatsapp + + #elif int(notification_type) == int(model.NotificationType.signal): + #return user.notify_signal + + else: # placeholder: whatsapp/signal + raise NotImplementedError(notification_type) """# build the list of evaluation times ('now', as isoformat)""" -#evaluation_times = [datetime.datetime.now().isoformat() for _i in range(len(evaluation_states_new))] \ No newline at end of file +#evaluation_times = [datetime.datetime.now().isoformat() for _i in range(len(evaluation_states_new))] + diff --git a/src/server/BreCal/services/email_handling.py b/src/server/BreCal/services/email_handling.py index ba21027..a1e5a10 100644 --- a/src/server/BreCal/services/email_handling.py +++ b/src/server/BreCal/services/email_handling.py @@ -1,6 +1,8 @@ import os import typing +import datetime import smtplib +from socket import gaierror from getpass import getpass from email.message import EmailMessage import mimetypes @@ -42,7 +44,10 @@ class EmailHandler(): self.mail_port = mail_port self.mail_address = mail_address - self.server = smtplib.SMTP_SSL(self.mail_server, self.mail_port) # alternatively, use smtplib.SMTP + try: + self.server = smtplib.SMTP_SSL(self.mail_server, self.mail_port) # alternatively, use smtplib.SMTP + except gaierror: + raise Exception(f"'socket.gaierror' raised. This commonly happens, when there is no access to the server (e.g., by not having an internet connection)") def check_state(self): """check, whether the server login took place and is open.""" @@ -226,4 +231,71 @@ class EmailHandler(): time.sleep(delete_after_s_seconds) if os.path.exists(temp_filename): os.remove(temp_filename) - return \ No newline at end of file + return + +import typing +from email.mime.application import MIMEApplication +import mimetypes + +def add_bremen_calling_logo(msg_multipart, path): + """ + The image is not attached automatically when it is embedded to the content. To circumvent this, + one commonly creates attachments, which are referred to in the email content. + + The content body refers to 'LogoBremenCalling', which the 'Content-ID' of the logo is assigned as. + """ + with open(path, 'rb') as file: + attachment = MIMEApplication(file.read(), _subtype=mimetypes.MimeTypes().guess_type(path), Name="bremen_calling.png") + + attachment.add_header('Content-Disposition','attachment',filename=str(os.path.basename(path))) + attachment.add_header('Content-ID', '') + msg_multipart.attach(attachment) + return msg_multipart + + +def create_shipcall_evaluation_notification(email_handler, ship_name:str, evaluation_message:str, eta_etd_str:str, eta_etd_type:str, content:str, files:typing.Optional[list[str]]): + """ + email_handler : EmailHandler. Contains meta-level information about the mail server and sender's Email. + + ship_name : str. Name of the referenced ship, so the user knows the context. + evaluation_message : str. Brief description of the current evaluation state + eta_etd_str : str. Readable format of a datetime.datetime object, which is either ETA, ETD or both. Informs the user about when the shipcall is due. + eta_etd_type : str. Reference to the time, whether it arrives/leaves/shifts. + + content : str (or filepath). Should refer to the template, which defines the content. This file contains HTML-structured text. + + files: (optional). List of file paths, which are included as attachments. + """ + subject = f"{ship_name} (vorauss. {eta_etd_type}: {eta_etd_str})" + + # create message_body + message_body = content # "Hello World." + evaluation_message_reformatted = evaluation_message.replace("\n", "
") + adaptive_content = f'
Betrifft: {ship_name} ({eta_etd_str})
{evaluation_message_reformatted}
' + message_body = message_body.replace("#ADAPTIVECONTENT", adaptive_content) + + msg = email_handler.create_email(subject=subject, message_body=message_body, subtype="html") + msg_multipart = email_handler.translate_mail_to_multipart(msg=msg) + + if files is not None: + for path in files: + assert os.path.exists(path), f"cannot find attachment at path: {path}" + email_handler.attach_file(path, msg=msg_multipart) + + # add the bremen calling logo, which is referred to in the email body + msg_multipart = add_bremen_calling_logo(msg_multipart, path=os.path.join("C:/Users/User/brecal/misc/logo_bremen_calling.png")) + return (msg_multipart,content) + +def send_notification(email_handler, email_tgts, msg, pwd, debug=False): + email_handler.login(interactive=False, pwd=pwd) + + try: + assert email_handler.check_login() + if not debug: + email_handler.send_email(msg, email_tgts) + else: + print(f"(send_notification INFO): debugging state. Would have sent an Email to: {email_tgts}") + + finally: + email_handler.close() + return diff --git a/src/server/BreCal/stubs/default_email_template.txt b/src/server/BreCal/stubs/default_email_template.txt new file mode 100644 index 0000000..819c668 --- /dev/null +++ b/src/server/BreCal/stubs/default_email_template.txt @@ -0,0 +1,159 @@ + + + + + + Simple Transactional Email + + + + + + + + + + + + \ No newline at end of file diff --git a/src/server/BreCal/stubs/email_template.py b/src/server/BreCal/stubs/email_template.py new file mode 100644 index 0000000..f47ca00 --- /dev/null +++ b/src/server/BreCal/stubs/email_template.py @@ -0,0 +1,15 @@ +import os + +def get_default_html_email(): + """ + dynamically finds the 'default_email_template.txt' file within the module. It opens the file and returns the content. + + __file__ returns to the file, where this function is stored (e.g., within BreCal.stubs.email_template) + using the dirname refers to the directory, where __file__ is stored. + finally, the 'default_email_template.txt' is stored within that folder + """ + html_filepath = os.path.join(os.path.dirname(__file__),"default_email_template.txt") + assert os.path.exists(html_filepath), f"could not find default email template file at path: {html_filepath}" + with open(html_filepath,"r", encoding="utf-8") as file: # encoding = "utf-8" allows for German Umlaute + content = file.read() + return content From 1bdfa8997f15e527bfbb9b18574d9daa6d3217b7 Mon Sep 17 00:00:00 2001 From: Max Metz Date: Tue, 30 Jul 2024 17:43:17 +0200 Subject: [PATCH 06/15] clearing older methods of the Notifier class. There is now a one-line function, which connects to the Email-server, populates candidates for notifications, creates those notifications one-by-one and sends them. Finally, the database is updated, so a notification is sent only once. --- src/server/BreCal/notifications/notifier.py | 155 +++++++------------ src/server/BreCal/services/email_handling.py | 7 +- 2 files changed, 59 insertions(+), 103 deletions(-) diff --git a/src/server/BreCal/notifications/notifier.py b/src/server/BreCal/notifications/notifier.py index c4c538b..5e4cba6 100644 --- a/src/server/BreCal/notifications/notifier.py +++ b/src/server/BreCal/notifications/notifier.py @@ -5,12 +5,14 @@ from BreCal.database.sql_queries import SQLQuery from BreCal.schemas import model from BreCal.brecal_utils.time_handling import difference_to_then +from BreCal.services.email_handling import EmailHandler, create_shipcall_evaluation_notification, send_notification +from BreCal.database.enums import ParticipantwiseTimeDelta -from BreCal.schemas.model import ShipcallType +from BreCal.stubs.email_template import get_default_html_email eta_etd_type_dict = { - ShipcallType.arrival : "Ankunft", - ShipcallType.departure : "Abfahrt", - ShipcallType.shifting : "Wechselnd" + model.ShipcallType.arrival : "Ankunft", + model.ShipcallType.departure : "Abfahrt", + model.ShipcallType.shifting : "Wechselnd" } @@ -46,107 +48,39 @@ class Notifier(): - iterate over each remaining shipcall and apply .send_notification - those which are unsent, shall be sent by the respective type """ - raise NotImplementedError("skeleton") - - # get all shipcalls - all_shipcalls = NotImplementedError - - shipcalls = [shipcall for shipcall in all_shipcalls if not shipcall.evaluation_notifications_sent] - for shipcall in shipcalls: - notification_list = Notifier.send_notification(shipcall, is_test=is_test) - - # #TODO: get all notifications - # #TODO: get matching shipcall (based on shipcall_id) - - # #TODO: filter: consider only those, which are not yet sent - - # identify necessity - # #TODO: get the 'evaluation_notifications_sent' field from all shipcalls (based on shipcall_id) - # if not -> return - # USE shipcall.evaluation_notifications_sent - - # #TODO: those which are unsent, shall be created&sent by the respective type -- Note: consider the is_test argument - # iterate over the list of Notifier.build_notification_type_list - # one might use Notifier.create(..., update_database=True) - # use the History (GetHistory -- by shipcall_id) to identify all subscribed users - - # #TODO: update the shipcall dataset ('evaluation_notifications_sent') -- Note: consider the is_test argument - - # #TODO_clarify: how to handle the 'evaluation_notifications_sent', when there is no recipient? - return - - @staticmethod - def send_notification(shipcall:model.Shipcall, is_test:bool=False)->list[model.Notification]: - """ - Complex-function, which is responsible of creating notification messages, issuing them to users and optionally updating - the database. The requirement is, that the notification is required and passes through an internal set of filters. - - Steps: - - get all notifications of shipcall_id - - identify the assigned list of users - - apply all filters. When a filter triggers, exit. If not, create and send a notification. - """ - update_database = False if is_test else True - # #TODO: the concept of old state and new state must be refactored. - # old state: read shipcall_id from notifications and look for the latest finding (if None -> EvaluationType.undefined) - # new state: read shipcall_id from shipcalls and look for the *current* 'evaluation' (-> EvaluationType(value)) - - # get existing notifications by shipcall_id (list) - existing_notifications = Notifier.get_existing_notifications(shipcall_id=shipcall.id) - old_state = NotImplementedError - - new_state = shipcall.evaluation - - # get User by querying all History objects of a shipcall_id - users = Notifier.get_users_via_history(shipcall_id=shipcall.id) - - # identify necessity - # state-check: Did the 'evaluation' shift to a higher level of severity? - severity_bool = Notifier.check_higher_severity(old_state, new_state) - if not severity_bool: - return None + # set a threshold, when alarm event notifications become eligible + time_diff_threshold = float(ParticipantwiseTimeDelta.NOTIFICATION)*60 # m minutes, converted to seconds - - # #TODO: time-based filter. There shall be 'enough' time between the evaluation time and NOW - evaluation_time = shipcall.evaluation_time - # latency_bool = #TODO_DIFFERENCE_FROM_NOW_TO_EVALUATION_TIME____THIS_METHOD_ALREADY_EXISTS(evaluation_time) - # careful: what is True, what is False? - # if latency_booL: - # return None + debug = is_test # if is_test, the Emails will not be issued. Only a print message will be created. + update_database = True if not is_test else False # if_test, the database will not be updated. + time_diff_threshold = time_diff_threshold if not is_test else 0.0 # 0.0 delay when is_test is set. - notification_list = [] - for user in users: - notification = Notifier.create( - shipcall.id, - old_state, - new_state, - user, - update_database=update_database, - is_test=is_test - ) - notification_list.append(notification) - return notification_list - - @staticmethod - def publish(shipcall_id, old_state, new_state, user, update_database:bool=False)->typing.Optional[model.Notification]: - """ - Complex-function, which creates, sends and documents a notification. It serves as a convenience function. - The method does not apply internal filters to identify, whether a notification should be created in the first place. + email_handler = EmailHandler(mail_server='w01d5503.kasserver.com', mail_port=465, mail_address="max.metz@scope-sorting.com") + pwd = b'gAAAAABmqJlkXbtJTL1tFiyQNHhF_Y7sgtVI0xEx07ybwbX70Ro1Vp73CLDq49eFSYG-1SswIDQ2JBSORYlWaR-Vh2kIwPHy_lX8SxkySrRvBRzkyZP5x0I=' - options: - update_database: bool. - # #TODO: instead of update_database, one may also use is_test - """ - # 1.) create - # ... = Notifier.create(shipcall_id, old_state, new_state, user) # e.g., might return a dictionary of dict[model.NotificationType, str], where str is the message + try: + # login in advance, so the email handler uses a shared connection. It disconnects only once at the end of the call. + email_handler.login(interactive=False, pwd=pwd) - # 2.) send - # ... = Notifier.send(...) # should contain internal 'logistics', which user the respective handlers to send notifications + # get candidates: find all eligible shipcalls, where the evaluation state is yellow or red & the notifications are not yet sent + eligible_shipcalls = Notifier.get_eligible_shipcalls() + + # find all notifications, which belong to the shipcall ids of the eligible_shipcall list + # a time_diff_threshold is used to block those notifications, which are still fairly novel + eligible_notifications = Notifier.get_eligible_notifications(eligible_shipcalls, time_diff_threshold) + + for notification in eligible_notifications: + # get all users, which are attached to the shipcall (uses the History dataset) + users = Notifier.get_users_via_history(notification.shipcall_id) + + # filter: only consider the users, which have subscribed to the notification type + users = [user for user in users if Notifier.check_user_is_subscribed_to_notification_type(user,notification_type=notification.type)] + + # obtain the mail address of each respective user + Notifier.create_and_send_email_notification(email_handler, pwd, users, notification, update_database=update_database, debug=debug) + finally: + email_handler.close() - # 3.) document (mysql database) - # if update_database - # ... = Notifier.document(...) - raise NotImplementedError("skeleton") return @staticmethod @@ -422,7 +356,7 @@ class Notifier(): # use ship & shipcall data models to prepare the body ship_name = ship.name eta_etd = Notifier.create_etaetd_string(shipcall.eta, shipcall.etd) - eta_etd_type = eta_etd_type_dict[ShipcallType(shipcall.type)] + eta_etd_type = eta_etd_type_dict[model.ShipcallType(shipcall.type)] evaluation_message = shipcall.evaluation_message return (ship_name, evaluation_message, eta_etd, eta_etd_type) @@ -435,6 +369,25 @@ class Notifier(): schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute") return + + @staticmethod + def create_and_send_email_notification(email_handler:EmailHandler, pwd:bytes, users:list[model.User], notification:model.Notification, update_database:bool=True, debug:bool=False): + email_tgts = [user.user_email for user in users if user.user_email is not None] + + ship_name, evaluation_message, eta_etd, eta_etd_type = Notifier.prepare_notification_body(notification) + + content = get_default_html_email() + files = [] # optional attachments + msg_multipart,msg_content = create_shipcall_evaluation_notification( + email_handler, ship_name, evaluation_message, eta_etd, eta_etd_type, content, files=files + ) + + # send the messages via smtlib's SSL functions + send_notification(email_handler, email_tgts, msg_multipart, pwd, debug=debug) + + if update_database: + Notifier.shipcall_put_update_evaluation_notifications_sent_flag(notification) + return @staticmethod def check_user_is_subscribed_to_notification_type(user,notification_type): diff --git a/src/server/BreCal/services/email_handling.py b/src/server/BreCal/services/email_handling.py index a1e5a10..dc13272 100644 --- a/src/server/BreCal/services/email_handling.py +++ b/src/server/BreCal/services/email_handling.py @@ -287,7 +287,9 @@ def create_shipcall_evaluation_notification(email_handler, ship_name:str, evalua return (msg_multipart,content) def send_notification(email_handler, email_tgts, msg, pwd, debug=False): - email_handler.login(interactive=False, pwd=pwd) + already_logged_in = email_handler.check_login() + if not already_logged_in: + email_handler.login(interactive=False, pwd=pwd) try: assert email_handler.check_login() @@ -297,5 +299,6 @@ def send_notification(email_handler, email_tgts, msg, pwd, debug=False): print(f"(send_notification INFO): debugging state. Would have sent an Email to: {email_tgts}") finally: - email_handler.close() + if not already_logged_in: + email_handler.close() return From 3d1391ed4527735874b75c761c18c1060868eba0 Mon Sep 17 00:00:00 2001 From: Max Metz Date: Wed, 31 Jul 2024 08:28:44 +0200 Subject: [PATCH 07/15] included the Notifier in the routine. Will be executed every 15 minutes for notifications, which are at least 10 minutes old. --- src/server/BreCal/services/schedule_routines.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/server/BreCal/services/schedule_routines.py b/src/server/BreCal/services/schedule_routines.py index 2e0be3e..a1ada24 100644 --- a/src/server/BreCal/services/schedule_routines.py +++ b/src/server/BreCal/services/schedule_routines.py @@ -51,7 +51,7 @@ def add_function_to_schedule__update_shipcalls(interval_in_minutes:int, options: schedule.every(interval_in_minutes).minutes.do(UpdateShipcalls, **kwargs_) return -def add_function_to_schedule__send_notifications(vr, interval_in_minutes:int=10): +def add_function_to_schedule__send_notifications(interval_in_minutes:int=10): schedule.every(interval_in_minutes).minutes.do(Notifier.send_notifications) return @@ -65,8 +65,8 @@ def setup_schedule(update_shipcalls_interval_in_minutes:int=60): # update the evaluation state in every recent shipcall add_function_to_schedule__update_shipcalls(update_shipcalls_interval_in_minutes) - # placeholder: create/send notifications - # add_function_to_schedule__send_notifications(...) + # create/send notifications + add_function_to_schedule__send_notifications(10) return From 40393fd47654ffe73d0c92b089998b4c889fd932 Mon Sep 17 00:00:00 2001 From: Max Metz Date: Wed, 31 Jul 2024 08:35:40 +0200 Subject: [PATCH 08/15] adding the Notifier to schedule routines with an execution routine of 15 minutes. Notifications must be at least 10 minutes old and still relevant to be sent. --- src/server/BreCal/services/schedule_routines.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/server/BreCal/services/schedule_routines.py b/src/server/BreCal/services/schedule_routines.py index a1ada24..5725594 100644 --- a/src/server/BreCal/services/schedule_routines.py +++ b/src/server/BreCal/services/schedule_routines.py @@ -51,7 +51,7 @@ def add_function_to_schedule__update_shipcalls(interval_in_minutes:int, options: schedule.every(interval_in_minutes).minutes.do(UpdateShipcalls, **kwargs_) return -def add_function_to_schedule__send_notifications(interval_in_minutes:int=10): +def add_function_to_schedule__send_notifications(interval_in_minutes:int=15): schedule.every(interval_in_minutes).minutes.do(Notifier.send_notifications) return @@ -66,7 +66,7 @@ def setup_schedule(update_shipcalls_interval_in_minutes:int=60): add_function_to_schedule__update_shipcalls(update_shipcalls_interval_in_minutes) # create/send notifications - add_function_to_schedule__send_notifications(10) + add_function_to_schedule__send_notifications(15) return From 19b4f34feea9f05575b156a1bb0613e45f7c1a3f Mon Sep 17 00:00:00 2001 From: Max Metz Date: Wed, 31 Jul 2024 09:04:35 +0200 Subject: [PATCH 09/15] found the bremen_calling_logo.png file within the client's resources. Mapping towards that file when creating email notifications. This alleviates an additional dependency. --- src/server/BreCal/services/email_handling.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/server/BreCal/services/email_handling.py b/src/server/BreCal/services/email_handling.py index dc13272..f091cf8 100644 --- a/src/server/BreCal/services/email_handling.py +++ b/src/server/BreCal/services/email_handling.py @@ -237,13 +237,20 @@ import typing from email.mime.application import MIMEApplication import mimetypes -def add_bremen_calling_logo(msg_multipart, path): +def add_bremen_calling_logo(msg_multipart): """ The image is not attached automatically when it is embedded to the content. To circumvent this, one commonly creates attachments, which are referred to in the email content. The content body refers to 'LogoBremenCalling', which the 'Content-ID' of the logo is assigned as. """ + # find the path towards the logo file (located at 'brecal\src\BreCalClient\Resources\logo_bremen_calling.png') + src_root_folder = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) + resource_root_folder = os.path.join(src_root_folder, "BreCalClient", "Resources") + + path = os.path.join(resource_root_folder, "logo_bremen_calling.png") + assert os.path.exists(path), f"cannot find logo of bremen calling at path: {os.path.abspath(path)}" + with open(path, 'rb') as file: attachment = MIMEApplication(file.read(), _subtype=mimetypes.MimeTypes().guess_type(path), Name="bremen_calling.png") @@ -283,7 +290,7 @@ def create_shipcall_evaluation_notification(email_handler, ship_name:str, evalua email_handler.attach_file(path, msg=msg_multipart) # add the bremen calling logo, which is referred to in the email body - msg_multipart = add_bremen_calling_logo(msg_multipart, path=os.path.join("C:/Users/User/brecal/misc/logo_bremen_calling.png")) + msg_multipart = add_bremen_calling_logo(msg_multipart) return (msg_multipart,content) def send_notification(email_handler, email_tgts, msg, pwd, debug=False): From fae07bdadfdfcf27c9bca432ff628c2d3a1a1e7b Mon Sep 17 00:00:00 2001 From: Max Metz Date: Wed, 31 Jul 2024 11:49:44 +0200 Subject: [PATCH 10/15] relocating logo_bremen_calling.png and default_email_template to a novel folder in the library: resources. Adapted the dynamic path functions in the library. Created tests, which assert, when the files are missing. Relocating functions from stubs/email_template.py to the email_handling.py to avoid confusion. --- src/server/BreCal/notifications/notifier.py | 62 +++++++++++------- src/server/BreCal/resources/__init__.py | 0 .../BreCal/resources/logo_bremen_calling.png | Bin 0 -> 20113 bytes .../warning_notification_email_template.txt} | 0 src/server/BreCal/services/email_handling.py | 49 ++++++++++++-- src/server/BreCal/stubs/email_template.py | 15 ----- src/server/tests/resources/__init__.py | 0 .../tests/resources/test_find_defaults.py | 14 ++++ 8 files changed, 95 insertions(+), 45 deletions(-) create mode 100644 src/server/BreCal/resources/__init__.py create mode 100644 src/server/BreCal/resources/logo_bremen_calling.png rename src/server/BreCal/{stubs/default_email_template.txt => resources/warning_notification_email_template.txt} (100%) delete mode 100644 src/server/BreCal/stubs/email_template.py create mode 100644 src/server/tests/resources/__init__.py create mode 100644 src/server/tests/resources/test_find_defaults.py diff --git a/src/server/BreCal/notifications/notifier.py b/src/server/BreCal/notifications/notifier.py index 5e4cba6..41b89f4 100644 --- a/src/server/BreCal/notifications/notifier.py +++ b/src/server/BreCal/notifications/notifier.py @@ -5,10 +5,9 @@ from BreCal.database.sql_queries import SQLQuery from BreCal.schemas import model from BreCal.brecal_utils.time_handling import difference_to_then -from BreCal.services.email_handling import EmailHandler, create_shipcall_evaluation_notification, send_notification +from BreCal.services.email_handling import EmailHandler, create_shipcall_evaluation_notification, send_notification, get_default_html_email from BreCal.database.enums import ParticipantwiseTimeDelta -from BreCal.stubs.email_template import get_default_html_email eta_etd_type_dict = { model.ShipcallType.arrival : "Ankunft", model.ShipcallType.departure : "Abfahrt", @@ -58,30 +57,40 @@ class Notifier(): email_handler = EmailHandler(mail_server='w01d5503.kasserver.com', mail_port=465, mail_address="max.metz@scope-sorting.com") pwd = b'gAAAAABmqJlkXbtJTL1tFiyQNHhF_Y7sgtVI0xEx07ybwbX70Ro1Vp73CLDq49eFSYG-1SswIDQ2JBSORYlWaR-Vh2kIwPHy_lX8SxkySrRvBRzkyZP5x0I=' - try: - # login in advance, so the email handler uses a shared connection. It disconnects only once at the end of the call. - email_handler.login(interactive=False, pwd=pwd) + # get candidates: find all eligible shipcalls, where the evaluation state is yellow or red & the notifications are not yet sent + eligible_shipcalls = Notifier.get_eligible_shipcalls() - # get candidates: find all eligible shipcalls, where the evaluation state is yellow or red & the notifications are not yet sent - eligible_shipcalls = Notifier.get_eligible_shipcalls() + # find all notifications, which belong to the shipcall ids of the eligible_shipcall list + # a time_diff_threshold is used to block those notifications, which are still fairly novel + eligible_notifications = Notifier.get_eligible_notifications(eligible_shipcalls, time_diff_threshold) - # find all notifications, which belong to the shipcall ids of the eligible_shipcall list - # a time_diff_threshold is used to block those notifications, which are still fairly novel - eligible_notifications = Notifier.get_eligible_notifications(eligible_shipcalls, time_diff_threshold) + if len(eligible_notifications) > 0: # only perform a login when there are eligible notifications + try: + # login in advance, so the email handler uses a shared connection. It disconnects only once at the end of the call. + email_handler.login(interactive=False, pwd=pwd) - for notification in eligible_notifications: - # get all users, which are attached to the shipcall (uses the History dataset) - users = Notifier.get_users_via_history(notification.shipcall_id) - - # filter: only consider the users, which have subscribed to the notification type - users = [user for user in users if Notifier.check_user_is_subscribed_to_notification_type(user,notification_type=notification.type)] - - # obtain the mail address of each respective user - Notifier.create_and_send_email_notification(email_handler, pwd, users, notification, update_database=update_database, debug=debug) - finally: - email_handler.close() + for notification in eligible_notifications: + eligible_users = Notifier.get_eligible_users(notification) + # create an Email and send it to each eligible_user. + # #TODO: this method must be a distributor. It should send emails for those, who want emails, and provide placeholders for other types of notifications + Notifier.create_and_send_email_notification(email_handler, pwd, eligible_users, notification, update_database=update_database, debug=debug) + finally: + email_handler.close() return + + @staticmethod + def get_eligible_users(notification): + # get all users, which are attached to the shipcall (uses the History dataset) + users = Notifier.get_users_via_history(notification.shipcall_id) + + # filter: only consider the users, which have subscribed to the notification type + eligible_users = [user for user in users if Notifier.check_user_is_subscribed_to_notification_type(user,notification_type=notification.type)] + + # filter: consider only those users, where an Email is set + # #TODO: this is Email-specific and should not be a filter for other notifications + eligible_users = [user for user in eligible_users if user.user_email is not None] + return eligible_users @staticmethod def create(shipcall_id, old_state, new_state, user, update_database:bool=False)->typing.Optional[model.Notification]: @@ -372,6 +381,12 @@ class Notifier(): @staticmethod def create_and_send_email_notification(email_handler:EmailHandler, pwd:bytes, users:list[model.User], notification:model.Notification, update_database:bool=True, debug:bool=False): + """ + # #TODO_rename: when there is more than one type of notification, this should be renamed. This method refers to a validation-state notification + + this 'naive' method creates a message and simply sends it to all users in a list of users. + Afterwards, the database will be updated, so the shipcall no longer requires a notification. + """ email_tgts = [user.user_email for user in users if user.user_email is not None] ship_name, evaluation_message, eta_etd, eta_etd_type = Notifier.prepare_notification_body(notification) @@ -385,6 +400,8 @@ class Notifier(): # send the messages via smtlib's SSL functions send_notification(email_handler, email_tgts, msg_multipart, pwd, debug=debug) + # #TODO_refactor: when there are multiple notification types, it makes sense to decouple updating the database + # from this method. Hence, an update would be done after *all* notifications are sent if update_database: Notifier.shipcall_put_update_evaluation_notifications_sent_flag(notification) return @@ -412,6 +429,3 @@ class Notifier(): raise NotImplementedError(notification_type) -"""# build the list of evaluation times ('now', as isoformat)""" -#evaluation_times = [datetime.datetime.now().isoformat() for _i in range(len(evaluation_states_new))] - diff --git a/src/server/BreCal/resources/__init__.py b/src/server/BreCal/resources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/server/BreCal/resources/logo_bremen_calling.png b/src/server/BreCal/resources/logo_bremen_calling.png new file mode 100644 index 0000000000000000000000000000000000000000..bb97bf5f87800ea46bf14996ccd7f3e927cd85b8 GIT binary patch literal 20113 zcmZs?V{j$y7w#R~wr$&XcAQKyv5kpsn-e=bns{Q{*2K2$ljmRczF*Gy(A9nK?%vha zxYxCQ*IJPPC22%BJU9>#5JXuS3DxiQ>G!e*0|f#CiqRxE^F07{QI!@0`7=du@;!jC z5LFZf0jZ0Je>Z~s9>Y4y=(vD@V59!GfR0h!34?$rQOQb(s(a{P_`>>_Xmt9+gHI@A zc{}{W4daqsqaf@lq=_q};d{k^H2qC=0upqKfs2`8^sk-54!Z9WYy{j>XopnbOGsTx z0;hIh0gs;N1LP1HT#M7Y1aoBU8e-;) z#``yw@E*V*7V@-wFupN>QBGj9J_xaDLuSXDnazFxmm~Y>g7k{@<-nVTIgBx7smkx> z`sm2m@$s`Z7OYNoZXq24WiBU*hu7xhawgCmb99rCL_a#k6Z~tMn?{|VeZeMU{$l&% zs(7MW3G6ZVjVW^0_~5O1NG<%~ZkGj@nt-eCxn-;2mq~L;x;1Guk$5nBrzDj0Z~;#e zh(0CyCBI&?>IA|m?;h;;4_Y;{Z;Qil;GWqPgtt@>?N_*wG1Bw^DCEjT?-u15_i_gL zkO6oYF~@Wxc~3-ZG%?8nVa(ymwTM9T6Z)J58fzf$@B-rSaF?Z073$Q)fanfHB5I#N zwvne9;)e|^)7%rphb|sL$7QbB^O;@^p(p~#;g_Z%`G?T!a#-v;2_Kymm~XaTt!g$0 zjk#*&C@4LRFAglWFUzPt&Km?LRe+wuK9_CQis2MOM1+E@0zrAC{f`dqma7DWySamZ z8BaY`Mx0ZjG7vih5vYmcNy5=C)VoSEelney$O${Sl!YAvsmZ4G>j%h49Gp$+y8&Ic z2V;-~HTv@BqMM7T$0oBI5^vIf%EAdUMLNx)yU3o)Ptj)FPL98Z!x?G3Z$P__mvA*===U0ZNw|xCN1oJ zy$0d=1Zr4{rPJrKBrMZ`qbRte_;IMu#N?cZ%+WtIu-n3QdRLKDIs?yE#M*=eRUdX6 zJ>x(kA^6W^oJtg>dC5m$(NuVp+L$>8WFi?MrR4k&z42(N2d0KCz;Vij#6!>~qA&y! zTM;Lu33fY+>B$e1ypS*!sgl%8HpSLY67g8b%=!gSTu9Jp!;(khe!b4tJNduH2cR0) z%yrW`7QRBhVjumzn%&A;MdDM&TsW+c9c^?Rf>6lp3h7S8{za^WK$CrDOgv~nK+e17 zCHUOkCMdeD9_?&r_G=jJU6}-l=U5n@>ky-smo+n=7n-^zHy~Dk^k)azz*kr+(6mXS z0i5MYcFzM))lCacfkJthurU6+){B!I_!X3UsY)jKZbvNB*RyYYjQ=Q zBt_+tSPAdpmO>YfFPeEUE;TVBM+M!U)oaT>p(QzxMupG0!~V{j2S>8OYuG5&Xc{zv zT|HYHo7;PuZlQB~uu4A<&+x#LG|1pgEc424IN=qupOBK5VI37__>@I*K2OXoFm0ca*6Hh_KI`kci3x}VW=@{ zC=5h$8AaG?-sJiE;c&H*Cya3*&CSj8@h0#kiq7KC@-f>VQDO;rdXk)iZ3J z4O(P0l$ujQ$}V+;4kJWQ#L?Od$Uf5Cqwu)vL+vmE#4|5ot+QhMxzC~FUxC~O6O z%J?t!MxORtUZS8`1VYTef#gR$VYRX1dRfn{Y=6))Qhtl|meo_Jx?te;>tX4g1 zJHkV9MLJBAh+`(bv%Ye`(TFZ+0mrPW$@;9B(_TJ8akuk2##un7x%-iqiW~jXqiHO5 z9n7*m@%q9L@wuoV>IL|S4LN0gy98Vw%My$_Ug%Oiu1+dIudW94{}t#T(t&5=?AMBy z;p3v@m^c*E2&?eY|8eMi$9QU%*7JUe!AqntaJ4|DGv#Vanzvk#a_lhCJ1i9Z)ryFm zP_07C@1rXX{l?Peo0ZYq8226}cQtp1mip{J6u7sjH9{}BS5gnu14F;`XX$vy*IZyS zBKK`0)FBe(9%E1FJ=4)|2m8F5ogB%&Q_VpBLN{=k%eO$uuQ>TVuBd0GjjPBIps*34 z&!gfuGM4ju2%3x?$~LY9*HT3~pB}c>XJXvVUcUJZM}{e>ZQX3xoe)A*fw8IBCCpWP zXv$k`-3VF2MN@8FDNRHn@`&&6CAr3e>5v4Pd(ula%KS%GpG)&*U4#T1qc*Gt%5t#6 zK-dVWH8o#ouF1+{S7VIG;iIOxLdWXEhtS_5|9IZ{7UN^fd7)t`4C3aUPv758wH*rS zW@RWd&)EuSG(dO5Xn4etyOX)-tDg&Xi=p;556C6ZH!i9^R1P~G1%eL`Wl2r6wnhb{ z#bAye)H7O0m~5rmPr&{*!kOEbiO&gz6BoDOfpdGk zzifpSDSrzYqOX(DAO66Gsw%TDO4I|u$hmH@O6DI?2=$T!MMXyFHheJOT!zsDs@EpA zX0k2~_ZP6IZ(+3?KqvHuIlK=>p>>8R#Qxd5!N&?ie^(bdEWq(p@FU@u0$0>zzNjFA zO}m)({jm66=s59vs0J7uWtfqfK8@h!;hv0GDVQcKO$z$GaMRw_>j7h4t&D`I1YrXk za|p+4d1!lGi?38{>@cyZnGl!w>o~A{D5@S4TAP|in6yZ~dSaL?*3An9b|vL^e%Q1)8=2V)#V$uTT#ow2?BqU;-uPn3kH zAChQ(ZK-As*XIz!u&0Up{>9w#*toKSIV(+PS9&CWw7zEu+vN}ELN8Jw3#Lq?6}_Xs z0=_I8A;$?A8p2RYAA;mG%ClzDyQsOu6!ySQ@f81IxeC7w5UB{<(68-F4H+6a(KMxa6k3i9ET?71_%@VZZQyyDME#^IN|K5*?`cqq^gP`_lwlw*dftL7!qrd< zuOow-0*b5KQyiTovOhNBKlEVKZ!sH4@qPzbG&{9;_Ni=^JV7JVl;>G@H}dA4tG-&* zvhGvmX-jjSbF(gQtM{|m1jx#S8UAglss1z>P84e z?B!H3ZneFuQ7_jaJ2h{0Z=(!pmHWoi#Cr>>5`GAk{imU5-wA)4hY6qkTdnOTcvpQ*^p()9?1t zn1a#mnb(jJV;op(=0`?TZ+Sj#{hd=iu}hNU0N3K_!DSRplytl>!>fhmGG|`$ocW0P0N@ zOadHkC!YB86_lB{;Vih89iJz`$6FZI(^?fLTMQ!60ypfGb}O|;{b8|lMoB|zT&2i7 z*v0;K6K>nk-AXF_iRT}fi_H9M`!#Cx_zh?+(>m(&%~#>xQre^+`@Z<>Xrs;g2x}pzVB^~k@BRRBL!f9qHb`2RL@u6aiN2yXw;fJXX*fm_ z^Kr^OM4m3(yFjhYfa?=V+q2dHt%?J7y!vMA*(`sD$|TN)&4!;0HJeKIBSYL@s$>vgYlBj%?f!5k#mL;#=LdbKIv%31rA2ac zVA&}vpg#mnas8HQ?UqGnt|UbQ9GqD| z>M=0T;Ii5+-}D{5k?-3L#A`gHbHruzWH&#P#`|IqIfE}gcyPS(Q1>l$?pYTs>+SVs zbDx3YfPXEH>wKNO5q?Jk6@!64x+u)WQoc%z>mJBK~87BcgTlS7TF27rh4_&_6q`xI?WO}Tx-(5siI~% zjZ70YGuJjPwVwjSS6tAT{A-KUe`q26L<|&Ckgnzi>Z~e5s%&${s(H)|?UT0=KZa`l z&Vdt)7U2r*H#}yge=9dKH0EZW2Bfri2A-f+aZAKGWRFlPq8E(gkMK46F%=XWOvVR` zKa=_XXajLb&{N?#`o=C>bqoQcr&yV5o>lWjxu;n`iB`IwP5e_a=uoNp|Z}O+wPm9ZO>f*v#eh~k;$w_&{ZH;v*GV@0{ zZ>+}8f(>T$PS$}{xqRdnw$hkUlT;SpZsuU*l8Y=YO_S@`>93ffPR=iAILBhb=dztJ zrx+cSSY{7;pYKR%bo;Bw3l|?;Bs58LxbQ*&|Q<8m}0|{EjOa8qq-F?;1`hKyb+(5tV-}Sq^}?j zl8{warh>k7xx6$DTu@h<`p-%+`%;<=oY7G46Jo8&*w{gnz*sz9kQ1}o#8kp znR;?t!t>zx2P$_=GzOh%h%HD1SgvOJKWo_Ekj={|@C|_8^;V%by+)r}G&&=k zX@<)wK38Op+ug=KrPgUib)_Wcd@e|Gd7unXCT1b0Dc>Prs>yyJe)04TrUQ6SA8~3Y#Px67z zS;&;CPNZbz3DF(LzC_|_(wwXNdB;K8p9snD?v>#Negx%|$Qt3W{_`h&?DYE<=@>Se zGyqD<=+MsZX2D^+5hsKLR_K1Qkly>9a@tipcwB|>l zra2WLK++7wk8$1@r77{1suryR)Po8-PYsFDeo;u@t3WK^_pqi#5T=PuOdq&tS5a0e ze3P7NQ5!E<=HJIpWCZ@dMGg?c0)=TsBi4S^%68MCK4K+3$v^c$s-J0ni6jHXavbwA zN!1~!?LWoFHV2a6QV&e%N5b212hPGsgRx84YVKqO6YAd7=NcQIvOYd@ew>V;RES69 z9N3=maS`)&NTd_d0s;UoMSudh+v%z~BXcut-oO6?L^x_dD3Z_NSPvOJ$_q940t{kv z3EuBF6+;LB_GS0YgK!)rog(@C^id1LOdUJ9#5Muo; zeLO+RF!*~tN5y9vf!(20+GdmwR@SNLEXCb%YqfSbZ;I9_=qz@vu{#fIPs3o#bTuKf z7M~gvQE;D|@j<%I=!#4te)fart+s=i;I*{o%>Qz~Lgp-;L9tK1pr6WM(iaqjdE03TJZYeIa(8+OJ2Y}aXNT~#W$j~KRwK_62~Nd`%q^vDAh87i%byz0WMWGprGC{U>9m937}J(#?&S{LXER zRC>dNre#uEZdLy8+3SBQ@*7(GZ(}Gs;AOBqmN($o4jxyuD_&?d13kk2#{^8p2jfY0 z5aU8?h-s^us=1>O&S zRm{YsB0*udI;MS8b4tM+hGUS0WAwMZ(^Z{QQ%39wB(|nf(=m56nS%FbY45)bE{U6d zOSAj=*hn;vB~qHlkMeogaG(!%<30GXaP^fKV^Dr|xh}4q0^D&~Tq}=GZPd5|{vdef zK=pc<-F}w(?a$M}`}#GcJ+igU^JrWCzsKnt_A5zjXN2XtrM5BP9w+?zs^d3fo(sSj4Eq*~0pE)y^@3AkB&+>ha0$t* zcg%C*R;~jD>yRz)%V5;2Ywol`XL_poQkjyh61CIf$nu?FpIOLIfZY@(~F8Df1WuCG^^Sz zelsmF(iYSQx&0A}TDpH5xf$mo2>BiIV|8;eL@bm?dIk8YkrNA67jk8g$VVs%Fx@T* zF;yy$?diDxr_w<5t`B{s+b+Sa<{$rQ*yFZG>`EfhR!)6fX$i9I2q%hw}TCd zd$|fLrZXvfJKXaQ$|T0zHhDD7GJXIRnd$#vKyO)Lw;zn&&48^4`2(S3fW)#y|+;M$7{sU*L0fx zj>m55=pzNpU|ben1i!rR7D8L@n1|1}APkic=d2?ui|zG|{a~_9Tp|ehe61Y#(u2Bp zNBe!I!)X@6gMATd?zBckvrXLKQ-h5>Y21IR!Q$YPTy?As8;lU*m9MkpR&QL`WNjA& zGl%xWYWYsVS!b_6qHQ@btE)GD>$zz$`*24N#f)dRq~Q!QdZ{eg0SU z-}OqMt#D#)L5Yel3YT&MLwJx^w5dKITr|PKspD}FD?=?!lr(ttAf{Lp^ynD87a0x> zqVDg_>%y%2fxWwB;_%w~e+qbUx|@1AZ-my{VB6K2Yxfb#;QBepVtKiSB3uyNR^d_^ zg2DPLz@rLze~#0Un0&0c;BiSB9iCUnjXh)UC4`oH{e`Vnz+b<&Rmw<5zcWyvDuxM& z)zqI~yIH#=GlL-txP9=guC>IC9j+Rhan(hrZEufYom8qHGPkhlatUx#smlp;cP+J8s`-=9`Rnl4O}J= z<7^1kgF(jgQQWuIXK8t$q5MRmnX6(3c2bEsIjI?$LILxzU{oJo*Ec^Lw!?>HGWGJ%d>JUuJp+#Y#?uJ3RL9VIaNZ?iRXv+$IQ zO&mJP%a4C1GcPEFT|2-1B=X5I$LgxJK<8M z1!N?@6weY?j5hfL(zu9^Q$KZ$QA@j^eI~rf{!=Wpqgw&q*dDYLD@PMI7jEe_$e_GN zxT$x{eh&8T-|In{ZwOYup7<}}aRp`2vvlZbOG}sq-Bm@0gzC1 zb>-OBFx|bsh4Tc(5SOapjpter8y~3P?_^D?xf5P%ZS+A5DrZBl%L}-l3x=`VX6Nix zTlg+i*F1xNY*i3zV~tLn|$@HWH${u2N5UnizU2e+~ki5KSXhPRz`W zM82=`M{Z7x83gFOCOY68Ywhwy*8Z~n=Ju79pDj*?sa?(jBytAyQRlV8CKwb>l15-^ zL^937jLgR^`q637M~Dhi=)JXD!K7_a+NS8dLq8h5BmU7ZJ6>F2BZk`a(se1J7JTp{ zZOrWn6Pi`W725Dw(={A;d;e?5v!HBV2*#%AU1DS;!TT6WwKgsRr8XZ%Z%b9X7-%8` z5@U`0c_n|#;;AjRcKgpfS;rSJW*w|^j=4R=&q%6iDb@h072zVO{?9aIt2tW*i2Uj= zJDnaih4+7mhsC0q4EX4fFmCA@qRNyvjL3>{v8wq?-v0`czo@9WMa9*QVAp6!Yfl>J zuFFZf49iAqRjga^!Mf|{?ng_Pz3ctZT4F~*Ep6Ms>p0*h>ma_a!o<28Qhs@6eW@SW z=!83=yo#M!E&Tr~mXF{&#Ki#LLs0kg4vT^MlQNO>H@8HmjEK>E>e%Gv!VUNGqqk$2 zG5NleJYJmWaS_gF#e3HD-DxTNdAq9$3*NZYwAi?AsY$Q%Vl@xOQzw6d-hXDnVI#Yj zI?DY+jVabkuBQ*EDvt3B6TRF}sUdT-b56-T6yv3y5ir~w;f;_C!rZgDpKl)`$=$&3 zZ8u6OyFu_i%?XGd(5lVRt@32Z zADh&93hTpu#1NzOv>viQ!R@0wD2ar~#j!U3vXPZq&@b79&3CMsWjDFljLN3jDu1KG zyZMpX3Gp2Sx0WxeK^gn2b(VoJ*33;q?3h|+xw1Up#PR+Y7MXg7QSiS&RY#xm=mjot z31Cg}y4=vDwj_xnFsNgkl(yRy=JaY30@T)6a(+-mgaHLlq76Xl3XwU;&(`l4Q4hA$ z!H*Y~_N$S$yGXPcK2^xYrSU_xHBQ})S@RGnnGjx(sC1iN&jv+5@=Nq7;^l!J^}xhd8RLP~-#l2c^Pv?O%<6(M zl_W8-UfL9jG}_KxsPm;&ii6@vR&hHIG}EQuPAOn?Td|W{V|BY#EJ;f(Jzx}#u0|}c ztVwK(E(7*M8Jc=^@;s(8(_S-yZI>i?qom{($-~n$}yGeNZqpo-1J1SYISIM7S6^I6l5dMTk&=d|TF5nu-NG1=2IUk;g zk$S3(6f>ZrdzFM08Id@}`ubI{OJV^M;hu50ExDGgr(HxY3ts>xBZ{x(Nk(2e9##Ad z%}C<$ODh2oXi^G$)rzVf={1c;r_UmD*_ymt`e{(Yk43EpG*QJhg1Eps&G~-AL`Fu= z0Du#WrsYY<5KPDkt{Ia05I(B96D0cLlPOAd2#$9AIXcRO=zB!|{twp$*J{NM5GMNT1_IMI$0QdrNb%(M| zSP~+aJASZk{=39T3OBx+OPru}F-Z}Rc=UD^?|DVMJ=6;0-&nyc6&tWG+VY)87<55< zi^jJc3h`3Xqn*oIKNxcK4>XXQPV1JFwP0`?HTf zki?`a4FHIaPh$py3Cg&cXSpJx(9BlBqeiKoIr4NbBMKs>%#Nq~SdNjP4TLG0Bt{rw z%Gb1#!9Im3)$j&|)WX{EFvPnt?$mr3&vz=v zq|`&ID$vr1`@XdpSo+)IA5`CEbG%_cP%}c|CTG@=WZ$wIs?Sz@s%+1M^qvGM#Wwc7 zv4u%061x$eE5?ILRmKvG*8HcH=dS1aMJQf|;y5yYH2_?EU>3~>rW6nOBP?T7x z++lf;c^emB*Rn@!I^_bGb=~?WKLT993U&^1 zY9l|%ZwDWx;!yDoS~&<%i^}QwdEXNeti@#H`GQ?VgOB|%MbvZMy-GTUJU<*HqK^}C z>V|{r2SPqrqiQ0*y@qgP7LUQS5QFH{DjL$3llbi`2HV=Mw+MG__S53+JMpe9b;3!l z!~@oBMWP3PzGHuU(0DGk-1PXYd6_fdsC*)(yRVr*UH>G*5+JS&I6Sf3OI2f1Xa?k@ z3kZOT+4U@&uD?EW-@>MLC>1gD@~C`(HA=A%$6x7s=@%yPWH4@jr?4}ByXU0qxfH&1 zg3wbOk~PppeF>kbu$Gd(shD+6=x-ABxu4gkyeMvL-KAnKzOx^rC1PYv-ifHGM;18% zQ~=MkcwSp_Y6_7aXE_?i*&SN`Ou?#ewOEp{l;W7StRVF3lF(}gBnBOm8k<-ZF`O$r zd8~co6c{F5n`kqV>_%d{NN;uix^O9g`GX#nPDo71Oml{a(v3GD8ouQKI>eK`zY>mV zh2!v<%xI|J>GS7ec7Cohadq9}NiZG6gUoC3UMV4TZC?-du^Lz5=m*_WxisIhjWh}U z(ni+P#4BA_CGEjqOgiOUq1afOF4jh%kCX1QO0j>jV^wnce7C2{;ILR+gd3S}?9qxRtIq%;8hKaYY(!kyue%XS!9dH4mh=KJJQq-R0PjO@}#i=EY4 zBa|U6g0O-8)$f0w53n=_?WcE7G$DnJH6i72-VN~6Zi}L3 z(v$N2hKD9{fy7B6-2Sj(t<0z;9C6PhB-khTpxmc(cT(NEbw>R|>Y&2S{mDQT*}EJ` zh)xFbk*^AjMU92B?~9-cvBWq``9|>0lZw#l;i=5L6bGJc9ZjkL^wudmmUj(%3Kxya zYqnOWz0bp%m;V?g$n-hJ;$%#*9}krzWV|EAFgmt^C@xfYJC1A}(tJMf+_dk;U8K%m zT+RIZCV>G|l~fmRh{|}&5uVOXBrYzkoq9yotonxpq&VslP;VrV=S92NLp*3v$bnBh z%QOs%)rk0a$QrXuP+oHvhxnD|1Z!_eEEJl!1<~ux7X3pV;!QNmOgh~!$rG=_{!!mp z+??+B(q$2{zJFdk1KVmXYh{8$8Zp7ilZLCUV4K{jU|UqYKUFfq)}2-Gq6j^=Mv_v<4Ffxv zjse;ldX#LC(3hu|agl{^W}kI#C#2B#+6_4FVKl;I2+O-+7VwJ_y20EsB4pwl$?k9O z8I*+$Ckk~$d@woE$F|M>`_1)0rMF6HKT?=-3pY-0Jcbwo`=}N4v88$==@mv&m6tIe z^0Lk1<6MafYr&oIfqH8ah1?I$uD>b24l@QrKaeXUk3ImsqZhu0%S#{>?yWyiv}6VQ zuaeU*YTrUFdVU-aYdGVzUwdKv~uIkNNIL&nwuXMm@qq*x}Fh(**hq z>+Uc#YTV1MlE{@V<0Sep3e~Pv--Xy=%1oE3jtwENOz*kkcCHf19wU!WBCx{wM{IkQ zc?+o(d`SnG@14Y428M^JBd8_ zrpHN6fQ(061$srMd;WZAqsU=+Y6)t1LU0VpmAr~N^!z#u9a3U%b{l#3ewfZYXQ+&0GFBYe4giuP zA`2S=*C7j>TL$w&x@GE9@cLaj9M}ooxk56)8{+2GaY~}7=H+w9fqN6!&B6WqrN8zU z_u-nJiq2DMc<>1oc1%`(UzknA-hsnvd!B6y#tOP~(mOqt_wNZgZpIj7_6zX#0%>o! zPlH3tx4VM#(ED3AXJV^*pM&ongpNnJnscy{R>+iN!sAItrO1BcXqK`JRiBI)1h^Kd ziLhAKUa7Bv-s_9y-_6|0xK4pI1Y3}uyYnqL8iSRJuepXK_WU8drRWYqhX-XMxU$L- zg5PygU!quzWKqcB(^6MWU{A0$>B5k1+yU*&Mm%l!yCatmr6G89l^?LVv9M%qIf450 zi9aZFIK+^tdppp&3BO@ZKDH0k?nilpU|QP`=qpuYvPdbeByD8z@gtdjQp^mdU)U#s zFH|%Q`EGnpiPUg{wN}Ti#nneNR8>FW$KdT#+PMcZETC(X`thp&Ba{f_I;Q`lgV{z> zh=y4pgJUp^g~nZcBTB%$c2{WmDkIN&2JyL$W zEPH{gG97YH+cVk;*da$Cb(+dReiq_@6Qr!E4&%K-%!l#sRfO~ix9&p6b4K&#ydM4! zSm&%cH9bHc`*MTUm{45cDFHMraW=P^{t1mpdKt+eM)7RsqP%aHhpl)7hzY@ce$A5t zEqYlx{^YZfiJ#q6og4F-j})Lx4dEBsWq@D~u2T6E{y0l7xV;S9`iV~FH%(iCgXEmR z5Ej8xmU0C(sB5rTL8Q1zy31LU_?BHo;U5lL$sN2=#PhXK!$5rHgPj7O$}q?^Ic84Hbvh_zh4$RCpHAl|wK89VH~P2{eAL1|W&ZbI9FKlYqT$zn^5 z+r-))Xi#2TcZjp))xM8XDR| z{!wQfyEu7yV7a`o;7s!CNT*YBz9FTqKiOd!eB1ep#{&0Z0F(QBA!DOh*8@Q(z!@#3Nh)Ny_Vq?|Frif}QIS zE8AzcCO;YlA>no7IwGu1>PETzhv?fxh$c-X4AU8ROe|Lv0{4POJj`(j zB6dW^bk&oK4B6B^U4<8%3P;hxMpy996;{@pr|{Mzbj* z?oFC^2TPZnoDbIA3poy~0H}-BaqI4zYHXyLADfI6D1gb-+i2Ggun93dyqwx(Urkno zWVwC)@e8viET7>jXy<`Q5GlHLE<&*36(HH(7DKfzwE4W%--t$FG zDa=5msD(+^aGX{U%mCh>AB%v^nX`%-kZ*?)f!N#TpB(n+F2`I$DTy`KS_J5Y&mBdIAaXLG)wRC(w*5WW@;>>jv{4t`HZxo%CA1 z)c^&gYz3%x?%y;A?2<5I(jki`cJ@LWMP!u#bL%h{g9?g=yZKHYO^Q%FV7#`+k!~P` zHMHE#Cyl~=mqo*8t>j{|XjU|vexonw;?jN!q(H`T7$Ri{kaEzJYP6Q1j-g2NzKYuE z5`G8AV8ZFz@9ZOLGZ(rav*O5tCXqrhN_p^aef-*g z6nT>mQn%`jcWVIlHDb6(bvyd&c4j}Ppj3MKo2i@}IOj|C)ZE;RNfDQZIzT7lvkTpz z5G)Lo^J;?F&_d4%hn9(cjYQG^?Y4NMigP^H0ekhw&z-%H1_pAC+8rk^+< zx(}`N4w!XY(GJ&6T#Pb~yuaNguKvpIFy}2WpV+gNHe}O+ZmBVx(eKVUX~PJgGID1v zPQ<<;J!tQpJRT|aKSSwn5~TdiJ7lBwmJiLnwNGA}s?+5OjpP&XeDsD>VS=Wy4vTdF zROb6<@$SH}XO?&33LA+~20LV4eX;nq>7T^brK6Bly!qe#f9`Y`oYY3g=QnvuUWVz+ z&sZ6w*WQQ|o!tjF(D5YnixQF2DwLzdftz;((u3&^cg7jOBZghLv?q-I`` ztgdwBa@H~`d-g;OTf7k0x(<$Pv({BfM~Q8$dnFNqoK0MtmfTOZ^ga*sL>GPjd5lf0_{xx6cKDOGU>YMNmg@vpwa9xvMl#) z-e3Y{`d9d;)I%p^eIUQFIE@@PK2MH;*>^ip&3*1>-$zVwSA^vJJJGccU{`1xD%)W% zf=V&aZUQ%f2n`c0EHE!P>OJ~j*v$O(8z{{|ZC4*(bP6kRWMP<}EsgcJzY{Zz-{I;; zYt0lCGXnPu{=R3@yTHhI_yW;3@aydK6s+s@H8sCy^(xqI0$>=e>+a#ASk3$lj&cYt zG4iQUFs%_L^<6L7l}cY!Jlso!Nq6@(fhb8BJ%fIRc&TKYy&hYT8XIfD#`8{%**`P^ zI%Q)BZ1(h1SD-H}gn~wp9Kt{vT)2f)>Y*rg%PhK4L`rM(ees$VOv_Qrz_MRE5D;u$dPA4B`?Q)>=g0B~d&+ zGp9j(Cv4l$Wba?FK-WyHi>k^8Be}hU;TxO5U9Wm6*C8EVTFqz68nKVHl6}lIl<>gE z_jXOuG#!od9{ zXnHcXz|pXFY1s#LYR|mA&N&|s;F?T5rIcVSKUj4 zIu)0#cHz43N;xsOnXgqE5}}I98{>opxD)*TPj#eku?uCnVe*Pfl9p;cgofc-llp>i zH1+Sbr+dDz*~mnX#(nRl@$O#yp0=p@1+s%WJgpTGU$TH;I{)E0h7DnS1X(I=H)$qqSmSTa7(r}h3U6R!g>78W}i?zg+sA(Z>er^-ga zH@%V+r}W?(7cBJcA?ja}d52chA9s{}>ip|IJyrUEV`Td#Fpviiv4IY4AA#-`UyuEB zb9)ZBZk!ONS|$xyX{q1f1ummG*A2L~{60hQ3TN@%5u|YL>>d0CYL@I)SBHqt4nmLJ z19)}=R^54M;ApvmJGA=rf-qv>bD2{K#8xVYZJL(3O6HDJ)aY*lZ@B%FutL92B4m79 z=G>M(VZ1AKcYOhC-#`=%#0o&fZ-7?(c7L_REt7|r*Mnd`rPLo4V@^@tFO4M+5TzDAdtscJ$;SvW&n|T||2kE{ zr7>8%Ji~pnIm13d@>L^jIagwGpm(4HxPkG&vk8*uOOCjzGL7!ry=bxX0XPj<@mm?n zlD$+=EyG?0&#KIqkdpePV;E-I5W;u+x7c!GLU?*$zs_gzGYUP>-@S3P41kOdtu6p_ z=PWfR1X1|TwyS{P6qKF+M;i2CBs5ixH0LYimswep`Zsb4z85^j*j>ez3G8zWs>hnE zO&-a4?&SkshVix3#l&NJ!ui*)laA(v^1U0IZvyPAT3)=>YN~+1BsZqzR!UenT`##C z3j7mUww>p2fJZk)bqb}33i-{qg()!d_enQggQJN8UkPMS7UqjG9EG30tPf36tmpiQp%O-v{J@YtE8kxJjC0wOsmc_`C+~QlIWYK?>7eXVt|w9`ac1D z4TJK?%UNFCnzy{6(F8^5rCvq>AlI>F7-e9Iw?=my>{TC@LXV zWSFuDa%Z7)*G1w$E#&PUZo@W<@GMx2q6+FxHmrV+x|}=|ps0jUme45eafs@LV^6^l zvrSP2^hZOl06$J9*A`7RSY3n`?(gH(o zQ zw?Km+p{4&O6yobyf)te9FgH4fx}=yiMWxE8U6WP$$^1utisd(diMpJbiVATv`Xb$p zoESe3lqdJFTvXQ3z*1mZw^qS0`&QO!dV`AC-v7+fR_)^~uRH_u-)#HYsi;FBLu}YG z&68Nyp>F3Hh0H!h{U_5Qf2wR_a(}W!o{QvF2oS<5i#~QB$KO{E6-EUAuW>M!N()!g zMxaG|G@6ysNmNuwKPM`;v63lFBq%D!D-SOb)zL3R@+zwGYV`%14yP`0fnuV}KyF2a z-#s`&L07Dh_ypKL4wYuqjX{{xF}$i&@Ueq2JZD%r7JAVaLE$poLYN&}_BkZ&M)$Q_ z7oqGxUe9G!>|zyFRL-L=t@1ryS|gXD!l(nxo^$f*=?*b|nVvqAJyKube3lhrpP-?c zFxwWBV7LV>`5N^T13ZV>1Y3&RT%R%Y)d#YfoAZ5YnAEtW0W7>X(ssi@F! zFuc;9l^$fIZy1KYw^5fmaw#ej%%D6Tf&W{9+lr&M4HRcPDE;raaQDY)tQ3VQDxzv_ z7sE)9POM0o=>(!oAiZ7susGe0jr5Ly0{)E^yGD;xDW#$c>M)r91JW>oazbj`8BtCE zGp0!txfK;<^&%HmPex}|QrBl0G5%K^`lj5N+=5JRDQkhwD23Uks3jp3AQU7g$Unpa zps`vYkD^N2FtpIOGVA3}Em^<$lX=KY+5bz6owDMHRKw`{tVHrp3_M!}LcT$&xER3)-6wspkaU_3Tqr{gsHM4HiUE z+Xsc3z1V>yg{NUl8)q*xzmhPJM^TYjm^geZlvc_~h(dxB@&7CvHqtPtJ&kL-4q_zr z=-HvDkU&W2Kb^wnW2no50lnE5IZ=&4ID4!s4h*GX{y?2mjG?GS6XJ)V82sM_zb`oi zdHqWBDsLuoDJuL2^S434&tM|Tr*Y>>(Tpg^RJ8XUSuGAYXsi^)DXI?eEB2fMW&1`} zBL!0j7~VUe&YnNa>O!J2% zWhcMk5Y*qp^D~)wo0*C_q1Q8jB)+17mLBJ)=CH9a1npHAs@71KfH4$RP+o_M`Y5)B zX%v(v!Tvqmb(G~N+%wFE)jP5muJRy{qJq*-+o~`8j-jXqpd1CIkCX>eP+oILF*>Bn zgVHMM^sf4jsi-;@3-=uY_3(kLhAj{0rlNu{9isd#2=A>lMS?GcG7F@9}1oKc)4M1Hy z5AwLzdi0mh8LX(?<5kphk_p`GE%tmFGXFOac6!*P5-P8!SNZ}Jb&SHn4eg+?TlG0$ zMxjHTke}D-fw!0nMNU$0VkPy@AlRpfvREyL6DmV654eG*fMVi` zY5>13feQ4`tOf#=SHcEJ+2-7!Szm0FOHq-;OnsUWv`3+bP9K7ghu{~|_Bh8Y*HwS_ zZqqzc+_GhT+9P&!3LG+y6z5C~WV<=mZC>5=%Xf4+3v((@9P)QgUMidsT~zzhF+X4WZc z35i=K$NTln1QZofE#B?mMCBIl;_dhvEiFCMtEi%~hTBjoMa2RGV!d^W@w0beYo$#NIU=-^GNL4XNSPsG@>E zEc}#&8VJ09B&VX{woCI?K9UY?@)cDCMJ3aQ(R7l{5rWc3U5&y}R8f67d*o6=VtVo^ zY6KKjv`|!Dd8t@M)sZY`gxJ@hSAU+37@J1OS5MQVs1P9|%AaCE5g;*_6-`OL9EOAu zhL=}6_}B@Kb@K9>=#IrrEsq^YV;U2SM7fr0lYa=q;vZ#(y?A}>Yt&g$s9KwFI|cdj zv~W}5$1v}!$`Z*1WTje)e;tH!3G^h^?n}d~7%6so{n?4MZ%QlL)n!gm4nyO4ManIt zo0H@yG$6-QXQc-;P)T)voWT9D)UIe$MYl8fnu+b)I>Hcy=4~7=D-J<^femZ_MlHq%P!vwou45v7`=Bg;Ru(mtCC(dL<^|5PceU#J76 z#7)s`MDgKjv_tGZi5v%6Jj&Vi5Eibais}$2sDI|#`;rb(iBbEafjgq00^Q=g?2!?c z&;DS$Xh3D>pe!K0cmm&>)U*UhmRCrdkAQOB4vpY@xC&1##)pj?_XqzsljB_UPv+i4 zQr!wHj6j(lFaV_7zZ5tEE(rBU* zl;uY9Dn?TOlNy&EKfKQg0)G}--yvw7FThqBdX!&rom=o7*+%NBC`^H(;_;v}R#Bao zJOpK^UBU>lKjPOYP!c)*0c=`f0EiIyng`l7hup-&q@RKs=sqvMO<0_>Z;=Yg5 zzsL a+5ZRIzjKv~?S_B=0000str: + """ + dynamically finds the 'default_email_template.txt' file within the module. + """ + # __file__ is BreCal/stubs/email_template.py + # parent of email_template.py is stubs + # parent of stubs is BreCal + brecal_root_folder = os.path.dirname(os.path.dirname(__file__)) # .../BreCal + resource_root_folder = os.path.join(brecal_root_folder, "resources") # .../BreCal/resources + html_filepath = os.path.join(resource_root_folder,"warning_notification_email_template.txt") # .../BreCal/resources/warning_notification_email_template.txt + assert os.path.exists(html_filepath), f"could not find default email template file at path: {html_filepath}" + return html_filepath + +def get_default_html_email()->str: + """ + dynamically finds the 'default_email_template.txt' file within the module. It opens the file and returns the content. + + __file__ returns to the file, where this function is stored (e.g., within BreCal.stubs.email_template) + using the dirname refers to the directory, where __file__ is stored. + finally, the 'default_email_template.txt' is stored within that folder + """ + html_filepath = find_warning_notification_email_template() + with open(html_filepath,"r", encoding="utf-8") as file: # encoding = "utf-8" allows for German Umlaute + content = file.read() + return content + +def find_bremen_calling_logo(): + """ + find the path towards the logo file (located at 'brecal\src\BreCalClient\Resources\logo_bremen_calling.png') + """ + # __file__ is services/email_handling.py + # parent of __file__ is services + # parent of services is BreCal + src_root_folder = os.path.dirname(os.path.dirname(__file__)) # .../BreCal + resource_root_folder = os.path.join(src_root_folder, "resources") + + path = os.path.join(resource_root_folder, "logo_bremen_calling.png") + assert os.path.exists(path), f"cannot find logo of bremen calling at path: {os.path.abspath(path)}" + return path + def add_bremen_calling_logo(msg_multipart): """ The image is not attached automatically when it is embedded to the content. To circumvent this, @@ -244,12 +286,7 @@ def add_bremen_calling_logo(msg_multipart): The content body refers to 'LogoBremenCalling', which the 'Content-ID' of the logo is assigned as. """ - # find the path towards the logo file (located at 'brecal\src\BreCalClient\Resources\logo_bremen_calling.png') - src_root_folder = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(__file__)))) - resource_root_folder = os.path.join(src_root_folder, "BreCalClient", "Resources") - - path = os.path.join(resource_root_folder, "logo_bremen_calling.png") - assert os.path.exists(path), f"cannot find logo of bremen calling at path: {os.path.abspath(path)}" + path = find_bremen_calling_logo() with open(path, 'rb') as file: attachment = MIMEApplication(file.read(), _subtype=mimetypes.MimeTypes().guess_type(path), Name="bremen_calling.png") diff --git a/src/server/BreCal/stubs/email_template.py b/src/server/BreCal/stubs/email_template.py deleted file mode 100644 index f47ca00..0000000 --- a/src/server/BreCal/stubs/email_template.py +++ /dev/null @@ -1,15 +0,0 @@ -import os - -def get_default_html_email(): - """ - dynamically finds the 'default_email_template.txt' file within the module. It opens the file and returns the content. - - __file__ returns to the file, where this function is stored (e.g., within BreCal.stubs.email_template) - using the dirname refers to the directory, where __file__ is stored. - finally, the 'default_email_template.txt' is stored within that folder - """ - html_filepath = os.path.join(os.path.dirname(__file__),"default_email_template.txt") - assert os.path.exists(html_filepath), f"could not find default email template file at path: {html_filepath}" - with open(html_filepath,"r", encoding="utf-8") as file: # encoding = "utf-8" allows for German Umlaute - content = file.read() - return content diff --git a/src/server/tests/resources/__init__.py b/src/server/tests/resources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/server/tests/resources/test_find_defaults.py b/src/server/tests/resources/test_find_defaults.py new file mode 100644 index 0000000..214f0d7 --- /dev/null +++ b/src/server/tests/resources/test_find_defaults.py @@ -0,0 +1,14 @@ +import pytest +import os + +def test_find_bremen_calling_logo(): + from BreCal.services.email_handling import find_bremen_calling_logo + path = find_bremen_calling_logo() + assert os.path.exists(path), f"cannot find the bremen calling logo file, which is needed for notifications (e.g., Email). Searched at path: \n\t{path}" + return + +def test_find_warning_notification_email_template(): + from BreCal.services.email_handling import find_warning_notification_email_template + path = find_warning_notification_email_template() + assert os.path.exists(path), f"cannot find the required email template, which is needed for warning notifications. Searched at path: \n\t{path}" + return From 0de47e26277338e4dc6d4a8367b384f25eaba421 Mon Sep 17 00:00:00 2001 From: Max Metz Date: Wed, 31 Jul 2024 12:05:48 +0200 Subject: [PATCH 11/15] creating only one notification per notification_type, even if multiple users request the same notification_type. --- src/server/BreCal/notifications/notifier.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/server/BreCal/notifications/notifier.py b/src/server/BreCal/notifications/notifier.py index 41b89f4..916059f 100644 --- a/src/server/BreCal/notifications/notifier.py +++ b/src/server/BreCal/notifications/notifier.py @@ -313,13 +313,17 @@ class Notifier(): @staticmethod def create_notifications_for_user_list(shipcall, users:list[model.User]): + notification_type_list = [] for user in users: - notification_type_list = Notifier.build_notification_type_list(user) + user_notification_type_list = Notifier.build_notification_type_list(user) + notification_type_list.extend(user_notification_type_list) - for notification_type in notification_type_list: - schemaModel = dict(shipcall_id = shipcall.id, level = int(shipcall.evaluation), type = notification_type, message = "", created = datetime.datetime.now(), modified=None) - query = SQLQuery.get_notifications_post(schemaModel) - schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute") + # get the unique notification types + notification_type_list = list(set(notification_type_list)) + for notification_type in notification_type_list: + schemaModel = dict(shipcall_id = shipcall.id, level = int(shipcall.evaluation), type = notification_type, message = "", created = datetime.datetime.now(), modified=None) + query = SQLQuery.get_notifications_post(schemaModel) + schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute") return @staticmethod From b078386520b042cf54dafe5a0d359782c14ac7cc Mon Sep 17 00:00:00 2001 From: Max Metz Date: Wed, 31 Jul 2024 12:30:37 +0200 Subject: [PATCH 12/15] refactored the account-data of the Email-server, so it can be easily adapted later on. Decoupled into a novel file notifications/accounts.py with some simple unit tests. --- src/server/BreCal/notifications/accounts.py | 7 +++++++ src/server/BreCal/notifications/notifier.py | 9 ++++---- src/server/tests/notifications/__init__.py | 0 .../tests/notifications/test_accounts.py | 21 +++++++++++++++++++ 4 files changed, 33 insertions(+), 4 deletions(-) create mode 100644 src/server/BreCal/notifications/accounts.py create mode 100644 src/server/tests/notifications/__init__.py create mode 100644 src/server/tests/notifications/test_accounts.py diff --git a/src/server/BreCal/notifications/accounts.py b/src/server/BreCal/notifications/accounts.py new file mode 100644 index 0000000..0e3bbd0 --- /dev/null +++ b/src/server/BreCal/notifications/accounts.py @@ -0,0 +1,7 @@ +"""This file contains login information to register into distinct notification accounts.""" + + +mail_server = 'w01d5503.kasserver.com' +mail_port=465 +mail_address="max.metz@scope-sorting.com" +mail_pwd = b'gAAAAABmqJlkXbtJTL1tFiyQNHhF_Y7sgtVI0xEx07ybwbX70Ro1Vp73CLDq49eFSYG-1SswIDQ2JBSORYlWaR-Vh2kIwPHy_lX8SxkySrRvBRzkyZP5x0I=' diff --git a/src/server/BreCal/notifications/notifier.py b/src/server/BreCal/notifications/notifier.py index 916059f..3f0f591 100644 --- a/src/server/BreCal/notifications/notifier.py +++ b/src/server/BreCal/notifications/notifier.py @@ -54,8 +54,9 @@ class Notifier(): update_database = True if not is_test else False # if_test, the database will not be updated. time_diff_threshold = time_diff_threshold if not is_test else 0.0 # 0.0 delay when is_test is set. - email_handler = EmailHandler(mail_server='w01d5503.kasserver.com', mail_port=465, mail_address="max.metz@scope-sorting.com") - pwd = b'gAAAAABmqJlkXbtJTL1tFiyQNHhF_Y7sgtVI0xEx07ybwbX70Ro1Vp73CLDq49eFSYG-1SswIDQ2JBSORYlWaR-Vh2kIwPHy_lX8SxkySrRvBRzkyZP5x0I=' + + from BreCal.notifications.accounts import mail_server, mail_port, mail_address, mail_pwd + email_handler = EmailHandler(mail_server=mail_server, mail_port=mail_port, mail_address=mail_address) # get candidates: find all eligible shipcalls, where the evaluation state is yellow or red & the notifications are not yet sent eligible_shipcalls = Notifier.get_eligible_shipcalls() @@ -67,14 +68,14 @@ class Notifier(): if len(eligible_notifications) > 0: # only perform a login when there are eligible notifications try: # login in advance, so the email handler uses a shared connection. It disconnects only once at the end of the call. - email_handler.login(interactive=False, pwd=pwd) + email_handler.login(interactive=False, pwd=mail_pwd) for notification in eligible_notifications: eligible_users = Notifier.get_eligible_users(notification) # create an Email and send it to each eligible_user. # #TODO: this method must be a distributor. It should send emails for those, who want emails, and provide placeholders for other types of notifications - Notifier.create_and_send_email_notification(email_handler, pwd, eligible_users, notification, update_database=update_database, debug=debug) + Notifier.create_and_send_email_notification(email_handler, mail_pwd, eligible_users, notification, update_database=update_database, debug=debug) finally: email_handler.close() return diff --git a/src/server/tests/notifications/__init__.py b/src/server/tests/notifications/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/server/tests/notifications/test_accounts.py b/src/server/tests/notifications/test_accounts.py new file mode 100644 index 0000000..8cf21fe --- /dev/null +++ b/src/server/tests/notifications/test_accounts.py @@ -0,0 +1,21 @@ +import pytest +from BreCal.notifications.accounts import mail_server, mail_port, mail_address, mail_pwd + + +def test_mail_server(): + assert isinstance(mail_server, str) + assert not "@" in mail_server + return + +def test_mail_port(): + assert isinstance(mail_port, int) + return + +def test_mail_address(): + assert isinstance(mail_address, str) + assert "@" in mail_address + return + +def test_mail_pwd(): + assert isinstance(mail_pwd, bytes), f"must be a bytes-encoded password to protect the account" + return From 0de0baf1d487c17d45b3d15ae08339d11198aacc Mon Sep 17 00:00:00 2001 From: Max Metz Date: Wed, 31 Jul 2024 14:13:41 +0200 Subject: [PATCH 13/15] the evaluation function (traffic-light validation) additionally performs the notification function to generate notifications. Integrated within the scheduler. --- src/server/BreCal/notifications/notifier.py | 33 ++++++++++++++++--- .../BreCal/services/schedule_routines.py | 1 + 2 files changed, 29 insertions(+), 5 deletions(-) diff --git a/src/server/BreCal/notifications/notifier.py b/src/server/BreCal/notifications/notifier.py index 3f0f591..c97eeeb 100644 --- a/src/server/BreCal/notifications/notifier.py +++ b/src/server/BreCal/notifications/notifier.py @@ -76,6 +76,9 @@ class Notifier(): # create an Email and send it to each eligible_user. # #TODO: this method must be a distributor. It should send emails for those, who want emails, and provide placeholders for other types of notifications Notifier.create_and_send_email_notification(email_handler, mail_pwd, eligible_users, notification, update_database=update_database, debug=debug) + + # #TODO: except... logging? + finally: email_handler.close() return @@ -334,14 +337,34 @@ class Notifier(): looking up its history, and finding all attached users. For each user, a notification will be created for each subscribed notification type (e.g., Email) """ - # get the respective shipcall shipcall = Notifier.get_shipcall(shipcall_id) + notifications = execute_sql_query_standalone(query=SQLQuery.get_notifications(), param={"scid" : shipcall_id}, model=model.Notification, command_type="query") + latest_notification = Notifier.find_latest_notification(notifications) - # find all attached users of the shipcall (checks the history, then reads out the user ids and builds the users) - users = Notifier.get_users_via_history(shipcall_id=shipcall.id) + old_state = model.EvaluationType(latest_notification.level) if latest_notification is not None else model.EvaluationType.undefined + new_state = shipcall.evaluation - # for each user, create one notification for each subscribed notification type (e.g., Email) - Notifier.create_notifications_for_user_list(shipcall, users) + # identify, whether the severity of the shipcall has increased to see, whether a notification is required + severity_increase = Notifier.check_higher_severity(old_state=old_state, new_state=new_state) + + # when the severity increases, set the 'evaluation_notifications_sent' argument to 0 (False) + if severity_increase: + ### UPDATE Shipcall ### + # prepare and create a query + evaluation_notifications_sent = 0 + schemaModel = {"id":shipcall.id, "evaluation_notifications_sent":evaluation_notifications_sent} # #TODO: should this require the 'modified' tag to be adapted? + query = SQLQuery.get_shipcall_put(schemaModel) + + # execute the PUT-Request + schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute") + + ### Generate Notifications ### + # find all attached users of the shipcall (checks the history, then reads out the user ids and builds the users) + users = Notifier.get_users_via_history(shipcall_id=shipcall.id) + + # for each user, identify the notification_types, which must be generated. Finally, create those + # notifications with a POST-request + Notifier.create_notifications_for_user_list(shipcall, users) return @staticmethod diff --git a/src/server/BreCal/services/schedule_routines.py b/src/server/BreCal/services/schedule_routines.py index 5725594..e46ead9 100644 --- a/src/server/BreCal/services/schedule_routines.py +++ b/src/server/BreCal/services/schedule_routines.py @@ -39,6 +39,7 @@ def UpdateShipcalls(options:dict = {'past_days':2}): for shipcall_id in shipcall_ids: # apply 'Traffic Light' evaluation to obtain 'GREEN', 'YELLOW' or 'RED' evaluation state. The function internally updates the mysql database evaluate_shipcall_state(mysql_connector_instance=pooledConnection, shipcall_id=shipcall_id) # new_id (last insert id) refers to the shipcall id + Notifier.generate_notifications(shipcall_id) pooledConnection.close() From 0d13ffd626307dfad1635311ccf3eda6e70ca93a Mon Sep 17 00:00:00 2001 From: Max Metz Date: Wed, 31 Jul 2024 15:18:47 +0200 Subject: [PATCH 14/15] manually overwritting the email address of all recipients of an Email notification with the proxy-mail 'bremencalling@bsmd.de'. This is useful to test the feature before enabling it to the live-system. --- src/server/BreCal/notifications/notifier.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/server/BreCal/notifications/notifier.py b/src/server/BreCal/notifications/notifier.py index c97eeeb..30db745 100644 --- a/src/server/BreCal/notifications/notifier.py +++ b/src/server/BreCal/notifications/notifier.py @@ -415,12 +415,17 @@ class Notifier(): this 'naive' method creates a message and simply sends it to all users in a list of users. Afterwards, the database will be updated, so the shipcall no longer requires a notification. """ - email_tgts = [user.user_email for user in users if user.user_email is not None] + #email_tgts = [user.user_email for user in users if user.user_email is not None] + email_tgts = ["bremencalling@bsmd.de" for user in users if user.user_email is not None] - ship_name, evaluation_message, eta_etd, eta_etd_type = Notifier.prepare_notification_body(notification) + # avoid multi-mails, when (for some reason) multiple users share the same email address. + email_tgts = list(set(email_tgts)) + # prepare and build the Email content content = get_default_html_email() files = [] # optional attachments + ship_name, evaluation_message, eta_etd, eta_etd_type = Notifier.prepare_notification_body(notification) + msg_multipart,msg_content = create_shipcall_evaluation_notification( email_handler, ship_name, evaluation_message, eta_etd, eta_etd_type, content, files=files ) From 29b611c780be5375653e0ca771712bc27f01ed2a Mon Sep 17 00:00:00 2001 From: Max Metz Date: Wed, 31 Jul 2024 15:30:06 +0200 Subject: [PATCH 15/15] instead of overwriting the recipient's addresses, the bsmd address will simply (always) be added to the list of recipients. --- src/server/BreCal/notifications/notifier.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/src/server/BreCal/notifications/notifier.py b/src/server/BreCal/notifications/notifier.py index 30db745..4e38d7e 100644 --- a/src/server/BreCal/notifications/notifier.py +++ b/src/server/BreCal/notifications/notifier.py @@ -407,6 +407,18 @@ class Notifier(): schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute") return + @staticmethod + def build_email_targets_validation_notification(users)->list[str]: + # readout the email address of all users + email_tgts = [user.user_email for user in users if user.user_email is not None] + + # additionally, always inform the BSMD + email_tgts.append("bremencalling@bsmd.de") # #TODO: for testing, use "bremencalling@bsmd.de". For live system, use "report@bsmd.de" + + # avoid multi-mails, when (for some reason) multiple users share the same email address. + email_tgts = list(set(email_tgts)) + return email_tgts + @staticmethod def create_and_send_email_notification(email_handler:EmailHandler, pwd:bytes, users:list[model.User], notification:model.Notification, update_database:bool=True, debug:bool=False): """ @@ -415,11 +427,8 @@ class Notifier(): this 'naive' method creates a message and simply sends it to all users in a list of users. Afterwards, the database will be updated, so the shipcall no longer requires a notification. """ - #email_tgts = [user.user_email for user in users if user.user_email is not None] - email_tgts = ["bremencalling@bsmd.de" for user in users if user.user_email is not None] - - # avoid multi-mails, when (for some reason) multiple users share the same email address. - email_tgts = list(set(email_tgts)) + # get a list of all recipients + email_tgts = Notifier.build_email_targets_validation_notification(users) # prepare and build the Email content content = get_default_html_email()