Added input validation for API requests. Refactored some methods, added docstrings. Solved marshmallow-warnings in BreCal.schemas.model. Added unit tests, now totaling 215. Added proposals to refactor all SQL queries into an object at BreCal.database.sql_queries.SQLQuery for better standardization. Created a helper-object to handle Emails, which prepares the notification-feature.

This commit is contained in:
Max Metz 2024-06-10 07:27:58 +02:00
parent e33833235d
commit e10e9ef88b
48 changed files with 5049 additions and 252 deletions

View File

@ -34,7 +34,7 @@ from BreCal.stubs.df_times import get_df_times
from BreCal.services.schedule_routines import setup_schedule, run_schedule_permanently_in_background
def create_app(test_config=None):
def create_app(test_config=None, instance_path=None):
app = Flask(__name__, instance_relative_config=True)
app.config.from_mapping(
@ -45,7 +45,11 @@ def create_app(test_config=None):
else:
app.config.from_mapping(test_config)
if instance_path is not None:
app.instance_path = instance_path
try:
import os
print(f'Instance path = {app.instance_path}')
os.makedirs(app.instance_path)
except OSError:

View File

@ -1,12 +1,17 @@
from flask import Blueprint, request
from webargs.flaskparser import parser
from marshmallow import Schema, fields
from marshmallow import Schema, fields, ValidationError
from ..schemas import model
from .. import impl
from ..services.auth_guard import auth_guard
from ..services.auth_guard import auth_guard, check_jwt
from BreCal.validators.input_validation import validate_posted_shipcall_data, check_if_user_is_bsmd_type
from BreCal.validators.input_validation_shipcall import InputValidationShipcall
from BreCal.database.sql_handler import execute_sql_query_standalone
import logging
import json
import traceback
import werkzeug
bp = Blueprint('shipcalls', __name__)
@ -14,7 +19,16 @@ bp = Blueprint('shipcalls', __name__)
@auth_guard() # no restriction by role
def GetShipcalls():
if 'Authorization' in request.headers:
token = request.headers.get('Authorization')
token = request.headers.get('Authorization') # see impl/login to see the token encoding, which is a JWT token.
"""
from BreCal.services.jwt_handler import decode_jwt
jwt = token.split('Bearer ')[1] # string key
payload = decode_jwt(jwt) # dictionary, which includes 'id' (user id) and 'participant_id'
# oneline:
payload = decode_jwt(request.headers.get("Authorization").split("Bearer ")[-1])
"""
options = {}
options["participant_id"] = request.args.get("participant_id")
options["past_days"] = request.args.get("past_days", default=1, type=int)
@ -31,8 +45,21 @@ def PostShipcalls():
try:
content = request.get_json(force=True)
loadedModel = model.ShipcallSchema().load(data=content, many=False, partial=True)
# read the user data from the JWT token (set when login is performed)
user_data = check_jwt()
# validate the posted shipcall data & the user's authority
InputValidationShipcall.evaluate_post_data(user_data, loadedModel, content)
except ValidationError as ex:
logging.error(ex)
print(ex)
return json.dumps({"message":f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"}), 400
except Exception as ex:
logging.error(ex)
logging.error(traceback.format_exc())
print(ex)
return json.dumps("bad format"), 400
@ -45,8 +72,24 @@ def PutShipcalls():
try:
content = request.get_json(force=True)
logging.info(content)
loadedModel = model.ShipcallSchema().load(data=content, many=False, partial=True)
# read the user data from the JWT token (set when login is performed)
user_data = check_jwt()
# validate the PUT shipcall data and the user's authority
InputValidationShipcall.evaluate_put_data(user_data, loadedModel, content)
except ValidationError as ex:
logging.error(ex)
print(ex)
return json.dumps({"message":f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"}), 400
except werkzeug.exceptions.Forbidden as ex:
logging.error(ex)
print(ex)
return json.dumps({"message":ex.description}), 403
except Exception as ex:
logging.error(ex)
print(ex)

View File

@ -1,11 +1,14 @@
from flask import Blueprint, request
from .. import impl
from ..services.auth_guard import auth_guard
from marshmallow import EXCLUDE
from ..services.auth_guard import auth_guard, check_jwt
from marshmallow import EXCLUDE, ValidationError
from ..schemas import model
import json
import logging
from BreCal.validators.input_validation import check_if_user_is_bsmd_type
from BreCal.validators.input_validation_ship import InputValidationShip
bp = Blueprint('ships', __name__)
@bp.route('/ships', methods=['get'])
@ -24,8 +27,21 @@ def GetShips():
def PostShip():
try:
# read the user data from the JWT token (set when login is performed)
user_data = check_jwt()
# check, whether the user belongs to a participant, which is of type ParticipantType.BSMD
# as ParticipantType is an IntFlag, a user belonging to multiple groups is properly evaluated.
is_bsmd = check_if_user_is_bsmd_type(user_data)
if not is_bsmd:
raise ValidationError(f"current user does not belong to BSMD. Cannot post shipcalls. Found user data: {user_data}")
content = request.get_json(force=True)
loadedModel = model.ShipSchema().load(data=content, many=False, partial=True)
# validate the request data & user permissions
InputValidationShip.evaluate_post_data(user_data, loadedModel, content)
except Exception as ex:
logging.error(ex)
print(ex)
@ -39,8 +55,15 @@ def PostShip():
def PutShip():
try:
# read the user data from the JWT token (set when login is performed)
user_data = check_jwt()
content = request.get_json(force=True)
loadedModel = model.ShipSchema().load(data=content, many=False, partial=True, unknown=EXCLUDE)
loadedModel = model.Ship().load(data=content, many=False, partial=True, unknown=EXCLUDE)
# validate the request data & user permissions
InputValidationShip.evaluate_put_data(user_data, loadedModel, content)
except Exception as ex:
logging.error(ex)
print(ex)
@ -53,13 +76,20 @@ def PutShip():
@auth_guard() # no restriction by role
def DeleteShip():
# TODO check if I am allowed to delete this thing by deriving the participant from the bearer token
try:
# read the user data from the JWT token (set when login is performed)
user_data = check_jwt()
ship_id = request.args.get("id")
if 'id' in request.args:
options = {}
options["id"] = request.args.get("id")
else:
return json.dumps("no id provided"), 400
# validate the request data & user permissions
InputValidationShip.evaluate_delete_data(user_data, ship_id)
except Exception as ex:
logging.error(ex)
print(ex)

View File

@ -1,9 +1,11 @@
from flask import Blueprint, request
from ..schemas import model
from .. import impl
from ..services.auth_guard import auth_guard
from ..services.auth_guard import auth_guard, check_jwt
import json
import logging
from marshmallow import ValidationError
from BreCal.validators.input_validation_times import InputValidationTimes
bp = Blueprint('times', __name__)
@ -30,6 +32,17 @@ def PostTimes():
# body = parser.parse(schema, request, location='json')
loadedModel = model.TimesSchema().load(data=content, many=False, partial=True)
# read the user data from the JWT token (set when login is performed)
user_data = check_jwt()
# validate the request
InputValidationTimes.evaluate_post_data(user_data, loadedModel, content)
except ValidationError as ex:
logging.error(ex)
print(ex)
return json.dumps(f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"), 400
except Exception as ex:
logging.error(ex)
print(ex)
@ -46,6 +59,17 @@ def PutTimes():
content = request.get_json(force=True)
loadedModel = model.TimesSchema().load(data=content, many=False, partial=True)
# read the user data from the JWT token (set when login is performed)
user_data = check_jwt()
# validate the request
InputValidationTimes.evaluate_put_data(user_data, loadedModel, content)
except ValidationError as ex:
logging.error(ex)
print(ex)
return json.dumps(f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"), 400
except Exception as ex:
logging.error(ex)
print(ex)
@ -58,11 +82,16 @@ def PutTimes():
@auth_guard() # no restriction by role
def DeleteTimes():
# TODO check if I am allowd to delete this thing by deriving the participant from the bearer token
if 'id' in request.args:
options = {}
options["id"] = request.args.get("id")
# read the user data from the JWT token (set when login is performed)
user_data = check_jwt()
# validate the request
InputValidationTimes.evaluate_delete_data(user_data, times_id = request.args.get("id"))
return impl.times.DeleteTimes(options)
else:
logging.warning("Times delete missing id argument")

View File

@ -4,6 +4,7 @@ from .. import impl
from ..services.auth_guard import auth_guard
import json
import logging
from marshmallow import ValidationError
bp = Blueprint('user', __name__)
@ -15,6 +16,11 @@ def PutUser():
content = request.get_json(force=True)
loadedModel = model.UserSchema().load(data=content, many=False, partial=True)
except ValidationError as ex:
logging.error(ex)
print(ex)
return json.dumps(f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"), 400
except Exception as ex:
logging.error(ex)
print(ex)

View File

@ -1,8 +1,8 @@
from enum import Enum, IntFlag
from enum import IntEnum, Enum, IntFlag
class ParticipantType(IntFlag):
"""determines the type of a participant"""
NONE = 0
undefined = 0
BSMD = 1
TERMINAL = 2
PILOT = 4
@ -11,12 +11,17 @@ class ParticipantType(IntFlag):
PORT_ADMINISTRATION = 32
TUG = 64
class ShipcallType(Enum):
class ShipcallType(IntEnum):
"""determines the type of a shipcall, as this changes the applicable validation rules"""
undefined = 0
INCOMING = 1
OUTGOING = 2
SHIFTING = 3
@classmethod
def _missing_(cls, value):
return cls.undefined
class ParticipantwiseTimeDelta():
"""stores the time delta for every participant, which triggers the validation rules in the rule set '0001'"""
AGENCY = 1200.0 # 20 h * 60 min/h = 1200 min
@ -26,7 +31,9 @@ class ParticipantwiseTimeDelta():
TUG = 960.0 # 16 h * 60 min/h = 960 min
TERMINAL = 960.0 # 16 h * 60 min/h = 960 min
class StatusFlags(Enum):
NOTIFICATION = 10.0 # after n minutes, an evaluation may rise a notification
class StatusFlags(IntEnum):
"""
these enumerators ensure that each traffic light validation rule state corresponds to a value, which will be used in the ValidationRules object to identify
the necessity of notifications.
@ -36,3 +43,26 @@ class StatusFlags(Enum):
YELLOW = 2
RED = 3
class PierSide(IntEnum):
"""These enumerators determine the pier side of a shipcall."""
PORTSIDE = 0 # Port/Backbord
STARBOARD_SIDE = 1 # Starboard / Steuerbord
class NotificationType(IntFlag):
"""determines the method by which a notification is distributed to users. Flagging allows selecting multiple notification types."""
UNDEFINED = 0
EMAIL = 1
POPUP = 2
MESSENGER = 4
class ParticipantFlag(IntFlag):
"""
| 1 | If this flag is set on a shipcall record with participant type Agency (8),
all participants of type BSMD (1) may edit the record.
"""
undefined = 0
BSMD = 1
@classmethod
def _missing_(cls, value):
return cls.undefined

View File

@ -1,9 +1,11 @@
import numpy as np
import pandas as pd
import pydapper
import datetime
import typing
from BreCal.schemas.model import Shipcall, Ship, Participant, Berth, User, Times
from BreCal.schemas.model import Shipcall, Ship, Participant, Berth, User, Times, ShipcallParticipantMap
from BreCal.database.enums import ParticipantType
from BreCal.local_db import getPoolConnection
def pandas_series_to_data_model():
return
@ -19,7 +21,80 @@ def set_participant_type(x, participant_df)->int:
participant_type = participant_df.loc[participant_id, "type"]
return participant_type
def get_synchronous_shipcall_times_standalone(query_time:pd.Timestamp, all_df_times:pd.DataFrame, delta_threshold=900)->int:
"""
This function counts all entries in {all_df_times}, which have the same timestamp as {query_time}.
It does so by:
1.) selecting all eta_berth & etd_berth entries
2.) measuring the timedelta towards {query_time}
3.) converting the timedelta to total absolute seconds (positive or negative time differences do not matter)
4.) applying a {delta_threshold} to identify, whether two times are too closely together
5.) counting the times, where the timedelta is below the threshold
returns: counts
"""
assert isinstance(query_time,pd.Timestamp)
# get a timedelta for each valid (not Null) time entry
time_deltas_eta = [(query_time.to_pydatetime()-time_.to_pydatetime()) for time_ in all_df_times.loc[:,"eta_berth"] if not pd.isnull(time_)]
time_deltas_etd = [(query_time.to_pydatetime()-time_.to_pydatetime()) for time_ in all_df_times.loc[:,"etd_berth"] if not pd.isnull(time_)]
# consider both, eta and etd times
time_deltas = time_deltas_eta + time_deltas_etd
# convert the timedelta to absolute total seconds
time_deltas = [abs(delta.total_seconds()) for delta in time_deltas]
# consider only those time deltas, which are <= the determined threshold
# create a list of booleans
time_deltas_filtered = [delta <= delta_threshold for delta in time_deltas]
# booleans can be added/counted in Python by using sum()
counts = sum(time_deltas_filtered) # int
return counts
def execute_sql_query_standalone(query, param={}, pooledConnection=None, model=None, command_type="query"):
"""
execute an arbitrary query with a set of parameters, return the output and convert it to a list.
when the pooled connection is rebuilt, it will be closed at the end of the function.
"""
rebuild_pooled_connection = pooledConnection is None
if rebuild_pooled_connection:
pooledConnection = getPoolConnection()
commands = pydapper.using(pooledConnection)
# participant_query = "SELECT participant_id, type FROM shipcall_participant_map WHERE shipcall_id=?shipcall_id?";
# creates a generator
try:
if command_type=="query":
if model is None:
schemas = commands.query(query, model=dict, param=param, buffered=False)
else:
schemas = commands.query(query, model=model, param=param, buffered=False)
# creates a list of results from the generator
schemas = [schema for schema in schemas]
elif command_type=="execute":
schemas = commands.execute(query, param=param)
elif command_type=="single":
sentinel = object()
# pulls a *single* row from the query. Typically, these queries require an ID within the param dictionary.
# when providing a model, such as model.Shipcall, the dataset is immediately translated into a data model.
schemas = commands.query_single_or_default(query, sentinel, param=param) if model is None else commands.query_single_or_default(query, sentinel, param=param, model=model)
if schemas is sentinel:
raise Exception("no such record")
else:
raise ValueError(command_type)
finally: # if needed, ensure that the pooled connection is closed.
if rebuild_pooled_connection:
pooledConnection.close()
return schemas
class SQLHandler():
"""
@ -36,6 +111,15 @@ class SQLHandler():
if read_all:
self.read_all(self.all_schemas)
def execute_sql_query(self, sql_connection, query, param):
"""
this method is best used in combination with a python context-manager, such as:
with mysql.connector.connect(**mysql_connection_data) as sql_connection:
schema = sql_handler.execute_sql_query(sql_connection, query)
"""
schemas = execute_sql_query_standalone(query, param, pooledConnection=sql_connection)
return schemas
def get_all_schemas_from_mysql(self):
with self.sql_connection.cursor(buffered=True) as cursor:
cursor.execute("SHOW TABLES")
@ -50,7 +134,8 @@ class SQLHandler():
'ship'->BreCal.schemas.model.Ship object
"""
self.str_to_model_dict = {
"shipcall":Shipcall, "ship":Ship, "participant":Participant, "berth":Berth, "user":User, "times":Times
"shipcall":Shipcall, "ship":Ship, "participant":Participant, "berth":Berth, "user":User, "times":Times,
"shipcall_participant_map":ShipcallParticipantMap
}
return
@ -70,9 +155,13 @@ class SQLHandler():
data = [{k:v for k,v in zip(column_names, dat)} for dat in data]
# 4.) build a dataframe from the respective data models (which ensures the correct data type)
df = self.build_df_from_data_and_name(data, table_name)
return df
def build_df_from_data_and_name(self, data, table_name):
data_model = self.str_to_model_dict.get(table_name)
if data_model is not None:
df = pd.DataFrame([data_model(**dat) for dat in data])
df = pd.DataFrame([data_model(**dat) for dat in data], columns=list(data_model.__annotations__.keys()))
else:
df = pd.DataFrame([dat for dat in data])
return df
@ -94,11 +183,7 @@ class SQLHandler():
# 4.) build a dataframe from the respective data models (which ensures the correct data type)
data_model = self.str_to_model_dict.get(table_name)
if data_model is not None:
df = pd.DataFrame([data_model(**dat) for dat in data])
else:
df = pd.DataFrame([dat for dat in data])
df = self.build_df_from_data_and_name(data, table_name)
if 'id' in df.columns:
df = df.set_index('id', inplace=False) # avoid inplace updates, so the raw sql remains unchanged
return df
@ -332,6 +417,10 @@ class SQLHandler():
def get_unique_ship_counts(self, all_df_times:pd.DataFrame, times_agency:pd.DataFrame, query:str, rounding:str="min", maximum_threshold=3):
"""given a dataframe of all agency times, get all unique ship counts, their values (datetime) and the string tags. returns a tuple (values,unique,counts)"""
# #deprecated!
import warnings
warnings.warn(f"SQLHandler.get_unique_ship_counts is deprecated. Instead, please use SQLHandler.count_synchronous_shipcall_times")
# optional: rounding
if rounding is not None:
all_df_times.loc[:, query] = pd.to_datetime(all_df_times.loc[:, query]).dt.round(rounding) # e.g., 'min' --- # correcting the error: 'AttributeError: Can only use .dt accessor with datetimelike values'
@ -347,3 +436,7 @@ class SQLHandler():
# get unique entries and counts
counts = len(values) # unique, counts = np.unique(values, return_counts=True)
return counts # (values, unique, counts)
def count_synchronous_shipcall_times(self, query_time:pd.Timestamp, all_df_times:pd.DataFrame, delta_threshold=900)->int:
"""count all times entries, which are too close to the query_time. The {delta_threshold} determines the threshold. returns counts (int)"""
return get_synchronous_shipcall_times_standalone(query_time, all_df_times, delta_threshold)

View File

@ -0,0 +1,418 @@
import logging
def create_sql_query_shipcall_get(options:dict)->str:
"""
creates an SQL query, which selects all shipcalls from the mysql database.
the agency eta times are used to order the entries.
args:
options : dict. A dictionary, which must contains the 'past_days' key (int). Determines the range
by which shipcalls are filtered.
"""
query = ("SELECT s.id as id, ship_id, type, eta, voyage, etd, arrival_berth_id, departure_berth_id, tug_required, pilot_required, " +
"flags, s.pier_side, bunkering, replenishing_terminal, replenishing_lock, draft, tidal_window_from, " +
"tidal_window_to, rain_sensitive_cargo, recommended_tugs, anchored, moored_lock, canceled, evaluation, " +
"evaluation_message, evaluation_time, evaluation_notifications_sent, s.created as created, s.modified as modified, time_ref_point " +
"FROM shipcall s " +
"LEFT JOIN times t ON t.shipcall_id = s.id AND t.participant_type = 8 " +
"WHERE " +
"(type = 1 AND " +
"((t.id IS NOT NULL AND t.eta_berth >= DATE(NOW() - INTERVAL %d DAY)) OR " +
"(eta >= DATE(NOW() - INTERVAL %d DAY)))) OR " +
"((type = 2 OR type = 3) AND " +
"((t.id IS NOT NULL AND t.etd_berth >= DATE(NOW() - INTERVAL %d DAY)) OR " +
"(etd >= DATE(NOW() - INTERVAL %d DAY)))) " +
"ORDER BY eta") % (options["past_days"], options["past_days"], options["past_days"], options["past_days"])
return query
def create_sql_query_shipcall_post(schemaModel:dict)->str:
query = "INSERT INTO shipcall ("
isNotFirst = False
for key in schemaModel.keys():
if key == "id":
continue
if key == "participants":
continue
if key == "created":
continue
if key == "modified":
continue
if key == "evaluation":
continue
if key == "evaluation_message":
continue
if key == "type_value":
continue
if key == "evaluation_value":
continue
if isNotFirst:
query += ","
isNotFirst = True
query += key
query += ") VALUES ("
isNotFirst = False
for key in schemaModel.keys():
param_key = key
if key == "id":
continue
if key == "participants":
continue
if key == "created":
continue
if key == "modified":
continue
if key == "evaluation":
continue
if key == "evaluation_message":
continue
if key == "type":
param_key = "type_value"
if key == "type_value":
continue
if key == "evaluation":
param_key = "evaluation_value"
if key == "evaluation_value":
continue
if isNotFirst:
query += ","
isNotFirst = True
query += "?" + param_key + "?"
query += ")"
return query
def create_sql_query_shipcall_put(schemaModel:dict)->str:
query = "UPDATE shipcall SET "
isNotFirst = False
for key in schemaModel.keys():
param_key = key
if key == "id":
continue
if key == "participants":
continue
if key == "created":
continue
if key == "modified":
continue
if key == "evaluation":
continue
if key == "evaluation_message":
continue
if key == "type":
param_key = "type_value"
if key == "type_value":
continue
if key == "evaluation":
param_key = "evaluation_value"
if key == "evaluation_value":
continue
if isNotFirst:
query += ", "
isNotFirst = True
query += key + " = ?" + param_key + "? "
query += "WHERE id = ?id?"
return query
def create_sql_query_history_post()->str:
query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?scid?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 1, 1)"
return query
def create_sql_query_history_put()->str:
query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?scid?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 1, 2)"
return query
def create_sql_query_user_put(schemaModel:dict):
query = "UPDATE user SET "
isNotFirst = False
for key in schemaModel.keys():
if key == "id":
continue
if key == "old_password":
continue
if key == "new_password":
continue
if isNotFirst:
query += ", "
isNotFirst = True
query += key + " = ?" + key + "? "
query += "WHERE id = ?id?"
return query
def create_sql_query_ship_post(schemaModel:dict):
query = "INSERT INTO ship ("
isNotFirst = False
for key in schemaModel.keys():
if key == "id":
continue
if key == "created":
continue
if key == "modified":
continue
if isNotFirst:
query += ","
isNotFirst = True
query += key
query += ") VALUES ("
isNotFirst = False
for key in schemaModel.keys():
if key == "id":
continue
if key == "created":
continue
if key == "modified":
continue
if isNotFirst:
query += ","
isNotFirst = True
query += "?" + key + "?"
query += ")"
return query
def create_sql_query_ship_put(schemaModel:dict):
query = "UPDATE ship SET "
isNotFirst = False
for key in schemaModel.keys():
if key == "id":
continue
if key == "created":
continue
if key == "modified":
continue
if isNotFirst:
query += ", "
isNotFirst = True
query += key + " = ?" + key + "? "
query += "WHERE id = ?id?"
return query
class SQLQuery():
"""
This class provides quick access to different SQL query functions, which creates default queries for the BreCal package.
Each method is callable without initializing the SQLQuery object.
Example:
SQLQuery.get_berths()
When the data violates one of the rules, a marshmallow.ValidationError is raised, which details the issues.
"""
def __init__(self) -> None:
pass
@staticmethod
def get_berth()->str:
query = "SELECT id, name, `lock`, owner_id, authority_id, created, modified, deleted FROM berth WHERE deleted = 0 ORDER BY name"
return query
@staticmethod
def get_history()->str:
query = "SELECT id, participant_id, shipcall_id, timestamp, eta, type, operation FROM history WHERE shipcall_id = ?shipcallid?"
return query
@staticmethod
def get_user()->str:
query = "SELECT id, participant_id, first_name, last_name, user_name, user_email, user_phone, password_hash, " +\
"api_key, notify_email, notify_whatsapp, notify_signal, notify_popup, created, modified FROM user " +\
"WHERE user_name = ?username? OR user_email = ?username?"
return query
@staticmethod
def get_notifications()->str:
query = "SELECT id, shipcall_id, level, type, message, created, modified FROM notification " + \
"WHERE shipcall_id = ?scid?"
return query
@staticmethod
def get_participant_by_user_id()->str:
query = "SELECT p.id as id, p.name as name, p.street as street, p.postal_code as postal_code, p.city as city, p.type as type, p.flags as flags, p.created as created, p.modified as modified, p.deleted as deleted FROM participant p INNER JOIN user u WHERE u.participant_id = p.id and u.id = ?userid?"
return query
@staticmethod
def get_participants()->str:
query = "SELECT id, name, street, postal_code, city, type, flags, created, modified, deleted FROM participant p ORDER BY p.name"
return query
@staticmethod
def get_shipcalls(options:dict={'past_days':3})->str:
# a pytest proves this method to be identical to create_sql_query_shipcall_get(options)
assert 'past_days' in list(options.keys()), f"there must be a key 'past_days' in the options, which determines, how recent the returned list of shipcalls shall be." # part of a pytest.raises
past_days = options['past_days']
query = ("SELECT s.id as id, ship_id, type, eta, voyage, etd, arrival_berth_id, departure_berth_id, tug_required, pilot_required, " + \
"flags, s.pier_side, bunkering, replenishing_terminal, replenishing_lock, draft, tidal_window_from, " + \
"tidal_window_to, rain_sensitive_cargo, recommended_tugs, anchored, moored_lock, canceled, evaluation, " + \
"evaluation_message, evaluation_time, evaluation_notifications_sent, s.created as created, s.modified as modified, time_ref_point " + \
"FROM shipcall s " + \
"LEFT JOIN times t ON t.shipcall_id = s.id AND t.participant_type = 8 " + \
"WHERE " + \
"(type = 1 AND " + \
f"((t.id IS NOT NULL AND t.eta_berth >= DATE(NOW() - INTERVAL {past_days} DAY)) OR " + \
f"(eta >= DATE(NOW() - INTERVAL {past_days} DAY)))) OR " + \
"((type = 2 OR type = 3) AND " + \
f"((t.id IS NOT NULL AND t.etd_berth >= DATE(NOW() - INTERVAL {past_days} DAY)) OR " + \
f"(etd >= DATE(NOW() - INTERVAL {past_days} DAY)))) " + \
"ORDER BY eta")
return query
@staticmethod
def get_ships()->str:
query = "SELECT id, name, imo, callsign, participant_id, length, width, is_tug, bollard_pull, eni, created, modified, deleted FROM ship ORDER BY name"
return query
@staticmethod
def get_times()->str:
query = "SELECT id, eta_berth, eta_berth_fixed, etd_berth, etd_berth_fixed, lock_time, lock_time_fixed, " + \
"zone_entry, zone_entry_fixed, operations_start, operations_end, remarks, shipcall_id, participant_id, " + \
"berth_id, berth_info, pier_side, participant_type, created, modified, ata, atd, eta_interval_end, etd_interval_end FROM times " + \
"WHERE times.shipcall_id = ?scid?"
return query
@staticmethod
def get_user_by_id():
query = "SELECT * FROM user where id = ?id?"
return query
@staticmethod
def get_user_put(schemaModel:dict):
# a pytest proves this method to be identical to create_sql_query_user_put(schemaModel)
prefix = "UPDATE user SET "
suffix = "WHERE id = ?id?"
center = [f"{key} = ?{key}? " for key in schemaModel.keys() if key not in ["id", "old_password", "new_password"]]
query = prefix + ", ".join(center) + suffix
return query
@staticmethod
def get_update_user_password()->str:
query = "UPDATE user SET password_hash = ?password_hash? WHERE id = ?id?"
return query
@staticmethod
def get_participants()->str:
query = "SELECT participant_id, type FROM shipcall_participant_map WHERE shipcall_id=?shipcall_id?"
return query
@staticmethod
def get_shipcall_post(schemaModel:dict)->str:
# a pytest proves this method to be identical to create_sql_query_shipcall_post(schemaModel)
param_keys = {key:key for key in schemaModel.keys()}
param_keys["type"] = "type_value"
param_keys["evaluation"] = "evaluation_value"
prefix = "INSERT INTO shipcall ("
bridge = ") VALUES ("
suffix = ")"
stage1 = ",".join([key for key in schemaModel.keys() if not key in ["id","participants","created","modified","evaluation","evaluation_message","type_value","evaluation_value"]])
stage2 = ",".join([f"?{param_keys.get(key)}?" for key in schemaModel.keys() if not key in ["id","participants","created","modified","evaluation","evaluation_message","type_value","evaluation_value"]])
query = prefix+stage1+bridge+stage2+suffix
return query
@staticmethod
def get_last_insert_id()->str:
query = "select last_insert_id()"
return query
@staticmethod
def get_shipcall_post_last_insert_id()->str:
"""alias function. May be deleted soon"""
query = SQLQuery.get_last_insert_id()
return query
@staticmethod
def get_shipcall_post_update_shipcall_participant_map()->str:
query = "INSERT INTO shipcall_participant_map (shipcall_id, participant_id, type) VALUES (?shipcall_id?, ?participant_id?, ?type?)"
return query
@staticmethod
def create_sql_query_history_post()->str:
query = create_sql_query_history_post()
return query
@staticmethod
def get_shipcall_by_id()->str:
query = "SELECT * FROM shipcall where id = ?id?"
return query
@staticmethod
def get_shipcall_put(schemaModel:dict)->str:
# a pytest proves this method to be identical to create_sql_query_shipcall_put(schemaModel)
param_keys = {key:key for key in schemaModel.keys()}
param_keys["type"] = "type_value"
param_keys["evaluation"] = "evaluation_value"
prefix = "UPDATE shipcall SET "
suffix = "WHERE id = ?id?"
body = ", ".join([f"{key} = ?{param_keys.get(key)}? " for key in schemaModel.keys() if key not in ["id", "participants", "created", "modified", "evaluation", "evaluation_message", "type_value", "evaluation_value"]])
query = prefix + body + suffix
return query
@staticmethod
def get_shipcall_participant_map_by_shipcall_id()->str:
query = "SELECT id, participant_id, type FROM shipcall_participant_map where shipcall_id = ?id?"
return query
@staticmethod
def get_shipcall_participant_map_delete_by_id()->str:
query = "DELETE FROM shipcall_participant_map WHERE id = ?existing_id?"
return query
@staticmethod
def create_sql_query_history_put()->str:
query = create_sql_query_history_put()
return query
@staticmethod
def get_ship_post(schemaModel:dict)->str:
# a pytest proves this method to be identical to create_sql_query_ship_post(schemaModel)
prefix = "INSERT INTO ship ("
suffix = ")"
bridge = ") VALUES ("
stage1 = ",".join([key for key in schemaModel.keys() if not key in ["id", "created", "modified"]])
stage2 = ",".join([f"?{key}?" for key in schemaModel.keys() if not key in ["id", "created", "modified"]])
query = prefix + stage1 + bridge + stage2 + suffix
return query
@staticmethod
def get_ship_put(schemaModel:dict)->str:
# a pytest proves this method to be identical to create_sql_query_ship_put(schemaModel)
prefix = "UPDATE ship SET "
suffix = "WHERE id = ?id?"
body = ", ".join([f"{key} = ?{key}? " for key in schemaModel.keys() if not key in ["id","created","modified"]])
query = prefix + body + suffix
return query
@staticmethod
def get_ship_delete_by_id()->str:
query = "UPDATE ship SET deleted = 1 WHERE id = ?id?"
return query
@staticmethod
def get_notification_post()->str:
raise NotImplementedError()
# #TODO: this query is wrong and just a proxy for a POST request
query = "INSERT INTO shipcall_participant_map (shipcall_id, participant_id, type) VALUES (?shipcall_id?, ?participant_id?, ?type?)"
return query
@staticmethod
def get_shipcall_put_notification_state()->str:
raise NotImplementedError()
# #TODO: use evaluation_notifications_sent here and consider only the shipcall_id
# #TODO: query
query = ...
return query

View File

@ -0,0 +1,14 @@
from BreCal.database.sql_handler import execute_sql_query_standalone
import datetime
def get_user_data_for_id(user_id:int, expiration_time:int=90):
"""debugging function, which is useful to pull user_data from the database, which may be used to create stub data and unit tests"""
query = "SELECT * FROM user where id = ?id?"
pdata = execute_sql_query_standalone(query=query, param={"id":user_id})
pdata = pdata[0] if len(pdata)>0 else None
assert pdata is not None, f"could not find user with id {user_id}"
user_data = {k:v for k,v in pdata.items() if k in ['id','participant_id','first_name','last_name','user_name','user_phone','user_email']}
user_data["exp"] = (datetime.datetime.now()+datetime.timedelta(minutes=expiration_time)).timestamp()
return user_data

View File

@ -34,6 +34,7 @@ def update_shipcall_in_mysql_database(sql_connection, shipcall:Shipcall, relevan
def build_mysql_query_to_update_shipcall(shipcall, relevant_keys:list):
"""builds a mysql query, which updates the shipcall table. In particular, the provided shipcall will be updated for each key in {relevant_keys}"""
# #TODO: refactor into SQLQuery
schemaModel = shipcall.__dict__
# prepare prefix and suffix. Then build the body of the query
@ -68,7 +69,6 @@ def evaluate_shipcall_state(mysql_connector_instance, shipcall_id:int=None, debu
with mysql.connector.connect(**mysql_connection_data) as mysql_connector_instance:
evaluate_shipcall_state(mysql_connector_instance)
returns None
"""
sql_handler = SQLHandler(sql_connection=mysql_connector_instance, read_all=True)
vr = ValidationRules(sql_handler)

View File

@ -4,6 +4,7 @@ import pydapper
from ..schemas import model
from .. import local_db
from BreCal.database.sql_queries import SQLQuery
def GetBerths(token):
"""
@ -13,6 +14,8 @@ def GetBerths(token):
try:
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
# query = SQLQuery.get_berth()
# data = commands.query(query, model=model.Berth)
data = commands.query("SELECT id, name, `lock`, owner_id, authority_id, created, modified, deleted FROM berth WHERE deleted = 0 ORDER BY name", model=model.Berth)
return json.dumps(data, default=model.obj_dict), 200, {'Content-Type': 'application/json; charset=utf-8'}

View File

@ -7,6 +7,7 @@ from ..schemas import model
from ..schemas.model import History
from .. import local_db
from BreCal.database.sql_queries import SQLQuery
def GetHistory(options):
@ -20,6 +21,8 @@ def GetHistory(options):
commands = pydapper.using(pooledConnection)
if "shipcall_id" in options and options["shipcall_id"]:
# query = SQLQuery.get_history()
# data = commands.query(query, model=History.from_query_row, param={"shipcallid" : options["shipcall_id"]})
data = commands.query("SELECT id, participant_id, shipcall_id, timestamp, eta, type, operation FROM history WHERE shipcall_id = ?shipcallid?",
model=History.from_query_row,
param={"shipcallid" : options["shipcall_id"]})

View File

@ -6,6 +6,7 @@ import bcrypt
from ..schemas import model
from .. import local_db
from ..services import jwt_handler
from BreCal.database.sql_queries import SQLQuery
def GetUser(options):
@ -14,11 +15,13 @@ def GetUser(options):
hash = bcrypt.hashpw(options["password"].encode('utf-8'), bcrypt.gensalt( 12 )).decode('utf8')
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
# query = SQLQuery.get_user()
# data = commands.query(query, model=model.User, param={"username" : options["username"]})
data = commands.query("SELECT id, participant_id, first_name, last_name, user_name, user_email, user_phone, password_hash, " +
"api_key, notify_email, notify_whatsapp, notify_signal, notify_popup, created, modified FROM user " +
"WHERE user_name = ?username? OR user_email = ?username?",
model=model.User, param={"username" : options["username"]})
# print(data)
if len(data) == 1:
if bcrypt.checkpw(options["password"].encode("utf-8"), bytes(data[0].password_hash, "utf-8")):
result = {

View File

@ -4,6 +4,7 @@ import pydapper
from ..schemas import model
from .. import local_db
from BreCal.database.sql_queries import SQLQuery
def GetNotifications(options):
"""
@ -16,6 +17,8 @@ def GetNotifications(options):
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
# query = SQLQuery.get_notifications()
# data = commands.query(query, model=model.Notification.from_query_row, param={"scid" : options["shipcall_id"]})
data = commands.query("SELECT id, shipcall_id, level, type, message, created, modified FROM notification " +
"WHERE shipcall_id = ?scid?", model=model.Notification.from_query_row, param={"scid" : options["shipcall_id"]})
pooledConnection.close()

View File

@ -4,6 +4,7 @@ import pydapper
from ..schemas import model
from .. import local_db
from BreCal.database.sql_queries import SQLQuery
def GetParticipant(options):
"""
@ -16,8 +17,10 @@ def GetParticipant(options):
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
if "user_id" in options and options["user_id"]:
# query = SQLQuery.get_participant_by_user_id()
data = commands.query("SELECT p.id as id, p.name as name, p.street as street, p.postal_code as postal_code, p.city as city, p.type as type, p.flags as flags, p.created as created, p.modified as modified, p.deleted as deleted FROM participant p INNER JOIN user u WHERE u.participant_id = p.id and u.id = ?userid?", model=model.Participant, param={"userid" : options["user_id"]})
else:
# query = SQLQuery.get_participants()
data = commands.query("SELECT id, name, street, postal_code, city, type, flags, created, modified, deleted FROM participant p ORDER BY p.name", model=model.Participant)
return json.dumps(data, default=model.obj_dict), 200, {'Content-Type': 'application/json; charset=utf-8'}

View File

@ -8,6 +8,8 @@ from .. import local_db
from ..services.auth_guard import check_jwt
from BreCal.database.update_database import evaluate_shipcall_state
from BreCal.database.sql_queries import create_sql_query_shipcall_get, create_sql_query_shipcall_post, create_sql_query_shipcall_put, create_sql_query_history_post, create_sql_query_history_put, SQLQuery
from marshmallow import Schema, fields, ValidationError
def GetShipcalls(options):
"""
@ -18,23 +20,14 @@ def GetShipcalls(options):
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
query = ("SELECT s.id as id, ship_id, type, eta, voyage, etd, arrival_berth_id, departure_berth_id, tug_required, pilot_required, " +
"flags, s.pier_side, bunkering, replenishing_terminal, replenishing_lock, draft, tidal_window_from, " +
"tidal_window_to, rain_sensitive_cargo, recommended_tugs, anchored, moored_lock, canceled, evaluation, " +
"evaluation_message, evaluation_time, evaluation_notifications_sent, s.created as created, s.modified as modified, time_ref_point " +
"FROM shipcall s " +
"LEFT JOIN times t ON t.shipcall_id = s.id AND t.participant_type = 8 " +
"WHERE " +
"(type = 1 AND " +
"((t.id IS NOT NULL AND t.eta_berth >= DATE(NOW() - INTERVAL %d DAY)) OR " +
"(eta >= DATE(NOW() - INTERVAL %d DAY)))) OR " +
"((type = 2 OR type = 3) AND " +
"((t.id IS NOT NULL AND t.etd_berth >= DATE(NOW() - INTERVAL %d DAY)) OR " +
"(etd >= DATE(NOW() - INTERVAL %d DAY)))) " +
"ORDER BY eta") % (options["past_days"], options["past_days"], options["past_days"], options["past_days"])
# query = SQLQuery.get_shipcalls(options)
query = create_sql_query_shipcall_get(options)
data = commands.query(query, model=model.Shipcall.from_query_row, buffered=True)
for shipcall in data:
# participant_query = SQLQuery.get_participants()
# participants = commands.query(participant_query, model=dict, param={"shipcall_id" : shipcall.id}, buffered=False)
# for record in participants:
participant_query = "SELECT participant_id, type FROM shipcall_participant_map WHERE shipcall_id=?shipcall_id?";
for record in commands.query(participant_query, model=dict, param={"shipcall_id" : shipcall.id}, buffered=False):
# model.Participant_Assignment = model.Participant_Assignment()
@ -58,18 +51,29 @@ def GetShipcalls(options):
def PostShipcalls(schemaModel):
"""
This function *executes* a post-request for shipcalls. The function is accessible as part of an API route.
The common sequence is:
a) issue a request to the Flask API
b) BreCal.api.shipcalls.PostShipcalls, to verify the incoming request (which includes an authentification guard)
c) BreCal.impl.shipcalls.PostShipcalls, to execute the incoming request
:param schemaModel: The deserialized dict of the request
e.g.,
{
'ship_id': 1, 'type': 1, 'eta': datetime.datetime(2023, 7, 23, 7, 18, 19),
'voyage': '43B', 'tug_required': False, 'pilot_required': True, 'flags': 0,
'pier_side': False, 'bunkering': True, 'recommended_tugs': 2, 'type_value': 1, 'evaluation_value': 0}
}
"""
# TODO: Validate the upload data
# This creates a *new* entry
try:
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
# query = SQLQuery.get_shipcall_post(schemaModel) # create_sql_query_shipcall_post(schemaModel)
query = "INSERT INTO shipcall ("
isNotFirst = False
for key in schemaModel.keys():
@ -122,12 +126,15 @@ def PostShipcalls(schemaModel):
isNotFirst = True
query += "?" + param_key + "?"
query += ")"
commands.execute(query, schemaModel)
# lquery = SQLQuery.get_shipcall_post_last_insert_id()
# new_id = commands.execute_scalar(lquery)
new_id = commands.execute_scalar("select last_insert_id()")
# add participant assignments if we have a list of participants
if 'participants' in schemaModel:
# pquery = SQLQuery.get_shipcall_post_update_shipcall_participant_map()
pquery = "INSERT INTO shipcall_participant_map (shipcall_id, participant_id, type) VALUES (?shipcall_id?, ?participant_id?, ?type?)"
for participant_assignment in schemaModel["participants"]:
commands.execute(pquery, param={"shipcall_id" : new_id, "participant_id" : participant_assignment["participant_id"], "type" : participant_assignment["type"]})
@ -138,11 +145,17 @@ def PostShipcalls(schemaModel):
# save history data
# TODO: set ETA properly
user_data = check_jwt()
# query = SQLQuery.create_sql_query_history_post()
query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?scid?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 1, 1)"
commands.execute(query, {"scid" : new_id, "pid" : user_data["participant_id"], "uid" : user_data["id"]})
return json.dumps({"id" : new_id}), 201, {'Content-Type': 'application/json; charset=utf-8'}
except ValidationError as ex:
logging.error(ex)
print(ex)
return json.dumps({"message":f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"}), 400
except Exception as ex:
logging.error(traceback.format_exc())
logging.error(ex)
@ -168,14 +181,19 @@ def PutShipcalls(schemaModel):
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
user_data = check_jwt()
# test if object to update is found
sentinel = object()
# query = SQLQuery.get_shipcall_by_id()
# theshipcall = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]})
theshipcall = commands.query_single_or_default("SELECT * FROM shipcall where id = ?id?", sentinel, param={"id" : schemaModel["id"]})
if theshipcall is sentinel:
pooledConnection.close()
return json.dumps("no such record"), 404, {'Content-Type': 'application/json; charset=utf-8'}
# query = SQLQuery.get_shipcall_put(schemaModel)
query = "UPDATE shipcall SET "
isNotFirst = False
for key in schemaModel.keys():
@ -206,8 +224,10 @@ def PutShipcalls(schemaModel):
query += key + " = ?" + param_key + "? "
query += "WHERE id = ?id?"
affected_rows = commands.execute(query, param=schemaModel)
# pquery = SQLQuery.get_shipcall_participant_map_by_shipcall_id()
pquery = "SELECT id, participant_id, type FROM shipcall_participant_map where shipcall_id = ?id?"
pdata = commands.query(pquery,param={"id" : schemaModel["id"]}) # existing list of assignments
@ -220,6 +240,7 @@ def PutShipcalls(schemaModel):
found_participant = True
break
if not found_participant:
# nquery = SQLQuery.get_shipcall_post_update_shipcall_participant_map()
nquery = "INSERT INTO shipcall_participant_map (shipcall_id, participant_id, type) VALUES (?shipcall_id?, ?participant_id?, ?type?)"
commands.execute(nquery, param={"shipcall_id" : schemaModel["id"], "participant_id" : participant_assignment["participant_id"], "type" : participant_assignment["type"]})
@ -231,20 +252,23 @@ def PutShipcalls(schemaModel):
found_participant = True
break;
if not found_participant:
# dquery = SQLQuery.get_shipcall_participant_map_delete_by_id()
dquery = "DELETE FROM shipcall_participant_map WHERE id = ?existing_id?"
commands.execute(dquery, param={"existing_id" : elem["id"]})
# apply 'Traffic Light' evaluation to obtain 'GREEN', 'YELLOW' or 'RED' evaluation state. The function internally updates the mysql database
# evaluate_shipcall_state(mysql_connector_instance=pooledConnection, shipcall_id=schemaModel["id"]) # schemaModel["id"] refers to the shipcall id
# save history data
# TODO: set ETA properly
user_data = check_jwt()
# query = SQLQuery.create_sql_query_history_put()
query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?scid?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 1, 2)"
commands.execute(query, {"scid" : schemaModel["id"], "pid" : user_data["participant_id"], "uid" : user_data["id"]})
return json.dumps({"id" : schemaModel["id"]}), 200
except ValidationError as ex:
logging.error(ex)
print(ex)
return json.dumps({"message":f"bad format. \nError Messages: {ex.messages}. \nValid Data: {ex.valid_data}"}), 400
except Exception as ex:
logging.error(traceback.format_exc())
logging.error(ex)

View File

@ -4,6 +4,7 @@ import pydapper
from ..schemas import model
from .. import local_db
from BreCal.database.sql_queries import SQLQuery
def GetShips(token):
"""
@ -14,6 +15,8 @@ def GetShips(token):
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
# query = SQLQuery.get_ships()
# data = commands.query(query, model=model.Ship)
data = commands.query("SELECT id, name, imo, callsign, participant_id, length, width, is_tug, bollard_pull, eni, created, modified, deleted FROM ship ORDER BY name", model=model.Ship)
return json.dumps(data, default=model.obj_dict), 200, {'Content-Type': 'application/json; charset=utf-8'}
@ -46,6 +49,7 @@ def PostShip(schemaModel):
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
# query = SQLQuery.create_sql_query_ship_post(schemaModel)
query = "INSERT INTO ship ("
isNotFirst = False
for key in schemaModel.keys():
@ -75,6 +79,8 @@ def PostShip(schemaModel):
query += ")"
commands.execute(query, schemaModel)
# nquery = SQLQuery.get_last_insert_id()
# new_id = commands.execute_scalar(nquery)
new_id = commands.execute_scalar("select last_insert_id()")
pooledConnection.close()
@ -100,6 +106,7 @@ def PutShip(schemaModel):
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
# query = SQLQuery.create_sql_query_ship_put(schemaModel)
query = "UPDATE ship SET "
isNotFirst = False
for key in schemaModel.keys():
@ -140,6 +147,8 @@ def DeleteShip(options):
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
# query = SQLQuery.get_ship_delete_by_id()
# affected_rows = commands.execute(query, param={"id" : options["id"]})
affected_rows = commands.execute("UPDATE ship SET deleted = 1 WHERE id = ?id?", param={"id" : options["id"]})
pooledConnection.close()

View File

@ -6,6 +6,7 @@ import pydapper
from ..schemas import model
from .. import local_db
from ..services.auth_guard import check_jwt
from BreCal.database.sql_queries import SQLQuery
from BreCal.database.update_database import evaluate_shipcall_state
@ -20,6 +21,8 @@ def GetTimes(options):
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
# query = SQLQuery.get_times()
# data = commands.query(query, model=model.Times, param={"scid" : options["shipcall_id"]})
data = commands.query("SELECT id, eta_berth, eta_berth_fixed, etd_berth, etd_berth_fixed, lock_time, lock_time_fixed, " +
"zone_entry, zone_entry_fixed, operations_start, operations_end, remarks, shipcall_id, participant_id, " +
"berth_id, berth_info, pier_side, participant_type, created, modified, ata, atd, eta_interval_end, etd_interval_end FROM times " +

View File

@ -5,6 +5,7 @@ import bcrypt
from ..schemas import model
from .. import local_db
from BreCal.database.sql_queries import SQLQuery, create_sql_query_user_put
def PutUser(schemaModel):
"""
@ -21,13 +22,21 @@ def PutUser(schemaModel):
# test if object to update is found
sentinel = object()
# query = SQLQuery.get_user_by_id()
# theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User)
theuser = commands.query_single_or_default("SELECT * FROM user where id = ?id?", sentinel, param={"id" : schemaModel["id"]}, model=model.User)
if theuser is sentinel:
pooledConnection.close()
# #TODO: result = {"message":"no such record"} -> json.dumps
return json.dumps("no such record"), 404, {'Content-Type': 'application/json; charset=utf-8'}
# see if we need to update public fields
# #TODO_determine: this filter blocks Put-Requests, which update the 'notify_email', 'notify_whatsapp', 'notify_signal', 'notify_popup' fields
# should this be refactored?
# Also, what about the 'user_name'?
# 'participant_id' would also not trigger an update in isolation
if "first_name" in schemaModel or "last_name" in schemaModel or "user_phone" in schemaModel or "user_email" in schemaModel:
# query = SQLQuery.get_user_put(schemaModel)
query = "UPDATE user SET "
isNotFirst = False
for key in schemaModel.keys():
@ -49,6 +58,7 @@ def PutUser(schemaModel):
if "old_password" in schemaModel and schemaModel["old_password"] and "new_password" in schemaModel and schemaModel["new_password"]:
if bcrypt.checkpw(schemaModel["old_password"].encode("utf-8"), bytes(theuser.password_hash, "utf-8")): # old pw matches
password_hash = bcrypt.hashpw(schemaModel["new_password"].encode('utf-8'), bcrypt.gensalt( 12 )).decode('utf8')
# query = SQLQuery.get_update_user_password()
query = "UPDATE user SET password_hash = ?password_hash? WHERE id = ?id?"
commands.execute(query, param={"password_hash" : password_hash, "id" : schemaModel["id"]})
else:

View File

@ -7,16 +7,16 @@ import sys
config_path = None
def initPool(instancePath):
def initPool(instancePath, connection_filename="connection_data_devel.json"):
try:
global config_path
if(config_path == None):
config_path = os.path.join(instancePath,'../../../secure/connection_data_devel.json');
config_path = os.path.join(instancePath,f'../../../secure/{connection_filename}') #connection_data_devel.json');
print (config_path)
if not os.path.exists(config_path):
print ('cannot find ' + config_path)
print ('cannot find ' + os.path.abspath(config_path))
print("instance path", instancePath)
sys.exit(1)

View File

@ -1,5 +1,5 @@
from dataclasses import field, dataclass
from marshmallow import Schema, fields, post_load, INCLUDE, ValidationError
from marshmallow import Schema, fields, INCLUDE, ValidationError, validate, validates, post_load
from marshmallow.fields import Field
from marshmallow_enum import EnumField
from enum import IntEnum
@ -9,6 +9,11 @@ from typing import List
import json
import datetime
from BreCal.validators.time_logic import validate_time_is_in_not_too_distant_future
from BreCal.validators.validation_base_utils import check_if_string_has_special_characters
from BreCal.database.enums import ParticipantType, ParticipantFlag
# from BreCal. ... import check_if_user_is_bsmd_type
def obj_dict(obj):
if isinstance(obj, datetime.datetime):
@ -50,9 +55,19 @@ class EvaluationType(IntEnum):
return cls.undefined
class NotificationType(IntEnum):
"""
Any user has the attributes
'notify_email' -> NotificationType.email
'notify_popup' -> NotificationType.push
'notify_whatsapp' -> undeclared
'notify_signal' -> undeclared
"""
undefined = 0
email = 1
push = 2
# whatsapp = 3
# signal = 4
@classmethod
def _missing_(cls, value):
return cls.undefined
@ -104,7 +119,7 @@ class History:
return self(id, participant_id, shipcall_id, timestamp, eta, ObjectType(type), OperationType(operation))
class Error(Schema):
message = fields.String(required=True)
message = fields.String(metadata={'required':True})
class GetVerifyInlineResp(Schema):
@ -112,11 +127,17 @@ class GetVerifyInlineResp(Schema):
@dataclass
class Notification:
"""
Base data class for any notification.
Description:
'An entry corresponds to an alarm given by a violated rule during times update'
"""
id: int
shipcall_id: int
level: int
type: NotificationType
message: str
shipcall_id: int # 'shipcall record that caused the notification'
level: int # 'severity of the notification'
type: NotificationType # 'type of the notification'
message: str # 'individual message'
created: datetime
modified: datetime
@ -142,69 +163,100 @@ class Participant(Schema):
street: str
postal_code: str
city: str
type: int
type: int # fields.Enum(ParticipantType ...)
flags: int
created: datetime
modified: datetime
deleted: bool
@validates("type")
def validate_type(self, value):
# e.g., when an IntFlag has the values 1,2,4; the maximum valid value is 7
max_int = sum([int(val) for val in list(ParticipantType._value2member_map_.values())])
min_int = 0
valid_type = 0 <= value < max_int
if not valid_type:
raise ValidationError(f"the provided integer is not supported for default behaviour of the ParticipantType IntFlag. Your choice: {value}. Supported values are: 0 <= value {max_int}")
@validates("flags")
def validate_flags(self, value):
# e.g., when an IntFlag has the values 1,2,4; the maximum valid value is 7
max_int = sum([int(val) for val in list(ParticipantFlag._value2member_map_.values())])
min_int = 0
valid_type = 0 <= value < max_int
if not valid_type:
raise ValidationError(f"the provided integer is not supported for default behaviour of the ParticipantFlag IntFlag. Your choice: {value}. Supported values are: 0 <= value {max_int}")
class ParticipantList(Participant):
pass
class ParticipantAssignmentSchema(Schema):
participant_id = fields.Int()
type = fields.Int()
participant_id = fields.Integer()
type = fields.Integer()
class ShipcallSchema(Schema):
def __init__(self):
super().__init__(unknown=None)
pass
id = fields.Int()
ship_id = fields.Int()
type = fields.Enum(ShipcallType, required=True)
eta = fields.DateTime(Required = False, allow_none=True)
voyage = fields.Str(allow_none=True, metadata={'Required':False}) # Solving: RemovedInMarshmallow4Warning: Passing field metadata as keyword arguments is deprecated. Use the explicit `metadata=...` argument instead. Additional metadata: {'Required': False}
etd = fields.DateTime(Required = False, allow_none=True)
arrival_berth_id = fields.Int(Required = False, allow_none=True)
departure_berth_id = fields.Int(Required = False, allow_none=True)
tug_required = fields.Bool(Required = False, allow_none=True)
pilot_required = fields.Bool(Required = False, allow_none=True)
flags = fields.Int(Required = False, allow_none=True)
pier_side = fields.Bool(Required = False, allow_none=True)
bunkering = fields.Bool(Required = False, allow_none=True)
replenishing_terminal = fields.Bool(Required = False, allow_none=True)
replenishing_lock = fields.Bool(Required = False, allow_none=True)
draft = fields.Float(Required = False, allow_none=True)
tidal_window_from = fields.DateTime(Required = False, allow_none=True)
tidal_window_to = fields.DateTime(Required = False, allow_none=True)
rain_sensitive_cargo = fields.Bool(Required = False, allow_none=True)
recommended_tugs = fields.Int(Required = False, allow_none=True)
anchored = fields.Bool(Required = False, allow_none=True)
moored_lock = fields.Bool(Required = False, allow_none=True)
canceled = fields.Bool(Required = False, allow_none=True)
evaluation = fields.Enum(EvaluationType, required=False, allow_none=True, default=EvaluationType.undefined)
id = fields.Integer(metadata={'required':True})
ship_id = fields.Integer(metadata={'required':True})
#type = fields.Enum(ShipcallType, default=ShipcallType.undefined) # type = fields.Integer() # make enum: shipcall type. add validator
type = fields.Integer(metadata={'required':True}) # make enum: shipcall type. add validator # type = fields.Enum(ShipcallType, default=ShipcallType.undefined) # type = fields.Integer() # make enum: shipcall type. add validator
eta = fields.DateTime(metadata={'required':False}, allow_none=True)
voyage = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(max=16)]) # Solving: RemovedInMarshmallow4Warning: Passing field metadata as keyword arguments is deprecated. Use the explicit `metadata=...` argument instead. Additional metadata: {'Required': False}
etd = fields.DateTime(metadata={'required':False}, allow_none=True)
arrival_berth_id = fields.Integer(metadata={'required':False}, allow_none=True)
departure_berth_id = fields.Integer(metadata={'required':False}, allow_none=True)
tug_required = fields.Bool(metadata={'required':False}, allow_none=True)
pilot_required = fields.Bool(metadata={'required':False}, allow_none=True)
flags = fields.Integer(metadata={'required':False}, allow_none=True)
pier_side = fields.Bool(metadata={'required':False}, allow_none=True)
bunkering = fields.Bool(metadata={'required':False}, allow_none=True)
replenishing_terminal = fields.Bool(metadata={'required':False}, allow_none=True)
replenishing_lock = fields.Bool(metadata={'required':False}, allow_none=True)
draft = fields.Float(metadata={'required':False}, allow_none=True, validate=[validate.Range(min=0, max=20, min_inclusive=False, max_inclusive=True)])
tidal_window_from = fields.DateTime(metadata={'required':False}, allow_none=True)
tidal_window_to = fields.DateTime(metadata={'required':False}, allow_none=True)
rain_sensitive_cargo = fields.Bool(metadata={'required':False}, allow_none=True)
recommended_tugs = fields.Integer(metadata={'required':False}, allow_none=True, validate=[validate.Range(min=0, max=10, min_inclusive=True, max_inclusive=True)])
anchored = fields.Bool(metadata={'required':False}, allow_none=True)
moored_lock = fields.Bool(metadata={'required':False}, allow_none=True)
canceled = fields.Bool(metadata={'required':False}, allow_none=True)
evaluation = fields.Enum(EvaluationType, metadata={'required':False}, allow_none=True, default=EvaluationType.undefined)
evaluation_message = fields.Str(allow_none=True, metadata={'Required':False}) # Solving: RemovedInMarshmallow4Warning: Passing field metadata as keyword arguments is deprecated. Use the explicit `metadata=...` argument instead. Additional metadata: {'Required': False}
evaluation_time = fields.DateTime(Required = False, allow_none=True)
evaluation_notifications_sent = fields.Bool(Required = False, allow_none=True)
time_ref_point = fields.Int(Required = False, allow_none=True)
evaluation_time = fields.DateTime(metadata={'required':False}, allow_none=True)
evaluation_notifications_sent = fields.Bool(metadata={'required':False}, allow_none=True)
time_ref_point = fields.Integer(metadata={'required':False}, allow_none=True)
participants = fields.List(fields.Nested(ParticipantAssignmentSchema))
created = fields.DateTime(Required = False, allow_none=True)
modified = fields.DateTime(Required = False, allow_none=True)
created = fields.DateTime(metadata={'required':False}, allow_none=True)
modified = fields.DateTime(metadata={'required':False}, allow_none=True)
@post_load
def make_shipcall(self, data, **kwargs):
if 'type' in data:
data['type_value'] = data['type'].value
data['type_value'] = int(data['type'])
else:
data['type_value'] = ShipcallType.undefined
data['type_value'] = int(ShipcallType.undefined)
if 'evaluation' in data:
if data['evaluation']:
data['evaluation_value'] = data['evaluation'].value
data['evaluation_value'] = int(data['evaluation'])
else:
data['evaluation_value'] = EvaluationType.undefined
data['evaluation_value'] = int(EvaluationType.undefined)
return data
@validates("type")
def validate_type(self, value):
valid_shipcall_type = int(value) in [item.value for item in ShipcallType]
if not valid_shipcall_type:
raise ValidationError(f"the provided type is not a valid shipcall type.")
@dataclass
class Participant_Assignment:
def __init__(self, participant_id, type):
@ -215,6 +267,9 @@ class Participant_Assignment:
participant_id: int
type: int # a variant would be to use the IntFlag type (with appropriate serialization)
def to_json(self):
return self.__dict__
@dataclass
class Shipcall:
@ -230,7 +285,7 @@ class Shipcall:
tug_required: bool
pilot_required: bool
flags: int
pier_side: bool
pier_side: bool # enumerator object in database/enum/PierSide
bunkering: bool
replenishing_terminal: bool
replenishing_lock: bool
@ -297,35 +352,91 @@ class ShipcallId(Schema):
# this is the way!
class TimesSchema(Schema):
def __init__(self):
super().__init__(unknown=None)
pass
id = fields.Int(Required=False)
eta_berth = fields.DateTime(Required = False, allow_none=True)
eta_berth_fixed = fields.Bool(Required = False, allow_none=True)
etd_berth = fields.DateTime(Required = False, allow_none=True)
etd_berth_fixed = fields.Bool(Required = False, allow_none=True)
lock_time = fields.DateTime(Required = False, allow_none=True)
lock_time_fixed = fields.Bool(Required = False, allow_none=True)
zone_entry = fields.DateTime(Required = False, allow_none=True)
zone_entry_fixed = fields.Bool(Required = False, allow_none=True)
operations_start = fields.DateTime(Required = False, allow_none=True)
operations_end = fields.DateTime(Required = False, allow_none=True)
remarks = fields.String(Required = False, allow_none=True)
participant_id = fields.Int(Required = True)
berth_id = fields.Int(Required = False, allow_none = True)
berth_info = fields.String(Required = False, allow_none=True)
pier_side = fields.Bool(Required = False, allow_none = True)
shipcall_id = fields.Int(Required = True)
participant_type = fields.Int(Required = False, allow_none=True)
ata = fields.DateTime(Required = False, allow_none=True)
atd = fields.DateTime(Required = False, allow_none=True)
eta_interval_end = fields.DateTime(Required = False, allow_none=True)
etd_interval_end = fields.DateTime(Required = False, allow_none=True)
created = fields.DateTime(Required = False, allow_none=True)
modified = fields.DateTime(Required = False, allow_none=True)
id = fields.Integer(metadata={'required':False})
eta_berth = fields.DateTime(metadata={'required':False}, allow_none=True)
eta_berth_fixed = fields.Bool(metadata={'required':False}, allow_none=True)
etd_berth = fields.DateTime(metadata={'required':False}, allow_none=True)
etd_berth_fixed = fields.Bool(metadata={'required':False}, allow_none=True)
lock_time = fields.DateTime(metadata={'required':False}, allow_none=True)
lock_time_fixed = fields.Bool(metadata={'required':False}, allow_none=True)
zone_entry = fields.DateTime(metadata={'required':False}, allow_none=True)
zone_entry_fixed = fields.Bool(metadata={'required':False}, allow_none=True)
operations_start = fields.DateTime(metadata={'required':False}, allow_none=True)
operations_end = fields.DateTime(metadata={'required':False}, allow_none=True)
remarks = fields.String(metadata={'required':False}, allow_none=True, validate=[validate.Length(max=512)])
participant_id = fields.Integer(metadata={'required':True})
berth_id = fields.Integer(metadata={'required':False}, allow_none = True)
berth_info = fields.String(metadata={'required':False}, allow_none=True, validate=[validate.Length(max=512)])
pier_side = fields.Bool(metadata={'required':False}, allow_none = True)
shipcall_id = fields.Integer(metadata={'required':True})
participant_type = fields.Integer(Required = False, allow_none=True)# TODO: could become Enum. fields.Enum(ParticipantType, metadata={'required':False}, allow_none=True, default=ParticipantType.undefined) #fields.Integer(metadata={'required':False}, allow_none=True)
ata = fields.DateTime(metadata={'required':False}, allow_none=True)
atd = fields.DateTime(metadata={'required':False}, allow_none=True)
eta_interval_end = fields.DateTime(metadata={'required':False}, allow_none=True)
etd_interval_end = fields.DateTime(metadata={'required':False}, allow_none=True)
created = fields.DateTime(metadata={'required':False}, allow_none=True)
modified = fields.DateTime(metadata={'required':False}, allow_none=True)
@validates("participant_type")
def validate_participant_type(self, value):
# #TODO: it may also make sense to block multi-assignments, whereas a value could be BSMD+AGENCY
# while the validation fails when one of those multi-assignments is BSMD, it passes in cases,
# such as AGENCY+PILOT
# a participant type should not be .BSMD
if not isinstance(value, ParticipantType):
value = ParticipantType(value)
if ParticipantType.BSMD in value:
raise ValidationError(f"the participant_type must not be .BSMD")
@validates("eta_berth")
def validate_eta_berth(self, value):
# violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future
# when 'value' is 'None', a ValidationError is not issued.
valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12)
return
@validates("etd_berth")
def validate_etd_berth(self, value):
# violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future
# when 'value' is 'None', a ValidationError is not issued.
valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12)
return
@validates("lock_time")
def validate_lock_time(self, value):
# violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future
# when 'value' is 'None', a ValidationError is not issued.
valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12)
return
@validates("zone_entry")
def validate_zone_entry(self, value):
# violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future
# when 'value' is 'None', a ValidationError is not issued.
valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12)
return
@validates("operations_start")
def validate_operations_start(self, value):
# violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future
# when 'value' is 'None', a ValidationError is not issued.
valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12)
return
@validates("operations_end")
def validate_operations_end(self, value):
# violation when time is not in the future, but also does not exceed a threshold for the 'reasonable' future
# when 'value' is 'None', a ValidationError is not issued.
valid_time = validate_time_is_in_not_too_distant_future(raise_validation_error=True, value=value, months=12)
return
# deserialize PUT object target
@ -333,13 +444,26 @@ class UserSchema(Schema):
def __init__(self):
super().__init__(unknown=None)
pass
id = fields.Int(required=True)
first_name = fields.Str(allow_none=True, metadata={'Required':False})
last_name = fields.Str(allow_none=True, metadata={'Required':False})
user_phone = fields.Str(allow_none=True, metadata={'Required':False})
user_email = fields.Str(allow_none=True, metadata={'Required':False})
old_password = fields.Str(allow_none=True, metadata={'Required':False})
new_password = fields.Str(allow_none=True, metadata={'Required':False})
id = fields.Integer(metadata={'required':True})
first_name = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(max=64)])
last_name = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(max=64)])
user_phone = fields.String(allow_none=True, metadata={'Required':False})
user_email = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(max=64)])
old_password = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(max=128)])
new_password = fields.String(allow_none=True, metadata={'Required':False}, validate=[validate.Length(min=6, max=128)])
# #TODO: the user schema does not (yet) include the 'notify_' fields
@validates("user_phone")
def validate_user_phone(self, value):
valid_characters = list(map(str,range(0,10)))+["+", " "]
if not all([v in valid_characters for v in value]):
raise ValidationError(f"one of the phone number values is not valid.")
@validates("user_email")
def validate_user_email(self, value):
if not "@" in value:
raise ValidationError(f"invalid email address")
@dataclass
class Times:
@ -380,10 +504,10 @@ class User:
user_phone: str
password_hash: str
api_key: str
notify_email: bool
notify_whatsapp: bool
notify_signal: bool
notify_popup: bool
notify_email: bool # #TODO_clarify: should we use an IntFlag for multi-assignment?
notify_whatsapp: bool # #TODO_clarify: should we use an IntFlag for multi-assignment?
notify_signal: bool # #TODO_clarify: should we use an IntFlag for multi-assignment?
notify_popup: bool # #TODO_clarify: should we use an IntFlag for multi-assignment?
created: datetime
modified: datetime
@ -409,13 +533,13 @@ class ShipSchema(Schema):
super().__init__(unknown=None)
pass
id = fields.Int(Required=False)
id = fields.Int(metadata={'required':False})
name = fields.String(allow_none=False, metadata={'Required':True})
imo = fields.Int(allow_none=False, metadata={'Required':True})
callsign = fields.String(allow_none=True, metadata={'Required':False})
participant_id = fields.Int(allow_none=True, metadata={'Required':False})
length = fields.Float(allow_none=True, metadata={'Required':False})
width = fields.Float(allow_none=True, metadata={'Required':False})
length = fields.Float(allow_none=True, metadata={'Required':False}, validate=[validate.Range(min=0, max=1000, min_inclusive=False, max_inclusive=False)])
width = fields.Float(allow_none=True, metadata={'Required':False}, validate=[validate.Range(min=0, max=100, min_inclusive=False, max_inclusive=False)])
is_tug = fields.Bool(allow_none=True, metadata={'Required':False}, default=False)
bollard_pull = fields.Int(allow_none=True, metadata={'Required':False})
eni = fields.Int(allow_none=True, metadata={'Required':False})
@ -423,6 +547,34 @@ class ShipSchema(Schema):
modified = fields.DateTime(allow_none=True, metadata={'Required':False})
deleted = fields.Bool(allow_none=True, metadata={'Required':False}, default=False)
@validates("name")
def validate_name(self, value):
character_length = len(str(value))
if character_length>=64:
raise ValidationError(f"'name' argument should have at max. 63 characters")
if check_if_string_has_special_characters(value):
raise ValidationError(f"'name' argument should not have special characters.")
return
@validates("imo")
def validate_imo(self, value):
imo_length = len(str(value))
if imo_length != 7:
raise ValidationError(f"'imo' should be a 7-digit number")
return
@validates("callsign")
def validate_callsign(self, value):
if value is not None:
callsign_length = len(str(value))
if callsign_length>8:
raise ValidationError(f"'callsign' argument should not have more than 8 characters")
if check_if_string_has_special_characters(value):
raise ValidationError(f"'callsign' argument should not have special characters.")
return
class TimesId(Schema):
pass
@ -442,3 +594,22 @@ class Shipcalls(Shipcall):
class TimesList(Times):
pass
@dataclass
class ShipcallParticipantMap:
id: int
shipcall_id: int
participant_id: int
type : ShipcallType
created: datetime
modified: datetime
def to_json(self):
return {
"id": self.id,
"shipcall_id": self.shipcall_id,
"participant_id": self.participant_id,
"type": self.type.name,
"created": self.created.isoformat() if self.created else "",
"modified": self.modified.isoformat() if self.modified else "",
}

View File

@ -9,6 +9,7 @@ def check_jwt():
if not token:
raise Exception('Missing access token')
jwt = token.split('Bearer ')[1]
try:
return decode_jwt(jwt)
except Exception as e:

View File

@ -0,0 +1,174 @@
import os
import typing
import smtplib
from getpass import getpass
from email.message import EmailMessage
import mimetypes
import email
# from email.mime.base import MIMEBase
# from email.mime.image import MIMEImage
# from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.application import MIMEApplication
class EmailHandler():
"""
Creates an EmailHandler, which is capable of connecting to a mail server at a respective port,
as well as logging into a specific user's mail address.
Upon creating messages, these can be sent via this handler.
Options:
mail_server: address of the server, such as 'smtp.gmail.com' or 'w01d5503.kasserver.com
mail_port:
25 - SMTP Port, to send emails
110 - POP3 Port, to receive emails
143 - IMAP Port, to receive from IMAP
465 - SSL Port of SMTP
587 - alternative SMTP Port
993 - SSL/TLS-Port of IMAP
995 - SSL/TLS-Port of POP3
mail_address: a specific user's Email address, which will be used to send Emails. Example: "my_user@gmail.com"
"""
def __init__(self, mail_server:str, mail_port:int, mail_address:str):
self.mail_server = mail_server
self.mail_port = mail_port
self.mail_address = mail_address
self.server = smtplib.SMTP_SSL(self.mail_server, self.mail_port) # alternatively, SMTP
def check_state(self):
"""check, whether the server login took place and is open."""
try:
(status_code, status_msg) = self.server.noop()
return status_code==250 # 250: b'2.0.0 Ok'
except smtplib.SMTPServerDisconnected:
return False
def check_connection(self):
"""check, whether the server object is connected to the server. If not, connect it. """
try:
self.server.ehlo()
except smtplib.SMTPServerDisconnected:
self.server.connect(self.mail_server, self.mail_port)
return
def check_login(self)->bool:
"""check, whether the server object is logged in as a user"""
user = self.server.__dict__.get("user",None)
return user is not None
def login(self, interactive:bool=True):
"""
login on the determined mail server's mail address. By default, this function opens an interactive window to
type the password without echoing (printing '*******' instead of readable characters).
returns (status_code, status_msg)
"""
self.check_connection()
if interactive:
(status_code, status_msg) = self.server.login(self.mail_address, password=getpass())
else:
# fernet + password file
raise NotImplementedError()
return (status_code, status_msg) # should be: (235, b'2.7.0 Authentication successful')
def create_email(self, subject:str, message_body:str)->EmailMessage:
"""
Create an EmailMessage object, which contains the Email's header ("Subject"), content ("Message Body") and the sender's address ("From").
The EmailMessage object does not contain the recipients yet, as these will be defined upon sending the Email.
"""
msg = EmailMessage()
msg["Subject"] = subject
msg["From"] = self.mail_address
#msg["To"] = email_tgts # will be defined in self.send_email
msg.set_content(message_body)
return msg
def build_recipients(self, email_tgts:list[str]):
"""
email formatting does not support lists. Instead, items are joined into a comma-space-separated string.
Example:
[mail1@mail.com, mail2@mail.com] becomes
'mail1@mail.com, mail2@mail.com'
"""
return ', '.join(email_tgts)
def open_mime_application(self, path:str)->MIMEApplication:
"""open a local file, read the bytes into a MIMEApplication object, which is built with the proper subtype (based on the file extension)"""
with open(path, 'rb') as file:
attachment = MIMEApplication(file.read(), _subtype=mimetypes.MimeTypes().guess_type(path))
attachment.add_header('Content-Disposition','attachment',filename=str(os.path.basename(path)))
return attachment
def attach_file(self, path:str, msg:email.mime.multipart.MIMEMultipart)->None:
"""
attach a file to the message. This function opens the file, reads its bytes, defines the mime type by the
path extension. The filename is appended as the header.
mimetypes: # https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types/Common_types
"""
attachment = self.open_mime_application(path)
msg.attach(attachment)
return
def send_email(self, msg:EmailMessage, email_tgts:list[str], cc_tgts:typing.Optional[list[str]]=None, bcc_tgts:typing.Optional[list[str]]=None, debug:bool=False)->typing.Union[dict,EmailMessage]:
"""
send a prepared email message to recipients (email_tgts), copy (cc_tgts) and blind copy (bcc_tgts).
Returns a dictionary of feedback, which is commonly empty and the EmailMessage.
When failing, this function returns an SMTP error instead of returning the default outputs.
"""
# Set the Recipients
msg["To"] = self.build_recipients(email_tgts)
# optionally, add CC and BCC (copy and blind-copy)
if cc_tgts is not None:
msg["Cc"] = self.build_recipients(cc_tgts)
if bcc_tgts is not None:
msg["Bcc"] = self.build_recipients(bcc_tgts)
# when debugging, do not send the Email, but return the EmailMessage.
if debug:
return {}, msg
assert self.check_login(), f"currently not logged in. Cannot send an Email. Make sure to properly use self.login first. "
# send the prepared EmailMessage via the server.
feedback = self.server.send_message(msg)
return feedback, msg
def translate_mail_to_multipart(self, msg:EmailMessage):
"""EmailMessage does not support HTML and attachments. Hence, one can convert an EmailMessage object."""
if msg.is_multipart():
return msg
# create a novel MIMEMultipart email
msg_new = MIMEMultipart("mixed")
headers = list((k, v) for (k, v) in msg.items() if k not in ("Content-Type", "Content-Transfer-Encoding"))
# add the headers of msg to the new message
for k,v in headers:
msg_new[k] = v
# delete the headers from msg
for k,v in headers:
del msg[k]
# attach the remainder of the msg, such as the body, to the MIMEMultipart
msg_new.attach(msg)
return msg_new
def print_email_attachments(self, msg:MIMEMultipart)->list[str]:
"""return a list of lines of an Email, which contain 'filename=' as a list. """
return [line_ for line_ in msg.as_string().split("\n") if "filename=" in line_]
def close(self):
self.server.__dict__.pop("user",None)
self.server.__dict__.pop("password",None)
# quit the server connection (internally uses .close)
self.server.quit()
return

View File

@ -7,11 +7,41 @@ def create_api_key():
return secrets.token_urlsafe(16)
def generate_jwt(payload, lifetime=None):
"""
creates an encoded token, which is based on the 'SECRET_KEY' environment variable. The environment variable
is set when the .wsgi application is started or can theoretically be set on system-level.
args:
payload:
json-dictionary with key:value pairs.
lifetime:
When a 'lifetime' (integer) is provided, the payload will be extended by an expiration key 'exp', which is
valid for the next {lifetime} minutes.
returns: token, a JWT-encoded string
"""
if lifetime:
payload['exp'] = (datetime.datetime.now() + datetime.timedelta(minutes=lifetime)).timestamp()
return jwt.encode(payload, os.environ.get('SECRET_KEY'), algorithm="HS256")
def decode_jwt(token):
"""
this function reverts the {generate_jwt} function. An encoded JWT token is decoded into a JSON dictionary.
The function is commonly used to decode a login-token and obtain a 'user_data' variable, which is a dictionary.
Example of 'user_data':
{
'id': 1,
'participant_id': 1,
'first_name': 'Firstname',
'last_name': 'Lastname',
'user_name': 'xUsername01',
'user_phone': '+01 123 456 7890',
'user_email': 'firstname.lastname@internet.com',
'exp': 1716881626.056438 # expiration timestamp
}
"""
return jwt.decode(token, os.environ.get('SECRET_KEY'), algorithms=["HS256"])

View File

@ -3,6 +3,7 @@ import pydapper
from BreCal.schemas import model
from BreCal.local_db import getPoolConnection
from BreCal.database.update_database import evaluate_shipcall_state
from BreCal.database.sql_queries import create_sql_query_shipcall_get
import threading
import schedule
@ -26,16 +27,8 @@ def UpdateShipcalls(options:dict = {'past_days':2}):
pooledConnection = getPoolConnection()
commands = pydapper.using(pooledConnection)
query = ("SELECT s.id as id, ship_id, type, eta, voyage, etd, arrival_berth_id, departure_berth_id, tug_required, pilot_required, "
"flags, s.pier_side, bunkering, replenishing_terminal, replenishing_lock, draft, tidal_window_from, tidal_window_to, rain_sensitive_cargo, recommended_tugs, "
"anchored, moored_lock, canceled, evaluation, evaluation_message, evaluation_notifications_sent, evaluation_time, s.created as created, s.modified as modified, time_ref_point FROM shipcall s " +
"LEFT JOIN times t ON t.shipcall_id = s.id AND t.participant_type = 8 "
"WHERE "
"(type = 1 AND (COALESCE(t.eta_berth, eta) >= DATE(NOW() - INTERVAL %d DAY))) OR "
"((type = 2 OR type = 3) AND (COALESCE(t.etd_berth, etd) >= DATE(NOW() - INTERVAL %d DAY)))"
"ORDER BY s.id") % (options["past_days"], options["past_days"])
# obtain data from the MYSQL database
# obtain data from the MYSQL database (uses 'options' to filter the resulting data by the ETA, considering those entries of 'past_days'-range)
query = create_sql_query_shipcall_get(options)
data = commands.query(query, model=model.Shipcall)
# get the shipcall ids, which are of interest
@ -57,6 +50,11 @@ def add_function_to_schedule__update_shipcalls(interval_in_minutes:int, options:
schedule.every(interval_in_minutes).minutes.do(UpdateShipcalls, **kwargs_)
return
def add_function_to_schedule__send_notifications(vr, interval_in_minutes:int=10):
schedule.every(interval_in_minutes).minutes.do(vr.notifier.send_notifications)
return
def setup_schedule(update_shipcalls_interval_in_minutes:int=60):
logging.getLogger('schedule').setLevel(logging.INFO); # set the logging level of the schedule module to INFO

View File

@ -7,7 +7,6 @@ def get_notification_simple():
"""creates a default notification, where 'created' is now, and modified is now+10 seconds"""
notification_id = generate_uuid1_int() # uid?
times_id = generate_uuid1_int() # uid?
acknowledged = False
level = 10
type = 0
message = "hello world"
@ -17,7 +16,6 @@ def get_notification_simple():
notification = Notification(
notification_id,
times_id,
acknowledged,
level,
type,
message,

View File

@ -30,3 +30,8 @@ def get_participant_simple():
deleted
)
return participant
def get_stub_list_of_valid_participants():
participants = [{'participant_id': 2, 'type': 4}, {'participant_id': 3, 'type': 1}, {'participant_id': 4, 'type': 2}, {'participant_id': 5, 'type': 8}]
return participants

View File

@ -36,3 +36,27 @@ def get_ship_simple():
)
return ship
def get_stub_valid_ship():
post_data = {
'name': 'BOTHNIABORG',
'imo': 9267728,
'callsign': "PBIO",
'participant_id': None,
'length': 153.05,
'width': 21.8,
'is_tug': 0,
'bollard_pull': None,
'eni': None,
'created': '2023-10-04 11:52:32',
'modified': None,
'deleted': 0
}
return post_data
def get_stub_valid_ship_loaded_model(post_data=None):
from BreCal.schemas import model
if post_data is None:
post_data = get_stub_valid_ship()
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
return loadedModel

View File

@ -3,20 +3,25 @@ from BreCal.stubs import generate_uuid1_int
from BreCal.schemas.model import Shipcall
from dataclasses import field
import json
import datetime
from BreCal.schemas.model import ShipcallType
from BreCal.stubs.participant import get_stub_list_of_valid_participants
def get_shipcall_simple():
# only used for the stub
base_time = datetime.datetime.now()
shipcall_id = generate_uuid1_int()
ship_id = generate_uuid1_int()
shipcall_id = 124 # generate_uuid1_int()
ship_id = 5 # generate_uuid1_int()
eta = base_time+datetime.timedelta(hours=3, minutes=12)
role_type = 1
voyage = "987654321"
etd = base_time+datetime.timedelta(hours=6, minutes=12) # should never be before eta
arrival_berth_id = generate_uuid1_int()
departure_berth_id = generate_uuid1_int()
arrival_berth_id = 140 #generate_uuid1_int()
departure_berth_id = 140 #generate_uuid1_int()
tug_required = False
pilot_required = False
@ -37,11 +42,13 @@ def get_shipcall_simple():
recommended_tugs = 2 # assert 0<recommended_tugs<={threshold}. E.g., 20 should not be exceeded.
anchored = False
moored_lock = False # de: 'Festmacherschleuse', en: 'moored lock'
canceled = False
time_ref_point = 0
evaluation = None
evaluation_message = ""
evaluation_time = None
evaluation_time = datetime.datetime.now()
evaluation_notifications_sent = False
created = datetime.datetime.now()
modified = created+datetime.timedelta(seconds=10)
@ -76,10 +83,125 @@ def get_shipcall_simple():
evaluation_message,
evaluation_time,
evaluation_notifications_sent,
time_ref_point,
created,
modified,
participants,
)
return shipcall
def create_postman_stub_shipcall():
"""
this function returns the common stub, which is used to POST data to shipcalls via POSTMAN. However,
the stub-function is updated with a dynamic ETA in the future, so the POST-request does not fail.
Also provides a stub arrival_berth_id, so the POST-request succeeds.
"""
shipcall = {
'ship_id': 1,
'type': 1,
'eta': (datetime.datetime.now()+datetime.timedelta(hours=3)).isoformat(),
'voyage': '43B',
'arrival_berth_id':142,
'tug_required': False,
'pilot_required': True,
'flags': 0,
'pier_side': False,
'bunkering': True,
'recommended_tugs': 2
}
return shipcall
def get_stub_valid_shipcall_base():
tidal_window_from = (datetime.datetime.now()+datetime.timedelta(minutes=15)).isoformat()
tidal_window_to = (datetime.datetime.now()+datetime.timedelta(minutes=115)).isoformat()
shipcall_base = {
'ship_id': 1,
'voyage': '43B',
'tug_required': False,
'pilot_required': True,
'flags': 0,
'pier_side': False,
'bunkering': True,
'recommended_tugs': 2,
'tidal_window_from' : tidal_window_from,
'tidal_window_to' : tidal_window_to
}
return shipcall_base
def get_stub_valid_shipcall_arrival():
eta = (datetime.datetime.now()+datetime.timedelta(minutes=45)).isoformat()
post_data = {
**get_stub_valid_shipcall_base(),
**{
'type': int(ShipcallType.arrival),
'eta': eta,
'participants':get_stub_list_of_valid_participants(),
'arrival_berth_id':139,
}
}
return post_data
def get_stub_valid_shipcall_departure():
etd = (datetime.datetime.now()+datetime.timedelta(minutes=45)).isoformat()
post_data = {
**get_stub_valid_shipcall_base(),
**{
'type': int(ShipcallType.departure),
'etd': etd,
'participants':get_stub_list_of_valid_participants(),
'departure_berth_id':139,
}
}
return post_data
def get_stub_valid_shipcall_shifting():
eta = (datetime.datetime.now()+datetime.timedelta(minutes=45)).isoformat()
etd = (datetime.datetime.now()+datetime.timedelta(minutes=60)).isoformat()
post_data = {
**get_stub_valid_shipcall_base(),
**{
'type': int(ShipcallType.shifting),
'eta': eta,
'etd': etd,
'participants':get_stub_list_of_valid_participants(),
'arrival_berth_id':139,
'departure_berth_id':139,
}
}
return post_data
def get_stub_shipcall_arrival_invalid_missing_eta():
post_data = get_stub_valid_shipcall_arrival()
post_data.pop("eta", None)
return post_data
def get_stub_shipcall_departure_invalid_missing_etd():
post_data = get_stub_valid_shipcall_departure()
post_data.pop("etd", None)
return post_data
def get_stub_shipcall_shifting_invalid_missing_eta():
post_data = get_stub_valid_shipcall_shifting()
post_data.pop("eta", None)
return post_data
def get_stub_shipcall_shifting_invalid_missing_etd():
post_data = get_stub_valid_shipcall_shifting()
post_data.pop("etd", None)
return post_data
def get_stub_shipcall_arrival_invalid_missing_type():
post_data = get_stub_valid_shipcall_arrival()
post_data.pop("type", None)
return post_data
def get_stub_valid_ship_loaded_model(post_data):
from BreCal.schemas import model
loadedModel = model.ShipcallSchema().load(data=post_data, many=False, partial=True)
return loadedModel

View File

@ -5,10 +5,12 @@ users will thereby be able to modify these values
import datetime
from BreCal.stubs import generate_uuid1_int
from BreCal.schemas.model import Times
from BreCal.schemas.model import Times, ParticipantType
from BreCal.database.sql_utils import get_user_data_for_id
def get_times_full_simple():
def get_times_full_simple(return_dataclass=True):
# only used for the stub
base_time = datetime.datetime.now()
@ -28,6 +30,11 @@ def get_times_full_simple():
zone_entry = etd_berth+datetime.timedelta(hours=0, minutes=15)
zone_entry_fixed = False
ata = eta_berth+datetime.timedelta(hours=0, minutes=15)
atd = etd_berth+datetime.timedelta(hours=0, minutes=15)
eta_interval_end = eta_berth + datetime.timedelta(hours=0, minutes=25)
etd_interval_end = etd_berth + datetime.timedelta(hours=0, minutes=25)
operations_start = zone_entry+datetime.timedelta(hours=1, minutes=30)
operations_end = operations_start+datetime.timedelta(hours=4, minutes=30)
@ -44,6 +51,7 @@ def get_times_full_simple():
created = datetime.datetime.now()
modified = created+datetime.timedelta(seconds=10)
if return_dataclass:
times = Times(
id=times_id,
eta_berth=eta_berth,
@ -63,9 +71,66 @@ def get_times_full_simple():
pier_side=pier_side,
participant_type=participant_type,
shipcall_id=shipcall_id,
ata=ata,
atd=atd,
eta_interval_end=eta_interval_end,
etd_interval_end=etd_interval_end,
created=created,
modified=modified,
)
else:
times = dict(
id=times_id,
eta_berth=eta_berth,
eta_berth_fixed=eta_berth_fixed,
etd_berth=etd_berth,
etd_berth_fixed=etd_berth_fixed,
lock_time=lock_time,
lock_time_fixed=lock_time_fixed,
zone_entry=zone_entry,
zone_entry_fixed=zone_entry_fixed,
operations_start=operations_start,
operations_end=operations_end,
remarks=remarks,
participant_id=participant_id,
berth_id=berth_id,
berth_info=berth_info,
pier_side=pier_side,
participant_type=participant_type,
shipcall_id=shipcall_id,
ata=ata,
atd=atd,
eta_interval_end=eta_interval_end,
etd_interval_end=etd_interval_end,
created=created,
modified=modified,)
times = {k:v.isoformat() if isinstance(v, datetime.datetime) else v for k,v in times.items()}
return times
def get_valid_stub_times():
"""create a stub entry for a times dataset, which is valid"""
times_entry = get_times_full_simple(return_dataclass=False)
times_entry.pop('id',None)
times_entry["participant_id"] = 136
times_entry["participant_type"] = int(ParticipantType.PILOT)
times_entry["shipcall_id"] = 222
times_entry["berth_id"] = 143
times_entry["remarks"] = "stub entry."
return times_entry
def get_valid_stub_times_loaded_model(post_data=None):
from BreCal.schemas import model
if post_data is None:
post_data = get_valid_stub_times()
loadedModel = model.TimesSchema().load(data=post_data, many=False, partial=True)
return loadedModel
def get_valid_stub_for_pytests(user_id:int=3):
user_data = get_user_data_for_id(user_id=user_id)
post_data = get_valid_stub_times()
content = post_data
loadedModel = get_valid_stub_times_loaded_model(post_data=post_data)
return user_data, loadedModel, content

View File

@ -19,6 +19,11 @@ def get_user_simple():
created = datetime.datetime.now()
modified = created+datetime.timedelta(seconds=10)
notify_email = True
notify_whatsapp = True
notify_signal = True
notify_popup = True
user = User(
user_id,
participant_id,
@ -29,6 +34,10 @@ def get_user_simple():
user_phone,
password_hash,
api_key,
notify_email,
notify_whatsapp,
notify_signal,
notify_popup,
created,
modified
)

View File

@ -1,8 +1,126 @@
####################################### InputValidation #######################################
import json
import datetime
from abc import ABC, abstractmethod
from BreCal.schemas.model import Ship, Shipcall, Berth, User, Participant
from marshmallow import ValidationError
from string import ascii_letters, digits
from BreCal.schemas.model import Ship, Shipcall, Berth, User, Participant, ShipcallType
from BreCal.impl.participant import GetParticipant
from BreCal.impl.ships import GetShips
from BreCal.impl.berths import GetBerths
from BreCal.database.enums import ParticipantType
from BreCal.validators.input_validation_utils import check_if_user_is_bsmd_type, get_participant_id_dictionary, check_if_ship_id_is_valid, check_if_berth_id_is_valid, check_if_participant_ids_are_valid, get_berth_id_dictionary, get_ship_id_dictionary
from BreCal.validators.validation_base_utils import check_if_string_has_special_characters
def validation_error_default_asserts(response):
"""creates assertions, when the response does not fail as expected. This function is extensively used in the input validation pytests"""
assert response.status_code == 400
assert 'message' in list(response.json().keys())
return
def validate_posted_shipcall_data(user_data:dict, loadedModel:dict, content:dict):
"""this function applies more complex validation functions to data, which is sent to a post-request of shipcalls"""
# DEPRECATED: this function has been refactored into InputValidationShipcall (see methods for POST and PUT evaluation)
# #TODO_refactor: this function is pretty complex. One may instead build an object, which calls the methods separately.
##### Section 1: check user_data #####
# check, whether the user belongs to a participant, which is of type ParticipantType.BSMD
# as ParticipantType is an IntFlag, a user belonging to multiple groups is properly evaluated.
is_bsmd = check_if_user_is_bsmd_type(user_data)
if not is_bsmd:
raise ValidationError(f"current user does not belong to BSMD. Cannot post shipcalls. Found user data: {user_data}")
##### Section 2: check loadedModel #####
valid_ship_id = check_if_ship_id_is_valid(ship_id=loadedModel.get("ship_id", None))
if not valid_ship_id:
raise ValidationError(f"provided an invalid ship id, which is not found in the database: {loadedModel.get('ship_id', None)}")
valid_arrival_berth_id = check_if_berth_id_is_valid(berth_id=loadedModel.get("arrival_berth_id", None))
if not valid_arrival_berth_id:
raise ValidationError(f"provided an invalid arrival berth id, which is not found in the database: {loadedModel.get('arrival_berth_id', None)}")
valid_departure_berth_id = check_if_berth_id_is_valid(berth_id=loadedModel.get("departure_berth_id", None))
if not valid_departure_berth_id:
raise ValidationError(f"provided an invalid departure berth id, which is not found in the database: {loadedModel.get('departure_berth_id', None)}")
valid_participant_ids = check_if_participant_ids_are_valid(participants=loadedModel.get("participants",[]))
if not valid_participant_ids:
raise ValidationError(f"one of the provided participant ids is invalid. Could not find one of these in the database: {loadedModel.get('participants', None)}")
##### Section 3: check content #####
# loadedModel fills missing values, sometimes using optional values. Hence, check content
# the following keys should not be set in a POST-request.
for forbidden_key in ["canceled", "evaluation", "evaluation_message"]:
value = content.get(forbidden_key, None)
if value is not None:
raise ValidationError(f"'{forbidden_key}' may not be set on POST. Found: {value}")
voyage_str_is_invalid = check_if_string_has_special_characters(text=content.get("voyage",""))
if voyage_str_is_invalid:
raise ValidationError(f"there are invalid characters in the 'voyage'-string. Please use only digits and ASCII letters. Allowed: {ascii_letters+digits}. Found: {content.get('voyage')}")
##### Section 4: check loadedModel & content #####
# #TODO_refactor: these methods should be placed in separate locations
# existance checks in content
# datetime checks in loadedModel (datetime.datetime objects). Dates should be in the future.
time_now = datetime.datetime.now()
type_ = loadedModel.get("type", int(ShipcallType.undefined))
if int(type_)==int(ShipcallType.undefined):
raise ValidationError(f"providing 'type' is mandatory. Missing key!")
elif int(type_)==int(ShipcallType.arrival):
eta = loadedModel.get("eta")
if (content.get("eta", None) is None):
raise ValidationError(f"providing 'eta' is mandatory. Missing key!")
if content.get("arrival_berth_id", None) is None:
raise ValidationError(f"providing 'arrival_berth_id' is mandatory. Missing key!")
if not eta >= time_now:
raise ValidationError(f"'eta' must be in the future. Incorrect datetime provided.")
elif int(type_)==int(ShipcallType.departure):
etd = loadedModel.get("etd")
if (content.get("etd", None) is None):
raise ValidationError(f"providing 'etd' is mandatory. Missing key!")
if content.get("departure_berth_id", None) is None:
raise ValidationError(f"providing 'departure_berth_id' is mandatory. Missing key!")
if not etd >= time_now:
raise ValidationError(f"'etd' must be in the future. Incorrect datetime provided.")
elif int(type_)==int(ShipcallType.shifting):
eta = loadedModel.get("eta")
etd = loadedModel.get("etd")
# * arrival_berth_id / departure_berth_id (depending on type, see above)
if (content.get("eta", None) is None) or (content.get("etd", None) is None):
raise ValidationError(f"providing 'eta' and 'etd' is mandatory. Missing one of those keys!")
if (content.get("arrival_berth_id", None) is None) or (content.get("departure_berth_id", None) is None):
raise ValidationError(f"providing 'arrival_berth_id' & 'departure_berth_id' is mandatory. Missing key!")
if (not eta >= time_now) or (not etd >= time_now) or (not eta >= etd):
raise ValidationError(f"'eta' and 'etd' must be in the future. Incorrect datetime provided.")
tidal_window_from = loadedModel.get("tidal_window_from", None)
tidal_window_to = loadedModel.get("tidal_window_to", None)
if tidal_window_to is not None:
if not tidal_window_to >= time_now:
raise ValidationError(f"'tidal_window_to' must be in the future. Incorrect datetime provided.")
if tidal_window_from is not None:
if not tidal_window_from >= time_now:
raise ValidationError(f"'tidal_window_from' must be in the future. Incorrect datetime provided.")
# #TODO: assert tidal_window_from > tidal_window_to
# #TODO: len of participants > 0, if agency
# * assigned participant for agency
return
class InputValidation():
def __init__(self):

View File

@ -0,0 +1,140 @@
import typing
import json
import datetime
from abc import ABC, abstractmethod
from marshmallow import ValidationError
from string import ascii_letters, digits
from BreCal.schemas.model import Ship, Shipcall, Berth, User, Participant, ShipcallType
from BreCal.impl.participant import GetParticipant
from BreCal.impl.ships import GetShips
from BreCal.impl.berths import GetBerths
from BreCal.database.enums import ParticipantType, ParticipantFlag
from BreCal.validators.input_validation_utils import check_if_user_is_bsmd_type, check_if_ship_id_is_valid, check_if_berth_id_is_valid, check_if_participant_ids_are_valid, check_if_participant_ids_and_types_are_valid, get_shipcall_id_dictionary, get_participant_type_from_user_data
from BreCal.database.sql_handler import execute_sql_query_standalone
from BreCal.validators.validation_base_utils import check_if_int_is_valid_flag
from BreCal.validators.validation_base_utils import check_if_string_has_special_characters
import werkzeug
class InputValidationShip():
"""
This class combines a complex set of individual input validation functions into a joint object.
It uses static methods, so the object does not need to be instantiated, but functions can be called immediately.
Example:
InputValidationShip.evaluate(user_data, loadedModel, content)
When the data violates one of the rules, a marshmallow.ValidationError is raised, which details the issues.
"""
def __init__(self) -> None:
pass
@staticmethod
def evaluate_post_data(user_data:dict, loadedModel:dict, content:dict):
# 1.) Only users of type BSMD are allowed to POST
InputValidationShip.check_user_is_bsmd_type(user_data)
# 2.) The ship IMOs are used as matching keys. They must be unique in the database.
InputValidationShip.check_ship_imo_already_exists(loadedModel)
# 3.) Check for reasonable Values (see BreCal.schemas.model.ShipSchema)
InputValidationShip.optionally_evaluate_bollard_pull_value(content)
return
@staticmethod
def evaluate_put_data(user_data:dict, loadedModel:dict, content:dict):
# 1.) Only users of type BSMD are allowed to PUT
InputValidationShip.check_user_is_bsmd_type(user_data)
# 2.) The IMO number field may not be changed
InputValidationShip.put_content_may_not_contain_imo_number(content)
# 3.) Check for reasonable Values (see BreCal.schemas.model.ShipSchema)
InputValidationShip.optionally_evaluate_bollard_pull_value(content)
# 4.) ID field is mandatory
InputValidationShip.content_contains_ship_id(content)
return
@staticmethod
def evaluate_delete_data(user_data:dict, ship_id:int):
# 1.) Only users of type BSMD are allowed to PUT
InputValidationShip.check_user_is_bsmd_type(user_data)
# 2.) The dataset entry may not be deleted already
InputValidationShip.check_if_entry_is_already_deleted(ship_id)
return
@staticmethod
def optionally_evaluate_bollard_pull_value(content:dict):
bollard_pull = content.get("bollard_pull",None)
is_tug = content.get("is_tug", None)
if bollard_pull is not None:
if not is_tug:
raise ValidationError(f"'bollard_pull' is only allowed, when a ship is a tug ('is_tug').")
if (not (0 < bollard_pull < 500)) & (is_tug):
raise ValidationError(f"when a ship is a tug, the bollard pull must be 0 < value < 500. ")
@staticmethod
def check_user_is_bsmd_type(user_data:dict):
is_bsmd = check_if_user_is_bsmd_type(user_data)
if not is_bsmd:
raise ValidationError(f"current user does not belong to BSMD. Cannot post, put or delete ships. Found user data: {user_data}")
@staticmethod
def check_ship_imo_already_exists(loadedModel:dict):
# get the ships, convert them to a list of JSON dictionaries
response, status_code, header = GetShips(token=None)
ships = json.loads(response)
# extract only the 'imo' values
ship_imos = [ship.get("imo") for ship in ships]
# check, if the imo in the POST-request already exists in the list
imo_already_exists = loadedModel.get("imo") in ship_imos
if imo_already_exists:
raise ValidationError(f"the provided ship IMO {loadedModel.get('imo')} already exists. A ship may only be added, if there is no other ship with the same IMO number.")
return
@staticmethod
def put_content_may_not_contain_imo_number(content:dict):
put_data_ship_imo = content.get("imo",None)
if put_data_ship_imo is not None:
raise ValidationError(f"The IMO number field may not be changed since it serves the purpose of a primary (matching) key.")
return
@staticmethod
def content_contains_ship_id(content:dict):
put_data_ship_id = content.get('id',None)
if put_data_ship_id is None:
raise ValidationError(f"The id field is required.")
return
@staticmethod
def check_if_entry_is_already_deleted(ship_id:int):
"""
When calling a delete request for ships, the dataset may not be deleted already. This method
makes sure, that the request contains and ID, has a matching entry in the database, and the
database entry may not have a deletion state already.
"""
if ship_id is None:
raise ValidationError(f"The ship_id must be provided.")
response, status_code, header = GetShips(token=None)
ships = json.loads(response)
existing_database_entries = [ship for ship in ships if ship.get("id")==ship_id]
if len(existing_database_entries)==0:
raise ValidationError(f"Could not find a ship with the specified ID. Selected: {ship_id}")
existing_database_entry = existing_database_entries[0]
deletion_state = existing_database_entry.get("deleted",None)
if deletion_state:
raise ValidationError(f"The selected ship entry is already deleted.")
return

View File

@ -0,0 +1,398 @@
import typing
import json
import datetime
from abc import ABC, abstractmethod
from marshmallow import ValidationError
from string import ascii_letters, digits
from BreCal.schemas.model import Ship, Shipcall, Berth, User, Participant, ShipcallType
from BreCal.impl.participant import GetParticipant
from BreCal.impl.ships import GetShips
from BreCal.impl.berths import GetBerths
from BreCal.database.enums import ParticipantType, ParticipantFlag
from BreCal.validators.input_validation_utils import check_if_user_is_bsmd_type, check_if_ship_id_is_valid, check_if_berth_id_is_valid, check_if_participant_ids_are_valid, check_if_participant_ids_and_types_are_valid, get_shipcall_id_dictionary, get_participant_type_from_user_data
from BreCal.database.sql_handler import execute_sql_query_standalone
from BreCal.validators.validation_base_utils import check_if_int_is_valid_flag
from BreCal.validators.validation_base_utils import check_if_string_has_special_characters
import werkzeug
class InputValidationShipcall():
"""
This class combines a complex set of individual input validation functions into a joint object.
It uses static methods, so the object does not need to be instantiated, but functions can be called immediately.
Example:
InputValidationShipcall.evaluate(user_data, loadedModel, content)
When the data violates one of the rules, a marshmallow.ValidationError is raised, which details the issues.
"""
def __init__(self) -> None:
pass
@staticmethod
def evaluate_post_data(user_data:dict, loadedModel:dict, content:dict):
"""
this function combines multiple validation functions to verify data, which is sent to the API as a shipcall's POST-request
checks:
1. permission: only participants that belong to the BSMD group are allowed to POST shipcalls
2. reference checks: all refered objects within the Shipcall must exist
3. existance of required fields
4. reasonable values: validates the values within the Shipcall
"""
# check for permission (only BSMD-type participants)
InputValidationShipcall.check_user_is_bsmd_type(user_data)
# check references (referred IDs must exist)
InputValidationShipcall.check_referenced_ids(loadedModel)
# POST-request only: check the existance of required fields based on the ShipcallType
InputValidationShipcall.check_required_fields_exist_based_on_type(loadedModel, content)
# POST-request only: check the existance of a participant list, when the user is of type agency
InputValidationShipcall.check_participant_list_not_empty_when_user_is_agency(loadedModel)
# check for reasonable values in the shipcall fields
InputValidationShipcall.check_shipcall_values(loadedModel, content, forbidden_keys=["canceled", "evaluation", "evaluation_message"])
return
@staticmethod
def evaluate_put_data(user_data:dict, loadedModel:dict, content:dict):
"""
this function combines multiple validation functions to verify data, which is sent to the API as a shipcall's PUT-request
checks:
1. whether the user belongs to participant group type BSMD
2. users of the agency may edit the shipcall, when the shipcall-participant-map entry lists them
3. existance of required fields
4. all value-rules of the POST evaluation
5. a canceled shipcall may not be changed
"""
# check for permission (only BSMD-type participants)
# #TODO: are both, bsmd and agency, user types accepted?
InputValidationShipcall.check_user_is_bsmd_type(user_data)
# check, whether an agency is listed in the shipcall-participant-map
InputValidationShipcall.check_agency_in_shipcall_participant_map(user_data, loadedModel, content)
# the ID field is required, all missing fields will be ignored in the update
InputValidationShipcall.check_required_fields_of_put_request(content)
# check for reasonable values in the shipcall fields and checks for forbidden keys.
InputValidationShipcall.check_shipcall_values(loadedModel, content, forbidden_keys=["evaluation", "evaluation_message"])
# a canceled shipcall cannot be selected
# Note: 'canceled' is allowed in PUT-requests, if it is not already set (which is checked by InputValidationShipcall.check_shipcall_is_cancel)
InputValidationShipcall.check_shipcall_is_canceled(loadedModel, content)
return
@staticmethod
def check_shipcall_values(loadedModel:dict, content:dict, forbidden_keys:list=["canceled", "evaluation", "evaluation_message"]):
"""
individually checks each value provided in the loadedModel/content.
This function validates, whether the values are reasonable.
Also, some data may not be set in a POST-request.
"""
# Note: BreCal.schemas.model.ShipcallSchema has an internal validation, which the marshmallow library provides. This is used
# to verify values individually, when the schema is loaded with data.
# This function focuses on more complex input validation, which may require more sophisticated methods
# loadedModel fills missing values, sometimes using optional values. Hence, the 'content'-variable is prefered for some of these verifications
# voyage shall not contain special characters
voyage_str_is_invalid = check_if_string_has_special_characters(text=content.get("voyage",""))
if voyage_str_is_invalid:
raise ValidationError(f"there are invalid characters in the 'voyage'-string. Please use only digits and ASCII letters. Allowed: {ascii_letters+digits}. Found: {content.get('voyage')}")
# the 'flags' integer must be valid
flags_value = content.get("flags", 0)
if check_if_int_is_valid_flag(flags_value, enum_object=ParticipantFlag):
raise ValidationError(f"incorrect value provided for 'flags'. Must be a valid combination of the flags.")
# time values must use future-dates
InputValidationShipcall.check_times_are_in_future(loadedModel, content)
# some arguments must not be provided
InputValidationShipcall.check_forbidden_arguments(content, forbidden_keys=forbidden_keys)
return
@staticmethod
def check_agency_in_shipcall_participant_map(user_data:dict, loadedModel:dict, content:dict, spm_shipcall_data:typing.Optional[list]=None):
"""
When the request is issued by a user of type 'AGENCY', there must be special caution. Agency users cannot self-assign as participants
of a shipcall. Further, when no AGENCY is assigned to the shipcall, a PUT-request is not feasible. In those cases, the
BSMD must first assign an agency, before a PUT-request can assign further participants.
Upon violation, this method issues 'Forbidden'-Exceptions with HTTP status code 403. There are four reasons for violations:
a) an agency tries to self-assign for a shipcall
b) there is no assigned agency for the current shipcall
c) an agency is assigned, but the current agency-user belongs to a different participant_id
d) the user must be of ParticipantType BSMD or AGENCY
args:
spm_shipcall_data:
a list of entries obtained from the ShipcallParticipantMap. These are deserialized dictionaries.
e.g., [{'participant_id': 136, 'type': 8}, ]
"""
if spm_shipcall_data is None:
# read the ShipcallParticipantMap entry of the current shipcall_id. This is used within the input validation of a PUT request
spm_shipcall_data = execute_sql_query_standalone(
query = "SELECT participant_id, type FROM shipcall_participant_map WHERE shipcall_id=?shipcall_id?",
param={"shipcall_id":loadedModel["id"]},
pooledConnection=None
)
# which role should be set by the PUT request? If the agency is about to be set, an error will be created
# read the user data from the JWT token (set when login is performed)
user_type = get_participant_type_from_user_data(user_data) # decode JWT -> get 'type' value
# select the matching entries from the ShipcallParticipantMap
agency_entries = [spm_entry for spm_entry in spm_shipcall_data if int(spm_entry.get("type"))==int(ParticipantType.AGENCY)] # find all entries of type AGENCY (there should be at max. 1)
# when the request stems from an AGENCY user, and the user wants to PUT an AGENCY role, the request should fail
# boolean: check, whether any of the assigned participants is of type AGENCY
types = [participant.get("type") for participant in loadedModel["participants"]] # readout the participants from the loadedModel, which shall be assigned by the PUT request
any_type_is_agency = any([int(type_) == int(ParticipantType.AGENCY) for type_ in types]) # check, whether *any* of the participants is an agency
if not (int(user_type) in [int(ParticipantType.AGENCY), int(ParticipantType.BSMD)]):
# user not AGENCY or BSMD
raise werkzeug.exceptions.Forbidden(f"PUT Requests for shipcalls can only be issued by AGENCY or BSMD users.") # Forbidden: 403
if (int(user_type) == int(ParticipantType.AGENCY)) & (any_type_is_agency):
# self-assignment: agency sets agency participant
raise werkzeug.exceptions.Forbidden(f"An agency cannot self-register for a shipcall. The request is issued by an agency-user and tries to assign an AGENCY as the participant of the shipcall.") # Forbidden: 403
if len(agency_entries)>0:
# agency participant exists: participant id must be the same as shipcall participant map entry
matching_spm_entry = [spm_entry for spm_entry in spm_shipcall_data if (spm_entry.get("participant_id")==user_data["id"]) & (int(spm_entry.get("type"))==int(ParticipantType.AGENCY))]
if len(matching_spm_entry)==0:
# An AGENCY was found, but a different participant_id is assigned to that AGENCY
raise werkzeug.exceptions.Forbidden(f"A different participant_id is assigned as the AGENCY of this shipcall. Provided ID: {user_data.get('id')}, Assigned ShipcallParticipantMap: {agency_entries}") # Forbidden: 403
else:
# a matching agency was found: no violation
return
else:
# agency participant does not exist: there is no assigned agency role for the shipcall {shipcall_id}
raise werkzeug.exceptions.Forbidden(f"There is no assigned agency for this shipcall. Shipcall ID: {loadedModel['id']}") # Forbidden: 403
return
@staticmethod
def check_user_is_bsmd_type(user_data):
"""
check, whether the user belongs to a participant, which is of type ParticipantType.BSMD
as ParticipantType is an IntFlag, a user belonging to multiple groups is properly evaluated.
"""
is_bsmd = check_if_user_is_bsmd_type(user_data)
if not is_bsmd:
raise ValidationError(f"current user does not belong to BSMD. Cannot post or put shipcalls. Found user data: {user_data}")
return
@staticmethod
def check_referenced_ids(loadedModel):
"""
check, whether the referenced entries exist (e.g., when a Ship ID is referenced, but does not exist, the validation fails)
"""
# get all IDs from the loadedModel
ship_id = loadedModel.get("ship_id", None)
arrival_berth_id = loadedModel.get("arrival_berth_id", None)
departure_berth_id = loadedModel.get("departure_berth_id", None)
participants = loadedModel.get("participants",[])
valid_ship_id = check_if_ship_id_is_valid(ship_id=ship_id)
if not valid_ship_id:
raise ValidationError(f"provided an invalid ship id, which is not found in the database: {ship_id}")
valid_arrival_berth_id = check_if_berth_id_is_valid(berth_id=arrival_berth_id)
if not valid_arrival_berth_id:
raise ValidationError(f"provided an invalid arrival berth id, which is not found in the database: {arrival_berth_id}")
valid_departure_berth_id = check_if_berth_id_is_valid(berth_id=departure_berth_id)
if not valid_departure_berth_id:
raise ValidationError(f"provided an invalid departure berth id, which is not found in the database: {departure_berth_id}")
valid_participant_ids = check_if_participant_ids_are_valid(participants=participants)
if not valid_participant_ids:
raise ValidationError(f"one of the provided participant ids is invalid. Could not find one of these in the database: {participants}")
valid_participant_types = check_if_participant_ids_and_types_are_valid(participants=participants)
if not valid_participant_types:
raise ValidationError(f"every participant id and type should be listed only once. Found multiple entries for one of the participants.")
@staticmethod
def check_forbidden_arguments(content:dict, forbidden_keys=["canceled", "evaluation", "evaluation_message"]):
"""
a post-request must not contain the arguments 'canceled', 'evaluation', 'evaluation_message'.
a put-request must not contain the arguments 'evaluation', 'evaluation_message'
"""
# the following keys should not be set in a POST-request.
for forbidden_key in forbidden_keys:
value = content.get(forbidden_key, None)
if value is not None:
raise ValidationError(f"'{forbidden_key}' may not be set on POST. Found: {value}")
return
@staticmethod
def check_required_fields_exist_based_on_type(loadedModel:dict, content:dict):
"""
depending on the ShipcallType, some fields are *required* in a POST-request
"""
type_ = loadedModel.get("type", int(ShipcallType.undefined))
ship_id = content.get("ship_id", None)
eta = content.get("eta", None)
etd = content.get("etd", None)
arrival_berth_id = content.get("arrival_berth_id", None)
departure_berth_id = content.get("departure_berth_id", None)
if ship_id is None:
raise ValidationError(f"providing 'ship_id' is mandatory. Missing key!")
if int(type_)==int(ShipcallType.undefined):
raise ValidationError(f"providing 'type' is mandatory. Missing key!")
# arrival: arrival_berth_id & eta must exist
elif int(type_)==int(ShipcallType.arrival):
if eta is None:
raise ValidationError(f"providing 'eta' is mandatory. Missing key!")
if arrival_berth_id is None:
raise ValidationError(f"providing 'arrival_berth_id' is mandatory. Missing key!")
# departure: departive_berth_id and etd must exist
elif int(type_)==int(ShipcallType.departure):
if etd is None:
raise ValidationError(f"providing 'etd' is mandatory. Missing key!")
if departure_berth_id is None:
raise ValidationError(f"providing 'departure_berth_id' is mandatory. Missing key!")
# shifting: arrival_berth_id, departure_berth_id, eta and etd must exist
elif int(type_)==int(ShipcallType.shifting):
if (eta is None) or (etd is None):
raise ValidationError(f"providing 'eta' and 'etd' is mandatory. Missing one of those keys!")
if (arrival_berth_id is None) or (departure_berth_id is None):
raise ValidationError(f"providing 'arrival_berth_id' & 'departure_berth_id' is mandatory. Missing key!")
else:
raise ValidationError(f"incorrect 'type' provided!")
return
@staticmethod
def check_times_are_in_future(loadedModel:dict, content:dict):
"""
Dates should be in the future. Depending on the ShipcallType, specific values should be checked
Perfornms datetime checks in the loadedModel (datetime.datetime objects).
"""
# obtain the current datetime to check, whether the provided values are in the future
time_now = datetime.datetime.now()
type_ = loadedModel.get("type", int(ShipcallType.undefined))
eta = loadedModel.get("eta")
etd = loadedModel.get("etd")
tidal_window_from = loadedModel.get("tidal_window_from", None)
tidal_window_to = loadedModel.get("tidal_window_to", None)
# Estimated arrival or departure times
InputValidationShipcall.check_times_in_future_based_on_type(type_, time_now, eta, etd)
# Tidal Window
InputValidationShipcall.check_tidal_window_in_future(time_now, tidal_window_from, tidal_window_to)
return
@staticmethod
def check_times_in_future_based_on_type(type_, time_now, eta, etd):
"""
checks, whether the ETA & ETD times are in the future.
based on the type, this function checks:
arrival: eta
departure: etd
shifting: eta & etd
"""
if int(type_)==int(ShipcallType.undefined):
raise ValidationError(f"providing 'type' is mandatory. Missing key!")
elif int(type_)==int(ShipcallType.arrival):
if not eta > time_now:
raise ValidationError(f"'eta' must be in the future. Incorrect datetime provided. Current Time: {time_now}. ETA: {eta}.")
elif int(type_)==int(ShipcallType.departure):
if not etd > time_now:
raise ValidationError(f"'etd' must be in the future. Incorrect datetime provided. Current Time: {time_now}. ETD: {etd}.")
elif int(type_)==int(ShipcallType.shifting):
if (not eta > time_now) or (not etd > time_now):
raise ValidationError(f"'eta' and 'etd' must be in the future. Incorrect datetime provided. Current Time: {time_now}. ETA: {eta}. ETD: {etd}")
if (not etd > eta):
raise ValidationError(f"'etd' must be larger than 'eta'. The ship cannot depart, before it has arrived. Found: ETA {eta}, ETD: {etd}")
return
@staticmethod
def check_tidal_window_in_future(time_now, tidal_window_from, tidal_window_to):
if tidal_window_to is not None:
if not tidal_window_to >= time_now:
raise ValidationError(f"'tidal_window_to' must be in the future. Incorrect datetime provided.")
if tidal_window_from is not None:
if not tidal_window_from >= time_now:
raise ValidationError(f"'tidal_window_from' must be in the future. Incorrect datetime provided.")
if (tidal_window_to is not None) and (tidal_window_from is not None):
if tidal_window_to < tidal_window_from:
raise ValidationError(f"'tidal_window_to' must take place after 'tidal_window_from'. Incorrect datetime provided. Found 'tidal_window_to': {tidal_window_to}, 'tidal_window_from': {tidal_window_to}.")
return
@staticmethod
def check_participant_list_not_empty_when_user_is_agency(loadedModel):
"""
For each POST request, one of the participants in the list must be assigned as a ParticipantType.AGENCY
"""
participants = loadedModel.get("participants", [])
is_agency_participant = [ParticipantType.AGENCY in ParticipantType(participant.get("type")) for participant in participants]
if not any(is_agency_participant):
raise ValidationError(f"One of the assigned participants *must* be of type 'ParticipantType.AGENCY'. Found list of participants: {participants}")
return
@staticmethod
def check_shipcall_is_canceled(loadedModel, content):
# read the shipcall_id from the PUT data
shipcall_id = loadedModel.get("id")
# get all shipcalls in the database
shipcalls = get_shipcall_id_dictionary()
# search for the matching shipcall in the database
shipcall = shipcalls.get(shipcall_id,{})
# if the *existing* shipcall in the database is canceled, it may not be changed
if shipcall.get("canceled", False):
raise ValidationError(f"The shipcall with id 'shipcall_id' is canceled. A canceled shipcall may not be changed.")
return
@staticmethod
def check_required_fields_of_put_request(content:dict):
shipcall_id = content.get("id", None)
if shipcall_id is None:
raise ValidationError(f"A PUT request requires an 'id' to refer to.")
"""
# copy
def validate_posted_shipcall_data(user_data:dict, loadedModel:dict, content:dict):
##### Section 1: check user_data #####
# DONE: refactored
##### Section 2: check loadedModel #####
# DONE: refactored
##### Section 3: check content #####
# DONE: refactored
##### Section 4: check loadedModel & content #####
# DONE: refactored ET and BERTH ID existance check
# DONE: refactored 'time in future' checks
return
"""

View File

@ -0,0 +1,402 @@
import typing
import json
import datetime
from abc import ABC, abstractmethod
from marshmallow import ValidationError
from string import ascii_letters, digits
from BreCal.schemas.model import Ship, Shipcall, Berth, User, Participant, ShipcallType, Times
from BreCal.impl.participant import GetParticipant
from BreCal.impl.ships import GetShips
from BreCal.impl.berths import GetBerths
from BreCal.impl.times import GetTimes
from BreCal.database.enums import ParticipantType, ParticipantFlag
from BreCal.validators.input_validation_utils import check_if_user_is_bsmd_type, check_if_ship_id_is_valid, check_if_berth_id_is_valid, check_if_participant_ids_are_valid, check_if_participant_ids_and_types_are_valid, check_if_shipcall_id_is_valid, get_shipcall_id_dictionary, get_participant_type_from_user_data, get_participant_id_dictionary, check_if_participant_id_is_valid_standalone
from BreCal.database.sql_handler import execute_sql_query_standalone
from BreCal.validators.validation_base_utils import check_if_int_is_valid_flag, check_if_string_has_special_characters
import werkzeug
def build_post_data_type_dependent_required_fields_dict()->dict[ShipcallType,dict[ParticipantType,typing.Optional[list[str]]]]:
"""
The required fields of a POST-request depend on ShipcallType and ParticipantType. This function creates
a dictionary, which maps those types to a list of required fields.
The participant types 'undefined' and 'bsmd' should not be used in POST-requests. They return 'None'.
"""
post_data_type_dependent_required_fields_dict = {
ShipcallType.arrival:{
ParticipantType.undefined:None, # should not be set in POST requests
ParticipantType.BSMD:None, # should not be set in POST requests
ParticipantType.TERMINAL:["operations_start"],
ParticipantType.AGENCY:["eta_berth"],
ParticipantType.MOORING:["eta_berth"],
ParticipantType.PILOT:["eta_berth"],
ParticipantType.PORT_ADMINISTRATION:["eta_berth"],
ParticipantType.TUG:["eta_berth"],
},
ShipcallType.departure:{
ParticipantType.undefined:None, # should not be set in POST requests
ParticipantType.BSMD:None, # should not be set in POST requests
ParticipantType.TERMINAL:["operations_end"],
ParticipantType.AGENCY:["etd_berth"],
ParticipantType.MOORING:["etd_berth"],
ParticipantType.PILOT:["etd_berth"],
ParticipantType.PORT_ADMINISTRATION:["etd_berth"],
ParticipantType.TUG:["etd_berth"],
},
ShipcallType.shifting:{
ParticipantType.undefined:None, # should not be set in POST requests
ParticipantType.BSMD:None, # should not be set in POST requests
ParticipantType.TERMINAL:["operations_start", "operations_end"],
ParticipantType.AGENCY:["eta_berth", "etd_berth"],
ParticipantType.MOORING:["eta_berth", "etd_berth"],
ParticipantType.PILOT:["eta_berth", "etd_berth"],
ParticipantType.PORT_ADMINISTRATION:["eta_berth", "etd_berth"],
ParticipantType.TUG:["eta_berth", "etd_berth"],
},
}
return post_data_type_dependent_required_fields_dict
class InputValidationTimes():
"""
This class combines a complex set of individual input validation functions into a joint object.
It uses static methods, so the object does not need to be instantiated, but functions can be called immediately.
Example:
InputValidationTimes.evaluate(user_data, loadedModel, content)
When the data violates one of the rules, a marshmallow.ValidationError is raised, which details the issues.
"""
def __init__(self) -> None:
pass
@staticmethod
def evaluate_post_data(user_data:dict, loadedModel:dict, content:dict):
# 0.) Check for the presence of required fields
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
# 1.) datasets may only be created, if the current user fits the appropriate type in the ShipcallParticipantMap
InputValidationTimes.check_if_user_fits_shipcall_participant_map(user_data, loadedModel, content)
# 2.) datasets may only be created, if the respective participant type did not already create one.
InputValidationTimes.check_if_entry_already_exists_for_participant_type(user_data, loadedModel, content)
# 3.) only users who are *not* of type BSMD may post times datasets.
InputValidationTimes.check_user_is_not_bsmd_type(user_data)
# 4.) Reference checking
InputValidationTimes.check_dataset_references(content)
# 5.) Value checking
InputValidationTimes.check_dataset_values(user_data, loadedModel, content)
return
@staticmethod
def evaluate_put_data(user_data:dict, loadedModel:dict, content:dict):
# 1.) Check for the presence of required fields
InputValidationTimes.check_times_required_fields_put_data(content)
# 2.) Only users of the same participant_id, which the times dataset refers to, can update the entry
InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=loadedModel, times_id=None)
# 3.) Reference checking
InputValidationTimes.check_dataset_references(content)
# 4.) Value checking
InputValidationTimes.check_dataset_values(user_data, loadedModel, content)
return
@staticmethod
def evaluate_delete_data(user_data:dict, times_id:int):
# #TODO_determine: is times_id always an int or does the request.args call provide a string?
times_id = int(times_id) if not isinstance(times_id, int) else times_id
# 1.) The dataset entry may not be deleted already
InputValidationTimes.check_if_entry_is_already_deleted(times_id)
# 2.) Only users of the same participant_id, which the times dataset refers to, can delete the entry
InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=None, times_id=times_id)
return
@staticmethod
def check_if_entry_is_already_deleted(times_id:int):
"""
When calling a delete request for times, the dataset may not be deleted already. This method
makes sure, that the request contains and ID, has a matching entry in the database.
When a times dataset is deleted, it is directly removed from the database.
To identify deleted entries, query from the database and check, whether there is a match for the times id.
"""
# perform an SQL query. Creates a pooled connection internally, queries the database, then closes the connection.
query = "SELECT shipcall_id FROM times WHERE id = ?id?"
pdata = execute_sql_query_standalone(query=query, param={"id":times_id}, pooledConnection=None)
if len(pdata)==0:
raise ValidationError(f"The selected time entry is already deleted. ID: {times_id}")
return
@staticmethod
def check_user_is_not_bsmd_type(user_data:dict):
"""a new dataset may only be created by a user who is *not* belonging to participant group BSMD"""
is_bsmd = check_if_user_is_bsmd_type(user_data)
if is_bsmd:
raise ValidationError(f"current user belongs to BSMD. Cannot post 'times' datasets. Found user data: {user_data}")
return
@staticmethod
def check_dataset_values(user_data:dict, loadedModel:dict, content:dict):
"""
this method validates POST and PUT data. Most of the dataset arguments are validated directly in the
BreCal.schemas.model.TimesSchema, using @validates. This is exclusive for 'simple' validation rules.
This applies to:
"remarks" & "berth_info"
"eta_berth", "etd_berth", "lock_time", "zone_entry", "operations_start", "operations_end"
"""
# while InputValidationTimes.check_user_is_not_bsmd_type already validates a user, this method
# validates the times dataset.
# ensure loadedModel["participant_type"] is of type ParticipantType
if not isinstance(loadedModel["participant_type"], ParticipantType):
loadedModel["participant_type"] = ParticipantType(loadedModel["participant_type"])
if ParticipantType.BSMD in loadedModel["participant_type"]:
raise ValidationError(f"current user belongs to BSMD. Cannot post times datasets. Found user data: {user_data}")
return
@staticmethod
def check_dataset_references(content:dict):
"""
When IDs are referenced, they must exist in the database. This method individually validates the existance of referred
berth ID, participant IDs and shipcall ID.
Note: whenever an ID is 'None', there is no exception, because a different method is supposed to capture non-existant mandatory fields.
"""
# extract the IDs
berth_id, participant_id, shipcall_id = content.get("berth_id"), content.get("participant_id"), content.get("shipcall_id")
valid_berth_id_reference = check_if_berth_id_is_valid(berth_id)
if not valid_berth_id_reference:
raise ValidationError(f"The referenced berth_id '{berth_id}' does not exist in the database.")
valid_shipcall_id_reference = check_if_shipcall_id_is_valid(shipcall_id)
if not valid_shipcall_id_reference:
raise ValidationError(f"The referenced shipcall_id '{shipcall_id}' does not exist in the database.")
valid_participant_id_reference = check_if_participant_id_is_valid_standalone(participant_id)
if not valid_participant_id_reference:
raise ValidationError(f"The referenced participant_id '{participant_id}' does not exist in the database.")
return
@staticmethod
def check_times_required_fields_post_data(loadedModel:dict, content:dict):
"""
Depending on ShipcallType and ParticipantType, there is a rather complex set of required fields.
Independent of those types, any POST request for times should always include the default fields.
The dependent and independent fields are validated by checking, whether the respective value in 'content'
is undefined (returns None). When any of these fields is undefined, a ValidationError is raised.
"""
participant_type = loadedModel["participant_type"]
shipcall_id = loadedModel["shipcall_id"]
# build a dictionary of id:item pairs, so one can select the respective participant
# must look-up the shipcall_type based on the shipcall_id
shipcalls = get_shipcall_id_dictionary()
shipcall_type = ShipcallType[shipcalls.get(shipcall_id,{}).get("type",ShipcallType.undefined.name)]
if (participant_type is None) or (int(shipcall_type) == int(ShipcallType.undefined)):
raise ValidationError(f"At least one of the required fields is missing. Missing: 'participant_type' or 'shipcall_type'")
# build a list of required fields based on shipcall and participant type, as well as type-independent fields
independent_required_fields = InputValidationTimes.get_post_data_type_independent_fields()
dependent_required_fields = InputValidationTimes.get_post_data_type_dependent_fields(shipcall_type, participant_type)
required_fields = independent_required_fields + dependent_required_fields
# generate a list of booleans, where each element shows, whether one of the required fields is missing.
missing_required_fields = [
content.get(field,None) is None for field in required_fields
]
if any(missing_required_fields):
# create a tuple of (field_key, bool) to describe to a user, which one of the fields may be missing
verbosity_tuple = [(field, missing) for field, missing in zip(required_fields, missing_required_fields) if missing]
raise ValidationError(f"At least one of the required fields is missing. Missing: {verbosity_tuple}")
return
@staticmethod
def check_times_required_fields_put_data(content:dict):
"""in a PUT request, only the 'id' is a required field. All other fields are simply ignored, when they are not provided."""
if content.get("id") is None:
raise ValidationError(f"A PUT-request requires an 'id' reference, which was not found.")
return
@staticmethod
def get_post_data_type_independent_fields()->list[str]:
"""
Independent of the ShipcallType and ParticipantType, any POST request for times should always include the default fields.
"""
independent_required_fields = [
"shipcall_id", "participant_id", "participant_type"
]
return independent_required_fields
@staticmethod
def get_post_data_type_dependent_fields(shipcall_type:typing.Union[int, ShipcallType], participant_type:typing.Union[int, ParticipantType]):
"""
Depending on ShipcallType and ParticipantType, there is a rather complex set of required fields.
Arriving shipcalls need arrival times (e.g., 'eta'), Departing shipcalls need departure times (e.g., 'etd') and
Shifting shipcalls need both times (e.g., 'eta' and 'etd').
Further, the ParticipantType determines the set of relevant times. In particular, the terminal uses
'operations_start' and 'operations_end', while other users use 'eta_berth' or 'etd_berth'.
"""
# ensure that both, shipcall_type and participant_type, refer to the enumerators, as opposed to integers.
if not isinstance(shipcall_type, ShipcallType):
shipcall_type = ShipcallType(shipcall_type)
if not isinstance(participant_type, ParticipantType):
participant_type = ParticipantType(participant_type)
# build a dictionary, which maps shipcall type and participant type to a list of fields
dependent_required_fields_dict = build_post_data_type_dependent_required_fields_dict()
# select shipcall type & participant type
dependent_required_fields = dependent_required_fields_dict.get(shipcall_type,{}).get(participant_type,None)
return dependent_required_fields
@staticmethod
def check_if_user_fits_shipcall_participant_map(user_data:dict, loadedModel:dict, content:dict, spm_shipcall_data=None):
"""
a new dataset may only be created, if the user belongs to the participant group (participant_id),
which is assigned to the shipcall within the ShipcallParticipantMap
This method does not validate, what the POST-request contains, but it validates, whether the *user* is
authorized to send the request.
options:
spm_shipcall_data:
data from the ShipcallParticipantMap, which refers to the respective shipcall ID. The SPM can be
an optional argument to allow for much easier unit testing.
"""
# identify shipcall_id
shipcall_id = loadedModel["shipcall_id"]
# identify user's participant_id & type (get all participants; then filter these for the {participant_id})
participant_id = user_data["participant_id"] #participants = get_participant_id_dictionary() #participant_type = ParticipantType(participants.get(participant_id,{}).get("type"))
participant_type = ParticipantType(loadedModel["participant_type"]) if not isinstance(loadedModel["participant_type"],ParticipantType) else loadedModel["participant_type"]
# get ShipcallParticipantMap for the shipcall_id
if spm_shipcall_data is None:
# read the ShipcallParticipantMap entry of the current shipcall_id. This is used within the input validation of a PUT request
# creates a list of {'participant_id: ..., 'type': ...} elements
spm_shipcall_data = execute_sql_query_standalone(
query = "SELECT participant_id, type FROM shipcall_participant_map WHERE shipcall_id=?shipcall_id?",
param={"shipcall_id":shipcall_id},
pooledConnection=None
)
# check, if participant_id is assigned to the ShipcallParticipantMap
matching_spm = [
spm
for spm in spm_shipcall_data
if spm.get("participant_id")==participant_id
]
if not len(matching_spm)>0:
raise ValidationError(f'The participant group with id {participant_id} is not assigned to the shipcall. Found ShipcallParticipantMap: {spm_shipcall_data}')
# check, if the assigned participant_id is assigned with the same role
matching_spm_element = matching_spm[0]
matching_spm_element_participant_type = ParticipantType(matching_spm_element.get("type"))
if not matching_spm_element_participant_type in participant_type:
raise ValidationError(f'The participant group with id {participant_id} is assigned to the shipcall in a different role. Request Role: {participant_type}, ShipcallParticipantMap Role Assignment: {matching_spm_element_participant_type}')
return
@staticmethod
def check_if_entry_already_exists_for_participant_type(user_data:dict, loadedModel:dict, content:dict):
"""determines, whether a dataset for the participant type is already present"""
# determine participant_type and shipcall_id from the loadedModel
participant_type = loadedModel["participant_type"]
if not isinstance(participant_type, ParticipantType): # ensure the correct data type
participant_type = ParticipantType(participant_type)
shipcall_id = loadedModel["shipcall_id"]
# get all times entries of the shipcall_id from the database
times, status_code, headers = GetTimes(options={"shipcall_id":shipcall_id})
times = json.loads(times)
# check, if there is already a dataset for the participant type
participant_type_exists_already = any([ParticipantType(time_.get("participant_type",0)) in participant_type for time_ in times])
if participant_type_exists_already:
raise ValidationError(f"A dataset for the participant type is already present. Participant Type: {participant_type}. Times Datasets: {times}")
return
@staticmethod
def check_user_belongs_to_same_group_as_dataset_determines(user_data:dict, loadedModel:typing.Optional[dict]=None, times_id:typing.Optional[int]=None):
"""
This method checks, whether a user belongs to the same participant_id, as the dataset entry refers to.
It is used in, both, PUT requests and DELETE requests, but uses different arguments to determine the matching
time dataset entry.
PUT:
loadedModel is unbundled to identify the matching times entry by the shipcall id
DELETE:
times_id is used to directly identify the matching times entry
"""
assert not ((loadedModel is None) and (times_id is None)), f"must provide either loadedModel OR times_id. Both are 'None'"
assert (loadedModel is None) or (times_id is None), f"must provide either loadedModel OR times_id. Both are defined."
# identify the user's participant id
user_participant_id = user_data["participant_id"]
if loadedModel is not None:
shipcall_id = loadedModel["shipcall_id"]
participant_type = loadedModel["participant_type"]
# get all times entries of the shipcall_id from the database as a list of {'participant_id':..., 'participant_type':...} elements
query = "SELECT participant_id, participant_type FROM times WHERE shipcall_id = ?shipcall_id?"
times = execute_sql_query_standalone(query=query, param={"shipcall_id":shipcall_id}, pooledConnection=None)
# get the matching datasets, where the participant id is identical
time_datasets_of_participant_type = [time_ for time_ in times if time_.get("participant_type")==participant_type]
# when there are no matching participants, raise a ValidationError
if not len(time_datasets_of_participant_type)>0:
raise ValidationError(f"Could not find a matching time dataset for the provided participant_type: {participant_type}. Found Time Datasets: {times}")
# take the first match. There should always be only one match.
time_datasets_of_participant_type = time_datasets_of_participant_type[0]
participant_id_of_times_dataset = time_datasets_of_participant_type.get("participant_id")
if times_id is not None:
# perform an SQL query. Creates a pooled connection internally, queries the database, then closes the connection.
query = "SELECT participant_id FROM times WHERE id = ?id?"
pdata = execute_sql_query_standalone(query=query, param={"id":times_id}, pooledConnection=None)
# extracts the participant_id from the first matching entry, if applicable
if not len(pdata)>0:
# this case is usually covered by the InputValidationTimes.check_if_entry_is_already_deleted method already
raise ValidationError(f"Unknown times_id. Could not find a matching entry for ID: {times_id}")
else:
participant_id_of_times_dataset = pdata[0].get("participant_id")
if user_participant_id != participant_id_of_times_dataset:
raise ValidationError(f"The dataset may only be changed by a user belonging to the same participant group as the times dataset is referring to. User participant_id: {user_participant_id}; Dataset participant_id: {participant_id_of_times_dataset}")
return

View File

@ -0,0 +1,182 @@
import logging
import json
from collections import Counter
from BreCal.impl.participant import GetParticipant
from BreCal.impl.ships import GetShips
from BreCal.impl.berths import GetBerths
from BreCal.impl.shipcalls import GetShipcalls
from BreCal.database.enums import ParticipantType
def get_participant_id_dictionary():
"""
get a dictionary of all participants, where the key is the participant's id, and the value is a dictionary
of common participant data (not a data model).
"""
# get all participants
response,status_code,header = GetParticipant(options={})
# build a dictionary of id:item pairs, so one can select the respective participant
participants = json.loads(response)
participants = {items.get("id"):items for items in participants}
return participants
def get_berth_id_dictionary():
# get all berths
response,status_code,header = GetBerths(token=None)
# build a dictionary of id:item pairs, so one can select the respective participant
berths = json.loads(response)
berths = {items.get("id"):items for items in berths}
return berths
def get_ship_id_dictionary():
# get all ships
response,status_code,header = GetShips(token=None)
# build a dictionary of id:item pairs, so one can select the respective participant
ships = json.loads(response)
ships = {items.get("id"):items for items in ships}
return ships
def get_shipcall_id_dictionary():
# get all ships
response,status_code,header = GetShipcalls(options={'past_days':30000})
# build a dictionary of id:item pairs, so one can select the respective participant
shipcalls = json.loads(response)
shipcalls = {items.get("id"):items for items in shipcalls}
return shipcalls
def get_participant_type_from_user_data(user_data:dict)->ParticipantType:
# user_data = decode token
participant_id = user_data.get("participant_id")
# build a dictionary of id:item pairs, so one can select the respective participant
participants = get_participant_id_dictionary()
participant = participants.get(participant_id,{})
participant_type = ParticipantType(participant.get("type",0))
return participant_type
def check_if_user_is_bsmd_type(user_data:dict)->bool:
"""
given a dictionary of user data, determine the respective participant id and read, whether
that participant is a .BSMD-type
Note: ParticipantType is an IntFlag.
Hence, ParticipantType(1) is ParticipantType.BSMD,
and ParticipantType(7) is [ParticipantType.BSMD, ParticipantType.TERMINAL, ParticipantType.PILOT]
both would return 'True'
returns: boolean. Whether the participant id is a .BSMD type element
"""
# use the decoded JWT token and extract the participant type
participant_type = get_participant_type_from_user_data(user_data)
# boolean check: is the participant of type .BSMD?
is_bsmd = ParticipantType.BSMD in participant_type
return is_bsmd
def check_if_ship_id_is_valid(ship_id):
"""check, whether the provided ID is valid. If it is 'None', it will be considered valid. This is, because a shipcall POST-request, does not have to include all IDs at once"""
if ship_id is None:
return True
# build a dictionary of id:item pairs, so one can select the respective participant
ships = get_ship_id_dictionary()
# boolean check
ship_id_is_valid = ship_id in list(ships.keys())
return ship_id_is_valid
def check_if_berth_id_is_valid(berth_id):
"""check, whether the provided ID is valid. If it is 'None', it will be considered valid. This is, because a shipcall POST-request, does not have to include all IDs at once"""
if berth_id is None:
return True
# build a dictionary of id:item pairs, so one can select the respective participant
berths = get_berth_id_dictionary()
# boolean check
berth_id_is_valid = berth_id in list(berths.keys())
return berth_id_is_valid
def check_if_shipcall_id_is_valid(shipcall_id:int):
"""check, whether the provided ID is valid. If it is 'None', it will be considered valid. This is, because a request, may not have to include all IDs at once"""
if shipcall_id is None:
return True
# build a dictionary of id:item pairs, so one can select the respective participant
shipcalls = get_shipcall_id_dictionary()
# boolean check
shipcall_id_is_valid = shipcall_id in list(shipcalls.keys())
return shipcall_id_is_valid
def check_if_participant_id_is_valid_standalone(participant_id:int):
"""check, whether the provided ID is valid. If it is 'None', it will be considered valid. This is, because a request, may not have to include all IDs at once"""
if participant_id is None:
return True
# build a dictionary of id:item pairs, so one can select the respective participant
participants = get_participant_id_dictionary()
# boolean check
participant_id_is_valid = participant_id in list(participants.keys())
return participant_id_is_valid
def check_if_participant_id_is_valid(participant:dict):
"""
check, whether the provided ID is valid. If it is 'None', it will be considered valid. This is, because a shipcall POST-request, does not have to include all IDs at once
Following the common BreCal.schemas.model.ParticipantAssignmentSchema, a participant dictionary contains the keys:
'participant_id' : int
'type' : ParticipantType
"""
# #TODO1: Daniel Schick: 'types may only appear once and must not include type "BSMD"'
participant_id = participant.get("participant_id", None)
participant_id_is_valid = check_if_participant_id_is_valid_standalone(participant_id)
return participant_id_is_valid
def check_if_participant_ids_are_valid(participants:list[dict]):
"""
args:
participants (list of participant-elements)
Following the common BreCal.schemas.model.ParticipantAssignmentSchema, a participant dictionary contains the keys:
'participant_id' : int
'type' : ParticipantType
"""
# empty list -> invalid
if participants is None:
return False
# check each participant id individually
valid_participant_ids = [check_if_participant_id_is_valid(participant) for participant in participants]
# boolean check, whether all participant ids are valid
return all(valid_participant_ids)
def check_if_participant_ids_and_types_are_valid(participants:list[dict[str,int]]):
# creates a Counter object, which counts the number of unique elements
# key of counter: type, value of counter: number of listings in 'participants'
# e.g., {1: 4, 2: 1, 8: 1} (type 1 occurs 4 times in this example)
counter_type = Counter([participant.get("type") for participant in participants])
counter_id = Counter([participant.get("type") for participant in participants])
# obtains the maximum count from the counter's values
max_count_type = max(list(counter_type.values())) if len(list(counter_type.values()))>0 else 0
max_count_ids = max(list(counter_id.values())) if len(list(counter_id.values()))>0 else 0
# when 0 or 1 count for the participant ids or types, return true. Return false, when there is more than one entry.
return max_count_type <= 1 and max_count_ids <= 1

View File

@ -2,6 +2,8 @@ import datetime
import numpy as np
import pandas as pd
from marshmallow import ValidationError
def validate_time_exceeds_threshold(value:datetime.datetime, seconds:int=60, minutes:int=60, hours:int=24, days:int=30, months:int=12)->bool:
"""returns a boolean when the input value is very distant in the future. The parameters provide the threshold"""
# time difference in seconds. Positive: in the future, Negative: in the past
@ -10,6 +12,37 @@ def validate_time_exceeds_threshold(value:datetime.datetime, seconds:int=60, min
threshold = seconds*minutes*hours*days*months
return time_>=threshold
def validate_time_is_in_future(value:datetime.datetime):
"""returns a boolean when the input value is in the future."""
current_time = datetime.datetime.now()
return value >= current_time
def validate_time_is_in_not_too_distant_future(raise_validation_error:bool, value:datetime.datetime, seconds:int=60, minutes:int=60, hours:int=24, days:int=30, months:int=12)->bool:
"""
combines two boolean operations. Returns True when both conditions are met.
a) value is in the future
b) value is not too distant (e.g., at max. 1 year in the future)
When the value is 'None', the validation will be skipped. A ValidationError is never issued, but the method returns 'False'.
options:
raise_validation_error: boolean. If set to True, this method issues a marshmallow.ValidationError, when the conditions fail.
"""
if value is None:
return False
is_in_future = validate_time_is_in_future(value)
is_too_distant = validate_time_exceeds_threshold(value, seconds, minutes, hours, days, months)
if raise_validation_error:
if not is_in_future:
raise ValidationError(f"The provided value must be in the future. Current Time: {datetime.datetime.now()}, Value: {value}")
if is_too_distant:
raise ValidationError(f"The provided value is in the too distant future and exceeds a threshold for 'reasonable' entries. Found: {value}")
return is_in_future & (not is_too_distant)
class TimeLogic():
def __init__(self):
return

View File

@ -0,0 +1,20 @@
from string import ascii_letters, digits
def check_if_string_has_special_characters(text:str):
"""
check, whether there are any characters within the provided string, which are not found in the ascii letters or digits
ascii_letters: abcd (...) and ABCD (...)
digits: 0123 (...)
Source: https://stackoverflow.com/questions/57062794/is-there-a-way-to-check-if-a-string-contains-special-characters
User: https://stackoverflow.com/users/10035985/andrej-kesely
returns bool
"""
return bool(set(text).difference(ascii_letters + digits))
def check_if_int_is_valid_flag(value, enum_object):
# e.g., when an IntFlag has the values 1,2,4; the maximum valid value is 7
max_int = sum([int(val) for val in list(enum_object._value2member_map_.values())])
return 0 < value <= max_int

View File

@ -38,14 +38,16 @@ error_message_dict = {
"validation_rule_fct_etd_time_not_in_tidal_window":"The tidal window does not fit to the agency's estimated time of departure (ETD) {Rule #0004B}",
# 0005 A+B
"validation_rule_fct_too_many_identical_eta_times":"There are more than three ships with the same planned time of arrival (ETA) {Rule #0005A}",
"validation_rule_fct_too_many_identical_etd_times":"There are more than three ships with the same planned time of departure (ETD) {Rule #0005B}",
"validation_rule_fct_too_many_identical_eta_times":"More than three shipcalls are planned at the same time as the defined ETA {Rule #0005A}",
"validation_rule_fct_too_many_identical_etd_times":"More than three shipcalls are planned at the same time as the defined ETD {Rule #0005B}",
# 0006 A+B
"validation_rule_fct_agency_and_terminal_berth_id_disagreement":"Agency and Terminal are planning with different berths (the berth_id deviates). {Rule #0006A}",
"validation_rule_fct_agency_and_terminal_pier_side_disagreement":"Agency and Terminal are planning with different pier sides (the pier_side deviates). {Rule #0006B}",
}
class ValidationRuleBaseFunctions():
"""
Base object with individual functions, which the {ValidationRuleFunctions}-child refers to.
@ -72,6 +74,18 @@ class ValidationRuleBaseFunctions():
"""return the default output of a validation function with no validation: a tuple of (GREEN state, None)"""
return (StatusFlags.GREEN, None)
def check_if_header_exists(self, df_times:pd.DataFrame, participant_type:ParticipantType)->bool:
"""
Given a pandas DataFrame, which contains times entries for a specific shipcall id,
this function checks, whether one of the times entries belongs to the requested ParticipantType.
returns bool
"""
# empty DataFrames form a special case, as they might miss the 'participant_type' column.
if len(df_times)==0:
return False
return participant_type in df_times.loc[:,"participant_type"].values
def check_time_delta_violation_query_time_to_now(self, query_time:pd.Timestamp, key_time:pd.Timestamp, threshold:float)->bool:
"""
# base function for all validation rules in the group {0001} A-L
@ -144,7 +158,6 @@ class ValidationRuleBaseFunctions():
return violation_state
df_times = df_times.loc[df_times["participant_type"].isin(participant_types),:]
agency_time = [time_ for time_ in agency_times.loc[:,query].tolist() if isinstance(time_, pd.Timestamp)]
# for the given query, e.g., 'eta_berth', sample all times from the pandas DataFrame
# exclude missing entries and consider only pd.Timestamp entries (which ignores pd.NaT/null entries)
@ -172,6 +185,7 @@ class ValidationRuleBaseFunctions():
violation_state = any(time_difference_exceeds_threshold)
# this (previous) solution compares times to the reference (agency) time and checks if the difference is greater than 15 minutes
# agency_time = [time_ for time_ in agency_times.loc[:,query].tolist() if isinstance(time_, pd.Timestamp)]
# violation_state = ((np.max(estimated_times) - agency_time[0]) > pd.Timedelta("15min")) or ((agency_time[0] - np.min(estimated_times)) > pd.Timedelta("15min"))
# this solution to the rule compares all times to each other. When there is a total difference of more than 15 minutes, a violation occurs
@ -762,10 +776,12 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions):
return self.get_no_violation_default_output()
# check, if the header is filled in (agency & terminal)
if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) != 1:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY):
# if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) != 1:
return self.get_no_violation_default_output() # rule not applicable
if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) != 1:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.TERMINAL):
#if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) != 1:
return self.get_no_violation_default_output() # rule not applicable
# get agency & terminal times
@ -805,10 +821,12 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions):
return self.get_no_violation_default_output()
# check, if the header is filled in (agency & terminal)
if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) != 1:
# if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) != 1:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY):
return self.get_no_violation_default_output() # rule not applicable
if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) != 1:
# if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) != 1:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.TERMINAL):
return self.get_no_violation_default_output() # rule not applicable
# get agency & terminal times
@ -845,7 +863,8 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions):
return self.get_no_violation_default_output()
# check, if the header is filled in (agency)
if len(df_times.loc[df_times["participant_type"].isin([ParticipantType.AGENCY.value])]) != 1:
# if len(df_times.loc[df_times["participant_type"].isin([ParticipantType.AGENCY.value])]) != 1:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY):
return self.get_no_violation_default_output()
times_agency = self.sql_handler.get_times_for_participant_type(df_times, participant_type=ParticipantType.AGENCY.value)
@ -876,7 +895,8 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions):
return self.get_no_violation_default_output()
# check, if the header is filled in (agency)
if len(df_times.loc[df_times["participant_type"].isin([ParticipantType.AGENCY.value])]) != 1:
# if len(df_times.loc[df_times["participant_type"].isin([ParticipantType.AGENCY.value])]) != 1:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY):
return self.get_no_violation_default_output()
times_agency = self.sql_handler.get_times_for_participant_type(df_times, participant_type=ParticipantType.AGENCY.value)
@ -898,16 +918,19 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions):
"""
Code: #0005-A
Type: Global Rule
Description: this validation rule checks, whether there are too many shipcalls with identical ETA times.
Description: this validation rule checks, whether there are too many shipcalls with identical times to the query ETA.
"""
times_agency = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]
# check, if the header is filled in (agency)
if len(times_agency) != 1:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY): # if len(times_agency) != 1:
return self.get_no_violation_default_output()
# when ANY of the unique values exceeds the threshold, a violation is observed
query = "eta_berth"
violation_state = self.check_unique_shipcall_counts(query, times_agency=times_agency, rounding=rounding, maximum_threshold=maximum_threshold, all_times_agency=all_times_agency)
# get the agency's query time
times_agency = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]
query_time = times_agency.iloc[0].eta_berth
# count the number of times, where a times entry is very close to the query time (uses an internal threshold, such as 15 minutes)
counts = self.sql_handler.count_synchronous_shipcall_times(query_time, all_df_times=all_times_agency)
violation_state = counts > maximum_threshold
if violation_state:
validation_name = "validation_rule_fct_too_many_identical_eta_times"
@ -919,16 +942,19 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions):
"""
Code: #0005-B
Type: Global Rule
Description: this validation rule checks, whether there are too many shipcalls with identical ETD times.
Description: this validation rule checks, whether there are too many shipcalls with identical times to the query ETD.
"""
times_agency = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]
# check, if the header is filled in (agency)
if len(times_agency) != 1:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY): #if len(times_agency) != 1:
return self.get_no_violation_default_output()
# when ANY of the unique values exceeds the threshold, a violation is observed
query = "etd_berth"
violation_state = self.check_unique_shipcall_counts(query, times_agency=times_agency, rounding=rounding, maximum_threshold=maximum_threshold, all_times_agency=all_times_agency)
# get the agency's query time
times_agency = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]
query_time = times_agency.iloc[0].etd_berth
# count the number of times, where a times entry is very close to the query time (uses an internal threshold, such as 15 minutes)
counts = self.sql_handler.count_synchronous_shipcall_times(query_time, all_df_times=all_times_agency)
violation_state = counts > maximum_threshold
if violation_state:
validation_name = "validation_rule_fct_too_many_identical_etd_times"
@ -943,10 +969,12 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions):
Description: This validation rule checks, whether agency and terminal agree with their designated berth place by checking berth_id.
"""
# check, if the header is filled in (agency & terminal)
if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) == 0:
# if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) == 0:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY):
return self.get_no_violation_default_output() # rule not applicable
if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) == 0:
# if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) == 0:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.TERMINAL):
return self.get_no_violation_default_output() # rule not applicable
times_agency = self.sql_handler.get_times_for_participant_type(df_times, participant_type=ParticipantType.AGENCY.value)
@ -979,13 +1007,14 @@ class ValidationRuleFunctions(ValidationRuleBaseFunctions):
Description: This validation rule checks, whether agency and terminal agree with their designated pier side by checking pier_side.
"""
# check, if the header is filled in (agency & terminal)
if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) == 0:
# if len(df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value]) == 0:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.AGENCY):
return self.get_no_violation_default_output() # rule not applicable
if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) == 0:
# if len(df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value]) == 0:
if not self.check_if_header_exists(df_times, participant_type=ParticipantType.TERMINAL):
return self.get_no_violation_default_output() # rule not applicable
times_agency = self.sql_handler.get_times_for_participant_type(df_times, participant_type=ParticipantType.AGENCY.value)
times_terminal = self.sql_handler.get_times_for_participant_type(df_times, participant_type=ParticipantType.TERMINAL.value)
# when one of the two values is null, the state is GREEN

View File

@ -3,6 +3,7 @@ import logging
import re
import numpy as np
import pandas as pd
import datetime
from BreCal.database.enums import StatusFlags
from BreCal.validators.validation_rule_functions import ValidationRuleFunctions
from BreCal.schemas.model import Shipcall
@ -17,10 +18,6 @@ class ValidationRules(ValidationRuleFunctions):
"""
def __init__(self, sql_handler): # use the entire data that is provided for this query (e.g., json input)
super().__init__(sql_handler)
self.validation_state = self.determine_validation_state()
# currently flagged: notification_state initially was based on using one ValidationRules object for each query. This is deprecated.
# self.notification_state = self.determine_notification_state() # (state:str, should_notify:bool)
return
def evaluate(self, shipcall):
@ -31,9 +28,9 @@ class ValidationRules(ValidationRuleFunctions):
returns: (evaluation_state, violations)
"""
# prepare df_times, which every validation rule tends to use
df_times = self.sql_handler.df_dict.get('times', pd.DataFrame()) # -> pd.DataFrame
all_df_times = self.sql_handler.df_dict.get('times', pd.DataFrame()) # -> pd.DataFrame
if len(df_times)==0:
if len(all_df_times)==0:
return (StatusFlags.GREEN.value, [])
spm = self.sql_handler.df_dict["shipcall_participant_map"]
@ -74,16 +71,26 @@ class ValidationRules(ValidationRuleFunctions):
return evaluation_state, violations
def evaluate_shipcalls(self, shipcall_df:pd.DataFrame)->pd.DataFrame:
"""apply 'evaluate_shipcall_from_df' to each individual shipcall in {shipcall_df}. Returns shipcall_df ('evaluation' and 'evaluation_message' are updated)"""
results = shipcall_df.apply(lambda x: self.evaluate_shipcall_from_df(x), axis=1).values
"""apply 'evaluate_shipcall_from_df' to each individual shipcall in {shipcall_df}. Returns shipcall_df ('evaluation', 'evaluation_message', 'evaluation_time' and 'evaluation_notifications_sent' are updated)"""
evaluation_states_old = [state_old for state_old in shipcall_df.loc[:,"evaluation"]]
evaluation_states_old = [state_old if not pd.isna(state_old) else 0 for state_old in evaluation_states_old]
results = shipcall_df.apply(lambda x: self.evaluate_shipcall_from_df(x), axis=1).values # returns tuple (state, message)
# unbundle individual results. evaluation_state becomes an integer, violation
evaluation_state = [StatusFlags(res[0]).value for res in results]
# unbundle individual results. evaluation_states becomes an integer, violation
evaluation_states_new = [StatusFlags(res[0]).value for res in results]
violations = [",\r\n".join(res[1]) if len(res[1])>0 else None for res in results]
violations = [self.concise_evaluation_message_if_too_long(violation) for violation in violations]
shipcall_df.loc[:,"evaluation"] = evaluation_state
# build the list of evaluation times ('now', as isoformat)
#evaluation_time = self.get_notification_times(evaluation_states_new)
# build the list of 'evaluation_notifications_sent'. The value is 'False', when a notification should be created
#evaluation_notifications_sent = self.get_notification_states(evaluation_states_old, evaluation_states_new)
shipcall_df.loc[:,"evaluation"] = evaluation_states_new
shipcall_df.loc[:,"evaluation_message"] = violations
#shipcall_df.loc[:,"evaluation_time"] = evaluation_time
#shipcall_df.loc[:,"evaluation_notifications_sent"] = evaluation_notifications_sent
return shipcall_df
def concise_evaluation_message_if_too_long(self, violation):
@ -101,35 +108,26 @@ class ValidationRules(ValidationRuleFunctions):
violation = f"Evaluation message too long. Violated Rules: {concise}"
return violation
def determine_validation_state(self) -> str:
"""
this method determines the validation state of a shipcall. The state is either ['green', 'yellow', 'red'] and signals,
whether an entry causes issues within the workflow of users.
def undefined_method(self) -> str:
"""this function should apply the ValidationRules to the respective .shipcall, in regards to .times"""
return (StatusFlags.GREEN, False) # (state:str, should_notify:bool)
returns: validation_state_new (str)
def determine_notification_state(self, state_old, state_new):
"""
(validation_state_new, description) = self.undefined_method()
# should there also be notifications for critical validation states? In principle, the traffic light itself provides that notification.
self.validation_state = validation_state_new
return validation_state_new
def determine_notification_state(self) -> (str, bool):
this method determines state changes in the notification state. When the state increases, a user is notified about it.
state order: (NONE = GREEN < YELLOW < RED)
"""
this method determines state changes in the notification state. When the state is changed to yellow or red,
a user is notified about it. The only exception for this rule is when the state was yellow or red before,
as the user has then already been notified.
# identify a state increase
should_notify = self.identify_notification_state_change(state_old=state_old, state_new=state_new)
returns: notification_state_new (str), should_notify (bool)
"""
(state_new, description) = self.undefined_method() # determine the successor
should_notify = self.identify_notification_state_change(state_new)
self.notification_state = state_new # overwrite the predecessor
return state_new, should_notify
# when a state increases, a notification must be sent. Thereby, the field should be set to False ({evaluation_notifications_sent})
evaluation_notifications_sent = False if bool(should_notify) else None
return evaluation_notifications_sent
def identify_notification_state_change(self, state_new) -> bool:
def identify_notification_state_change(self, state_old, state_new) -> bool:
"""
determines, whether the observed state change should trigger a notification.
internally, this function maps a color string to an integer and determines, if the successor state is more severe than the predecessor.
internally, this function maps StatusFlags to an integer and determines, if the successor state is more severe than the predecessor.
state changes trigger a notification in the following cases:
green -> yellow
@ -143,10 +141,41 @@ class ValidationRules(ValidationRuleFunctions):
returns bool, whether a notification should be triggered
"""
# state_old is always considered at least 'Green' (1)
state_old = max(copy.copy(self.notification_state) if "notification_state" in list(self.__dict__.keys()) else StatusFlags.NONE, StatusFlags.GREEN.value)
return state_new.value > state_old.value
if state_old is None:
state_old = StatusFlags.NONE.value
state_old = max(int(state_old), StatusFlags.GREEN.value)
return int(state_new) > int(state_old)
def undefined_method(self) -> str:
"""this function should apply the ValidationRules to the respective .shipcall, in regards to .times"""
# #TODO_traffic_state
return (StatusFlags.GREEN, False) # (state:str, should_notify:bool)
def get_notification_times(self, evaluation_states_new)->list[datetime.datetime]:
"""# build the list of evaluation times ('now', as isoformat)"""
evaluation_times = [datetime.datetime.now().isoformat() for _i in range(len(evaluation_states_new))]
return evaluation_times
def get_notification_states(self, evaluation_states_old, evaluation_states_new)->list[bool]:
"""# build the list of 'evaluation_notifications_sent'. The value is 'False', when a notification should be created"""
evaluation_notifications_sent = [self.determine_notification_state(state_old=int(state_old), state_new=int(state_new)) for state_old, state_new in zip(evaluation_states_old, evaluation_states_new)]
return evaluation_notifications_sent
def inspect_shipcall_evaluation(vr, sql_handler, shipcall_id):
"""
# debug only!
a simple debugging function, which serves in inspecting an evaluation function for a single shipcall id. It returns the result and all related data.
returns: result, shipcall_df (filtered by shipcall id), shipcall, spm (shipcall participant map, filtered by shipcall id), times_df (filtered by shipcall id)
"""
shipcall_df = sql_handler.df_dict.get("shipcall").loc[shipcall_id:shipcall_id,:]
shipcall = Shipcall(**{**{"id":shipcall_id},**sql_handler.df_dict.get("shipcall").loc[shipcall_id].to_dict()})
result = vr.evaluate(shipcall=shipcall)
notification_state = vr.identify_notification_state_change(state_old=int(shipcall.evaluation), state_new=int(result[0]))
print(f"Previous state: {int(shipcall.evaluation)}, New State: {result[0]}, Notification State: {notification_state}")
times_df = sql_handler.df_dict.get("times")
times_df = times_df.loc[times_df["shipcall_id"]==shipcall_id]
spm = sql_handler.df_dict["shipcall_participant_map"]
spm = spm.loc[spm["shipcall_id"]==shipcall_id]
return result, shipcall_df, shipcall, spm, times_df

View File

View File

@ -0,0 +1,527 @@
import pytest
import os
import bcrypt
import pydapper
from BreCal import local_db
from BreCal.database.sql_handler import execute_sql_query_standalone
from BreCal.database.sql_queries import SQLQuery
from BreCal.schemas import model
from BreCal.stubs.user import get_user_simple
instance_path = os.path.join(os.path.expanduser('~'), "brecal", "src", "server", "instance", "instance")
local_db.initPool(os.path.dirname(instance_path), connection_filename="connection_data_local.json")
def test_sql_query_every_call_returns_str():
assert isinstance(SQLQuery.get_berth(), str)
return
def test_sql_query_get_berths():
schemas = execute_sql_query_standalone(query=SQLQuery.get_berth(), param={})
berths = [model.Berth(**schema) for schema in schemas]
assert all([isinstance(berth, model.Berth) for berth in berths]), f"one of the returned schemas is not a Berth object"
return
def test_sql_query_get_history():
options = {"shipcall_id":157}
history = execute_sql_query_standalone(query=SQLQuery.get_history(), param={"shipcallid" : options["shipcall_id"]}, model=model.History.from_query_row)
assert all([isinstance(hist,model.History) for hist in history])
return
def test_sql_query_get_user():
options = {"username":"maxm"}
users = execute_sql_query_standalone(query=SQLQuery.get_user(), param={"username" : options["username"]}, model=model.User)
assert all([isinstance(user,model.User) for user in users])
assert users[0].user_name==options["username"]
return
def test_sql_get_notifications():
import mysql.connector
# unfortunately, there currently is *no* notification in the database.
with pytest.raises(mysql.connector.errors.ProgrammingError, match="Unknown column 'shipcall_id' in 'field list'"):
options = {"shipcall_id":417}
notifications = execute_sql_query_standalone(query=SQLQuery.get_notifications(), param={"scid" : options["shipcall_id"]}, model=model.Notification.from_query_row)
assert all([isinstance(notification,model.Notification) for notification in notifications])
return
def test_sql_get_participants():
participants = execute_sql_query_standalone(query=SQLQuery.get_participants(), param={}, model=model.Participant)
assert all([isinstance(participant,model.Participant) for participant in participants])
return
def test_sql_get_participants():
options = {"user_id":29}
query = SQLQuery.get_participant_by_user_id()
participants = execute_sql_query_standalone(query=query, param={"userid" : options["user_id"]}, model=model.Participant)
assert all([isinstance(participant,model.Participant) for participant in participants])
assert len(participants)==1, f"there should only be one match for the respective user id"
assert participants[0].id == 136, f"user 29 belongs to participant_id 136"
return
def test_sql_get_shipcalls():
from BreCal.database.sql_queries import create_sql_query_shipcall_get
# different styles for the same outcome
options = {"past_days":3000}
query = create_sql_query_shipcall_get(options)
shipcalls = execute_sql_query_standalone(query=query, param=options, model=model.Shipcall.from_query_row)
assert all([isinstance(shipcall,model.Shipcall) for shipcall in shipcalls])
query = SQLQuery.get_shipcalls() # defaults to 'past_days'=3
shipcalls = execute_sql_query_standalone(query=query, param=options, model=model.Shipcall.from_query_row)
assert all([isinstance(shipcall,model.Shipcall) for shipcall in shipcalls])
query = SQLQuery.get_shipcalls({'past_days':3000})
shipcalls = execute_sql_query_standalone(query=query, param=options, model=model.Shipcall.from_query_row)
assert all([isinstance(shipcall,model.Shipcall) for shipcall in shipcalls])
# fails: options must contain 'past_days' key
with pytest.raises(AssertionError, match="there must be a key 'past_days' in the options, which determines"):
query = SQLQuery.get_shipcalls(options={})
shipcalls = execute_sql_query_standalone(query=query, param=options, model=model.Shipcall.from_query_row)
return
def test_sql_get_ships():
ships = execute_sql_query_standalone(query=SQLQuery.get_ships(), model=model.Ship)
assert all([isinstance(ship, model.Ship) for ship in ships])
return
def test_sql_get_times():
options = {'shipcall_id':153}
times = execute_sql_query_standalone(query=SQLQuery.get_times(), model=model.Times, param={"scid" : options["shipcall_id"]})
assert all([isinstance(time_,model.Times) for time_ in times])
assert times[0].shipcall_id==options["shipcall_id"]
return
def test_sql_get_user_by_id():
# success: id 29 exists
schemaModel = get_user_simple().__dict__
schemaModel["id"] = 29
sentinel = object()
query = SQLQuery.get_user_by_id()
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User)
if theuser is sentinel:
pooledConnection.close()
theuser.id == schemaModel["id"]
# fails: id 292212 does not exist (returns default, which is the sentinel object in this case)
schemaModel = get_user_simple().__dict__
schemaModel["id"] = 292212
sentinel = object()
query = SQLQuery.get_user_by_id()
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User)
if theuser is sentinel:
pooledConnection.close()
assert theuser is sentinel
return
def test_sql_get_user_put():
"""the PUT query for a user must be built based on a schemaModel, as the available data is dynamic and the query must be adaptive."""
schemaModel = get_user_simple().__dict__
schemaModel["id"] = 29
query = "UPDATE user SET "
isNotFirst = False
for key in schemaModel.keys():
if key == "id":
continue
if key == "old_password":
continue
if key == "new_password":
continue
if isNotFirst:
query += ", "
isNotFirst = True
query += key + " = ?" + key + "? "
query += "WHERE id = ?id?"
assert SQLQuery.get_user_put(schemaModel) == query
return
def test_sql_user_put_set_lastname_check_unset_lastname_check():
"""
Simply put, this method updates the last_name of user 29 to "Metz", verifies the change,
and then proceeds to set it back to "Mustermann", and verifies the change.
"""
# 1.) SET the last_name of user_id 29 to 'Metz' by a PUT command
schemaModel = get_user_simple().__dict__
schemaModel["id"] = 29
schemaModel["last_name"] = "Metz" # -> "Metz" -> "Mustermann"
schemaModel = {k:v for k,v in schemaModel.items() if k in ["id", "last_name"]}
query = SQLQuery.get_user_put(schemaModel)
affected_rows = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute")
assert affected_rows==1, f"at least one row should be affected by this call."
# 2.) GET the user_id 29 and verify the last_name is 'Metz'
sentinel = object()
query = SQLQuery.get_user_by_id()
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User)
pooledConnection.close()
assert not theuser is sentinel, f"failed GET user query"
assert theuser.last_name=="Metz", f"PUT command has been unsuccessful."
# 3.) SET the last_name of user_id 29 to 'Mustermann' by a PUT command
schemaModel = theuser.__dict__
schemaModel["last_name"] = "Mustermann"
schemaModel = {k:v for k,v in schemaModel.items() if k in ["id", "last_name"]}
query = SQLQuery.get_user_put(schemaModel)
affected_rows = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute")
assert affected_rows==1, f"at least one row should be affected by this call."
# 4.) GET the user_id 29 and verify the last_name is 'Mustermann'
sentinel = object()
query = SQLQuery.get_user_by_id()
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User)
pooledConnection.close()
assert not theuser is sentinel, f"failed GET user query"
assert theuser.last_name=="Mustermann", f"PUT command has been unsuccessful."
return
def test_sql_user_update_password():
"""This test updates the default password of user 29 from 'Start1234' to 'Start4321' and afterwards sets it back to 'Start1234'."""
# #TODO: this test very openly displays the password of 'maxm'. It makes sense to create a stub user in the database, which can be
# used for these tests, so an account without any importance or valuable assigned role is insecure instead.
# Set. Update the password of user 29 from 'Start1234' to 'Start4321'
schemaModel = get_user_simple().__dict__
schemaModel["id"] = 29
schemaModel["old_password"] = "Start1234"
schemaModel["new_password"] = "Start4321"
sentinel = object()
query = SQLQuery.get_user_by_id()
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User)
pooledConnection.close()
assert not theuser is sentinel, f"failed GET user query"
assert bcrypt.checkpw(schemaModel["old_password"].encode("utf-8"), bytes(theuser.password_hash, "utf-8")), f"old password does not match to the database entry"
password_hash = bcrypt.hashpw(schemaModel["new_password"].encode('utf-8'), bcrypt.gensalt( 12 )).decode('utf8')
query = SQLQuery.get_update_user_password()
affected_rows = execute_sql_query_standalone(query=query, param={"password_hash" : password_hash, "id" : schemaModel["id"]}, command_type="execute")
assert affected_rows == 1
# 2.) Revert. Set password back to the default (from 'Start4321' to 'Start1234')
schemaModel = get_user_simple().__dict__
schemaModel["id"] = 29
schemaModel["old_password"] = "Start4321"
schemaModel["new_password"] = "Start1234"
sentinel = object()
query = SQLQuery.get_user_by_id()
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
theuser = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.User)
pooledConnection.close()
assert not theuser is sentinel, f"failed GET user query"
assert bcrypt.checkpw(schemaModel["old_password"].encode("utf-8"), bytes(theuser.password_hash, "utf-8")), f"old password does not match to the database entry"
password_hash = bcrypt.hashpw(schemaModel["new_password"].encode('utf-8'), bcrypt.gensalt( 12 )).decode('utf8')
query = SQLQuery.get_update_user_password()
affected_rows = execute_sql_query_standalone(query=query, param={"password_hash" : password_hash, "id" : schemaModel["id"]}, command_type="execute")
assert affected_rows == 1
return
def test_sql_get_participants_of_shipcall_id():
shipcall_id = 389
query = SQLQuery.get_participants()
participants = execute_sql_query_standalone(query=query, model=dict, param={"shipcall_id" : shipcall_id})
assert all([part.get("participant_id") is not None for part in participants])
assert all([part.get("type") is not None for part in participants])
# try to convert every participant into a model.Participant_Assignment
participants = [
model.Participant_Assignment(part["participant_id"], part["type"])
for part in participants
]
assert all([isinstance(part,model.Participant_Assignment) for part in participants])
return
def test_sql_get_all_shipcalls_and_assign_participants():
"""
this test reproduces the SQL query within BreCal.impl.shipcalls to make sure, that the
query first returns all shipcalls, and then assigns all participants of the respective shipcall to it.
"""
# get all shipcalls
options = {'past_days':30000}
query = SQLQuery.get_shipcalls(options)
shipcalls = execute_sql_query_standalone(query=query, model=model.Shipcall.from_query_row)
assert all([isinstance(shipcall,model.Shipcall) for shipcall in shipcalls])
# for every shipcall, assign all of its participants to it
for shipcall in shipcalls:
participant_query = SQLQuery.get_participants()
participants = execute_sql_query_standalone(query=participant_query, model=dict, param={"shipcall_id" : shipcall.id})
for record in participants:
pa = model.Participant_Assignment(record["participant_id"], record["type"])
shipcall.participants.append(pa)
assert any([
any([isinstance(participant, model.Participant_Assignment) for participant in shipcall.participants])
for shipcall in shipcalls
]), f"at least one of the shipcalls should have an assigned model.Participant_Assignment"
return
def test_sqlquery_get_shipcal_post_identical_to_create_sql_query_shipcall_post():
from BreCal.database.sql_queries import create_sql_query_shipcall_post
from BreCal.stubs.shipcall import get_stub_valid_shipcall_shifting, get_stub_valid_ship_loaded_model
post_data = get_stub_valid_shipcall_shifting()
schemaModel = get_stub_valid_ship_loaded_model(post_data)
query1 = SQLQuery.get_shipcall_post(schemaModel) # refactored variant of create_sql_query_shipcall_post (more concise)
query2 = create_sql_query_shipcall_post(schemaModel)
assert query1==query2
return
def test_sql_post_shipcall():
"""issues a post-request with stub data and adds it to the database."""
from BreCal.database.sql_queries import create_sql_query_shipcall_post
from BreCal.stubs.shipcall import get_stub_valid_shipcall_shifting, get_stub_valid_ship_loaded_model
pooledConnection = local_db.getPoolConnection()
try:
commands = pydapper.using(pooledConnection)
post_data = get_stub_valid_shipcall_shifting()
post_data["voyage"] = "pytestRS71" # perform tagging to identify the shipcalls created by pytests (<16 characters, no special characters).
schemaModel = get_stub_valid_ship_loaded_model(post_data)
query = SQLQuery.get_shipcall_post(schemaModel) # refactored variant of create_sql_query_shipcall_post (more concise)
schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute", pooledConnection=pooledConnection)
assert schemas==1, f"unsuccessful query execution. Query: {query}"
# within the same pooledConnection, ask for the last inserted id
query = SQLQuery.get_shipcall_post_last_insert_id()
new_id = commands.execute_scalar(query)
assert new_id > 0, f"the new id should be unlike 0.."
# add participant assignments if we have a list of participants
if 'participants' in schemaModel:
pquery = SQLQuery.get_shipcall_post_update_shipcall_participant_map()
for participant_assignment in schemaModel["participants"]:
schemas = execute_sql_query_standalone(query=pquery, param={"shipcall_id" : new_id, "participant_id" : participant_assignment["participant_id"], "type" : participant_assignment["type"]}, command_type="execute", pooledConnection=pooledConnection)
from BreCal.stubs.user import get_user_simple
# assign an artificial user with id 29 (maxm) & participant type 136
user_data = get_user_simple().__dict__
user_data["id"] = 29
user_data["participant_id"] = 136
# POST in the history
query = SQLQuery.create_sql_query_history_post()
schemas = execute_sql_query_standalone(query=query, param={"scid" : new_id, "pid" : user_data["participant_id"], "uid" : user_data["id"]}, command_type="execute", pooledConnection=pooledConnection)
assert schemas == 1, f"unsuccessful history POST"
finally:
pooledConnection.close()
return
def test_sql_create_history_post_matches_legacy_function():
from BreCal.database.sql_queries import create_sql_query_history_post
query_refactored = SQLQuery.create_sql_query_history_post()
query_legacy = create_sql_query_history_post()
assert isinstance(query_refactored,str)
assert query_refactored==query_legacy, f"the refactored code to generate the query must be absolutely identical to the legacy version"
return
def test_sql_get_shipcall_by_id():
schemaModel = {"id":63}
sentinel = object()
query = SQLQuery.get_shipcall_by_id()
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
theshipcall = commands.query_single_or_default(query, sentinel, param={"id" : schemaModel["id"]}, model=model.Shipcall)
pooledConnection.close()
assert not theshipcall is sentinel, f"failed GET user query"
assert theshipcall.id==schemaModel["id"]
return
def test_sql_get_shipcall_by_id_short_version():
schemaModel = {"id":63}
# when model is defined, returns the data model
query = SQLQuery.get_shipcall_by_id()
schemas = execute_sql_query_standalone(query=query, param={"id" : schemaModel["id"]}, model=model.Shipcall, command_type="single")
assert schemas.id==schemaModel["id"]
assert isinstance(schemas, model.Shipcall)
# when model = None, returns a dictionary
query = SQLQuery.get_shipcall_by_id()
schemas = execute_sql_query_standalone(query=query, param={"id" : schemaModel["id"]}, command_type="single")
assert isinstance(schemas, dict)
assert schemas.get("id")==schemaModel["id"]
return
def test_sql_get_shipcall_put_refactored_equals_extended_version():
from BreCal.database.sql_queries import create_sql_query_shipcall_put
from BreCal.stubs.shipcall import get_stub_valid_shipcall_shifting, get_stub_valid_ship_loaded_model
post_data = get_stub_valid_shipcall_shifting()
post_data["voyage"] = "pytestRS71" # perform tagging to identify the shipcalls created by pytests (<16 characters, no special characters).
schemaModel = get_stub_valid_ship_loaded_model(post_data)
legacy_query = create_sql_query_shipcall_put(schemaModel)
refactored_query = SQLQuery.get_shipcall_put(schemaModel)
assert refactored_query == legacy_query, f"version conflict. the refactored query must precisely match the legacy query!"
return
def test_sql_get_shipcall_participant_map_by_shipcall_id():
schemaModel = {"id":152}
query = SQLQuery.get_shipcall_participant_map_by_shipcall_id()
pdata = execute_sql_query_standalone(query=query, param={"id" : schemaModel["id"]}, command_type="query") # existing list of assignments
assert len(pdata)==4, f"there should be four assigned participants for the shipcall with id {schemaModel.get('id')}"
return
def test_sql__get_shipcall__get_spm__optionally_update_shipcall():
schemaModel = {'id': 152}
query = SQLQuery.get_shipcall_by_id()
shipcall = execute_sql_query_standalone(query=query, param={"id" : schemaModel["id"]}, command_type="single", model=model.Shipcall)
query = SQLQuery.get_shipcall_participant_map_by_shipcall_id()
pdata = execute_sql_query_standalone(query=query, param={"id" : shipcall.id}, command_type="query") # existing list of assignments
assert len(pdata)==4, f"there should be four assigned participants for the shipcall with id {shipcall.id}"
for participant_assignment in shipcall.participants:
found_participant = False
for elem in pdata:
if elem["participant_id"] == participant_assignment["participant_id"] and elem["type"] == participant_assignment["type"]:
found_participant = True
break
if not found_participant:
nquery = SQLQuery.get_shipcall_post_update_shipcall_participant_map()
ndata = execute_sql_query_standalone(query=nquery, param={"shipcall_id" : shipcall.id, "participant_id" : participant_assignment["participant_id"], "type" : participant_assignment["type"]}, command_type="execute") # existing list of assignments
return
def test_sql__shipcall_post__get_last_insert_id__get_spm__update_participants__verify_changes():
"""
this combinatorial test:
1.) creates a novel shipcall
2.) obtains the ID of the just-created shipcall
3.) reads the participant map for that ID and verifies, that there are no participants listed
4.) iteratively updates the participant map of the ID (using proxy data)
5.) verifies the update
"""
from BreCal.stubs.shipcall import get_stub_valid_shipcall_shifting, get_stub_valid_ship_loaded_model
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
try:
# 1.) create shipcall
post_data = get_stub_valid_shipcall_shifting()
post_data["voyage"] = "pytestRS71" # perform tagging to identify the shipcalls created by pytests (<16 characters, no special characters).
schemaModel = get_stub_valid_ship_loaded_model(post_data)
query = SQLQuery.get_shipcall_post(schemaModel) # refactored variant of create_sql_query_shipcall_post (more concise)
schemas = execute_sql_query_standalone(query=query, param=schemaModel, command_type="execute", pooledConnection=pooledConnection)
assert schemas==1, f"unsuccessful query execution. Query: {query}"
# 2.) obtain the ID of the novel shipcall
# within the same pooledConnection, ask for the last inserted id
query = SQLQuery.get_shipcall_post_last_insert_id()
new_id = commands.execute_scalar(query)
assert new_id > 0, f"the new id should be unlike 0.."
# 3.) read the ShipcallParticipantMap for the novel id
query = SQLQuery.get_shipcall_participant_map_by_shipcall_id()
pdata = execute_sql_query_standalone(query=query, param={"id" : new_id}, command_type="query") # existing list of assignments
assert len(pdata)==0, f"as the POST query does not include participants in this case, the novel id should not have assigned participants."
### proxy data ###
# loop across passed participant ids, creating entries for those not present in pdata
schemaModel = {'id': new_id, "participants":[{'id': 128, 'participant_id': 2, 'type': 4}, {'id': 129, 'participant_id': 3, 'type': 1}, {'id': 130, 'participant_id': 4, 'type': 2}, {'id': 131, 'participant_id': 6, 'type': 8}]}
# 4.) assign the participants
for participant_assignment in schemaModel["participants"]:
found_participant = False
for elem in pdata:
if elem["participant_id"] == participant_assignment["participant_id"] and elem["type"] == participant_assignment["type"]:
found_participant = True
break
if not found_participant:
nquery = SQLQuery.get_shipcall_post_update_shipcall_participant_map()
ndata = execute_sql_query_standalone(query=nquery, param={"shipcall_id" : new_id, "participant_id" : participant_assignment["participant_id"], "type" : participant_assignment["type"]}, command_type="execute") # existing list of assignments
# 5.) verify the update (5 participants, including the false one)
query = SQLQuery.get_shipcall_participant_map_by_shipcall_id()
pdata = execute_sql_query_standalone(query=query, param={"id" : new_id}, command_type="query") # existing list of assignments
assert len(pdata)==5, f"due to the PUT, there shall now be five participants, as defined in schemaModel."
# 6.) delete the incorrect participant (last entry in the list in this case)
dquery = SQLQuery.get_shipcall_participant_map_delete_by_id()
ddata = execute_sql_query_standalone(query=dquery, param={"existing_id" : pdata[-1].get("id")}, command_type="execute")
# 7.) verify the update (now 4 participants)
query = SQLQuery.get_shipcall_participant_map_by_shipcall_id()
pdata = execute_sql_query_standalone(query=query, param={"id" : new_id}, command_type="query") # existing list of assignments
assert len(pdata)==4, f"due to the PUT, there shall now be five participants, as defined in schemaModel."
finally:
pooledConnection.close()
return
def test_sql_query_get_shipcalls_is_identical_to_legacy_query():
from BreCal.database.sql_queries import create_sql_query_shipcall_get
options = {'past_days':7}
query_refactored = SQLQuery.get_shipcalls(options)
query_legacy = create_sql_query_shipcall_get(options)
assert query_refactored == query_legacy, f"the refactored code to generate the query must be absolutely identical to the legacy version"
return
def test_sql_query_post_ship_is_identical_to_legacy_query():
from BreCal.database.sql_queries import SQLQuery, create_sql_query_ship_post, create_sql_query_ship_put
from BreCal.stubs.ship import get_stub_valid_ship_loaded_model
schemaModel = get_stub_valid_ship_loaded_model()
query_refactored = SQLQuery.get_ship_post(schemaModel)
query_legacy = create_sql_query_ship_post(schemaModel)
assert query_refactored == query_legacy, f"the refactored code to generate the query must be absolutely identical to the legacy version"
return
def test_sql_query_put_ship_is_identical_to_legacy_query():
from BreCal.database.sql_queries import SQLQuery, create_sql_query_ship_post, create_sql_query_ship_put
from BreCal.stubs.ship import get_stub_valid_ship_loaded_model
schemaModel = get_stub_valid_ship_loaded_model()
query_refactored = SQLQuery.get_ship_put(schemaModel)
query_legacy = create_sql_query_ship_put(schemaModel)
assert query_refactored == query_legacy, f"the refactored code to generate the query must be absolutely identical to the legacy version"
return
#schemas = execute_sql_query_standalone(query=SQLQuery.get_berth(), param={})

View File

@ -0,0 +1,80 @@
from marshmallow import ValidationError
import pytest
from BreCal.schemas.model import ShipcallSchema
@pytest.fixture(scope="function") # function: destroy fixture at the end of each test
def prepare_shipcall_content():
import datetime
from BreCal.stubs.shipcall import get_shipcall_simple
shipcall_stub = get_shipcall_simple()
content = shipcall_stub.__dict__
content["participants"] = []
content = {k:v.isoformat() if isinstance(v, datetime.datetime) else v for k,v in content.items()}
return locals()
def test_shipcall_input_validation_draft(prepare_shipcall_content):
content = prepare_shipcall_content["content"]
content["draft"] = 24.11
schemaModel = ShipcallSchema()
with pytest.raises(ValidationError, match="Must be greater than 0 and less than or equal to 20."):
loadedModel = schemaModel.load(data=content, many=False, partial=True)
return
def test_shipcall_input_validation_voyage(prepare_shipcall_content):
content = prepare_shipcall_content["content"]
content["voyage"] = "".join(list(map(str,list(range(0,24))))) # 38 characters
schemaModel = ShipcallSchema()
with pytest.raises(ValidationError, match="Longer than maximum length "):
loadedModel = schemaModel.load(data=content, many=False, partial=True)
return
@pytest.fixture(scope="function") # function: destroy fixture at the end of each test
def prepare_user_content():
import datetime
from BreCal.stubs.user import get_user_simple
from BreCal.schemas.model import UserSchema
schemaModel = UserSchema()
user_stub = get_user_simple()
content = user_stub.__dict__
content = {k:v.isoformat() if isinstance(v, datetime.datetime) else v for k,v in content.items()}
content = {k:v for k,v in content.items() if k in list(schemaModel.fields.keys())}
content["old_password"] = "myfavoritedog123"
content["new_password"] = "SecuRepassW0rd!"
return locals()
def test_input_validation_berth_phone_number_is_valid(prepare_user_content):
content, schemaModel = prepare_user_content["content"], prepare_user_content["schemaModel"]
content["user_phone"] = "+49123 45678912" # whitespace and + are valid
loadedModel = schemaModel.load(data=content, many=False, partial=True)
return
def test_input_validation_berth_phone_number_is_invalid(prepare_user_content):
content, schemaModel = prepare_user_content["content"], prepare_user_content["schemaModel"]
content["user_phone"] = "+49123 45678912!" # ! is invalid
with pytest.raises(ValidationError, match="one of the phone number values is not valid."):
loadedModel = schemaModel.load(data=content, many=False, partial=True)
return
def test_input_validation_new_password_too_short(prepare_user_content):
content, schemaModel = prepare_user_content["content"], prepare_user_content["schemaModel"]
content["new_password"] = "1234" # must have between 6 and 128 characters
with pytest.raises(ValidationError, match="Length must be between 6 and 128."):
loadedModel = schemaModel.load(data=content, many=False, partial=True)
return
def test_input_validation_user_email_invalid(prepare_user_content):
content, schemaModel = prepare_user_content["content"], prepare_user_content["schemaModel"]
content["user_email"] = "userbrecal.com" # forgot @ -> invalid
with pytest.raises(ValidationError, match="invalid email address"):
loadedModel = schemaModel.load(data=content, many=False, partial=True)
return

View File

@ -8,13 +8,14 @@ def test_create_app():
import sys
from BreCal import get_project_root
project_root = get_project_root("brecal")
project_root = os.path.join(os.path.expanduser("~"), "brecal")
lib_location = os.path.join(project_root, "src", "server")
sys.path.append(lib_location)
from BreCal import create_app
os.chdir(os.path.join(lib_location,"BreCal")) # set the current directory to ~/brecal/src/server/BreCal, so the config is found
application = create_app()
instance_path = os.path.join(os.path.expanduser('~'), "brecal", "src", "server", "instance", "instance")
application = create_app(test_config=None, instance_path=instance_path)
return
if __name__=="__main__":

View File

@ -0,0 +1,226 @@
import pytest
import os
import jwt
import json
import requests
import datetime
import werkzeug
import re
from marshmallow import ValidationError
from BreCal import local_db
from BreCal.schemas import model
from BreCal.impl.ships import GetShips
from BreCal.schemas.model import Participant_Assignment, EvaluationType, ShipcallType
from BreCal.stubs.ship import get_stub_valid_ship, get_stub_valid_ship_loaded_model
from BreCal.validators.input_validation import validation_error_default_asserts
from BreCal.schemas.model import ParticipantType
from BreCal.validators.input_validation_ship import InputValidationShip
instance_path = os.path.join(os.path.expanduser('~'), "brecal", "src", "server", "instance", "instance")
local_db.initPool(os.path.dirname(instance_path), connection_filename="connection_data_local.json")
@pytest.fixture(scope="session")
def get_stub_token():
"""
performs a login to the user 'maxm' and returns the respective url and the token. The token will be used in
further requests in the following format (example of post-request):
requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
"""
port = 9013
url = f"http://127.0.0.1:{port}"
# set the JWT key
os.environ['SECRET_KEY'] = 'zdiTz8P3jXOc7jztIQAoelK4zztyuCpJ'
try:
response = requests.post(f"{url}/login", json=jwt.decode("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6Im1heG0iLCJwYXNzd29yZCI6IlN0YXJ0MTIzNCJ9.uIrbz3g-IwwTLz6C1zXELRGtAtRJ_myYJ4J4x0ozjAI", key=os.environ.get("SECRET_KEY"), algorithms=["HS256"]))
except requests.ConnectionError as err:
raise AssertionError(f"could not establish a connection to the default url. Did you start an instance of the local database at port {port}? Looking for a connection to {url}")
user = response.json()
token = user.get("token")
return locals()
def test_input_validation_ship_fails_when_length_is_incorrect():
with pytest.raises(ValidationError, match=re.escape("Must be greater than 0 and less than 1000.")):
post_data = get_stub_valid_ship()
post_data["length"] = 0
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
with pytest.raises(ValidationError, match=re.escape("Must be greater than 0 and less than 1000.")):
post_data = get_stub_valid_ship()
post_data["length"] = 1000
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
# success
post_data = get_stub_valid_ship()
post_data["length"] = 123
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
return
def test_input_validation_ship_fails_when_width_is_incorrect():
with pytest.raises(ValidationError, match=re.escape("Must be greater than 0 and less than 100.")):
post_data = get_stub_valid_ship()
post_data["width"] = 0
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
with pytest.raises(ValidationError, match=re.escape("Must be greater than 0 and less than 100.")):
post_data = get_stub_valid_ship()
post_data["width"] = 100
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
# success
post_data = get_stub_valid_ship()
post_data["width"] = 12
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
return
def test_input_validation_ship_fails_when_name_is_incorrect():
with pytest.raises(ValidationError, match=re.escape("'name' argument should have at max. 63 characters")):
post_data = get_stub_valid_ship()
post_data["name"] = "0123456789012345678901234567890123456789012345678901234567890123"
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
with pytest.raises(ValidationError, match=re.escape("'name' argument should not have special characters.")):
post_data = get_stub_valid_ship()
post_data["name"] = '👽'
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
post_data = get_stub_valid_ship()
post_data["name"] = "012345678901234567890123456789012345678901234567890123456789012"
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
return
def test_input_validation_ship_fails_when_callsign_is_incorrect():
with pytest.raises(ValidationError, match=re.escape("'callsign' argument should not have more than 8 characters")):
post_data = get_stub_valid_ship()
post_data["callsign"] = "123456789"
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
with pytest.raises(ValidationError, match=re.escape("'callsign' argument should not have special characters.")):
post_data = get_stub_valid_ship()
post_data["callsign"] = '👽'
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
# success
post_data = get_stub_valid_ship()
post_data["callsign"] = 'PBIO'
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
# success
post_data = get_stub_valid_ship()
post_data["callsign"] = None
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
return
def test_input_validation_ship_fails_when_imo_is_incorrect():
# imo must have exactly 7 digits and can't be None
with pytest.raises(ValidationError, match=re.escape("'imo' should be a 7-digit number")):
post_data = get_stub_valid_ship()
post_data["imo"] = 123456
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
with pytest.raises(ValidationError, match=re.escape("'imo' should be a 7-digit number")):
post_data = get_stub_valid_ship()
post_data["imo"] = 12345678
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
with pytest.raises(ValidationError, match=re.escape("Field may not be null.")):
post_data = get_stub_valid_ship()
post_data["imo"] = None
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
# success
post_data = get_stub_valid_ship()
post_data["imo"] = 1234567
loadedModel = model.ShipSchema().load(data=post_data, many=False, partial=True)
return
def test_input_validation_ship_fails_when_bollard_pull_and_tug_values_are_set():
ivs = InputValidationShip()
with pytest.raises(ValidationError, match=re.escape("'bollard_pull' is only allowed, when a ship is a tug ('is_tug').")):
content = {'is_tug':0, 'bollard_pull':230}
ivs.optionally_evaluate_bollard_pull_value(content)
with pytest.raises(ValidationError, match=re.escape("'bollard_pull' is only allowed, when a ship is a tug ('is_tug').")):
content = {'is_tug':None, 'bollard_pull':230}
ivs.optionally_evaluate_bollard_pull_value(content)
content = {'is_tug':0, 'bollard_pull':None}
ivs.optionally_evaluate_bollard_pull_value(content)
content = {'is_tug':1, 'bollard_pull':None}
ivs.optionally_evaluate_bollard_pull_value(content)
content = {'is_tug':1, 'bollard_pull':125}
ivs.optionally_evaluate_bollard_pull_value(content)
with pytest.raises(ValidationError, match=re.escape("when a ship is a tug, the bollard pull must be 0 < value < 500.")):
content = {'is_tug':1, 'bollard_pull':-1}
ivs.optionally_evaluate_bollard_pull_value(content)
with pytest.raises(ValidationError, match=re.escape("when a ship is a tug, the bollard pull must be 0 < value < 500.")):
content = {'is_tug':1, 'bollard_pull':0}
ivs.optionally_evaluate_bollard_pull_value(content)
with pytest.raises(ValidationError, match=re.escape("when a ship is a tug, the bollard pull must be 0 < value < 500.")):
content = {'is_tug':1, 'bollard_pull':500}
ivs.optionally_evaluate_bollard_pull_value(content)
with pytest.raises(ValidationError, match=re.escape("when a ship is a tug, the bollard pull must be 0 < value < 500.")):
content = {'is_tug':1, 'bollard_pull':501}
ivs.optionally_evaluate_bollard_pull_value(content)
return
def test_input_validation_ship_post_request_fails_when_ship_imo_already_exists():
# get the ships, convert them to a list of JSON dictionaries
response, status_code, header = GetShips(token=None)
ships = json.loads(response)
# extract only the 'imo' values
ship_imos = [ship.get("imo") for ship in ships]
post_data = get_stub_valid_ship()
post_data["imo"] = ship_imos[-1] # assign one of the IMOs, which already exist
loadedModel = get_stub_valid_ship_loaded_model(post_data)
content = post_data
with pytest.raises(ValidationError, match="the provided ship IMO 9186687 already exists. A ship may only be added, if there is no other ship with the same IMO number."):
InputValidationShip.check_ship_imo_already_exists(loadedModel)
return
def test_input_validation_ship_put_request_fails_when_ship_imo_should_be_changed():
# get the ships, convert them to a list of JSON dictionaries
response, status_code, header = GetShips(token=None)
ships = json.loads(response)
selected_ship = ships[-1] # select one of the ships; in this case the last one.
put_data = get_stub_valid_ship()
put_data["imo"] = selected_ship.get("imo")+1 # assign one of the IMOs, which already exist
loadedModel = get_stub_valid_ship_loaded_model(put_data)
content = put_data
with pytest.raises(ValidationError, match=re.escape("The IMO number field may not be changed since it serves the purpose of a primary (matching) key.")):
InputValidationShip.put_content_may_not_contain_imo_number(content)
return
def test_input_validation_ship_put_request_fails_when_ship_id_is_missing():
put_data = get_stub_valid_ship()
put_data.pop("id",None) # make sure there is no ID within the put data for this test
loadedModel = get_stub_valid_ship_loaded_model(put_data)
content = put_data
with pytest.raises(ValidationError, match="The id field is required."):
InputValidationShip.content_contains_ship_id(content)
return

View File

@ -0,0 +1,737 @@
import pytest
import os
import jwt
import json
import requests
import datetime
import werkzeug
from marshmallow import ValidationError
from BreCal import local_db
from BreCal.schemas.model import Participant_Assignment, EvaluationType, ShipcallType
from BreCal.stubs.shipcall import create_postman_stub_shipcall, get_stub_valid_shipcall_arrival, get_stub_valid_shipcall_departure, get_stub_valid_shipcall_shifting, get_stub_shipcall_arrival_invalid_missing_eta, get_stub_shipcall_shifting_invalid_missing_eta, get_stub_shipcall_shifting_invalid_missing_etd, get_stub_shipcall_arrival_invalid_missing_type, get_stub_shipcall_departure_invalid_missing_etd
from BreCal.stubs.participant import get_stub_list_of_valid_participants
from BreCal.validators.input_validation import validation_error_default_asserts
from BreCal.schemas.model import ParticipantType
from BreCal.validators.input_validation_shipcall import InputValidationShipcall
instance_path = os.path.join(os.path.expanduser('~'), "brecal", "src", "server", "instance", "instance")
local_db.initPool(os.path.dirname(instance_path), connection_filename="connection_data_local.json")
@pytest.fixture(scope="session")
def get_stub_token():
"""
performs a login to the user 'maxm' and returns the respective url and the token. The token will be used in
further requests in the following format (example of post-request):
requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
"""
port = 9013
url = f"http://127.0.0.1:{port}"
# set the JWT key
os.environ['SECRET_KEY'] = 'zdiTz8P3jXOc7jztIQAoelK4zztyuCpJ'
try:
response = requests.post(f"{url}/login", json=jwt.decode("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6Im1heG0iLCJwYXNzd29yZCI6IlN0YXJ0MTIzNCJ9.uIrbz3g-IwwTLz6C1zXELRGtAtRJ_myYJ4J4x0ozjAI", key=os.environ.get("SECRET_KEY"), algorithms=["HS256"]))
except requests.ConnectionError as err:
raise AssertionError(f"could not establish a connection to the default url. Did you start an instance of the local database at port {port}? Looking for a connection to {url}")
user = response.json()
token = user.get("token")
return locals()
@pytest.fixture(scope="session")
def get_shipcall_id_after_stub_post_request(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_arrival()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
assert response.status_code==201
shipcall_id = response.json().get("id")
assert shipcall_id is not None
return locals()
def test_shipcall_post_request_fails_when_ship_id_is_invalid(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data["ship_id"] = 1234562
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
with pytest.raises(ValidationError, match=f"provided an invalid ship id"):
assert response.status_code==400
raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome
return
def test_shipcall_post_request_fails_when_arrival_berth_id_is_invalid(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data["arrival_berth_id"] = 1234562
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
with pytest.raises(ValidationError, match=f"provided an invalid arrival berth id"):
assert response.status_code==400
raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome
return
def test_shipcall_post_request_fails_when_departure_berth_id_is_invalid(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data["departure_berth_id"] = 1234562
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
with pytest.raises(ValidationError, match=f"provided an invalid departure berth id"):
assert response.status_code==400
raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome
return
def test_shipcall_post_request_fails_when_participant_ids_are_invalid(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data["participants"] = [Participant_Assignment(1234562,4).to_json()] # identical to: [{'participant_id': 1234562, 'type': 4}]
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
with pytest.raises(ValidationError, match=f"one of the provided participant ids is invalid"):
assert response.status_code==400
raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome
return
def test_shipcall_post_request_fails_when_forbidden_keys_are_set(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
for forbidden_key, forbidden_value in zip(["canceled", "evaluation", "evaluation_message"], [1, EvaluationType.red.name, "random error message"]):
post_data = original_post_data.copy()
post_data[forbidden_key] = forbidden_value
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
assert response.status_code==400
with pytest.raises(ValidationError, match=f"may not be set on POST. "):
raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome
return
def test_shipcall_post_request_fails_when_draft_is_out_of_range(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data["draft"] = 0
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match=f"Must be greater than 0 and less than or equal to "):
assert response.status_code==400
raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome
post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data["draft"] = 21
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match=f"Must be greater than 0 and less than or equal to "):
assert response.status_code==400
raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome
post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data["draft"] = 20
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
assert response.status_code==201, f"the request should accept 20.0 as a valid 'draft' value"
return
def test_shipcall_post_request_fails_when_recommended_tugs_is_out_of_range(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data = original_post_data.copy()
post_data["recommended_tugs"] = 10
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
assert response.status_code == 201
post_data = original_post_data.copy()
post_data["recommended_tugs"] = 0
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
assert response.status_code == 201
post_data = original_post_data.copy()
post_data["recommended_tugs"] = 11
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match=f"Must be greater than or equal to 0 and less than or equal to"):
assert response.status_code==400
raise ValidationError(response.json()) # because the response does not raise a ValidationError, we artifically create it to check the pytest.raises outcome
def test_shipcall_post_request_fails_when_voyage_string_is_invalid(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
# Accept
post_data = original_post_data.copy()
post_data["voyage"] = "abcdefghijklmnop"
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
assert response.status_code==201
# Fail: too long string
post_data = original_post_data.copy()
post_data["voyage"] = "abcdefghijklmnopq"
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match="Longer than maximum length 16"):
assert response.status_code==400
raise ValidationError(response.json())
# Fail: special characters
post_data = original_post_data.copy()
post_data["voyage"] = '👽'
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match="Please use only digits and ASCII letters."):
assert response.status_code==400
raise ValidationError(response.json())
return
def test_shipcall_post_request_fails_when_type_arrival_and_not_in_future(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
# accept
post_data = original_post_data.copy()
post_data["type"] = ShipcallType.arrival
post_data["eta"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat()
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
assert response.status_code == 201
# error
post_data = original_post_data.copy()
post_data["type"] = ShipcallType.arrival
post_data["eta"] = (datetime.datetime.now() - datetime.timedelta(hours=3)).isoformat()
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match="must be in the future. Incorrect datetime provided"):
assert response.status_code==400
raise ValidationError(response.json())
return
def test_shipcall_post_request_fails_when_type_departure_and_not_in_future(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
original_post_data = get_stub_valid_shipcall_departure() # create_postman_stub_shipcall()
# accept
post_data = original_post_data.copy()
post_data["type"] = ShipcallType.departure
post_data["etd"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat()
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
assert response.status_code == 201
# error
post_data = original_post_data.copy()
post_data["type"] = ShipcallType.departure
post_data["etd"] = (datetime.datetime.now() - datetime.timedelta(hours=3)).isoformat()
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match="must be in the future. Incorrect datetime provided"):
assert response.status_code==400
raise ValidationError(response.json())
return
def test_shipcall_post_request_fails_when_type_shifting_and_not_in_future(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
original_post_data = get_stub_valid_shipcall_departure() # create_postman_stub_shipcall()
# accept
post_data = original_post_data.copy()
post_data["type"] = ShipcallType.departure
post_data["eta"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat()
post_data["etd"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat()
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
assert response.status_code == 201
# error
post_data = original_post_data.copy()
post_data["type"] = ShipcallType.departure
post_data["eta"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat()
post_data["etd"] = (datetime.datetime.now() - datetime.timedelta(hours=3)).isoformat()
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match="must be in the future. Incorrect datetime provided"):
assert response.status_code==400
raise ValidationError(response.json())
return
def test_shipcall_post_request_fails_when_type_arrival_and_missing_eta(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data = original_post_data.copy()
post_data.pop("eta", None)
post_data["type"] = ShipcallType.arrival
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match="Missing key!"):
assert response.status_code==400
raise ValidationError(response.json())
return
def test_shipcall_post_request_fails_when_type_departure_and_missing_etd(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data = original_post_data.copy()
post_data.pop("etd", None)
post_data["type"] = ShipcallType.departure
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match="Missing key!"):
assert response.status_code==400
raise ValidationError(response.json())
return
def test_shipcall_post_request_fails_when_type_shifting_and_missing_eta(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data = original_post_data.copy()
post_data["type"] = ShipcallType.departure
post_data.pop("eta", None)
post_data["etd"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat()
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match="Missing key!"):
assert response.status_code==400
raise ValidationError(response.json())
return
def test_shipcall_post_request_fails_when_type_shifting_and_missing_etd(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
original_post_data = get_stub_valid_shipcall_arrival() # create_postman_stub_shipcall()
post_data = original_post_data.copy()
post_data["type"] = ShipcallType.departure
post_data["eta"] = (datetime.datetime.now() + datetime.timedelta(hours=3)).isoformat()
post_data.pop("etd", None)
response = requests.post(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data)
with pytest.raises(ValidationError, match="Missing key!"):
assert response.status_code==400
raise ValidationError(response.json())
return
def test_shipcall_post_invalid_tidal_window_to_smaller_than_tidal_window_from(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_shifting()
post_data["tidal_window_to"] = (datetime.datetime.fromisoformat(post_data["tidal_window_from"])-datetime.timedelta(minutes=1)).isoformat()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "\'tidal_window_to\' must take place after \'tidal_window_from\'" in response.json().get("message","")
return
def test_shipcall_post_invalid_tidal_windows_must_be_in_future(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_shifting()
post_data["tidal_window_from"] = (datetime.datetime.now()-datetime.timedelta(minutes=1)).isoformat()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "\'tidal_window_from\' must be in the future. " in response.json().get("message","")
post_data = get_stub_valid_shipcall_shifting()
post_data["tidal_window_to"] = (datetime.datetime.now()-datetime.timedelta(minutes=1)).isoformat()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "\'tidal_window_to\' must be in the future. " in response.json().get("message","")
return
def test_shipcall_post_invalid_canceled_must_not_be_set(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
stubs = [("canceled", 1), ("evaluation", "green"), ("evaluation_message", "this is an error message")]
for key, value in stubs:
post_data = get_stub_valid_shipcall_shifting()
post_data[key] = value
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert f"\'{key}\' may not be set on POST. Found:" in response.json().get("message","")
return
def test_shipcall_post_invalid_participant_type_listed_multiple_times(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
participants = get_stub_list_of_valid_participants()
# create double entry for 'id' and 'type'. Either of the two should raise an exception.
participants.append(participants[0])
post_data = get_stub_valid_shipcall_shifting()
post_data["participants"] = participants
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert f"every participant id and type should be listed only once. Found multiple entries for one of the participants." in response.json().get("message","")
return
def test_shipcall_post_invalid_participants_missing_agency(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = {}
response = requests.get(
f"{url}/participants", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
participants = response.json()
participant_id_dict = {item.get("id"):{"participant_id":item.get("id"), "type":item.get("type")} for item in response.json()}
# e.g., [{'participant_id': 2, 'type': 4}, {'participant_id': 3, 'type': 1}, {'participant_id': 4, 'type': 2}]
participants = [participant_id_dict[2], participant_id_dict[3], participant_id_dict[4]]
post_data = get_stub_valid_shipcall_shifting()
post_data["participants"] = participants
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "One of the assigned participants *must* be of type \'ParticipantType.AGENCY\'" in response.json().get("message","")
return
def test_shipcall_post_invalid_etd_smaller_than_eta(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_shifting()
post_data["etd"] = (datetime.datetime.fromisoformat(post_data["eta"])-datetime.timedelta(minutes=1)).isoformat()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "\'etd\' must be larger than \'eta\'. " in response.json().get("message","")
return
def test_shipcall_post_invalid_eta_and_etd_must_be_in_future(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_shifting()
post_data["etd"] = (datetime.datetime.now()-datetime.timedelta(minutes=1)).isoformat()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "\'eta\' and \'etd\' must be in the future. " in response.json().get("message","")
post_data = get_stub_valid_shipcall_shifting()
post_data["eta"] = (datetime.datetime.now()-datetime.timedelta(minutes=1)).isoformat()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "\'eta\' and \'etd\' must be in the future. " in response.json().get("message","")
return
def test_shipcall_post_request_missing_mandatory_keys(get_stub_token): # fixture: some sort of local API start in the background
"""
creates a valid shipcall entry and modifies it, by dropping one of the mandatory keys. This test ensures,
that each mandatory key raises a ValidationError, when the key is missing.
"""
url = get_stub_token.get("url")
token = get_stub_token.get("token")
post_data = get_stub_shipcall_arrival_invalid_missing_eta()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "providing \'eta\' is mandatory." in response.json().get("message","")
post_data = get_stub_shipcall_departure_invalid_missing_etd()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "providing \'etd\' is mandatory." in response.json().get("message","")
post_data = get_stub_shipcall_shifting_invalid_missing_eta()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "providing \'eta\' and \'etd\' is mandatory." in response.json().get("message","")
post_data = get_stub_shipcall_shifting_invalid_missing_etd()
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "providing \'eta\' and \'etd\' is mandatory." in response.json().get("message","")
# the following keys all share the same logic and will be tested in sequence
for KEY in ["eta", "arrival_berth_id", "type", "ship_id"]:
# artificially remove the KEY from a valid shipcall entry
post_data = get_stub_valid_shipcall_arrival()
post_data.pop(KEY,None)
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert f"providing \'{KEY}\' is mandatory." in response.json().get("message","")
# BERTH ID (arrival or departure, based on type)
post_data = get_stub_valid_shipcall_arrival()
post_data.pop("arrival_berth_id",None)
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "providing \'arrival_berth_id\' is mandatory." in response.json().get("message","")
post_data = get_stub_valid_shipcall_departure()
post_data.pop("departure_berth_id",None)
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "providing \'departure_berth_id\' is mandatory." in response.json().get("message","")
post_data = get_stub_valid_shipcall_shifting()
post_data.pop("arrival_berth_id",None)
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "providing \'arrival_berth_id\' & \'departure_berth_id\' is mandatory." in response.json().get("message","")
post_data = get_stub_valid_shipcall_shifting()
post_data.pop("departure_berth_id",None)
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "providing \'arrival_berth_id\' & \'departure_berth_id\' is mandatory." in response.json().get("message","")
return
def test_shipcall_post_invalid_agency_missing_participant_list(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_shifting()
# keep all participants, but drop the agency
post_data["participants"] = [
participant for participant in post_data.get("participants")
if not int(participant.get("type")) == int(ParticipantType.AGENCY)
]
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
assert "One of the assigned participants *must* be of type \'ParticipantType.AGENCY\'" in response.json().get("message","")
return
def test_shipcall_post_type_is_wrong(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
post_data = get_stub_valid_shipcall_arrival()
# type 1 should be successful (201)
post_data["type"] = 1
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
assert response.status_code == 201
# type 51 should not be successful (400 BAD REQUEST)
post_data["type"] = 51
response = requests.post(
f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, json=post_data
)
validation_error_default_asserts(response)
return
def test_shipcall_put_request_fails_when_different_participant_id_is_assigned(get_shipcall_id_after_stub_post_request):
url, token, shipcall_id = get_shipcall_id_after_stub_post_request["token"], get_shipcall_id_after_stub_post_request["url"], get_shipcall_id_after_stub_post_request["shipcall_id"]
post_data = get_stub_valid_shipcall_arrival()
post_data["id"] = shipcall_id
user_data = {'id':6, 'participant_id':1}
loadedModel = post_data
content = post_data
spm_shipcall_data = [{'participant_id': 6, 'type': 4},
{'participant_id': 3, 'type': 1},
{'participant_id': 4, 'type': 2},
{'participant_id': 5, 'type': 8}]
# agency with different participant id is assigned
ivs = InputValidationShipcall()
with pytest.raises(werkzeug.exceptions.Forbidden, match=f"A different participant_id is assigned as the AGENCY of this shipcall. "):
ivs.check_agency_in_shipcall_participant_map(user_data, loadedModel, content, spm_shipcall_data)
return
def test_shipcall_put_request_success(get_shipcall_id_after_stub_post_request):
url, token, shipcall_id = get_shipcall_id_after_stub_post_request["token"], get_shipcall_id_after_stub_post_request["url"], get_shipcall_id_after_stub_post_request["shipcall_id"]
post_data = get_stub_valid_shipcall_arrival()
post_data["id"] = shipcall_id
# success happens, when shipcall data is valid, the user is authorized and the assigned spm shipcall data is suitable
user_data = {'id':6, 'participant_id':1}
loadedModel = post_data
content = post_data
spm_shipcall_data = [{'participant_id': 6, 'type': 8},
{'participant_id': 3, 'type': 1},
{'participant_id': 4, 'type': 2},
{'participant_id': 5, 'type': 4}]
# success
ivs = InputValidationShipcall()
ivs.check_agency_in_shipcall_participant_map(user_data, loadedModel, content, spm_shipcall_data)
return
def test_shipcall_put_request_fails_when_no_agency_is_assigned(get_shipcall_id_after_stub_post_request):
url, token, shipcall_id = get_shipcall_id_after_stub_post_request["token"], get_shipcall_id_after_stub_post_request["url"], get_shipcall_id_after_stub_post_request["shipcall_id"]
post_data = get_stub_valid_shipcall_arrival()
post_data["id"] = shipcall_id
user_data = {'id':6, 'participant_id':1}
loadedModel = post_data
content = post_data
spm_shipcall_data = [
{'participant_id': 3, 'type': 1},
{'participant_id': 4, 'type': 2},
{'participant_id': 5, 'type': 4}]
# no agency assigned
ivs = InputValidationShipcall()
with pytest.raises(werkzeug.exceptions.Forbidden, match=f"There is no assigned agency for this shipcall."):
ivs.check_agency_in_shipcall_participant_map(user_data, loadedModel, content, spm_shipcall_data)
return
def test_shipcall_put_request_fails_when_user_is_not_authorized(get_shipcall_id_after_stub_post_request):
url, token, shipcall_id = get_shipcall_id_after_stub_post_request["token"], get_shipcall_id_after_stub_post_request["url"], get_shipcall_id_after_stub_post_request["shipcall_id"]
post_data = get_stub_valid_shipcall_arrival()
post_data["id"] = shipcall_id
# user '1' is artificially set as participant 2, which has ParticipantType 4 (pilot), and is not authorized as an agency
user_data = {'id':1, 'participant_id':2}
loadedModel = post_data
content = post_data
spm_shipcall_data = [
{'participant_id': 2, 'type': 8},
{'participant_id': 3, 'type': 1},
{'participant_id': 4, 'type': 2},
{'participant_id': 5, 'type': 4}]
# current user is not authorized
ivs = InputValidationShipcall()
with pytest.raises(werkzeug.exceptions.Forbidden, match=f"PUT Requests for shipcalls can only be issued by AGENCY or BSMD users."):
ivs.check_agency_in_shipcall_participant_map(user_data, loadedModel, content, spm_shipcall_data)
return
def test_shipcall_put_request_fails_when_user_tries_self_assignment(get_shipcall_id_after_stub_post_request):
url, token, shipcall_id = get_shipcall_id_after_stub_post_request["token"], get_shipcall_id_after_stub_post_request["url"], get_shipcall_id_after_stub_post_request["shipcall_id"]
post_data = get_stub_valid_shipcall_arrival()
post_data["id"] = shipcall_id
user_data = {'id':1, 'participant_id':6}
loadedModel = post_data
content = post_data
spm_shipcall_data = [{'participant_id': 6, 'type': 8},
{'participant_id': 3, 'type': 1},
{'participant_id': 4, 'type': 2},
{'participant_id': 5, 'type': 4}]
# self-assignment. User is participant 6, and wants to assign participant 6.
ivs = InputValidationShipcall()
with pytest.raises(werkzeug.exceptions.Forbidden, match=f"An agency cannot self-register for a shipcall. The request is issued by an agency-user and tries to assign an AGENCY as the participant of the shipcall."):
ivs.check_agency_in_shipcall_participant_map(user_data, loadedModel, content, spm_shipcall_data)
return
def test_shipcall_put_request_fails_input_validation_shipcall_when_shipcall_is_canceled(get_stub_token):
url, token = get_stub_token["url"], get_stub_token["token"]
# get all shipcalls and grab shipcall with ID 4
# #TODO: there must be a better way to accomplish this easily...
response = requests.get(f"{url}/shipcalls", headers={"Content-Type":"text", "Authorization":f"Bearer {token}"}, params={"past_days":30000})
assert response.status_code==200
assert isinstance(response.json(), list)
shipcalls = response.json()
shipcall_id = 4
sh4 = [sh for sh in shipcalls if sh.get("id")==shipcall_id][0]
put_data = {k:v for k,v in sh4.items() if k in ["eta", "type", "ship_id", "arrival_berth_id", "participants"]}
put_data["id"] = shipcall_id
loadedModel = put_data
content = put_data
# a canceled shipcall cannot be selected
with pytest.raises(ValidationError, match="The shipcall with id 'shipcall_id' is canceled. A canceled shipcall may not be changed."):
InputValidationShipcall.check_shipcall_is_canceled(loadedModel, content)
return

View File

@ -0,0 +1,398 @@
import pytest
import os
import random
import datetime
from marshmallow import ValidationError
from BreCal import local_db
from BreCal.schemas import model
from BreCal.schemas.model import ParticipantType
from BreCal.validators.input_validation_times import InputValidationTimes
from BreCal.stubs.times_full import get_valid_stub_times, get_valid_stub_for_pytests
instance_path = os.path.join(os.path.expanduser('~'), "brecal", "src", "server", "instance", "instance")
local_db.initPool(os.path.dirname(instance_path), connection_filename="connection_data_local.json")
def test_input_validation_times_fails_when_berth_info_exceeds_length_limit():
# success
post_data = get_valid_stub_times()
post_data["berth_info"] = "a"*512 # 512 characters
model.TimesSchema().load(data=post_data, many=False, partial=True)
post_data["berth_info"] = "" # 0 characters
model.TimesSchema().load(data=post_data, many=False, partial=True)
# failure
with pytest.raises(ValidationError, match="Longer than maximum length 512."):
post_data["berth_info"] = "a"*513 # 513 characters
model.TimesSchema().load(data=post_data, many=False, partial=True)
return
def test_input_validation_times_fails_when_remarks_exceeds_length_limit():
# success
post_data = get_valid_stub_times()
post_data["remarks"] = "a"*512 # 512 characters
model.TimesSchema().load(data=post_data, many=False, partial=True)
post_data["remarks"] = "" # 0 characters
model.TimesSchema().load(data=post_data, many=False, partial=True)
# failure
with pytest.raises(ValidationError, match="Longer than maximum length 512."):
post_data["remarks"] = "a"*513 # 513 characters
model.TimesSchema().load(data=post_data, many=False, partial=True)
return
def test_input_validation_times_fails_when_participant_type_is_bsmd():
# BSMD -> Failure
post_data = get_valid_stub_times()
post_data["participant_type"] = int(ParticipantType.BSMD)
with pytest.raises(ValidationError, match="the participant_type must not be .BSMD"):
model.TimesSchema().load(data=post_data, many=False, partial=True)
# IntFlag property: BSMD & AGENCY -> Failure
post_data = get_valid_stub_times()
post_data["participant_type"] = int(ParticipantType(ParticipantType.BSMD+ParticipantType.AGENCY))
with pytest.raises(ValidationError, match="the participant_type must not be .BSMD"):
model.TimesSchema().load(data=post_data, many=False, partial=True)
return
def test_input_validation_times_fails_when_time_key_is_not_reasonable():
"""
every time key (e.g., 'eta_berth' or 'zone_entry') must be reasonable. The validation expects
these values to be 'in the future' (larger than datetime.datetime.now()) and not 'in the too distant future'
(e.g., more than one year from now.)
"""
for time_key in ["eta_berth", "etd_berth", "lock_time", "zone_entry", "operations_start", "operations_end"]:
post_data = get_valid_stub_times()
# success
post_data[time_key] = (datetime.datetime.now() + datetime.timedelta(minutes=11)).isoformat()
model.TimesSchema().load(data=post_data, many=False, partial=True)
# fails
with pytest.raises(ValidationError, match="The provided value must be in the future."):
post_data[time_key] = (datetime.datetime.now() - datetime.timedelta(minutes=11)).isoformat()
model.TimesSchema().load(data=post_data, many=False, partial=True)
# fails
with pytest.raises(ValidationError, match="The provided value is in the too distant future and exceeds a threshold for 'reasonable' entries."):
post_data[time_key] = (datetime.datetime.now() + datetime.timedelta(days=367)).isoformat()
model.TimesSchema().load(data=post_data, many=False, partial=True)
return
def test_input_validation_times_fails_when_user_is_bsmd_user():
# create stub-data for a POST request
from BreCal.services.jwt_handler import decode_jwt
from BreCal.database.sql_utils import get_user_data_for_id
import re
# user 4 is a BSMD user -> fails
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=4)
with pytest.raises(ValidationError, match=re.escape("current user belongs to BSMD. Cannot post 'times' datasets.")):
InputValidationTimes.check_user_is_not_bsmd_type(user_data)
# user 13 is not a BSMD user -> passes
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=13)
# success
InputValidationTimes.check_user_is_not_bsmd_type(user_data)
return
def test_input_validation_times_fails_when_participant_type_entry_already_exists():
# the participant type already has an entry -> fails
with pytest.raises(ValidationError, match="A dataset for the participant type is already present. Participant Type:"):
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["participant_type"] = int(ParticipantType.AGENCY)
# 2.) datasets may only be created, if the respective participant type did not already create one.
InputValidationTimes.check_if_entry_already_exists_for_participant_type(user_data, loadedModel, content)
return
def test_input_validation_times_fails_when_participant_type_deviates_from_shipcall_participant_map():
# success
# user id 3 is assigned as participant_type=4, but the stub assigns participant_type=4
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
InputValidationTimes.check_if_user_fits_shipcall_participant_map(user_data, loadedModel, content)
# fails
# user id 4 is assigned as participant_type=1, but the stub assigns participant_type=4
with pytest.raises(ValidationError, match="is assigned to the shipcall in a different role."):
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=4)
InputValidationTimes.check_if_user_fits_shipcall_participant_map(user_data, loadedModel, content)
return
def test_input_validation_times_fails_when_id_references_do_not_exist():
# success: all IDs exist
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
InputValidationTimes.check_dataset_references(content)
# fails: IDs do not exist
# iterates once for each, berth_id, shipcall_id, participant_id and generates an artificial, non-existing ID
for key in ["berth_id", "shipcall_id", "participant_id"]:
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
content[key] = loadedModel[key] = 9912737
with pytest.raises(ValidationError, match=f"The referenced {key} '{content[key]}' does not exist in the database."):
InputValidationTimes.check_dataset_references(content)
return
from BreCal.schemas.model import ParticipantType
def test_input_validation_times_fails_when_missing_required_fields_arrival():
"""
evaluates every individual combination of arriving shipcalls, where one of the required values is arbitrarily missing
randomly selects one of the non-terminal ParticipantTypes, which are reasonable (not .BSMD), and validates. This makes sure,
that over time, every possible combination has been tested.
"""
# arrival + not-terminal
non_terminal_list = [ParticipantType.AGENCY, ParticipantType.MOORING, ParticipantType.PILOT, ParticipantType.PORT_ADMINISTRATION, ParticipantType.TUG]
for key in ["eta_berth"]+InputValidationTimes.get_post_data_type_independent_fields():
random_participant_type_for_unit_test = random.sample(non_terminal_list,k=1)[0]
# pass: all required fields exist for the current shipcall type (arrival/incoming)
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 222
loadedModel["participant_type"] = random_participant_type_for_unit_test
content["participant_type"] = int(random_participant_type_for_unit_test)
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
# fails: iteratively creates stubs, where one of the required keys is missing
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 222
loadedModel["participant_type"] = random_participant_type_for_unit_test
content["participant_type"] = int(random_participant_type_for_unit_test)
with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"):
loadedModel[key] = content[key] = None
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
# arrival + terminal
for key in ["operations_start"]+InputValidationTimes.get_post_data_type_independent_fields():
# pass: all required fields exist for the current shipcall type (arrival/incoming)
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 222
loadedModel["participant_type"] = ParticipantType.TERMINAL
content["participant_type"] = int(ParticipantType.TERMINAL)
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
# fails: iteratively creates stubs, where one of the required keys is missing
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 222
loadedModel["participant_type"] = ParticipantType.TERMINAL
content["participant_type"] = int(ParticipantType.TERMINAL)
with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"):
loadedModel[key] = content[key] = None
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
return
def test_input_validation_times_fails_when_missing_required_fields_departure():
"""
evaluates every individual combination of departing shipcalls, where one of the required values is arbitrarily missing
randomly selects one of the non-terminal ParticipantTypes, which are reasonable (not .BSMD), and validates. This makes sure,
that over time, every possible combination has been tested.
"""
# departure + not-terminal
non_terminal_list = [ParticipantType.AGENCY, ParticipantType.MOORING, ParticipantType.PILOT, ParticipantType.PORT_ADMINISTRATION, ParticipantType.TUG]
for key in ["etd_berth"]+InputValidationTimes.get_post_data_type_independent_fields():
# select a *random* particiipant type, which is reasonable and *not* TERMINAL, and validate the function.
random_participant_type_for_unit_test = random.sample(non_terminal_list,k=1)[0]
# pass
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 241
loadedModel["participant_type"] = random_participant_type_for_unit_test
content["participant_type"] = int(random_participant_type_for_unit_test)
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
# fails: iteratively creates stubs, where one of the required keys is missing
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 241
loadedModel["participant_type"] = random_participant_type_for_unit_test
content["participant_type"] = int(random_participant_type_for_unit_test)
with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"):
loadedModel[key] = content[key] = None
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
# departure + terminal
for key in ["operations_end"]+InputValidationTimes.get_post_data_type_independent_fields():
# pass
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 241
loadedModel["participant_type"] = ParticipantType.TERMINAL
content["participant_type"] = int(ParticipantType.TERMINAL)
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
# fails: iteratively creates stubs, where one of the required keys is missing
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 241
loadedModel["participant_type"] = ParticipantType.TERMINAL
content["participant_type"] = int(ParticipantType.TERMINAL)
with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"):
loadedModel[key] = content[key] = None
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
return
def test_input_validation_times_fails_when_missing_required_fields_shifting():
"""
evaluates every individual combination of shifting shipcalls, where one of the required values is arbitrarily missing
randomly selects one of the non-terminal ParticipantTypes, which are reasonable (not .BSMD), and validates. This makes sure,
that over time, every possible combination has been tested.
"""
# shifting + not-terminal
non_terminal_list = [ParticipantType.AGENCY, ParticipantType.MOORING, ParticipantType.PILOT, ParticipantType.PORT_ADMINISTRATION, ParticipantType.TUG]
for key in ["eta_berth", "etd_berth"]+InputValidationTimes.get_post_data_type_independent_fields():
# select a *random* particiipant type, which is reasonable and *not* TERMINAL, and validate the function.
random_participant_type_for_unit_test = random.sample(non_terminal_list,k=1)[0]
# pass: all required fields exist for the current shipcall type (arrival/incoming)
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 189
loadedModel["participant_type"] =random_participant_type_for_unit_test
content["participant_type"] = int(random_participant_type_for_unit_test)
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
# fails: iteratively creates stubs, where one of the required keys is missing
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 189
loadedModel["participant_type"] = random_participant_type_for_unit_test
content["participant_type"] = int(random_participant_type_for_unit_test)
with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"):
loadedModel[key] = content[key] = None
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
# shifting + terminal
for key in ["operations_start", "operations_end"]+InputValidationTimes.get_post_data_type_independent_fields():
# pass: all required fields exist for the current shipcall type (arrival/incoming)
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 189
loadedModel["participant_type"] = ParticipantType.TERMINAL
content["participant_type"] = int(ParticipantType.TERMINAL)
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
# fails: iteratively creates stubs, where one of the required keys is missing
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
loadedModel["shipcall_id"] = content["shipcall_id"] = 189
loadedModel["participant_type"] = ParticipantType.TERMINAL
content["participant_type"] = int(ParticipantType.TERMINAL)
with pytest.raises(ValidationError, match="At least one of the required fields is missing. Missing:"):
loadedModel[key] = content[key] = None
InputValidationTimes.check_times_required_fields_post_data(loadedModel, content)
return
def test_input_validation_times_fails_when_participant_type_is_not_assigned__or__user_does_not_belong_to_the_same_participant_id():
"""
There are two failure cases in InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines
1.) when the participant type is simply not assigned
2.) when the participant type matches to the user, but the participant_id is not assigned
Test case:
shipcall_id 222 is assigned to the participants {"participant_id": 136, "type":2} and {"participant_id": 136, "type":8}
Case 1:
When user_id 3 should be set as participant_type 4, the call fails, because type 4 is not assigned
Case 2:
When user_id 2 (participant_id 2) should be set as participant_type 2, the call fails even though type 2 exists,
because participant_id 136 is assigned
Case 3:
When user_id 28 (participant_id 136) is set as participant_type 2, the call passes.
"""
# fails: participant type 4 does not exist
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
participant_type = 4
loadedModel["shipcall_id"] = content["shipcall_id"] = 222
loadedModel["participant_id"] = content["participant_id"] = 2
loadedModel["participant_type"] = content["participant_type"] = participant_type
with pytest.raises(ValidationError, match=f"Could not find a matching time dataset for the provided participant_type: {participant_type}. Found Time Datasets:"):
InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=loadedModel, times_id=None)
# fails: participant type 2 exists, but user_id 2 is part of the wrong participant_id group (user_id 28 or 29 would be)
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=2)
loadedModel["shipcall_id"] = content["shipcall_id"] = 222
participant_type = 2
loadedModel["participant_type"] = content["participant_type"] = participant_type
with pytest.raises(ValidationError, match="The dataset may only be changed by a user belonging to the same participant group as the times dataset is referring to. User participant_id:"):
InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=loadedModel, times_id=None)
# pass: participant type 2 exists & user_id is part of participant_id group 136, which is correct
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=28)
loadedModel["shipcall_id"] = content["shipcall_id"] = 222
participant_type = 2
loadedModel["participant_type"] = content["participant_type"] = participant_type
InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=loadedModel, times_id=None)
return
def test_input_validation_times_put_request_fails_when_id_field_is_missing():
"""used within PUT-requests. When 'id' is missing, a ValidationError is issued"""
# passes: as an 'id' is provided
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
content["id"] = 379
InputValidationTimes.check_times_required_fields_put_data(content)
# fails: 'id' field is missing
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
content.pop("id",None)
with pytest.raises(ValidationError, match="A PUT-request requires an 'id' reference, which was not found."):
InputValidationTimes.check_times_required_fields_put_data(content)
return
def test_input_validation_times_delete_request_fails_when_times_id_is_deleted_already():
# passes: id exists
times_id = 379
InputValidationTimes.check_if_entry_is_already_deleted(times_id)
# passes: id exists
times_id = 391
InputValidationTimes.check_if_entry_is_already_deleted(times_id)
# fails
times_id = 11
with pytest.raises(ValidationError, match=f"The selected time entry is already deleted. ID: {times_id}"):
InputValidationTimes.check_if_entry_is_already_deleted(times_id)
# fails
times_id = 4
with pytest.raises(ValidationError, match=f"The selected time entry is already deleted. ID: {times_id}"):
InputValidationTimes.check_if_entry_is_already_deleted(times_id)
return
def test_input_validation_times_delete_request_fails_when_times_id_does_not_exist_():
# passes: times_id exists
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=28)
times_id = 392
InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=None, times_id=times_id)
# fails: times_id does not exist
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=28)
times_id = 4
with pytest.raises(ValidationError, match=f"Unknown times_id. Could not find a matching entry for ID: {times_id}"):
InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=None, times_id=times_id)
return
def test_input_validation_times_delete_request_fails_when_user_belongs_to_wrong_participant_id():
# fails: participant_id should be 136, but user_id=3 belongs to participant_id=2
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=3)
times_id = 392
with pytest.raises(ValidationError, match=f"The dataset may only be changed by a user belonging to the same participant group as the times dataset is referring to. User participant_id:"):
InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=None, times_id=times_id)
# passes: participant_id should be 136, and user_id=28 belongs to participant_id=2
user_data, loadedModel, content = get_valid_stub_for_pytests(user_id=28)
times_id = 392
InputValidationTimes.check_user_belongs_to_same_group_as_dataset_determines(user_data, loadedModel=None, times_id=times_id)
return

View File

@ -12,7 +12,17 @@ from BreCal.stubs.df_times import get_df_times, random_time_perturbation, get_df
@pytest.fixture(scope="session")
def build_sql_proxy_connection():
import mysql.connector
conn_from_pool = mysql.connector.connect(**{'host':'localhost', 'port':3306, 'user':'root', 'password':'HalloWach_2323XXL!!', 'pool_name':'brecal_pool', 'pool_size':20, 'database':'bremen_calling', 'autocommit': True})
import os
import json
connection_data_path = os.path.join(os.path.expanduser("~"),"secure","connection_data_local.json")
assert os.path.exists(connection_data_path)
with open(connection_data_path, "r") as jr:
connection_data = json.load(jr)
connection_data = {k:v for k,v in connection_data.items() if k in ["host", "port", "user", "password", "pool_size", "pool_name", "database"]}
conn_from_pool = mysql.connector.connect(**connection_data)
#conn_from_pool = mysql.connector.connect(**{'host':'localhost', 'port':3306, 'user':'root', 'password':'HalloWach_2323XXL!!', 'pool_name':'brecal_pool', 'pool_size':20, 'database':'bremen_calling_local', 'autocommit': True})
sql_handler = SQLHandler(sql_connection=conn_from_pool, read_all=True)
vr = ValidationRules(sql_handler)
return locals()
@ -654,6 +664,9 @@ def test_validation_rule_fct_missing_time_tug_berth_etd__shipcall_soon_but_parti
def test_validation_rule_fct_missing_time_terminal_berth_eta__shipcall_soon_but_participant_estimated_time_undefined(build_sql_proxy_connection):
"""0001-L validation_rule_fct_missing_time_terminal_berth_eta"""
vr = build_sql_proxy_connection['vr']
import copy
reset_to_default = copy.deepcopy(vr.ignore_terminal_flag)
vr.ignore_terminal_flag = False
shipcall = get_shipcall_simple()
df_times = get_df_times(shipcall)
@ -684,6 +697,46 @@ def test_validation_rule_fct_missing_time_terminal_berth_eta__shipcall_soon_but_
# expectation: green state, no msg
assert state==StatusFlags.YELLOW, f"function should return 'yellow', because the participant did not provide a time and the shipcall takes place soon (according to the agency)"
vr.ignore_terminal_flag = reset_to_default
return
def test_validation_rule_fct_missing_time_terminal_berth_eta__shipcall_soon_but_participant_estimated_time_undefined__no_violation_because_terminal_flag_is_active(build_sql_proxy_connection):
"""0001-L validation_rule_fct_missing_time_terminal_berth_eta"""
vr = build_sql_proxy_connection['vr']
import copy
reset_to_default = copy.deepcopy(vr.ignore_terminal_flag)
vr.ignore_terminal_flag = True
shipcall = get_shipcall_simple()
df_times = get_df_times(shipcall)
# according to the agency, a shipcall takes place soon (ETA/ETD)
df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "eta_berth"] = datetime.datetime.now() + datetime.timedelta(minutes=ParticipantwiseTimeDelta.TERMINAL-10)
# set times agency to be undetermined
df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "operations_start"] = None # previously: eta_berth, which does not exist in times_terminal
# must adapt the shipcall_participant_map, so it suits the test
agency_participant_id = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "participant_id"].iloc[0]
terminal_participant_id = df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "participant_id"].iloc[0]
spm = vr.sql_handler.df_dict["shipcall_participant_map"]
df = pd.DataFrame(
[
{"id":10001, "shipcall_id":shipcall.id, "participant_id":agency_participant_id, "type":ParticipantType.AGENCY.value, "created":pd.Timestamp(datetime.datetime.now().isoformat()), "modified":None},
{"id":10002, "shipcall_id":shipcall.id, "participant_id":terminal_participant_id, "type":ParticipantType.TERMINAL.value, "created":pd.Timestamp(datetime.datetime.now().isoformat()), "modified":None}
]
)
df.set_index("id", inplace=True)
spm = pd.concat([spm, df], axis=0, ignore_index=True)
vr.sql_handler.df_dict["shipcall_participant_map"] = spm
# apply the validation rule
(state, msg) = vr.validation_rule_fct_missing_time_terminal_berth_eta(shipcall=shipcall, df_times=df_times)
# expectation: green state, no msg
assert state==StatusFlags.GREEN, f"function should return 'green', becaues the ignore terminal flag is active. Hence, terminal validation rules are ignored."
vr.ignore_terminal_flag = reset_to_default
return
@ -691,6 +744,9 @@ def test_validation_rule_fct_missing_time_terminal_berth_eta__shipcall_soon_but_
def test_validation_rule_fct_missing_time_terminal_berth_etd__shipcall_soon_but_participant_estimated_time_undefined(build_sql_proxy_connection):
"""0001-M validation_rule_fct_missing_time_terminal_berth_etd"""
vr = build_sql_proxy_connection['vr']
import copy
reset_to_default = copy.deepcopy(vr.ignore_terminal_flag)
vr.ignore_terminal_flag = False
shipcall = get_shipcall_simple()
shipcall.type = ShipcallType.OUTGOING.value
@ -723,6 +779,48 @@ def test_validation_rule_fct_missing_time_terminal_berth_etd__shipcall_soon_but_
# expectation: green state, no msg
assert state==StatusFlags.YELLOW, f"function should return 'yellow', because the participant did not provide a time and the shipcall takes place soon (according to the agency)"
vr.ignore_terminal_flag = reset_to_default
return
def test_validation_rule_fct_missing_time_terminal_berth_etd__shipcall_soon_but_participant_estimated_time_undefined__no_violation_because_terminal_flag_is_active(build_sql_proxy_connection):
"""0001-M validation_rule_fct_missing_time_terminal_berth_etd"""
vr = build_sql_proxy_connection['vr']
import copy
reset_to_default = copy.deepcopy(vr.ignore_terminal_flag)
vr.ignore_terminal_flag = True
shipcall = get_shipcall_simple()
shipcall.type = ShipcallType.OUTGOING.value
df_times = get_df_times(shipcall)
# according to the agency, a shipcall takes place soon (ETA/ETD)
df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "etd_berth"] = datetime.datetime.now() + datetime.timedelta(minutes=ParticipantwiseTimeDelta.TERMINAL-10)
# set times agency to be undetermined
df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "operations_end"] = None # previously: etd_berth, which does not exist in times_terminal
# must adapt the shipcall_participant_map, so it suits the test
agency_participant_id = df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "participant_id"].iloc[0]
terminal_participant_id = df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "participant_id"].iloc[0]
spm = vr.sql_handler.df_dict["shipcall_participant_map"]
df = pd.DataFrame(
[
{"id":10001, "shipcall_id":shipcall.id, "participant_id":agency_participant_id, "type":ParticipantType.AGENCY.value, "created":pd.Timestamp(datetime.datetime.now().isoformat()), "modified":None},
{"id":10002, "shipcall_id":shipcall.id, "participant_id":terminal_participant_id, "type":ParticipantType.TERMINAL.value, "created":pd.Timestamp(datetime.datetime.now().isoformat()), "modified":None}
]
)
df.set_index("id", inplace=True)
spm = pd.concat([spm, df], axis=0, ignore_index=True)
vr.sql_handler.df_dict["shipcall_participant_map"] = spm
# apply the validation rule
(state, msg) = vr.validation_rule_fct_missing_time_terminal_berth_etd(shipcall=shipcall, df_times=df_times)
# expectation: green state, no msg
assert state==StatusFlags.GREEN, f"function should return 'green', becaues the ignore terminal flag is active. Hence, terminal validation rules are ignored."
vr.ignore_terminal_flag = reset_to_default
return
@ -912,6 +1010,10 @@ def test_validation_rule_fct_shipcall_shifting_participants_disagree_on_eta_or_e
def test_validation_rule_fct_eta_time_not_in_operation_window__times_dont_match(build_sql_proxy_connection):
"""0003-A validation_rule_fct_eta_time_not_in_operation_window"""
vr = build_sql_proxy_connection['vr']
import copy
reset_to_default = copy.deepcopy(vr.ignore_terminal_flag)
vr.ignore_terminal_flag = False
shipcall = get_shipcall_simple()
df_times = get_df_times(shipcall)
@ -923,11 +1025,37 @@ def test_validation_rule_fct_eta_time_not_in_operation_window__times_dont_match(
(code, msg) = vr.validation_rule_fct_eta_time_not_in_operation_window(shipcall, df_times)
assert code==StatusFlags.RED, f"status flag should be 'red', because the planned operations start is BEFORE the estimated time of arrival for the shipcall"
vr.ignore_terminal_flag = reset_to_default
return
def test_validation_rule_fct_eta_time_not_in_operation_window__times_dont_match__no_violation_because_terminal_flag_is_active(build_sql_proxy_connection):
"""0003-A validation_rule_fct_eta_time_not_in_operation_window"""
vr = build_sql_proxy_connection['vr']
import copy
reset_to_default = copy.deepcopy(vr.ignore_terminal_flag)
vr.ignore_terminal_flag = True
shipcall = get_shipcall_simple()
df_times = get_df_times(shipcall)
t0_time = datetime.datetime.now() # reference time for easier readability
# the planned operations_start is before eta_berth (by one minute in this case)
df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "eta_berth"] = t0_time + datetime.timedelta(minutes=1)
df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "operations_start"] = t0_time + datetime.timedelta(minutes=0)
(code, msg) = vr.validation_rule_fct_eta_time_not_in_operation_window(shipcall, df_times)
assert code==StatusFlags.GREEN, f"the ignore terminal flag is active, so this validation rule is ignored. There should not be a violation"
vr.ignore_terminal_flag = reset_to_default
return
def test_validation_rule_fct_etd_time_not_in_operation_window__times_dont_match(build_sql_proxy_connection):
"""0003-B validation_rule_fct_etd_time_not_in_operation_window"""
vr = build_sql_proxy_connection['vr']
import copy
reset_to_default = copy.deepcopy(vr.ignore_terminal_flag)
vr.ignore_terminal_flag = False
shipcall = get_shipcall_simple()
shipcall.type = ShipcallType.SHIFTING.value
df_times = get_df_times(shipcall)
@ -941,6 +1069,30 @@ def test_validation_rule_fct_etd_time_not_in_operation_window__times_dont_match(
(code, msg) = vr.validation_rule_fct_etd_time_not_in_operation_window(shipcall, df_times)
assert code==StatusFlags.RED, f"status flag should be 'red', because the planned operations end is AFTER the estimated time of departure for the shipcall"
vr.ignore_terminal_flag = reset_to_default
return
def test_validation_rule_fct_etd_time_not_in_operation_window__times_dont_match__no_violation_because_terminal_flag_is_active(build_sql_proxy_connection):
"""0003-B validation_rule_fct_etd_time_not_in_operation_window"""
vr = build_sql_proxy_connection['vr']
import copy
reset_to_default = copy.deepcopy(vr.ignore_terminal_flag)
vr.ignore_terminal_flag = True
shipcall = get_shipcall_simple()
shipcall.type = ShipcallType.SHIFTING.value
df_times = get_df_times(shipcall)
t0_time = datetime.datetime.now() # reference time for easier readability
# the planned operations_end is after etd_berth (by one minute in this case)
df_times.loc[df_times["participant_type"]==ParticipantType.AGENCY.value, "etd_berth"] = t0_time + datetime.timedelta(hours=1)
df_times.loc[df_times["participant_type"]==ParticipantType.TERMINAL.value, "operations_end"] = t0_time+datetime.timedelta(hours=1, minutes=1)
(code, msg) = vr.validation_rule_fct_etd_time_not_in_operation_window(shipcall, df_times)
assert code==StatusFlags.GREEN, f"the ignore terminal flag is active, so this validation rule is ignored. There should not be a violation"
vr.ignore_terminal_flag = reset_to_default
return
def test_validation_rule_fct_eta_time_not_in_operation_window_and_validation_rule_fct_etd_time_not_in_operation_window__always_okay(build_sql_proxy_connection):