git_brcal/src/server/BreCal/impl/times.py
Daniel Schick b5dd7422f4 Initializing pool connection variable with None.
Release pool connection handle und all circumstances especially also when a query fails
before the call is finished. This should avoid connection starvation.

fix prod. link

production fix

Fixed application path
2025-11-12 15:06:54 +01:00

214 lines
8.1 KiB
Python

import json
import logging
import traceback
import pydapper
from enum import Enum, Flag
from ..schemas import model
from .. import local_db
from ..services.auth_guard import check_jwt
from BreCal.database.sql_queries import SQLQuery
from BreCal.database.update_database import evaluate_shipcall_state
def GetTimes(options):
"""
:param options: A dictionary containing all the paramters for the Operations
options["shipcall_id"]: **Id**. *Example: 42*. Id of referenced ship call.
"""
pooledConnection = None
try:
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
# query = SQLQuery.get_times()
# data = commands.query(query, model=model.Times, param={"scid" : options["shipcall_id"]})
data = commands.query("SELECT id, eta_berth, eta_berth_fixed, etd_berth, etd_berth_fixed, lock_time, lock_time_fixed, " +
"zone_entry, zone_entry_fixed, operations_start, operations_end, remarks, shipcall_id, participant_id, " +
"berth_id, berth_info, pier_side, participant_type, created, modified, ata, atd, eta_interval_end, etd_interval_end FROM times " +
"WHERE times.shipcall_id = ?scid?", model=model.Times, param={"scid" : options["shipcall_id"]})
except Exception as ex:
logging.error(traceback.format_exc())
logging.error(ex)
print(ex)
result = {}
result["error_field"] = "call failed"
return json.dumps(result), 500, {'Content-Type': 'application/json; charset=utf-8'}
finally:
if pooledConnection is not None:
pooledConnection.close()
return json.dumps(data, default=model.obj_dict), 200, {'Content-Type': 'application/json; charset=utf-8'}
def PostTimes(schemaModel):
"""
:param schemaModel: The deserialized model of the record to be inserted
"""
# TODO: Validate the upload data
# This creates a *new* entry
pooledConnection = None
try:
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
query = "INSERT INTO times ("
isNotFirst = False
for key in schemaModel.keys():
if key == "id":
continue
if key == "created":
continue
if key == "modified":
continue
if isNotFirst:
query += ","
isNotFirst = True
query += key
query += ") VALUES ("
isNotFirst = False
for key in schemaModel.keys():
if key == "id":
continue
if key == "created":
continue
if key == "modified":
continue
if isNotFirst:
query += ","
isNotFirst = True
query += "?" + key + "?"
query += ")"
schemaModel = {k:v.value if isinstance(v, (Enum, Flag)) else v for k,v in schemaModel.items()}
commands.execute(query, schemaModel)
new_id = commands.execute_scalar("select last_insert_id()")
# apply 'Traffic Light' evaluation to obtain 'GREEN', 'YELLOW' or 'RED' evaluation state. The function internally updates the mysql database 'shipcall'
evaluate_shipcall_state(mysql_connector_instance=pooledConnection, shipcall_id=schemaModel["shipcall_id"]) # every times data object refers to the 'shipcall_id'
# save history data
# TODO: set ETA properly
user_data = check_jwt()
query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?scid?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 2, 1)"
commands.execute(query, {"scid" : schemaModel["shipcall_id"], "pid" : user_data["participant_id"], "uid" : user_data["id"]})
return json.dumps({"id" : new_id}), 201, {'Content-Type': 'application/json; charset=utf-8'}
except Exception as ex:
logging.error(traceback.format_exc())
logging.error(ex)
print(ex)
result = {}
result["error_field"] = "call failed"
return json.dumps(result), 500, {'Content-Type': 'application/json; charset=utf-8'}
finally:
if pooledConnection is not None:
pooledConnection.close()
def PutTimes(schemaModel):
"""
:param schemaModel: The deserialized model of the record to be inserted
"""
# This updates an *existing* entry
pooledConnection = None
try:
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
query = "UPDATE times SET "
isNotFirst = False
for key in schemaModel.keys():
if key == "id":
continue
if key == "created":
continue
if key == "modified":
continue
if isNotFirst:
query += ", "
isNotFirst = True
query += key + " = ?" + key + "? "
query += "WHERE id = ?id?"
schemaModel = {k:v.value if isinstance(v, (Enum, Flag)) else v for k,v in schemaModel.items()}
affected_rows = commands.execute(query, param=schemaModel)
# apply 'Traffic Light' evaluation to obtain 'GREEN', 'YELLOW' or 'RED' evaluation state. The function internally updates the mysql database 'shipcall'
evaluate_shipcall_state(mysql_connector_instance=pooledConnection, shipcall_id=schemaModel["shipcall_id"]) # every times data object refers to the 'shipcall_id'
# save history data
# TODO: set ETA properly
user_data = check_jwt()
if "participant_id" in user_data and "id" in user_data:
query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?scid?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 2, 2)"
commands.execute(query, {"pid" : user_data["participant_id"], "scid" : schemaModel["shipcall_id"], "uid" : user_data["id"]})
else:
logging.error("user_data does not contain participant_id or id")
# if affected_rows == 1: # this doesn't work as expected
return json.dumps({"id" : schemaModel["id"]}), 200, {'Content-Type': 'application/json; charset=utf-8'}
except Exception as ex:
logging.error(traceback.format_exc())
logging.error(ex)
print(ex)
result = {}
result["error_field"] = "call failed"
return json.dumps(result), 500, {'Content-Type': 'application/json; charset=utf-8'}
finally:
if pooledConnection is not None:
pooledConnection.close()
def DeleteTimes(options):
"""
:param options: A dictionary containing all the paramters for the Operations
options["id"]
"""
pooledConnection = None
try:
pooledConnection = local_db.getPoolConnection()
commands = pydapper.using(pooledConnection)
shipcall_id = commands.execute_scalar("SELECT shipcall_id FROM times WHERE id = ?id?", param={"id" : options["id"]})
affected_rows = commands.execute("DELETE FROM times WHERE id = ?id?", param={"id" : options["id"]})
# TODO: set ETA properly?
# save history data
user_data = check_jwt()
query = "INSERT INTO history (participant_id, shipcall_id, user_id, timestamp, eta, type, operation) VALUES (?pid?, ?shipcall_id?, ?uid?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, 2, 3)"
commands.execute(query, {"pid" : user_data["participant_id"], "shipcall_id" : shipcall_id, "uid" : user_data["id"]})
if affected_rows == 1:
return json.dumps({"id" : options["id"]}), 200, {'Content-Type': 'application/json; charset=utf-8'}
result = {}
result["error_field"] = "no such record"
return json.dumps(result), 404, {'Content-Type': 'application/json; charset=utf-8'}
except Exception as ex:
logging.error(ex)
print(ex)
result = {}
result["error_field"] = "call failed"
return json.dumps(result), 500, {'Content-Type': 'application/json; charset=utf-8'}
finally:
if pooledConnection is not None:
pooledConnection.close()