Index: cloudsync/common/enums.py =================================================================== diff -u -rd2799fd4159bbf89974027611ef0f854a5d94121 -rb8938353017252efea9c1eaa459f76386f6d14ac --- cloudsync/common/enums.py (.../enums.py) (revision d2799fd4159bbf89974027611ef0f854a5d94121) +++ cloudsync/common/enums.py (.../enums.py) (revision b8938353017252efea9c1eaa459f76386f6d14ac) @@ -1,128 +1,134 @@ -"""Implementation of all enumerations used in the application""" - -from enum import Enum, unique - - -class RootEnum(Enum): - - @classmethod - def has_value(cls, value): - return value in cls._value2member_map_ - - @classmethod - def mapped_int_value(cls, value): - try: - return cls._value2member_map_[value] - except Exception: - return None - - @classmethod - def mapped_str_value(cls, value): - try: - return cls._value2member_map_[int(value)] - except Exception: - return None - -@unique -class HDOpModes(RootEnum): - MODE_FAUL = 0 # Fault mode - MODE_SERV = 1 # Service mode - MODE_INIT = 2 # Initialization & POST mode - MODE_STAN = 3 # Standby mode - MODE_TPAR = 4 # Treatment Parameters mode - MODE_PRET = 5 # Pre-Treatment mode - MODE_TREA = 6 # Treatment mode - MODE_POST = 7 # Post-Treatment mode - NUM_OF_MODES = 8 # Number of HD operation modes - - -@unique -class HDOpSubModes(RootEnum): - SUBMODE_START = 0 - SUBMODE_WAIT_FOR_TREATMENT = 1 - SUBMODE_WAIT_FOR_DISINFECT = 2 - SUBMODE_DG_FLUSH_IN_PROGRESS = 3 - SUBMODE_DG_HEAT_DISINFECT_IN_PROGRESS = 4 - SUBMODE_DG_CHEMICAL_DISINFECT_IN_PROGRESS = 5 - NUM_OF_MODES = 6 - - -@unique -class DeviceStates(RootEnum): - INACTIVE_NOT_OK = 1 - INACTIVE_OK = 2 - ACTIVE_OK = 3 - ACTIVE_READY = 4 - ACTIVE_IN_TREATMENT = 5 - ACTIVE_NOT_READY = 6 - ACTIVE_NOT_OK = 7 - DECOMMISSIONED = 8 - UNKNOWN_STATE = 9 - - -@unique -class InboundMessageIDs(RootEnum): - # REGISTRATION - UI2CS_REQ_REGISTRATION = 1001 - UI2CS_SEND_DEVICE_INFO = 1002 - UI2CS_SEND_CREDENTIALS_SAVED = 1003 - UI2CS_SEND_CHECKIN = 1004 - UI2CS_SEND_FACTORY_RESET_CONFIRMATION = 1005 - - # OPERATION - UI2CS_SEND_DEVICE_STATE = 1006 - UI2CS_SEND_TREATMENT_REPORT = 1007 - UI2CS_REQ_DECOMMISSION = 1009 - UI2CS_REQ_LOG_ROTATION = 1011 - - # INCOMING ERROR - UI2CS_ERROR = 1999 - - -@unique -class OutboundMessageIDs(RootEnum): - # REGISTRATION - CS2UI_REQ_DEVICE_INFO = 2002 - CS2UI_REQ_SAVE_CREDENTIALS = 2003 - CS2UI_REQ_CHECKIN = 2004 - CS2UI_REQ_FACTORY_RESET = 2005 - - # OPERATION - CS2UI_REQ_DEVICE_STATE = 2006 - CS2UI_REQ_TX_CODE_DISPLAY = 2008 - CS2UI_REQ_DEVICE_DECOMMISSIONED = 2009 - CS2UI_REQ_LOG_ROTATION = 2011 - - # OUTGOING ERROR - CS2UI_ERROR = 2999 - -@unique -class NetworkRequestType(RootEnum): - MFT2CS_REQ_SET_CREDENTIALS = 4 - MFT2CS_REQ_INIT_CONNECTIVITY_TEST = 5 - MFT2CS_REQ_FACTORY_RESET = 13 - CS2MFT_REQ_REGISTRATION = 101 - CS2DCS_REQ_SET_DEVICE_STATE = 201 - CS2DCS_REQ_SEND_TREATMENT_REPORT = 202 - -@unique -class ErrorIDs(RootEnum): - GENERIC_ERROR = 900 - CS_REQ_REGISTRATION_ERROR = 901 - CS_SEND_DEVICE_STATE_ERROR = 906 - CS_SEND_TREATMENT_REPORT_ERROR = 907 - CS_REQ_CHECKIN_ERROR = 908 - CS_REQ_DECOMMISSION_ERROR = 909 - CS_BAD_CRC_ERROR = 910 - CS_DEVICE_VALIDATION_RESULT_ERROR = 920 - CS_SET_PATIENT_DEVICE_ASSOCIATION_ERROR = 921 - CS_GET_NEW_TOKEN_WITH_CERT_ERROR = 922 - CS_VERIFY_TOKEN_ERROR = 923 - CS_VALIDATE_DEVICE_ERROR = 924 - CS_CHECK_IF_PATIENT_WITH_EMR_ID_EXISTS_ERROR = 925 - CS_CREATE_TEMPORARY_PATIENT_ERROR = 926 - CS_SAVE_CREDENTIALS_ERROR = 927 - CS_UNKNOWN_DEVICE_STATE_ERROR = 928 - CS_SAVE_CONFIG_ERROR = 929 - CS_LOG_ROTATION_ERROR = 930 - +"""Implementation of all enumerations used in the application""" + +from enum import Enum, unique + + +class RootEnum(Enum): + + @classmethod + def has_value(cls, value): + return value in cls._value2member_map_ + + @classmethod + def mapped_int_value(cls, value): + try: + return cls._value2member_map_[value] + except Exception: + return None + + @classmethod + def mapped_str_value(cls, value): + try: + return cls._value2member_map_[int(value)] + except Exception: + return None + +@unique +class HDOpModes(RootEnum): + MODE_FAUL = 0 # Fault mode + MODE_SERV = 1 # Service mode + MODE_INIT = 2 # Initialization & POST mode + MODE_STAN = 3 # Standby mode + MODE_TPAR = 4 # Treatment Parameters mode + MODE_PRET = 5 # Pre-Treatment mode + MODE_TREA = 6 # Treatment mode + MODE_POST = 7 # Post-Treatment mode + NUM_OF_MODES = 8 # Number of HD operation modes + + +@unique +class HDOpSubModes(RootEnum): + SUBMODE_START = 0 + SUBMODE_WAIT_FOR_TREATMENT = 1 + SUBMODE_WAIT_FOR_DISINFECT = 2 + SUBMODE_DG_FLUSH_IN_PROGRESS = 3 + SUBMODE_DG_HEAT_DISINFECT_IN_PROGRESS = 4 + SUBMODE_DG_CHEMICAL_DISINFECT_IN_PROGRESS = 5 + NUM_OF_MODES = 6 + + +@unique +class DeviceStates(RootEnum): + INACTIVE_NOT_OK = 1 + INACTIVE_OK = 2 + ACTIVE_OK = 3 + ACTIVE_READY = 4 + ACTIVE_IN_TREATMENT = 5 + ACTIVE_NOT_READY = 6 + ACTIVE_NOT_OK = 7 + DECOMMISSIONED = 8 + UNKNOWN_STATE = 9 + + +@unique +class InboundMessageIDs(RootEnum): + # REGISTRATION + UI2CS_REQ_REGISTRATION = 1001 + UI2CS_SEND_DEVICE_INFO = 1002 + UI2CS_SEND_CREDENTIALS_SAVED = 1003 + UI2CS_SEND_CHECKIN = 1004 + UI2CS_SEND_FACTORY_RESET_CONFIRMATION = 1005 + + # OPERATION + UI2CS_SEND_DEVICE_STATE = 1006 + UI2CS_SEND_TREATMENT_REPORT = 1007 + UI2CS_REQ_DECOMMISSION = 1009 + UI2CS_UPLOAD_DEVICE_LOG = 1010 # DEVICE_LOG + UI2CS_REQ_LOG_ROTATION = 1011 + + # INCOMING ERROR + UI2CS_ERROR = 1999 + + +@unique +class OutboundMessageIDs(RootEnum): + # REGISTRATION + CS2UI_REQ_DEVICE_INFO = 2002 + CS2UI_REQ_SAVE_CREDENTIALS = 2003 + CS2UI_REQ_CHECKIN = 2004 + CS2UI_REQ_FACTORY_RESET = 2005 + + # OPERATION + CS2UI_REQ_DEVICE_STATE = 2006 + CS2UI_REQ_TX_CODE_DISPLAY = 2008 + CS2UI_REQ_DEVICE_DECOMMISSIONED = 2009 + CS2UI_DEVICE_LOG_UPLOADED = 2010 + CS2UI_REQ_LOG_ROTATION = 2011 + + # OUTGOING ERROR + CS2UI_ERROR = 2999 + +@unique +class NetworkRequestType(RootEnum): + MFT2CS_REQ_SET_CREDENTIALS = 4 + MFT2CS_REQ_INIT_CONNECTIVITY_TEST = 5 + MFT2CS_REQ_FACTORY_RESET = 13 + CS2MFT_REQ_REGISTRATION = 101 + CS2DCS_REQ_SET_DEVICE_STATE = 201 + CS2DCS_REQ_SEND_TREATMENT_REPORT = 202 + CS2DCS_REQ_SEND_DEVICE_LOG = 203 + CS2DCS_REQ_SEND_CS_LOG = 204 + + +@unique +class ErrorIDs(RootEnum): + GENERIC_ERROR = 900 + CS_REQ_REGISTRATION_ERROR = 901 + CS_SEND_DEVICE_STATE_ERROR = 906 + CS_SEND_TREATMENT_REPORT_ERROR = 907 + CS_REQ_CHECKIN_ERROR = 908 + CS_REQ_DECOMMISSION_ERROR = 909 + CS_BAD_CRC_ERROR = 910 + CS_DEVICE_VALIDATION_RESULT_ERROR = 920 + CS_SET_PATIENT_DEVICE_ASSOCIATION_ERROR = 921 + CS_GET_NEW_TOKEN_WITH_CERT_ERROR = 922 + CS_VERIFY_TOKEN_ERROR = 923 + CS_VALIDATE_DEVICE_ERROR = 924 + CS_CHECK_IF_PATIENT_WITH_EMR_ID_EXISTS_ERROR = 925 + CS_CREATE_TEMPORARY_PATIENT_ERROR = 926 + CS_SAVE_CREDENTIALS_ERROR = 927 + CS_UNKNOWN_DEVICE_STATE_ERROR = 928 + CS_SAVE_CONFIG_ERROR = 929 + CS_DEVICE_LOG_ERROR = 930 + CS_LOG_ERROR = 931 + CS_LOG_ROTATION_ERROR = 932 Index: cloudsync/handlers/ui_cs_request_handler.py =================================================================== diff -u -rd2799fd4159bbf89974027611ef0f854a5d94121 -rb8938353017252efea9c1eaa459f76386f6d14ac --- cloudsync/handlers/ui_cs_request_handler.py (.../ui_cs_request_handler.py) (revision d2799fd4159bbf89974027611ef0f854a5d94121) +++ cloudsync/handlers/ui_cs_request_handler.py (.../ui_cs_request_handler.py) (revision b8938353017252efea9c1eaa459f76386f6d14ac) @@ -57,7 +57,7 @@ else: return False - def handle_message(self, message: UICSMessage): + def handle_message(self, message: UICSMessage) -> None: self.logger.info('UI2CS Message: {0}'.format(message)) message_data = message.timestamp + message.sequence + message.ID + message.size @@ -256,3 +256,47 @@ error_body += ",{0}".format(parameter) error = Error(error_body=error_body) self.error_handler.enqueue_error(error=error) + + # UPLOAD DEVICE LOG TO CLOUD REQUEST + elif InboundMessageIDs.mapped_str_value(message.ID) == InboundMessageIDs.UI2CS_UPLOAD_DEVICE_LOG and \ + (message.g_config[CONFIG_DEVICE][CONFIG_DEVICE_MODE] == 'operation'): + self.logger.info("UI2CS_UPLOAD_DEVICE_LOG request received") + + if (len(message.parameters) != 2) or (message.parameters[0] is None) or (message.parameters[1] is None): + error = Error( + "{0},2,{1},invalid # of parameters for file upload request".format( + OutboundMessageIDs.CS2UI_ERROR.value, + ErrorIDs.CS_DEVICE_LOG_ERROR.value)) + self.error_handler.enqueue_error(error=error) + else: + try: + + local_checksum = helpers_sha256_checksum(message.parameters[0]) + assert (local_checksum == message.parameters[1]), 'No valid sha256 value.' + + hd_serial_number = message.g_config[CONFIG_DEVICE][CONFIG_DEVICE_HD_SERIAL] + dg_serial_number = message.g_config[CONFIG_DEVICE][CONFIG_DEVICE_DG_SERIAL] + sw_version = message.g_config[CONFIG_DEVICE][CONFIG_DEVICE_SW_VERSION] + + self.logger.debug('hd: {0},dg: {1},sw: {2}'.format(hd_serial_number, dg_serial_number, sw_version)) + + device_log_json = helpers_construct_device_log_json(message.parameters[0]) + + if device_log_json: + device_log_json['serialNumber'] = message.g_config[CONFIG_DEVICE][CONFIG_DEVICE_HD_SERIAL] + + g_utils.logger.debug("Device log {0}".format(device_log_json)) + + helpers_add_to_network_queue(network_request_handler=self.network_request_handler, + request_type=NetworkRequestType.CS2DCS_REQ_SEND_DEVICE_LOG, + url='', + payload=device_log_json, + method='', + g_config=message.g_config, + success_message='CS2DCS_REQ_SEND_DEVICE_LOG request added to network queue') + + except Exception as e: + error = Error("{0},2,{1},{2}".format(OutboundMessageIDs.CS2UI_ERROR.value, + ErrorIDs.CS_DEVICE_LOG_ERROR.value, + e)) + self.error_handler.enqueue_error(error=error) Index: cloudsync/utils/globals.py =================================================================== diff -u -rd2799fd4159bbf89974027611ef0f854a5d94121 -rb8938353017252efea9c1eaa459f76386f6d14ac --- cloudsync/utils/globals.py (.../globals.py) (revision d2799fd4159bbf89974027611ef0f854a5d94121) +++ cloudsync/utils/globals.py (.../globals.py) (revision b8938353017252efea9c1eaa459f76386f6d14ac) @@ -1,132 +1,156 @@ -"""Object holding all application global constants""" -import os - -# PATHS -PATH_HOME = os.getcwd() + '/' - -# Configuration groups -CONFIG_DEVICE = 'device' -CONFIG_DEVICE_IP = 'ip' -CONFIG_DEVICE_PORT = 'port' -CONFIG_DEVICE_NAME = 'name' -CONFIG_DEVICE_HD_SERIAL = 'hd_serial' -CONFIG_DEVICE_DG_SERIAL = 'dg_serial' -CONFIG_DEVICE_SW_VERSION = 'sw_version' -CONFIG_DEVICE_MODE = 'mode' -CONFIG_DEVICE_STATE = 'device_state' - -CONFIG_KEBORMED = "kebormed_paas" -CONFIG_KEBORMED_MFT_URL = "url_mft" -CONFIG_KEBORMED_DCS_URL = "url_dcs" -CONFIG_KEBORMED_IDP_CLIENT_SECRET = "idp_client_secret" -CONFIG_KEBORMED_DEVICE_IDENTITY_URL = "url_device_identity" -CONFIG_KEBORMED_REACHABILITY_URL = "url_reachability" -CONFIG_KEBORMED_DIA_ORG_ID = "dia_org_id" - -# CONFIG -CONFIG_PATH = os.path.join(PATH_HOME, "cloudsync/config/config.json") - -DECOMMISSION_CS_PATH = "/var/configurations/CloudSync/" -DECOMMISSION_FOLDERS = ['config', 'jwt', 'credentials'] - -OPERATION_CONFIG_PATH = "/var/configurations/CloudSync/config/" -OPERATION_CONFIG_FILE_PATH = os.path.join(OPERATION_CONFIG_PATH, "config.json") - -# LOGS -CS_LOG_PATH = "/media/sd-card/cloudsync/log" -CS_LOG_FILE = os.path.join(CS_LOG_PATH, "cloudsync.log") - -# DEVICE TOKEN -TOKEN_CACHING_PATH = "/var/configurations/CloudSync/jwt/" -DEVICE_KEBORMED_ACCESS_TOKEN_PATH = os.path.join(TOKEN_CACHING_PATH, "access_token.json") - -DEVICE_TOKEN_VALIDATION = "/api/common/info" - -# CREDENTIALS -CREDENTIALS_PATH = "/var/configurations/CloudSync/credentials/" - -CERTIFICATE_X509_FILE_NAME = "client_certificate.pem" -PRIVATE_KEY_FILE_NAME = "client_private_key.pem" -PUBLIC_KEY_FILE_NAME = "client_public_key.pem" - -CREDENTIALS_CERTIFICATE_X509 = os.path.join(CREDENTIALS_PATH, CERTIFICATE_X509_FILE_NAME) -CREDENTIALS_PRIVATE_KEY = os.path.join(CREDENTIALS_PATH, PRIVATE_KEY_FILE_NAME) -CREDENTIALS_PUBLIC_KEY = os.path.join(CREDENTIALS_PATH, PUBLIC_KEY_FILE_NAME) - -# UI2CS VALUES -UI2CS_FILE_CHANNELS_PATH = "/media/sd-card/cloudsync" -UI2CS_FILE_LOG_PATH = "/media/sd-card/cloudsync/log" -# PATH for running off device -# UI2CS_FILE_CHANNELS_PATH = "data/busses" - - -# TREATMENT REPORT SECTIONS -TITLE = "[Title]" -TREATMENT_PRESCRIPTION = "[Treatment Prescription]" -TREATMENT_PARAMETERS = "[Treatment Parameters]" -POST_TREATMENT_DATA = "[Post-Treatment Data]" -EXTRA = "[Extra]" -TREATMENT_DATA = "[Treatment Data]" -TREATMENT_ALARMS = "[Treatment Alarms]" -TREATMENT_EVENTS = "[Treatment Events]" - -# TREATMENT SECTION CHARACTERS -SECTION_START_CHARACTER = "[" -SECTION_STOP_CHARACTER = "]" - -# UI APP - TREATMENT REPORT VALUES -TREATMENT_CODE = "Tx Code" -PATIENT_ID = "Patient ID" -TREATMENT_DURATION = "Treatment Duration" -BLOOD_FLOW_RATE = "Blood Flow Rate" -DIALYSATE_FLOW_RATE = "Dialysate Flow Rate" -ACID_CONCENTRATE_TYPE = "Acid ConcentrateType" -BICARBONATE_CONCENTRATE_TYPE = "Bicarbonate Concentrate Type" -POTASSIUM_CONCENTRATION = "Potassium Concentration" -CALCIUM_CONCENTRATION = "Calcium Concentration" -BICARBONATE_CONCENTRATION = "Bicarbonate Concentration" -SODIUM_CONCENTRATION = "Sodium Concentration" -DIALYSATE_TEMPERATURE = "Dialysate Temperature" -DIALYZER_TYPE = "Dialyzer Type" -HEPARIN_TYPE = "Heparin Type" -HEPARIN_CONCENTRATION = "Heparin Concentration" -HEPARIN_BOLUS_VOLUME = "Heparin Bolus Volume" -HEPARIN_DISPENSE_RATE = "Heparin Dispense Rate" -HEPARIN_STOP = "Heparin Stop" -TREATMENT_START_DATE_TIME = "Treatment Start DateTime" -TREATMENT_END_DATE_TIME = "Treatment End DateTime" -ACTUAL_TREATMENT_DURATION = "Actual Treatment Duration" -DIALYSATE_VOLUME_USED = "Dialysate Volume Used" -PRESCRIBED_UF_VOLUME = "Prescribed UF Volume" -TARGET_UF_VOLUME = "Target UF Volume" -ACTUAL_UF_VOLUME = "Actual UF Volume" -PRESCRIBED_UF_RATE = "Prescribed UF Rate" -TARGET_UF_RATE = "Target UF Rate" -ACTUAL_UF_RATE = "Actual UF Rate" -SALINE_BOLUS_VOLUME = "Saline Bolus Volume" -HEPARIN_DELIVERED_VOLUME = "Heparin Delivered Volume" -WATER_SAMPLE_TEST_RESULT = "Water Sample Test Result" - -# TREATMENT TEMPLATE PATH -TREATMENT_REPORT_TEMPLATE_PATH = "cloudsync/config/treatment_report_template.json" - -# USER_AGENT -USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36" -CONTENT_TYPE = "application/json" -BEARER_HOLDER = "Bearer {0}" -TOO_MANY_REDIRECTS_HOLDER = "{0},2,{1},Too many redirects" -GENERAL_EXCEPTION_HOLDER = "{0},2,{1},{2}" -REGISTRATION_TIMEOUT_HOLDER = "{0},2,{1},Registration timeout" - -# HTTP CODES -OK = 200 -SUCCESS = 204 -BAD_REQUEST = 400 -UNAUTHORIZED = 401 -NOT_FOUND = 404 -UNASSIGNED = 427 -INTERNAL_SERVER_ERROR = 500 - -# TIME CONSTANTS -S_MS_CONVERSION_FACTOR = 1000 - +"""Object holding all application global constants""" +import os + +# PATHS +PATH_HOME = os.getcwd() + '/' + +# Configuration groups +CONFIG_DEVICE = 'device' +CONFIG_DEVICE_IP = 'ip' +CONFIG_DEVICE_PORT = 'port' +CONFIG_DEVICE_NAME = 'name' +CONFIG_DEVICE_HD_SERIAL = 'hd_serial' +CONFIG_DEVICE_DG_SERIAL = 'dg_serial' +CONFIG_DEVICE_SW_VERSION = 'sw_version' +CONFIG_DEVICE_MODE = 'mode' +CONFIG_DEVICE_STATE = 'device_state' +CONFIG_LOG_LEVEL = 'log_level' +CONFIG_LOG_LEVEL_DURATION = 'log_level_duration' + +CONFIG_KEBORMED = "kebormed_paas" +CONFIG_KEBORMED_MFT_URL = "url_mft" +CONFIG_KEBORMED_DCS_URL = "url_dcs" +CONFIG_KEBORMED_IDP_CLIENT_SECRET = "idp_client_secret" +CONFIG_KEBORMED_DEVICE_IDENTITY_URL = "url_device_identity" +CONFIG_KEBORMED_REACHABILITY_URL = "url_reachability" +CONFIG_KEBORMED_DIA_ORG_ID = "dia_org_id" + +# DEFAULTS +DEFAULT_REACHABILITY_URL = "https://google.com" + +# CONFIG +CONFIG_PATH = os.path.join(PATH_HOME, "cloudsync/config/config.json") + +DECOMMISSION_CS_PATH = "/var/configurations/CloudSync/" +DECOMMISSION_FOLDERS = ['config', 'jwt', 'credentials'] + +OPERATION_CONFIG_PATH = "/var/configurations/CloudSync/config/" +OPERATION_CONFIG_FILE_PATH = os.path.join(OPERATION_CONFIG_PATH, "config.json") + +# LOGS +CS_LOG_PATH = "/media/sd-card/cloudsync/log" +CS_LOG_FILE = os.path.join(CS_LOG_PATH, "cloudsync.log") + +# DEVICE TOKEN +TOKEN_CACHING_PATH = "/var/configurations/CloudSync/jwt/" +DEVICE_KEBORMED_ACCESS_TOKEN_PATH = os.path.join(TOKEN_CACHING_PATH, "access_token.json") + +DEVICE_TOKEN_VALIDATION = "/api/common/info" + +# CREDENTIALS +CREDENTIALS_PATH = "/var/configurations/CloudSync/credentials/" + +CERTIFICATE_X509_FILE_NAME = "client_certificate.pem" +PRIVATE_KEY_FILE_NAME = "client_private_key.pem" +PUBLIC_KEY_FILE_NAME = "client_public_key.pem" + +CREDENTIALS_CERTIFICATE_X509 = os.path.join(CREDENTIALS_PATH, CERTIFICATE_X509_FILE_NAME) +CREDENTIALS_PRIVATE_KEY = os.path.join(CREDENTIALS_PATH, PRIVATE_KEY_FILE_NAME) +CREDENTIALS_PUBLIC_KEY = os.path.join(CREDENTIALS_PATH, PUBLIC_KEY_FILE_NAME) + +# UI2CS VALUES +UI2CS_FILE_CHANNELS_PATH = "/media/sd-card/cloudsync" +UI2CS_FILE_LOG_PATH = "/media/sd-card/cloudsync/log" +# PATH for running off device +# UI2CS_FILE_CHANNELS_PATH = "data/busses" + + +# TREATMENT REPORT SECTIONS +TITLE = "[Title]" +TREATMENT_PRESCRIPTION = "[Treatment Prescription]" +TREATMENT_PARAMETERS = "[Treatment Parameters]" +POST_TREATMENT_DATA = "[Post-Treatment Data]" +EXTRA = "[Extra]" +TREATMENT_DATA = "[Treatment Data]" +TREATMENT_ALARMS = "[Treatment Alarms]" +TREATMENT_EVENTS = "[Treatment Events]" + +# TREATMENT SECTION CHARACTERS +SECTION_START_CHARACTER = "[" +SECTION_STOP_CHARACTER = "]" + +# UI APP - TREATMENT REPORT VALUES +TREATMENT_CODE = "Tx Code" +PATIENT_ID = "Patient ID" +TREATMENT_DURATION = "Treatment Duration" +BLOOD_FLOW_RATE = "Blood Flow Rate" +DIALYSATE_FLOW_RATE = "Dialysate Flow Rate" +ACID_CONCENTRATE_TYPE = "Acid ConcentrateType" +BICARBONATE_CONCENTRATE_TYPE = "Bicarbonate Concentrate Type" +POTASSIUM_CONCENTRATION = "Potassium Concentration" +CALCIUM_CONCENTRATION = "Calcium Concentration" +BICARBONATE_CONCENTRATION = "Bicarbonate Concentration" +SODIUM_CONCENTRATION = "Sodium Concentration" +DIALYSATE_TEMPERATURE = "Dialysate Temperature" +DIALYZER_TYPE = "Dialyzer Type" +HEPARIN_TYPE = "Heparin Type" +HEPARIN_CONCENTRATION = "Heparin Concentration" +HEPARIN_BOLUS_VOLUME = "Heparin Bolus Volume" +HEPARIN_DISPENSE_RATE = "Heparin Dispense Rate" +HEPARIN_STOP = "Heparin Stop" +TREATMENT_START_DATE_TIME = "Treatment Start DateTime" +TREATMENT_END_DATE_TIME = "Treatment End DateTime" +ACTUAL_TREATMENT_DURATION = "Actual Treatment Duration" +DIALYSATE_VOLUME_USED = "Dialysate Volume Used" +PRESCRIBED_UF_VOLUME = "Prescribed UF Volume" +TARGET_UF_VOLUME = "Target UF Volume" +ACTUAL_UF_VOLUME = "Actual UF Volume" +PRESCRIBED_UF_RATE = "Prescribed UF Rate" +TARGET_UF_RATE = "Target UF Rate" +ACTUAL_UF_RATE = "Actual UF Rate" +SALINE_BOLUS_VOLUME = "Saline Bolus Volume" +HEPARIN_DELIVERED_VOLUME = "Heparin Delivered Volume" +WATER_SAMPLE_TEST_RESULT = "Water Sample Test Result" +# new fields +TARGET_WEIGHT = " Target Weight" +TOTAL_CHLORINE = "Total Chlorine" +PH = "PH" +CONDUCTIVITY = "Conductivity" +MACHINE_WIPED_DOWN = "Machine Wiped Down" +FILTER_LIFE = "Filter Life" +LAST_CHEMICAL_DISINFECTION = "Last Chemical Disinfection" +LAST_HEAT_DISINFECTION = "Last Heat Disinfection" + +# TREATMENT TEMPLATE PATH +TREATMENT_REPORT_TEMPLATE_PATH = "cloudsync/config/treatment_report_template.json" + +# DEVICE LOGS TEMPLATE PATH +DEVICE_LOG_TEMPLATE_PATH = "cloudsync/config/device_log_template.json" + +# CS LOGS TEMPLATE PATH +CS_LOG_TEMPLATE_PATH = "cloudsync/config/device_log_template.json" + +# CS LOG STATE FILE PATH +CS_LOG_STATE_FILE_PATH = "cloudsync/config/log_state.json" + +# USER_AGENT +USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36" +CONTENT_TYPE = "application/json" +BEARER_HOLDER = "Bearer {0}" +TOO_MANY_REDIRECTS_HOLDER = "{0},2,{1},Too many redirects" +GENERAL_EXCEPTION_HOLDER = "{0},2,{1},{2}" +REGISTRATION_TIMEOUT_HOLDER = "{0},2,{1},Registration timeout" +FILE_NOT_FOUND = "{0},2,{1},UI logfile not found" + +# HTTP CODES +OK = 200 +SUCCESS = 204 +BAD_REQUEST = 400 +UNAUTHORIZED = 401 +NOT_FOUND = 404 +UNASSIGNED = 427 +INTERNAL_SERVER_ERROR = 500 + +# TIME CONSTANTS +S_MS_CONVERSION_FACTOR = 1000 + Index: cloudsync/utils/helpers.py =================================================================== diff -u -rd2799fd4159bbf89974027611ef0f854a5d94121 -rb8938353017252efea9c1eaa459f76386f6d14ac --- cloudsync/utils/helpers.py (.../helpers.py) (revision d2799fd4159bbf89974027611ef0f854a5d94121) +++ cloudsync/utils/helpers.py (.../helpers.py) (revision b8938353017252efea9c1eaa459f76386f6d14ac) @@ -1,530 +1,768 @@ -"""Implementation of helper methods""" - -import os -import shutil -import json -import datetime -import hashlib -import socket -import subprocess - -from time import time, sleep -from typing import Union, Any -from logging import Logger - -from cloudsync.utils.singleton import SingletonMeta -from cloudsync.common.enums import * -from cloudsync.handlers.network_request import NetworkRequest -from cloudsync.utils.reachability import * -from cloudsync.utils.globals import * - - -class GUtils(metaclass=SingletonMeta): - - def __init__(self): - self.logger = None - self.reachability_provider = None - - def add_logger(self, logger: Logger): - self.logger = logger - - def add_reachability_provider(self, reachability_provider: ReachabilityProvider): - self.reachability_provider = reachability_provider - - -g_utils = GUtils() - -CRC_LIST = [ - 0, 49, 98, 83, 196, 245, 166, 151, 185, 136, 219, 234, 125, 76, 31, 46, - 67, 114, 33, 16, 135, 182, 229, 212, 250, 203, 152, 169, 62, 15, 92, 109, - 134, 183, 228, 213, 66, 115, 32, 17, 63, 14, 93, 108, 251, 202, 153, 168, - 197, 244, 167, 150, 1, 48, 99, 82, 124, 77, 30, 47, 184, 137, 218, 235, - 61, 12, 95, 110, 249, 200, 155, 170, 132, 181, 230, 215, 64, 113, 34, 19, - 126, 79, 28, 45, 186, 139, 216, 233, 199, 246, 165, 148, 3, 50, 97, 80, - 187, 138, 217, 232, 127, 78, 29, 44, 2, 51, 96, 81, 198, 247, 164, 149, - 248, 201, 154, 171, 60, 13, 94, 111, 65, 112, 35, 18, 133, 180, 231, 214, - 122, 75, 24, 41, 190, 143, 220, 237, 195, 242, 161, 144, 7, 54, 101, 84, - 57, 8, 91, 106, 253, 204, 159, 174, 128, 177, 226, 211, 68, 117, 38, 23, - 252, 205, 158, 175, 56, 9, 90, 107, 69, 116, 39, 22, 129, 176, 227, 210, - 191, 142, 221, 236, 123, 74, 25, 40, 6, 55, 100, 85, 194, 243, 160, 145, - 71, 118, 37, 20, 131, 178, 225, 208, 254, 207, 156, 173, 58, 11, 88, 105, - 4, 53, 102, 87, 192, 241, 162, 147, 189, 140, 223, 238, 121, 72, 27, 42, - 193, 240, 163, 146, 5, 52, 103, 86, 120, 73, 26, 43, 188, 141, 222, 239, - 130, 179, 224, 209, 70, 119, 36, 21, 59, 10, 89, 104, 255, 206, 157, 172 -] - - -def helpers_is_int(val: str) -> bool: - """ - Determines if the value can be converted to an int - - :param val: the value in string form - :return: True if can be converted to an int, false otherwise - """ - - try: - int(val) - return True - except ValueError: - return False - - -def helpers_is_float(val: str) -> bool: - """ - Determines if the value can be converted to a float - - :param val: the value in string form - :return: True if can be converted to a float, false otherwise - """ - try: - float(val) - except ValueError: - return False - return True - - -def helpers_try_numeric(val: str) -> Union[int, float, str]: - """ - Tries to convert the value to numeric. If it's not possible - leaves it as a string - - :param val: the value to convert - :return: the converted value if possible, otherwise it is returned as is - """ - - if helpers_is_int(val): - return int(val) - elif helpers_is_float(val): - return float(val) - else: - return val - - -def helpers_parse_treatment_log(path: str) -> dict: - """ - Converts a treatment.log file to a python dictionary - The treatment log needs to be formatted better. - Until then, this is a (non-ideal) way to read it. - - :param path: the path to the treatment log file - :return: the parsed treatment log file - """ - if not os.path.exists(path): - print("Path does not exist.") - return {} - - result = {} - with open(path, 'r') as f: - lines = f.readlines() - group = "NoGroup" - for line_ in lines: - line = line_.replace("\n", "") - if "[" and "]" in line: - group = line.split("[")[1].split("]")[0] - if group not in result: - result[group] = {} - else: - if group in ["Title", "Treatment Parameters", "Post-Treatment Data", "Extra"]: - tokens = line.split(",") - if len(tokens) > 1: - subgroup = tokens[0] - result[group][subgroup] = {} - result[group][subgroup]["value"] = helpers_try_numeric(tokens[1]) - if len(tokens) > 2: - if tokens[2] == "": - result[group][subgroup]["units"] = None - else: - result[group][subgroup]["units"] = tokens[2] - elif group in ["Treatment Data", "Treatment Alarms", "Treatment Events"]: - tokens = line.split(",") - tokens_converted = [] - for token in tokens: - tokens_converted.append(helpers_try_numeric(token)) - result[group]["data"] = result[group].get("data", []) + [tokens_converted] - - return result - - -def helpers_device_state_to_cloud_state(hd_mode: HDOpModes, hd_sub_mode: HDOpSubModes) -> DeviceStates: - """ - Inactive Not OK – N/A – HD f/w will not know active vs. inactive – UI or cloud will have to maintain assigned tenant, active/inactive and Ok/Not OK while inactive - Inactive OK – N/A - Active OK – N/A - - Active Ready – mode is 3 (standby) and sub-mode is 1 (wait for treatment) - Active In Treatment – mode between 4 and 7 (treatment params .. post treatment) - Active Not Ready – mode/sub-mode is anything other than ready, in-treatment, or not OK - Active Not OK – mode is 0 (fault) - - Decommissioned – N/A – HD f/w will not know if system is decommissioned - """ - if hd_mode == HDOpModes.MODE_STAN: - return DeviceStates.ACTIVE_READY - if (hd_mode == HDOpModes.MODE_TPAR) or (hd_mode == HDOpModes.MODE_PRET) or (hd_mode == HDOpModes.MODE_TREA) or ( - hd_mode == HDOpModes.MODE_POST): - return DeviceStates.ACTIVE_IN_TREATMENT - if hd_mode == HDOpModes.MODE_FAUL: - return DeviceStates.ACTIVE_NOT_OK - - return DeviceStates.ACTIVE_NOT_READY - - -def helpers_get_stored_token() -> Union[str, None]: - """ - Returns the stored token - :return: The token if found, otherwise returns None - """ - data = None - # print('token path: {0}'.format(DEVICE_KEBORMED_ACCESS_TOKEN_PATH)) - if not os.path.exists(DEVICE_KEBORMED_ACCESS_TOKEN_PATH): - return None - with open(DEVICE_KEBORMED_ACCESS_TOKEN_PATH, 'r') as f: - try: - data = json.load(f) - except json.decoder.JSONDecodeError: - return None - - if data is None: - return None - - return data.get("access_token", None) - - -def helpers_read_config(path: str) -> dict: - """ - Read the configuration - :param path: the path to the configuration - :return: the loaded configuration - """ - if os.path.exists(path): - with open(path, 'r') as f: - config = json.load(f) - return config - else: - g_utils.logger.error("Operation configuration file does not exist: {0}".format(path)) - raise FileNotFoundError(f"Operation configuration file does not exist: {path}") - - -def helpers_write_config(folder_path: str, file_path: str, config: dict) -> None: - """ - Writes the config to the provided path - If folder_path is provided, it first checks if the folder exists and creates it if it doesn't - - :param folder_path: the path for the config folder - :param file_path: the path where the config json will be written - :param config: the config dictionary - :return: None - """ - if folder_path is not None: - if not os.path.exists(folder_path): - os.makedirs(folder_path) - - with open(file_path, 'w') as f: - json.dump(config, f, indent=4) - - -def helpers_read_access_token(path: str) -> dict: - """ - Reads the access token json file, returns it as a dict - :param path: The path to the access token json file - :return: - """ - data = {} - if os.path.exists(path): - with open(path, 'r', encoding="utf-8-sig") as f: - data = json.load(f) - return data - - -def helpers_read_treatment_report_template(path: str) -> dict: - """ - Read the treatment report template - :param path: the path to the template - :return: the loaded template - """ - if os.path.exists(path): - with open(path, 'r') as f: - template = json.load(f) - return template - else: - g_utils.logger.error("Configuration file does not exist: {0}".format(path)) - return {} - - -def log_func(func): - """ - Log the function and the parameters passed to it - @param func: The decorated function - @return: The wrapper function - """ - - def _wrapper(*args, **kwargs): - g_utils.logger.debug("Calling {0} args: {1} kwargs: {2}".format(func.__name__, - tuple(args), - kwargs)) - return func(*args, **kwargs) - - return _wrapper - - -def helpers_read_treatment_log_file(path: str): - treatment_data = helpers_read_treatment_report_template(TREATMENT_REPORT_TEMPLATE_PATH) - - try: - f = open(path) - - counter = 0 - treatment_log_lines = f.readlines() - - # print("log_lines: {0}".format(treatment_log_lines)) - - while counter < len(treatment_log_lines): - line = treatment_log_lines[counter].strip() - - if line.startswith(SECTION_START_CHARACTER) and line.endswith(SECTION_STOP_CHARACTER) and counter < ( - len(treatment_log_lines) - 2): - section = line - section_lines = [] - counter += 1 - section_start_counter = counter - line = treatment_log_lines[counter].strip() - while not (line.startswith(SECTION_START_CHARACTER) and line.endswith( - SECTION_STOP_CHARACTER)) and counter < len(treatment_log_lines) - 1: - section_lines.append(line) - counter += 1 - line = treatment_log_lines[counter].strip() - if len(section_lines) > 0: - if section == TREATMENT_DATA: - for data_line in section_lines: - data_components = data_line.split(',') - data_record = { - "time": int(data_components[0]) * S_MS_CONVERSION_FACTOR, - "bloodFlowRate": float(data_components[1]), - "dialysateFlowRate": float(data_components[2]), - "ultrafiltrationRate": float(data_components[3]), - "arterialPressure": float(data_components[4]), - "venousPressure": float(data_components[5]), - "systolic": float(data_components[6]), - "diastolic": float(data_components[7]), - "heartRate": float(data_components[8]) - } - treatment_data['data']['treatment']['data'].append(data_record) - elif section == TREATMENT_ALARMS: - for data_line in section_lines: - data_components = data_line.split(',') - parameters = [] - if len(data_components) > 2: - parameters = data_components[2:(len(data_components) - 2)] - data_record = { - "time": int(data_components[0]) * S_MS_CONVERSION_FACTOR, - "title": data_components[1], - "parameters": parameters - } - treatment_data['data']['treatment']['alarms'].append(data_record) - elif section == TREATMENT_EVENTS: - for data_line in section_lines: - data_components = data_line.split(',') - parameters = [] - if len(data_components) > 2: - parameters = data_components[2:(len(data_components) - 2)] - data_record = { - "time": int(data_components[0]) * S_MS_CONVERSION_FACTOR, - "title": data_components[1], - "parameters": parameters - } - treatment_data['data']['treatment']['events'].append(data_record) - else: - counter = section_start_counter - - else: - # print('regular line') - if line.startswith(TREATMENT_CODE): - treatment_data['data']['treatment']['treatmentCode'] = line.split(',')[1] - elif line.startswith(PATIENT_ID): - treatment_data['data']['patient']['id'] = line.split(',')[1] - elif line.startswith(TREATMENT_DURATION): - treatment_data['data']['treatment']['parameters']['treatmentDuration'] = int(line.split(',')[1]) - elif line.startswith(BLOOD_FLOW_RATE): - treatment_data['data']['treatment']['parameters']['bloodFlowRate'] = int(line.split(',')[1]) - elif line.startswith(DIALYSATE_FLOW_RATE): - treatment_data['data']['treatment']['parameters']['dialysateFlowRate'] = int(line.split(',')[1]) - elif line.startswith(ACID_CONCENTRATE_TYPE): - treatment_data['data']['treatment']['parameters']['dialysate']['acidCode'] = line.split(',')[1] - elif line.startswith(BICARBONATE_CONCENTRATE_TYPE): - treatment_data['data']['treatment']['parameters']['dialysate']['bicarbCode'] = line.split(',')[1] - elif line.startswith(POTASSIUM_CONCENTRATION): - treatment_data['data']['treatment']['parameters']['dialysate']['K'] = float(line.split(',')[1]) - elif line.startswith(CALCIUM_CONCENTRATION): - treatment_data['data']['treatment']['parameters']['dialysate']['Ca'] = float(line.split(',')[1]) - elif line.startswith(BICARBONATE_CONCENTRATION): - treatment_data['data']['treatment']['parameters']['dialysate']['HCO3'] = float(line.split(',')[1]) - elif line.startswith(SODIUM_CONCENTRATION): - treatment_data['data']['treatment']['parameters']['dialysate']['Na'] = float(line.split(',')[1]) - elif line.startswith(DIALYSATE_TEMPERATURE): - treatment_data['data']['treatment']['parameters']['dialysateTemp'] = float(line.split(',')[1]) - elif line.startswith(DIALYZER_TYPE): - treatment_data['data']['treatment']['parameters']['dialyzerModel'] = line.split(',')[1] - elif line.startswith(HEPARIN_TYPE): - treatment_data['data']['treatment']['parameters']['heparinType'] = line.split(',')[1] - elif line.startswith(HEPARIN_CONCENTRATION): - treatment_data['data']['treatment']['parameters']['heparinConcentration'] = line.split(',')[1] - elif line.startswith(HEPARIN_BOLUS_VOLUME): - treatment_data['data']['treatment']['parameters']['heparinBolus'] = line.split(',')[1] - elif line.startswith(HEPARIN_DISPENSE_RATE): - treatment_data['data']['treatment']['parameters']['heparinRate'] = line.split(',')[1] - elif line.startswith(HEPARIN_STOP): - treatment_data['data']['treatment']['parameters']['heparinStopBeforeTreatmentEnd'] = line.split(',')[1] - elif line.startswith(TREATMENT_START_DATE_TIME): - element = datetime.datetime.strptime(line.split(',')[1], "%Y/%m/%d %H:%M") - timestamp = datetime.datetime.timestamp(element) - treatment_data['data']['treatment']['time']['start'] = int(timestamp) * S_MS_CONVERSION_FACTOR - elif line.startswith(TREATMENT_END_DATE_TIME): - element = datetime.datetime.strptime(line.split(',')[1], "%Y/%m/%d %H:%M") - timestamp = datetime.datetime.timestamp(element) - treatment_data['data']['treatment']['time']['end'] = int(timestamp) * S_MS_CONVERSION_FACTOR - elif line.startswith(ACTUAL_TREATMENT_DURATION): - treatment_data['data']['treatment']['time']['treatmentDuration'] = int(line.split(',')[1]) - elif line.startswith(DIALYSATE_VOLUME_USED): - treatment_data['data']['deviceTreatmentData']['dialysateVolumeUsed'] = float(line.split(',')[1]) - elif line.startswith(PRESCRIBED_UF_VOLUME): - treatment_data['data']['deviceTreatmentData']['prescribedUltrafiltrationVolume'] = float( - line.split(',')[1]) - elif line.startswith(TARGET_UF_VOLUME): - treatment_data['data']['deviceTreatmentData']['finalTargetUltrafiltrationVolume'] = float( - line.split(',')[1]) - elif line.startswith(ACTUAL_UF_VOLUME): - treatment_data['data']['deviceTreatmentData']['actualUltrafiltrationVolume'] = float( - line.split(',')[1]) - elif line.startswith(PRESCRIBED_UF_RATE): - treatment_data['data']['deviceTreatmentData']['prescribedUltrafiltrationRate'] = float( - line.split(',')[1]) - elif line.startswith(TARGET_UF_RATE): - treatment_data['data']['deviceTreatmentData']['finalTargetUltrafiltrationRate'] = float( - line.split(',')[1]) - elif line.startswith(ACTUAL_UF_RATE): - treatment_data['data']['deviceTreatmentData']['actualUltrafiltrationRate'] = float( - line.split(',')[1]) - elif line.startswith(SALINE_BOLUS_VOLUME): - treatment_data['data']['deviceTreatmentData']['salineBolusVolumeGiven'] = float(line.split(',')[1]) - elif line.startswith(HEPARIN_DELIVERED_VOLUME): - treatment_data['data']['deviceTreatmentData']['heparinDelivered'] = line.split(',')[1] - elif line.startswith(WATER_SAMPLE_TEST_RESULT): - treatment_data['data']['diagnostics']['waterSampleTestResult'] = line.split(',')[1] - counter += 1 - - return treatment_data - except IOError as er: - g_utils.logger.error('Opening treatment log file error: {0}'.format(' '.join(er.args))) - return None - except Exception as e: - g_utils.logger.error('Error parsing treatment file: {0}'.format(' '.join(e.args))) - return None - - -def helpers_add_to_network_queue(network_request_handler, request_type, url, payload, method, g_config, - success_message): - cycle_duration = REACHABILITY_CYCLE_PAUSE - total_cycles = REACHABILITY_CYCLES - - cycle = 0 - - while (not g_utils.reachability_provider.reachability) and (cycle < total_cycles): - sleep(cycle_duration) - cycle += 1 - - if g_utils.reachability_provider.reachability: - r = NetworkRequest(request_type=request_type, - url=url, - payload=payload, - method=method, - g_config=g_config) - request_added_to_queue = False - while not request_added_to_queue: - request_added_to_queue = network_request_handler.enqueue_request(r) - - g_utils.logger.info(success_message) - else: - g_utils.logger.warning("Internet DOWN: Network request {0} couldn't be processed".format(request_type.name)) - - -def helpers_add_to_output_channel(output_channel, message_body, success_message): - message_added_to_queue = False - while not message_added_to_queue: - message_added_to_queue = output_channel.enqueue_message(message_body) - - g_utils.logger.info(success_message) - - -def helpers_sha256_checksum(data: str) -> str: - """ - Returns the calculated checksum (SHA256) for input data - @param data: input data - @return:: checksum - """ - - checksum = hashlib.sha256(data.encode()).hexdigest() - return checksum - - -def helpers_crc8(message_list): - """ - Returns the calculated crc from a message list - @param message_list: is a list of integer numbers containing the message - @return:: integer containing an unsigned byte - """ - crc = 0 - for byte in message_list: - unsigned_byte = byte ^ crc - crc = CRC_LIST[unsigned_byte] - return crc - - -def helpers_get_ip_address(): - hostname = socket.gethostname() - ip_address = socket.gethostbyname(hostname) - return ip_address - - -def helpers_decommission_device(): - parent_folder = DECOMMISSION_CS_PATH - subfolders_to_delete = DECOMMISSION_FOLDERS - - for subfolder in subfolders_to_delete: - path_to_delete = os.path.join(parent_folder, subfolder) - if os.path.exists(path_to_delete) and os.path.isdir(path_to_delete): - shutil.rmtree(path_to_delete) - else: - raise FileNotFoundError(f"{path_to_delete} not found or not a directory!") - - -def helpers_log_rotation(rotation_pct): - cs_log_pct = rotation_pct / 100 - num_files_deleted = 0 - # Total sd stats - sd_total_bytes, sd_used_bytes, sd_free_bytes = shutil.disk_usage(UI2CS_FILE_CHANNELS_PATH) - # Cloudsync log stats - cs_used_bytes = int(subprocess.check_output(['du', '-bsx', UI2CS_FILE_CHANNELS_PATH]).split()[0].decode('utf-8')) - total_deleted_size = cs_used_bytes - # Retrieve file list - f = [] - for subdir, dir, files in os.walk(UI2CS_FILE_LOG_PATH): - for file in files: - f.append(os.path.join(UI2CS_FILE_LOG_PATH, file)) - - # Sorted with oldest modified first. - f.sort(key=lambda x: os.path.getmtime(x)) - - if len(f) > 1: - while (cs_used_bytes / sd_used_bytes) <= cs_log_pct: - file_to_remove = f[0] - os.remove(file_to_remove) - num_files_deleted += 1 - f.pop(0) - cs_used_bytes = int( - subprocess.check_output(['du', '-bsx', UI2CS_FILE_CHANNELS_PATH]).split()[0].decode('utf-8')) - total_deleted_size = total_deleted_size - cs_used_bytes - else: - total_deleted_size = 0 - g_utils.logger.info("No files Removed.") - - return num_files_deleted, total_deleted_size +"""Implementation of helper methods""" + +import os +import shutil +import json +import hashlib +import socket +import re +import base64 +import subprocess + +from datetime import * +from time import time, sleep +from typing import Union, Any +from logging import Logger + +from cloudsync.utils.singleton import SingletonMeta +from cloudsync.common.enums import * +from cloudsync.handlers.network_request import NetworkRequest +from cloudsync.utils.reachability import * +from cloudsync.utils.globals import * + + +class GUtils(metaclass=SingletonMeta): + + def __init__(self): + self.logger = None + self.reachability_provider = None + + def add_logger(self, logger: Logger): + self.logger = logger + + def add_reachability_provider(self, reachability_provider: ReachabilityProvider): + self.reachability_provider = reachability_provider + + +g_utils = GUtils() + +CRC_LIST = [ + 0, 49, 98, 83, 196, 245, 166, 151, 185, 136, 219, 234, 125, 76, 31, 46, + 67, 114, 33, 16, 135, 182, 229, 212, 250, 203, 152, 169, 62, 15, 92, 109, + 134, 183, 228, 213, 66, 115, 32, 17, 63, 14, 93, 108, 251, 202, 153, 168, + 197, 244, 167, 150, 1, 48, 99, 82, 124, 77, 30, 47, 184, 137, 218, 235, + 61, 12, 95, 110, 249, 200, 155, 170, 132, 181, 230, 215, 64, 113, 34, 19, + 126, 79, 28, 45, 186, 139, 216, 233, 199, 246, 165, 148, 3, 50, 97, 80, + 187, 138, 217, 232, 127, 78, 29, 44, 2, 51, 96, 81, 198, 247, 164, 149, + 248, 201, 154, 171, 60, 13, 94, 111, 65, 112, 35, 18, 133, 180, 231, 214, + 122, 75, 24, 41, 190, 143, 220, 237, 195, 242, 161, 144, 7, 54, 101, 84, + 57, 8, 91, 106, 253, 204, 159, 174, 128, 177, 226, 211, 68, 117, 38, 23, + 252, 205, 158, 175, 56, 9, 90, 107, 69, 116, 39, 22, 129, 176, 227, 210, + 191, 142, 221, 236, 123, 74, 25, 40, 6, 55, 100, 85, 194, 243, 160, 145, + 71, 118, 37, 20, 131, 178, 225, 208, 254, 207, 156, 173, 58, 11, 88, 105, + 4, 53, 102, 87, 192, 241, 162, 147, 189, 140, 223, 238, 121, 72, 27, 42, + 193, 240, 163, 146, 5, 52, 103, 86, 120, 73, 26, 43, 188, 141, 222, 239, + 130, 179, 224, 209, 70, 119, 36, 21, 59, 10, 89, 104, 255, 206, 157, 172 +] + + +def helpers_is_int(val: str) -> bool: + """ + Determines if the value can be converted to an int + + :param val: the value in string form + :return: True if can be converted to an int, false otherwise + """ + + try: + int(val) + return True + except ValueError: + return False + + +def helpers_is_float(val: str) -> bool: + """ + Determines if the value can be converted to a float + + :param val: the value in string form + :return: True if can be converted to a float, false otherwise + """ + try: + float(val) + except ValueError: + return False + return True + + +def helpers_try_numeric(val: str) -> Union[int, float, str]: + """ + Tries to convert the value to numeric. If it's not possible + leaves it as a string + + :param val: the value to convert + :return: the converted value if possible, otherwise it is returned as is + """ + + if helpers_is_int(val): + return int(val) + elif helpers_is_float(val): + return float(val) + else: + return val + + +def helpers_parse_treatment_log(path: str) -> dict: + """ + Converts a treatment.log file to a python dictionary + The treatment log needs to be formatted better. + Until then, this is a (non-ideal) way to read it. + + :param path: the path to the treatment log file + :return: the parsed treatment log file + """ + if not os.path.exists(path): + print("Path does not exist.") + return {} + + result = {} + with open(path, 'r') as f: + lines = f.readlines() + group = "NoGroup" + for line_ in lines: + line = line_.replace("\n", "") + if "[" and "]" in line: + group = line.split("[")[1].split("]")[0] + if group not in result: + result[group] = {} + else: + if group in ["Title", "Treatment Parameters", "Post-Treatment Data", "Extra"]: + tokens = line.split(",") + if len(tokens) > 1: + subgroup = tokens[0] + result[group][subgroup] = {} + result[group][subgroup]["value"] = helpers_try_numeric(tokens[1]) + if len(tokens) > 2: + if tokens[2] == "": + result[group][subgroup]["units"] = None + else: + result[group][subgroup]["units"] = tokens[2] + elif group in ["Treatment Data", "Treatment Alarms", "Treatment Events"]: + tokens = line.split(",") + tokens_converted = [] + for token in tokens: + tokens_converted.append(helpers_try_numeric(token)) + result[group]["data"] = result[group].get("data", []) + [tokens_converted] + + return result + + +def helpers_device_state_to_cloud_state(hd_mode: HDOpModes, hd_sub_mode: HDOpSubModes) -> DeviceStates: + """ + Inactive Not OK – N/A – HD f/w will not know active vs. inactive – UI or cloud will have to maintain assigned tenant, active/inactive and Ok/Not OK while inactive + Inactive OK – N/A + Active OK – N/A + + Active Ready – mode is 3 (standby) and sub-mode is 1 (wait for treatment) + Active In Treatment – mode between 4 and 7 (treatment params .. post treatment) + Active Not Ready – mode/sub-mode is anything other than ready, in-treatment, or not OK + Active Not OK – mode is 0 (fault) + + Decommissioned – N/A – HD f/w will not know if system is decommissioned + """ + if hd_mode == HDOpModes.MODE_STAN: + return DeviceStates.ACTIVE_READY + if (hd_mode == HDOpModes.MODE_TPAR) or (hd_mode == HDOpModes.MODE_PRET) or (hd_mode == HDOpModes.MODE_TREA) or ( + hd_mode == HDOpModes.MODE_POST): + return DeviceStates.ACTIVE_IN_TREATMENT + if hd_mode == HDOpModes.MODE_FAUL: + return DeviceStates.ACTIVE_NOT_OK + + return DeviceStates.ACTIVE_NOT_READY + + +def helpers_get_stored_token() -> Union[str, None]: + """ + Returns the stored token + :return: The token if found, otherwise returns None + """ + data = None + # print('token path: {0}'.format(DEVICE_KEBORMED_ACCESS_TOKEN_PATH)) + if not os.path.exists(DEVICE_KEBORMED_ACCESS_TOKEN_PATH): + return None + with open(DEVICE_KEBORMED_ACCESS_TOKEN_PATH, 'r') as f: + try: + data = json.load(f) + except json.decoder.JSONDecodeError: + return None + + if data is None: + return None + + return data.get("access_token", None) + + +def helpers_read_config(path: str) -> dict: + """ + Read the configuration + :param path: the path to the configuration + :return: the loaded configuration + """ + if os.path.exists(path): + with open(path, 'r') as f: + config = json.load(f) + return config + else: + g_utils.logger.error("Operation configuration file does not exist: {0}".format(path)) + raise FileNotFoundError(f"Operation configuration file does not exist: {path}") + + +def helpers_write_config(folder_path: str, file_path: str, config: dict) -> None: + """ + Writes the config to the provided path + If folder_path is provided, it first checks if the folder exists and creates it if it doesn't + + :param folder_path: the path for the config folder + :param file_path: the path where the config json will be written + :param config: the config dictionary + :return: None + """ + if folder_path is not None: + if not os.path.exists(folder_path): + os.makedirs(folder_path) + + with open(file_path, 'w') as f: + json.dump(config, f, indent=4) + + +def helpers_read_access_token(path: str) -> dict: + """ + Reads the access token json file, returns it as a dict + :param path: The path to the access token json file + :return: + """ + data = {} + if os.path.exists(path): + with open(path, 'r', encoding="utf-8-sig") as f: + data = json.load(f) + return data + + +def helpers_read_treatment_report_template(path: str) -> dict: + """ + Read the treatment report template + :param path: the path to the template + :return: the loaded template + """ + if os.path.exists(path): + with open(path, 'r') as f: + template = json.load(f) + return template + else: + g_utils.logger.error("Configuration file does not exist: {0}".format(path)) + return {} + + +def helpers_read_device_log_template(path: str) -> dict: + """ + Read the device log template + :param path: the path to the template + :return: the loaded template + """ + if os.path.exists(path): + with open(path, 'r') as f: + template = json.load(f) + return template + else: + g_utils.logger.error( + "Configuration file does not exist: {0}".format(path)) + return {} + + +def helpers_read_cs_log_template(path: str) -> dict: + """ + Read the cs log template + :param path: the path to the template + :return: the loaded template + """ + if os.path.exists(path): + with open(path, 'r') as f: + template = json.load(f) + return template + else: + g_utils.logger.error( + "Configuration file does not exist: {0}".format(path)) + return {} + + +def helpers_read_log_state(path: str) -> dict: + """ + Read the cs log state file + :param path: the path to the file + :return: the contents of the log state file + """ + if os.path.exists(path): + with open(path, 'r') as f: + template = json.load(f) + return template + else: + g_utils.logger.error( + "Configuration file does not exist: {0}".format(path)) + return {} + + +def log_func(func): + """ + Log the function and the parameters passed to it + @param func: The decorated function + @return: The wrapper function + """ + + def _wrapper(*args, **kwargs): + g_utils.logger.debug("Calling {0} args: {1} kwargs: {2}".format(func.__name__, + tuple(args), + kwargs)) + return func(*args, **kwargs) + + return _wrapper + + +def helpers_extract_device_log_metadata(log_file_name:str) -> Union[dict,None]: + local_date_pattern = r"^\d{8}(?=_)" + local_time_pattern = r"^(?:^.*?)_(\d{6})_" + serial_number_pattern = r"^[a-zA-Z0-9]+_[a-zA-Z0-9]+_([a-zA-Z0-9]+)(?=_)" + device_subtype_pattern = r"^[a-zA-Z0-9]+_[a-zA-Z0-9]+_[a-zA-Z0-9]+_([a-zA-Z0-9]+)(?=)" + log_type_pattern = r"[a-zA-Z0-9]+\.u\.(\w+)(?=)" + + local_date_match = re.search(local_date_pattern, log_file_name) + local_time_match = re.search(local_time_pattern, log_file_name) + serial_number_match = re.search(serial_number_pattern, log_file_name) + device_subtype_match = re.search(device_subtype_pattern, log_file_name) + log_type_match = re.search(log_type_pattern, log_file_name) + + return { + "local_date": local_date_match.group(0) if local_date_match else 'unknown', + "local_time": local_time_match.group(1) if local_time_match else 'unknown', + "serial_number": serial_number_match.group(1) if serial_number_match else 'unknown', + "device_sub_type": device_subtype_match.group(1) if device_subtype_match else 'unknown', + "device_log_type": log_type_match.group(1) if log_type_match else 'unknown' + } + + +def helpers_file_to_byte_array(file_path: str) -> bytearray: + try: + with open(file_path, "rb") as f: + # Read the entire file in binary mode + file_bytes = f.read() + return base64.b64encode(file_bytes).decode('utf-8') + except FileNotFoundError as e: + g_utils.logger.error(f"Device log file not found: {e}") + return None + except Exception as e: + g_utils.logger.error(f"Error reading device log file: {e}") + return None + + +def helpers_construct_cs_log_json(path:str): + """ + Constructs the payload for cs log file uploading + :param path: the path to the log file + :returns: the json payload to be uploaded + """ + cs_log_data_template = helpers_read_cs_log_template(CS_LOG_TEMPLATE_PATH) + + # Convert the file into byte array + logs_byte_array = helpers_file_to_byte_array(path) + + # Get the filename + file_name = os.path.basename(path) + + # Completion and generation timestamp from CS (in miliseconds) + local_timestamp = int(datetime.now(timezone.utc).timestamp()*1000) + + # Start and End timestamps calculation + today = datetime.now(timezone.utc) + start_of_day = int(today.replace(hour=0, minute=0, second=0, microsecond=0).timestamp()*1000) + end_of_day = int(today.replace(hour=23, minute=59, second=59, microsecond=999999).timestamp()*1000) + + # Populate JSON object + cs_log_data_template['metadata']['dataType'] = 'cloud-sync-log-category' + cs_log_data_template['metadata']['deviceFileName'] = file_name + cs_log_data_template['metadata']['startTimestamp'] = start_of_day + cs_log_data_template['metadata']['endTimestamp'] = end_of_day + cs_log_data_template['completedAt'] = local_timestamp + cs_log_data_template['generatedAt'] = local_timestamp + cs_log_data_template['data'] = logs_byte_array + cs_log_data_template['checksum'] = helpers_sha256_checksum(logs_byte_array) + + return cs_log_data_template + + +def helpers_construct_device_log_json(path:str): + """ + Constructs the payload for device log file uploading + :param path: the path to the log file + :returns: the json payload to be uploaded + """ + device_log_data = helpers_read_device_log_template(DEVICE_LOG_TEMPLATE_PATH) + + # Convert the file into byte array + logs_byte_array = helpers_file_to_byte_array(path) + + # Extract metadata from the filename + file_name = os.path.basename(path) + extracted_metadata = helpers_extract_device_log_metadata(file_name) + + # Completion and generation timestamp from CS (in miliseconds) + local_timestamp = int(datetime.now(timezone.utc).timestamp()*1000) + + # Calculate the UTC timestamp based on the local date and time the UI provided (in milliseconds) (NEED BETTER SOLUTION HERE) + if extracted_metadata['local_date'] != 'unknown' or extracted_metadata['local_time'] != 'unknown': + datetime_obj = datetime.strptime(f"{extracted_metadata['local_date']}{extracted_metadata['local_time']}", "%Y%m%d%H%M%S") + # Convert to UTC timestamp + ui_utc_timestamp = int(datetime_obj.timestamp()*1000) + else: + ui_utc_timestamp = local_timestamp + + + # Populate JSON object + if extracted_metadata is not None: + device_log_data['metadata']['dataType'] = 'device-log-category' + device_log_data['metadata']['deviceLogType'] = extracted_metadata['device_log_type'] + device_log_data['metadata']['deviceSubType'] = extracted_metadata['device_sub_type'] + device_log_data['metadata']['deviceFileName'] = file_name + device_log_data['metadata']['startTimestamp'] = ui_utc_timestamp + device_log_data['metadata']['endTimestamp'] = ui_utc_timestamp + device_log_data['completedAt'] = local_timestamp + device_log_data['generatedAt'] = local_timestamp + device_log_data['data'] = logs_byte_array + device_log_data['checksum'] = helpers_sha256_checksum(logs_byte_array) + + return device_log_data + + else: + g_utils.logger.error('Device log file name does not match the pattern') + return None + + +def helpers_read_treatment_log_file(path: str): + treatment_data = helpers_read_treatment_report_template(TREATMENT_REPORT_TEMPLATE_PATH) + + try: + f = open(path) + + counter = 0 + treatment_log_lines = f.readlines() + + # print("log_lines: {0}".format(treatment_log_lines)) + + while counter < len(treatment_log_lines): + line = treatment_log_lines[counter].strip() + + if line.startswith(SECTION_START_CHARACTER) and line.endswith(SECTION_STOP_CHARACTER) and counter < ( + len(treatment_log_lines) - 2): + section = line + section_lines = [] + counter += 1 + section_start_counter = counter + line = treatment_log_lines[counter].strip() + while not (line.startswith(SECTION_START_CHARACTER) and line.endswith( + SECTION_STOP_CHARACTER)) and counter < len(treatment_log_lines) - 1: + section_lines.append(line) + counter += 1 + line = treatment_log_lines[counter].strip() + if len(section_lines) > 0: + if section == TREATMENT_DATA: + for data_line in section_lines: + data_components = data_line.split(',') + data_record = { + "time": int(data_components[0]) * S_MS_CONVERSION_FACTOR, + "bloodFlowRate": float(data_components[1]), + "dialysateFlowRate": float(data_components[2]), + "ultrafiltrationRate": float(data_components[3]), + "arterialPressure": float(data_components[4]), + "venousPressure": float(data_components[5]), + "systolic": float(data_components[6]), + "diastolic": float(data_components[7]), + "heartRate": float(data_components[8]) + } + treatment_data['data']['treatment']['data'].append(data_record) + elif section == TREATMENT_ALARMS: + for data_line in section_lines: + data_components = data_line.split(',') + parameters = [] + if len(data_components) > 2: + parameters = data_components[2:len(data_components)] + data_record = { + "time": int(data_components[0]) * S_MS_CONVERSION_FACTOR, + "title": data_components[1], + "parameters": parameters + } + treatment_data['data']['treatment']['alarms'].append(data_record) + elif section == TREATMENT_EVENTS: + for data_line in section_lines: + data_components = data_line.split(',') + parameters = [] + if len(data_components) > 2: + parameters = data_components[2:len(data_components)] + data_record = { + "time": int(data_components[0]) * S_MS_CONVERSION_FACTOR, + "title": data_components[1], + "parameters": parameters + } + treatment_data['data']['treatment']['events'].append(data_record) + else: + counter = section_start_counter + + else: + # print('regular line') + if line.startswith(TREATMENT_CODE): + treatment_data['data']['treatment']['treatmentCode'] = line.split(',')[1] + elif line.startswith(PATIENT_ID): + treatment_data['data']['patient']['id'] = line.split(',')[1] + elif line.startswith(TREATMENT_DURATION): + treatment_data['data']['treatment']['parameters']['treatmentDuration'] = int(line.split(',')[1]) + elif line.startswith(BLOOD_FLOW_RATE): + treatment_data['data']['treatment']['parameters']['bloodFlowRate'] = int(line.split(',')[1]) + elif line.startswith(DIALYSATE_FLOW_RATE): + treatment_data['data']['treatment']['parameters']['dialysateFlowRate'] = int(line.split(',')[1]) + elif line.startswith(ACID_CONCENTRATE_TYPE): + treatment_data['data']['treatment']['parameters']['dialysate']['acidCode'] = line.split(',')[1] + elif line.startswith(BICARBONATE_CONCENTRATE_TYPE): + treatment_data['data']['treatment']['parameters']['dialysate']['bicarbCode'] = line.split(',')[1] + elif line.startswith(POTASSIUM_CONCENTRATION): + treatment_data['data']['treatment']['parameters']['dialysate']['K'] = float(line.split(',')[1]) + elif line.startswith(CALCIUM_CONCENTRATION): + treatment_data['data']['treatment']['parameters']['dialysate']['Ca'] = float(line.split(',')[1]) + elif line.startswith(BICARBONATE_CONCENTRATION): + treatment_data['data']['treatment']['parameters']['dialysate']['HCO3'] = float(line.split(',')[1]) + elif line.startswith(SODIUM_CONCENTRATION): + treatment_data['data']['treatment']['parameters']['dialysate']['Na'] = float(line.split(',')[1]) + elif line.startswith(DIALYSATE_TEMPERATURE): + treatment_data['data']['treatment']['parameters']['dialysateTemp'] = float(line.split(',')[1]) + elif line.startswith(DIALYZER_TYPE): + treatment_data['data']['treatment']['parameters']['dialyzerModel'] = line.split(',')[1] + elif line.startswith(HEPARIN_TYPE): + treatment_data['data']['treatment']['parameters']['heparinType'] = line.split(',')[1] + elif line.startswith(HEPARIN_CONCENTRATION): + treatment_data['data']['treatment']['parameters']['heparinConcentration'] = line.split(',')[1] + elif line.startswith(HEPARIN_BOLUS_VOLUME): + treatment_data['data']['treatment']['parameters']['heparinBolus'] = line.split(',')[1] + elif line.startswith(HEPARIN_DISPENSE_RATE): + treatment_data['data']['treatment']['parameters']['heparinRate'] = line.split(',')[1] + elif line.startswith(HEPARIN_STOP): + treatment_data['data']['treatment']['parameters']['heparinStopBeforeTreatmentEnd'] = line.split(',')[1] + elif line.startswith(TREATMENT_START_DATE_TIME): + element = datetime.strptime(line.split(',')[1], "%Y/%m/%d %H:%M") + timestamp = datetime.timestamp(element) + treatment_data['data']['treatment']['time']['start'] = int(timestamp) * S_MS_CONVERSION_FACTOR + elif line.startswith(TREATMENT_END_DATE_TIME): + element = datetime.strptime(line.split(',')[1], "%Y/%m/%d %H:%M") + timestamp = datetime.timestamp(element) + treatment_data['data']['treatment']['time']['end'] = int(timestamp) * S_MS_CONVERSION_FACTOR + elif line.startswith(ACTUAL_TREATMENT_DURATION): + treatment_data['data']['treatment']['time']['treatmentDuration'] = int(line.split(',')[1]) + elif line.startswith(DIALYSATE_VOLUME_USED): + treatment_data['data']['deviceTreatmentData']['dialysateVolumeUsed'] = float(line.split(',')[1]) + elif line.startswith(PRESCRIBED_UF_VOLUME): + treatment_data['data']['deviceTreatmentData']['prescribedUltrafiltrationVolume'] = float( + line.split(',')[1]) + elif line.startswith(TARGET_UF_VOLUME): + treatment_data['data']['deviceTreatmentData']['finalTargetUltrafiltrationVolume'] = float( + line.split(',')[1]) + elif line.startswith(ACTUAL_UF_VOLUME): + treatment_data['data']['deviceTreatmentData']['actualUltrafiltrationVolume'] = float( + line.split(',')[1]) + elif line.startswith(PRESCRIBED_UF_RATE): + treatment_data['data']['deviceTreatmentData']['prescribedUltrafiltrationRate'] = float( + line.split(',')[1]) + elif line.startswith(TARGET_UF_RATE): + treatment_data['data']['deviceTreatmentData']['finalTargetUltrafiltrationRate'] = float( + line.split(',')[1]) + elif line.startswith(ACTUAL_UF_RATE): + treatment_data['data']['deviceTreatmentData']['actualUltrafiltrationRate'] = float( + line.split(',')[1]) + elif line.startswith(SALINE_BOLUS_VOLUME): + treatment_data['data']['deviceTreatmentData']['salineBolusVolumeGiven'] = float(line.split(',')[1]) + elif line.startswith(HEPARIN_DELIVERED_VOLUME): + treatment_data['data']['deviceTreatmentData']['heparinDelivered'] = line.split(',')[1] + elif line.startswith(WATER_SAMPLE_TEST_RESULT): + treatment_data['data']['treatment']['treatmentPreparation']['totalChlorine'] = line.split(',')[1] + # new treatment fields + elif line.startswith(TARGET_WEIGHT): + treatment_data['data']['treatment']['time']['targetWeight'] = "" + elif line.startswith(TOTAL_CHLORINE): + treatment_data['data']['treatment']['treatmentPreparation']['totalChlorine'] = "" + elif line.startswith(PH): + treatment_data['data']['treatment']['treatmentPreparation']['ph'] = "" + elif line.startswith(CONDUCTIVITY): + treatment_data['data']['treatment']['treatmentPreparation']['conductivity'] = "" + elif line.startswith(MACHINE_WIPED_DOWN): + treatment_data['data']['diagnostics']['machineWipedDown'] = "" + elif line.startswith(FILTER_LIFE): + treatment_data['data']['diagnostics']['filterLife'] = "" + elif line.startswith(LAST_CHEMICAL_DISINFECTION): + treatment_data['data']['diagnostics']['lastChemicalDisinfection'] = "" + elif line.startswith(LAST_HEAT_DISINFECTION): + treatment_data['data']['diagnostics']['lastHeatDisinfection'] = "" + counter += 1 + + return treatment_data + except IOError as er: + g_utils.logger.error('Opening treatment log file error: {0}'.format(' '.join(er.args))) + return None + except Exception as e: + g_utils.logger.error('Error parsing treatment file: {0}'.format(' '.join(e.args))) + return None + + +def helpers_add_to_network_queue(network_request_handler, request_type, url, payload, method, g_config, + success_message): + cycle_duration = REACHABILITY_CYCLE_PAUSE + total_cycles = REACHABILITY_CYCLES + + cycle = 0 + + while (not g_utils.reachability_provider.reachability) and (cycle < total_cycles): + sleep(cycle_duration) + cycle += 1 + + if g_utils.reachability_provider.reachability: + r = NetworkRequest(request_type=request_type, + url=url, + payload=payload, + method=method, + g_config=g_config) + request_added_to_queue = False + while not request_added_to_queue: + request_added_to_queue = network_request_handler.enqueue_request(r) + + g_utils.logger.info(success_message) + else: + g_utils.logger.warning("Internet DOWN: Network request {0} couldn't be processed".format(request_type.name)) + + +def helpers_add_to_output_channel(output_channel, message_body, success_message): + message_added_to_queue = False + while not message_added_to_queue: + message_added_to_queue = output_channel.enqueue_message(message_body) + + g_utils.logger.info(success_message) + + +def helpers_sha256_checksum(data: str) -> str: + """ + Returns the calculated checksum (SHA256) for input data + @param data: input data. It can be either string or file + @return:: checksum + """ + + isFile = os.path.isfile(data) + + if isFile: + checksum = hashlib.sha256() + with open(data, "rb") as f: + # Read and update hash in blocks of 4K + for byte_block in iter(lambda: f.read(4096), b""): + checksum.update(byte_block) + else: + checksum = hashlib.sha256(data.encode()) + + return checksum.hexdigest() + + +def helpers_crc8(message_list): + """ + Returns the calculated crc from a message list + @param message_list: is a list of integer numbers containing the message + @return:: integer containing an unsigned byte + """ + crc = 0 + for byte in message_list: + unsigned_byte = byte ^ crc + crc = CRC_LIST[unsigned_byte] + return crc + + +def helpers_get_ip_address(): + hostname = socket.gethostname() + ip_address = socket.gethostbyname(hostname) + return ip_address + + +def helpers_decommission_device(): + parent_folder = DECOMMISSION_CS_PATH + subfolders_to_delete = DECOMMISSION_FOLDERS + + for subfolder in subfolders_to_delete: + path_to_delete = os.path.join(parent_folder, subfolder) + if os.path.exists(path_to_delete) and os.path.isdir(path_to_delete): + shutil.rmtree(path_to_delete) + else: + raise FileNotFoundError(f"{path_to_delete} not found or not a directory!") + + +def helpers_should_update_dcs_log_level() -> bool: + """ + Returns True if the state of the log level should be + communicated to the DCS else False. It is controlled by + the update_dcs_flag in the log_state.json. + * 1 --> True + * 0 --> False + """ + log_state = helpers_read_log_state(CS_LOG_STATE_FILE_PATH) + if int(log_state['update_dcs_flag']) == 1: + return True + return False + + +def helpers_should_update_cs_log_level(response:dict) -> bool: + current_log_state = helpers_read_log_state(CS_LOG_STATE_FILE_PATH) + current_log_level:str = current_log_state['current_log_level'] + requested_log_level:str = response['logLevel'] + if requested_log_level.upper() != current_log_level.upper(): + g_utils.logger.debug(f"DCS requested to change the log level from {current_log_level} to {requested_log_level}") + return True + return False + + +def helpers_write_log_state(file_path: str, config: dict) -> None: + with open(file_path, 'w') as f: + json.dump(config, f, indent=4) + +def helpers_calculate_log_level_duration(response): + """ + Returns a tuple with duration_in_seconds, start_timestamp, end_timestamp + if a valid log level duration was given. Otherwise None + """ + duration = int(response['logLevelDuration']) if response['logLevelDuration'] else 0 + start_timestamp = int(datetime.now(timezone.utc).timestamp() * 1000) + end_timestamp = start_timestamp + duration + + if duration > 0: + duration_in_seconds = int(duration/1000) + return duration_in_seconds, start_timestamp, end_timestamp + return None + + +def helpers_log_rotation(rotation_pct): + cs_log_pct = rotation_pct / 100 + num_files_deleted = 0 + # Total sd stats + sd_total_bytes, sd_used_bytes, sd_free_bytes = shutil.disk_usage(UI2CS_FILE_CHANNELS_PATH) + # Cloudsync log stats + cs_used_bytes = int(subprocess.check_output(['du', '-bsx', UI2CS_FILE_CHANNELS_PATH]).split()[0].decode('utf-8')) + total_deleted_size = cs_used_bytes + # Retrieve file list + f = [] + for subdir, dir, files in os.walk(UI2CS_FILE_LOG_PATH): + for file in files: + f.append(os.path.join(UI2CS_FILE_LOG_PATH, file)) + + # Sorted with oldest modified first. + f.sort(key=lambda x: os.path.getmtime(x)) + + if len(f) > 1: + while (cs_used_bytes / sd_used_bytes) <= cs_log_pct: + file_to_remove = f[0] + os.remove(file_to_remove) + num_files_deleted += 1 + f.pop(0) + cs_used_bytes = int( + subprocess.check_output(['du', '-bsx', UI2CS_FILE_CHANNELS_PATH]).split()[0].decode('utf-8')) + total_deleted_size = total_deleted_size - cs_used_bytes + else: + total_deleted_size = 0 + g_utils.logger.info("No files Removed.") + + return num_files_deleted, total_deleted_size Index: cloudsync/utils/logging.py =================================================================== diff -u --- cloudsync/utils/logging.py (revision 0) +++ cloudsync/utils/logging.py (revision b8938353017252efea9c1eaa459f76386f6d14ac) @@ -0,0 +1,216 @@ +import logging +import os +from logging.handlers import TimedRotatingFileHandler +from cloudsync.handlers.logs_handler import CustomTimedRotatingFileHandlerHandler +from cloudsync.utils.singleton import SingletonMeta +from cloudsync.handlers.error import Error +from cloudsync.utils.globals import * +from cloudsync.utils.helpers import * +from cloudsync.common.enums import * + + + +class LoggingConfig(metaclass=SingletonMeta): + """ + Encapsulates logging configuration and setup. + """ + + DEFAULT_LOG_LEVEL = 'ERROR' + LOG_LEVELS = { + 0: "NOTSET", + 10: "DEBUG", + 20: "INFO", + 30: "WARN", + 40: "ERROR", + 50: "CRITICAL" + } + + def __init__(self): + self.flask_app = None + self.log_level = self.DEFAULT_LOG_LEVEL + self.log_level_name = None + self.log_level_duration = None + self.log_handler = None + + def initiate(self, app): + self.flask_app = app + + # Genereate the Logs Dir + if not os.path.exists(CS_LOG_PATH): + os.makedirs(CS_LOG_PATH) + + # Remove existing handlers form the Flask app + for handler in self.flask_app.logger.handlers: + self.flask_app.logger.removeHandler(handler) + + # Initiate the main configuration of the logger(s) + self.__configure_logging() + + def set_log_level(self, log_level) -> None: + try: + self.log_level = log_level if isinstance(log_level, int) else getattr(logging, log_level.upper()) + self.log_level_name = self.LOG_LEVELS[self.log_level] + + # Set the log level to the custom log handler + self.log_handler.setLevel(self.log_level) + # Set the log level the the flask logger + self.flask_app.logger.setLevel(self.log_level) + # Set the log level ot the root logger + root_logger = logging.getLogger() + root_logger.setLevel(self.log_level) + + # Update the log state file + update_obj = { + 'attribute': 'current_log_level', + 'value': self.log_level_name + } + self.__update_log_state_file(update_object=update_obj) + + g_utils.logger.info(f'Log level set at {self.log_level_name} ({self.log_level})') + + except Exception as e: + g_utils.logger.error(f'Could not update the log level: {e}') + + def set_log_handler(self): + # Add the handler to the Flask app logger + self.flask_app.logger.addHandler(self.log_handler) + # Add the handler to the root logger + root_logger = logging.getLogger() + root_logger.addHandler(self.log_handler) + + def set_dcs_flag(self, flag_state:int): + update_obj = { + 'attribute': 'update_dcs_flag', + 'value': flag_state + } + self.__update_log_state_file(update_object=update_obj) + + def set_log_duration(self, duration: Union[int, tuple]): + + if isinstance(duration, tuple): + self.log_level_duration = int(duration[0]) + # Update the duration at log state file + update_obj = { + 'attribute': 'log_level_duration', + 'value': duration[0] + } + self.__update_log_state_file(update_object=update_obj) + # Update the start timestamp at log state file + update_obj = { + 'attribute': 'log_level_start_timestamp', + 'value': duration[1] + } + self.__update_log_state_file(update_object=update_obj) + # Update the stop timestamp at log state file + update_obj = { + 'attribute': 'log_level_stop_timestamp', + 'value': duration[2] + } + self.__update_log_state_file(update_object=update_obj) + else: + self.log_level_duration = int(duration) + # Update the duration at log state file + update_obj = { + 'attribute': 'log_level_duration', + 'value': duration + } + self.__update_log_state_file(update_object=update_obj) + + g_utils.logger.debug(f"Log level duration set at {self.log_level_duration}") + + def get_log_level(self) -> tuple: + """ + Returns a tuple with both the numeric and text + value of the log level. The first element of the tuple + is the numeric value and the second one is the text. + """ + return self.log_level, self.log_level_name + + def set_network_provider(self, network_request_handler) -> None: + """ + Passes the network provider to the custom log handler so it can + be able to send uploading log requests to the dcs for CS logs. + """ + self.log_handler.set_network_provider(network_request_handler=network_request_handler) + + def set_error_provider(self, error_handler) -> None: + """ + Passes the error provider to the custom log handler so it can + be able to send uploading log requests to the dcs for CS logs. + """ + self.log_handler.set_error_provider(error_handler=error_handler) + + def set_configuration(self, g_config) -> None: + """ + Passes the configuration tothe custom log handler so it can + be able to send uploading log requests to the dcs for CS logs. + """ + self.log_handler.set_configuration(g_config=g_config) + + def clear_prelogger(self) -> None: + """ + Clears the prelogger after the app has been initiated successfully + """ + #TODO: Implement this logic + + def start_countdown(self) -> None: + if self.log_level and self.log_level_duration: + try: + # Initiate the log level reset thread + Thread(target=self.__reset_log_level, args=(self.log_level_duration,)).start() + except Exception as e: + g_utils.logger.error(f"Failed starting the countdown thread: {e}") + + def __configure_logging(cls): + """ + * Sets up the file rotation Handler + * Sets the initial log level to both root and app loggers + * Sets the g_utils logger + """ + + # Create the "prelogger" for handling logs before the main logger is initialized + LOG_FORMAT = ("%(asctime)s [%(levelname)s]: %(message)s in %(pathname)s:%(lineno)d") + prelogger = logging.getLogger("prelogger") + prelogger.setLevel('DEBUG') + prelogger_file_handler = logging.FileHandler(os.path.join(CS_LOG_PATH, 'logs.init')) + prelogger_file_handler.setLevel('DEBUG') + prelogger_file_handler.setFormatter(logging.Formatter(LOG_FORMAT)) + prelogger.addHandler(prelogger_file_handler) + + # Create a custom TimedRotatingFileHandler that writes logs to a new file and uploads them to DCS + # every midnight, in UTC time + cls.log_handler = CustomTimedRotatingFileHandlerHandler( + filename=CS_LOG_FILE, + prelogger=prelogger, + when="midnight", + interval=1, + utc=True + ) + cls.log_handler.suffix = "%m-%d-%Y" + + # Add a formatter + default_formatter = logging.Formatter( + '[%(asctime)s] %(levelname)s in %(module)s: %(message)s | {%(pathname)s:%(lineno)d}') + cls.log_handler.setFormatter(default_formatter) + + # Set the custom handler as global handler + cls.set_log_handler() + # Set the g_utils logger + g_utils.add_logger(cls.flask_app.logger) + # Set the log level globally + cls.set_log_level(cls.log_level) + + def __update_log_state_file(cls, update_object:dict): + log_state_file = helpers_read_log_state(CS_LOG_STATE_FILE_PATH) + log_state_file[update_object['attribute']] = update_object['value'] + helpers_write_log_state(CS_LOG_STATE_FILE_PATH, log_state_file) + + def __reset_log_level(cls, duration:int): + """ + Resets the logger level to the specified default level after a given time interval. + """ + g_utils.logger.info(f"Logger level set to: {cls.log_level_name} for {duration} seconds") + sleep(duration) + cls.set_log_level(cls.DEFAULT_LOG_LEVEL) + cls.set_dcs_flag(1) + g_utils.logger.info(f"Logger level reverted to: {cls.DEFAULT_LOG_LEVEL}")