Index: cloud_sync.py =================================================================== diff -u -rb02f69db02eb8157d85f8d5c5b4e571784a6c212 -rc770c4ec771bb6175179584363076194cd880b9c --- cloud_sync.py (.../cloud_sync.py) (revision b02f69db02eb8157d85f8d5c5b4e571784a6c212) +++ cloud_sync.py (.../cloud_sync.py) (revision c770c4ec771bb6175179584363076194cd880b9c) @@ -19,7 +19,7 @@ import sys -VERSION = "0.4.15" +VERSION = "0.4.16" arguments = sys.argv Index: cloudsync/config/config_STAGING.json =================================================================== diff -u --- cloudsync/config/config_STAGING.json (revision 0) +++ cloudsync/config/config_STAGING.json (revision c770c4ec771bb6175179584363076194cd880b9c) @@ -0,0 +1,29 @@ +{ + "kebormed_paas": { + "idp_client_secret": "NL2cn6eMyg2WLSB0nhfvbxvM79dvo3ta", + "url_mft": "", + "url_dcs": "https://device-api.diality.staging.kebormed.com", + "url_device_identity": "https://device-identity.diality.staging.kebormed.com/auth/realms/Main/protocol/openid-connect/token", + "url_reachability": "https://healthcheck.diality.staging.kebormed.com/", + "dia_org_id": 1 + }, + "device": { + "ip": "", + "port": 80, + "name": "", + "hd_serial": "", + "dg_serial": "", + "sw_version": "", + "mode": "registration", + "device_state": "INACTIVE_NOT_OK" + }, + "logs": { + "default_log_level": "ERROR", + "default_log_level_duration": "86400000", + "current_log_level": "", + "log_level_duration": 0, + "log_level_start_timestamp": 0, + "log_level_stop_timestamp": 0, + "update_dcs_flag": 0 + } +} \ No newline at end of file Index: cloudsync/config/config_integration.json =================================================================== diff -u -r19f09f19af5ac851ab78be77c33d09af26a36f84 -rc770c4ec771bb6175179584363076194cd880b9c --- cloudsync/config/config_integration.json (.../config_integration.json) (revision 19f09f19af5ac851ab78be77c33d09af26a36f84) +++ cloudsync/config/config_integration.json (.../config_integration.json) (revision c770c4ec771bb6175179584363076194cd880b9c) @@ -1,29 +1,29 @@ { "kebormed_paas": { - "idp_client_secret": "ocnnWhkfImztoJShV6GV4uxeAOM0GhwT", - "url_mft": "", - "url_dcs": "https://device-api.diality.integration.kebormed.com", - "url_device_identity": "https://device-identity.diality.integration.kebormed.com/auth/realms/Main/protocol/openid-connect/token", - "url_reachability": "https://healthcheck.diality.integration.kebormed.com/", + "idp_client_secret": "mock-client-secret", + "url_mft": "http://mock-drt:9090", + "url_dcs": "http://mock-dcs:8080", + "url_device_identity": "http://mock-dcs:8080/auth/realms/Main/protocol/openid-connect/token", + "url_reachability": "http://mock-dcs:8080/health", "dia_org_id": 1 }, "device": { - "ip": "", - "port": 80, - "name": "", - "hd_serial": "", - "dg_serial": "", - "sw_version": "", + "ip": "172.18.0.3", + "port": 5000, + "name": "HD_TEST_1770675061356", + "hd_serial": "HD_TEST_1770675061356", + "dg_serial": "DG_TEST_1770675061356", + "sw_version": "0.5.0_test", "mode": "registration", - "device_state": "INACTIVE_NOT_OK" + "device_state": 4 }, "logs": { - "default_log_level": "ERROR", + "default_log_level": "DEBUG", "default_log_level_duration": "86400000", - "current_log_level": "", + "current_log_level": "DEBUG", "log_level_duration": 0, "log_level_start_timestamp": 0, "log_level_stop_timestamp": 0, "update_dcs_flag": 0 } -} +} \ No newline at end of file Fisheye: Tag c770c4ec771bb6175179584363076194cd880b9c refers to a dead (removed) revision in file `cloudsync/config/config_production.json'. Fisheye: No comparison available. Pass `N' to diff? Fisheye: Tag c770c4ec771bb6175179584363076194cd880b9c refers to a dead (removed) revision in file `cloudsync/config/config_quality2.json'. Fisheye: No comparison available. Pass `N' to diff? Fisheye: Tag c770c4ec771bb6175179584363076194cd880b9c refers to a dead (removed) revision in file `cloudsync/config/config_staging.json'. Fisheye: No comparison available. Pass `N' to diff? Fisheye: Tag c770c4ec771bb6175179584363076194cd880b9c refers to a dead (removed) revision in file `cloudsync/config/device_log_template.json'. Fisheye: No comparison available. Pass `N' to diff? Fisheye: Tag c770c4ec771bb6175179584363076194cd880b9c refers to a dead (removed) revision in file `cloudsync/config/log_state.json'. Fisheye: No comparison available. Pass `N' to diff? Index: cloudsync/handlers/cs_mft_dcs_request_handler.py =================================================================== diff -u -r8819ea1a48e9bb0014e9bb90ebb1e0d16eda7d65 -rc770c4ec771bb6175179584363076194cd880b9c --- cloudsync/handlers/cs_mft_dcs_request_handler.py (.../cs_mft_dcs_request_handler.py) (revision 8819ea1a48e9bb0014e9bb90ebb1e0d16eda7d65) +++ cloudsync/handlers/cs_mft_dcs_request_handler.py (.../cs_mft_dcs_request_handler.py) (revision c770c4ec771bb6175179584363076194cd880b9c) @@ -301,14 +301,18 @@ self.logger.debug("treatment log: {0}".format(treatment_log_json)) + self.logger.info("Sending treatment report to DCS") response = cmd_outgoing_send_treatment_report(url=data_submission_url, access_token=access_token, treatment_log=treatment_log_json, error_handler=self.error_handler) if response is not None: + self.logger.info(f"Treatment upload response: {response.status_code}") + if response.status_code != OK: + self.logger.error(f"Treatment upload failed: {response.status_code} - {response.text[:500]}") self.logger.debug( - "Treatment upload response: {0}".format(response.json())) + "Treatment upload response body: {0}".format(response.json())) if response.status_code == OK: # Send TX code to UI app @@ -386,7 +390,11 @@ access_token=access_token, file_json=device_log_json, error_handler=self.error_handler, - log_file_origin='device') + log_file_origin='device', + token_refresher=lambda: self.get_valid_token( + identity_url=identity_url, + token_verification_url=token_verification_url, + client_secret=client_secret)) if device_log_filename is not None: self.logger.debug("Device log file uploaded: {device_log_filename}") @@ -459,7 +467,11 @@ access_token=access_token, file_json=cs_log_json, error_handler=self.error_handler, - log_file_origin='cs') + log_file_origin='cs', + token_refresher=lambda: self.get_valid_token( + identity_url=identity_url, + token_verification_url=token_verification_url, + client_secret=client_secret)) if cs_log_filename is not None: self.logger.debug("CS log file uploaded: {cs_log_filename}") @@ -482,6 +494,7 @@ access_token = helpers_get_stored_token() if access_token is None: + self.logger.info("No stored token found, requesting new token") access_token = cmd_outgoing_get_new_token_with_cert(path_certificate=CREDENTIALS_CERTIFICATE_X509, path_private_key=CREDENTIALS_PRIVATE_KEY, save=True, @@ -492,13 +505,16 @@ response = cmd_outgoing_verify_token(url=token_verification_url, access_token=access_token, error_handler=self.error_handler) - if response.status_code == UNAUTHORIZED: + if response is None or response.status_code != OK: + self.logger.warning(f"Token verification failed (status={response.status_code if response else 'None'}), refreshing token") access_token = cmd_outgoing_get_new_token_with_cert(path_certificate=CREDENTIALS_CERTIFICATE_X509, path_private_key=CREDENTIALS_PRIVATE_KEY, save=True, url=identity_url, client_secret=client_secret, error_handler=self.error_handler) + else: + self.logger.info("Token verification succeeded") return access_token Index: cloudsync/handlers/outgoing/handler_cs_to_dcs.py =================================================================== diff -u -r827b6a2d23a7e61dc5ffe316b2cecebc9b310501 -rc770c4ec771bb6175179584363076194cd880b9c --- cloudsync/handlers/outgoing/handler_cs_to_dcs.py (.../handler_cs_to_dcs.py) (revision 827b6a2d23a7e61dc5ffe316b2cecebc9b310501) +++ cloudsync/handlers/outgoing/handler_cs_to_dcs.py (.../handler_cs_to_dcs.py) (revision c770c4ec771bb6175179584363076194cd880b9c) @@ -102,8 +102,9 @@ } resp = requests.post(url=url, - data=data, + data=json.dumps(data), headers=headers) + g_utils.logger.info(f"Token verification response: {resp.status_code}") return resp except requests.exceptions.Timeout: error = Error(REGISTRATION_TIMEOUT_HOLDER.format(OutboundMessageIDs.CS2UI_ERROR.value, @@ -426,7 +427,8 @@ error_handler: ErrorHandler, log_file_origin: str, chunk_size: int=2 * 1024 * 1024, - retries: int=3 ) -> Union[str, None]: + retries: int=3, + token_refresher: callable=None ) -> Union[str, None]: """ Uploads a large file in chunks using sessions and retries. @@ -464,6 +466,7 @@ "X-Api-Version": API_VERSION } + g_utils.logger.info(f"Starting upload session for {log_file_origin} log") g_utils.logger.debug(f"File upload payload (start-session): {start_session_payload}") try: @@ -472,7 +475,9 @@ headers=headers, data=start_session_payload) + g_utils.logger.info(f"Start-session response: {response.status_code}") if response.status_code != 200: + g_utils.logger.error(f"Start-session failed: {response.status_code} - {response.text[:500]}") raise Exception(f"Error while starting upload session: {response.status_code} - {response.text}") except Exception as e: @@ -545,8 +550,9 @@ g_utils.logger.info(f"Uploaded chunk {chunk_number} of {num_chunks}") break # Successful upload, break retry loop + g_utils.logger.warning(f"Chunk {chunk_number}/{num_chunks} upload failed: {response.status_code} - {response.text[:500]}") if retry_count < retries: - g_utils.logger.info(f"Retrying chunk upload in 5 seconds...") + g_utils.logger.info(f"Retrying chunk upload in 5 seconds (attempt {retry_count + 1}/{retries})...") sleep(5) # Implement backoff time between retries retry_count += 1 @@ -563,6 +569,16 @@ # End upload session # + # Refresh token before end-session to prevent expiry after long chunk uploads + if token_refresher is not None: + g_utils.logger.info("Refreshing token before end-session phase") + refreshed_token = token_refresher() + if refreshed_token is not None: + access_token = refreshed_token + g_utils.logger.info("Token refreshed successfully before end-session") + else: + g_utils.logger.warning("Token refresh returned None, using existing token for end-session") + end_session_url = os.path.join(base_url, "api/device/data/end-session") end_session_payload = file_json['end_session'] end_session_payload['sessionId'] = session_id @@ -575,12 +591,15 @@ } try: end_session_payload = json.dumps(end_session_payload) + g_utils.logger.info(f"Ending upload session (sessionId={session_id})") g_utils.logger.debug(f"Device log upload payload (end-session): {end_session_payload}") response = requests.post(end_session_url, headers=headers, data=end_session_payload) + g_utils.logger.info(f"End-session response: {response.status_code}") if response.status_code != 200: + g_utils.logger.error(f"End-session failed: {response.status_code} - {response.text[:500]}") raise Exception(f"Error while ending upload session: {response.status_code} - {response.text}") except Exception as e: Index: cloudsync/utils/helpers.py =================================================================== diff -u -rcc36b7572e8d4e67ea514e038f9e07665d6e336f -rc770c4ec771bb6175179584363076194cd880b9c --- cloudsync/utils/helpers.py (.../helpers.py) (revision cc36b7572e8d4e67ea514e038f9e07665d6e336f) +++ cloudsync/utils/helpers.py (.../helpers.py) (revision c770c4ec771bb6175179584363076194cd880b9c) @@ -9,6 +9,7 @@ import base64 import uuid import subprocess +import math from datetime import * from time import time, sleep @@ -98,7 +99,11 @@ if helpers_is_int(val): return int(val) elif helpers_is_float(val): - return float(val) + f = float(val) + if math.isinf(f) or math.isnan(f): + g_utils.logger.warning(f"Non-finite float value encountered: '{val}' — replacing with 0") + return 0 + return f else: return val @@ -325,14 +330,14 @@ data_components = data_line.split(',') data_record = { "time": int(data_components[0]) * S_MS_CONVERSION_FACTOR, - "bloodFlowRate": float(data_components[1]), - "dialysateFlowRate": float(data_components[2]), - "ultrafiltrationRate": float(data_components[3]), - "arterialPressure": float(data_components[4]), - "venousPressure": float(data_components[5]), - "systolic": float(data_components[6]), - "diastolic": float(data_components[7]), - "heartRate": float(data_components[8]) + "bloodFlowRate": helpers_try_numeric(data_components[1]), + "dialysateFlowRate": helpers_try_numeric(data_components[2]), + "ultrafiltrationRate": helpers_try_numeric(data_components[3]), + "arterialPressure": helpers_try_numeric(data_components[4]), + "venousPressure": helpers_try_numeric(data_components[5]), + "systolic": helpers_try_numeric(data_components[6]), + "diastolic": helpers_try_numeric(data_components[7]), + "heartRate": helpers_try_numeric(data_components[8]) } treatment_data['data']['treatment']['data'].append(data_record) elif section == TREATMENT_ALARMS: