Index: cloud_sync.py =================================================================== diff -u -r011ffd902df4c112d14412bccc7e850519ad0ee9 -r8819ea1a48e9bb0014e9bb90ebb1e0d16eda7d65 --- cloud_sync.py (.../cloud_sync.py) (revision 011ffd902df4c112d14412bccc7e850519ad0ee9) +++ cloud_sync.py (.../cloud_sync.py) (revision 8819ea1a48e9bb0014e9bb90ebb1e0d16eda7d65) @@ -50,7 +50,7 @@ # Update the $HOME from /var/configuraitons/ and update result in both for later update oldConfig = helpers_read_config(OPERATION_CONFIG_FILE_PATH) newConfig = helpers_read_config(CONFIG_PATH) - newConfig . update (oldConfig) + newConfig.update(oldConfig) helpers_write_config(None , CONFIG_PATH , newConfig) helpers_write_config(OPERATION_CONFIG_PATH, OPERATION_CONFIG_FILE_PATH, newConfig) print("CloudSync update config done.") Index: cloudsync/handlers/cs_mft_dcs_request_handler.py =================================================================== diff -u -re868961eccbbbef0154180df14d7432ff78cf476 -r8819ea1a48e9bb0014e9bb90ebb1e0d16eda7d65 --- cloudsync/handlers/cs_mft_dcs_request_handler.py (.../cs_mft_dcs_request_handler.py) (revision e868961eccbbbef0154180df14d7432ff78cf476) +++ cloudsync/handlers/cs_mft_dcs_request_handler.py (.../cs_mft_dcs_request_handler.py) (revision 8819ea1a48e9bb0014e9bb90ebb1e0d16eda7d65) @@ -467,12 +467,12 @@ else: error = Error( "{0},2,{1}, Missing access token".format(OutboundMessageIDs.CS2UI_ERROR.value, - ErrorIDs.CS_DEVICE_LOG_ERROR.value)) + ErrorIDs.CS_LOG_ERROR.value)) self.error_handler.enqueue_error(error=error) except Exception as e: error = Error(ERROR_STRING.format(OutboundMessageIDs.CS2UI_ERROR.value, - ErrorIDs.CS_DEVICE_LOG_ERROR.value, + ErrorIDs.CS_LOG_ERROR.value, e)) self.error_handler.enqueue_error(error=error) else: Index: cloudsync/handlers/outgoing/handler_cs_to_dcs.py =================================================================== diff -u -r78fd392821abf65d46d47982d391808a02c34249 -r8819ea1a48e9bb0014e9bb90ebb1e0d16eda7d65 --- cloudsync/handlers/outgoing/handler_cs_to_dcs.py (.../handler_cs_to_dcs.py) (revision 78fd392821abf65d46d47982d391808a02c34249) +++ cloudsync/handlers/outgoing/handler_cs_to_dcs.py (.../handler_cs_to_dcs.py) (revision 8819ea1a48e9bb0014e9bb90ebb1e0d16eda7d65) @@ -417,7 +417,7 @@ file_json: dict, error_handler: ErrorHandler, log_file_origin: str, - chunk_size: int=50 * 1024 * 1024, + chunk_size: int=2 * 1024 * 1024, retries: int=3 ) -> Union[str, None]: """ Uploads a large file in chunks using sessions and retries. @@ -496,41 +496,54 @@ } with open(target_file, "rb") as f: - # Upload chunks with retry logic - for chunk_start in range(0, file_size, chunk_size): - chunk_end = min(chunk_start + chunk_size, file_size) - chunk_data = f.read(chunk_end - chunk_start) + file_content = f.read() - # Retry logic with counter and backoff time - retry_count = 0 - while retry_count < retries: - try: + # Encode the bytes using base64 + base64_string = base64.b64encode(file_content).decode("utf8") - chunk_data_b64 = base64.b64encode(chunk_data).decode('utf8') - upload_chunk_payload['chunkNo'] = chunk_number - upload_chunk_payload['data'] = chunk_data_b64 - upload_chunk_payload = json.dumps(upload_chunk_payload) + # Get the total size of the base64 string (in bytes) + total_size = len(base64_string) - g_utils.logger.debug(f"File upload payload (upload-chunk) - chunk No {chunk_number}: {upload_chunk_payload}") + # Calculate the number of chunks + num_chunks = total_size // chunk_size + (total_size % chunk_size > 0) - response = requests.post(upload_chunk_url, - headers=headers, - data=upload_chunk_payload) + for i in range(num_chunks): + start_index = i * chunk_size + end_index = min(start_index + chunk_size, total_size) + chunk = base64_string[start_index:end_index] - if response.status_code == 200: - chunk_number += 1 - g_utils.logger.info(f"Uploaded chunk {chunk_start // chunk_size + 1} of {(file_size // chunk_size) + 1}") - break # Successful upload, break retry loop + # Retry logic with counter and backoff time + retry_count = 0 + while retry_count < retries: + try: + if type(upload_chunk_payload) is str: + upload_chunk_payload = json.loads(upload_chunk_payload) - if retry_count < retries: - g_utils.logger.info(f"Retrying chunk upload in 5 seconds...") - sleep(5) # Implement backoff time between retries - retry_count += 1 + upload_chunk_payload['chunkNo'] = chunk_number + upload_chunk_payload['data'] = chunk + upload_chunk_payload = json.dumps(upload_chunk_payload) - except Exception as e: - error = Error(GENERAL_EXCEPTION_HOLDER.format(OutboundMessageIDs.CS2UI_ERROR.value,ERROR_ID,str(e))) - error_handler.enqueue_error(error=error) + g_utils.logger.debug(f"File upload payload (upload-chunk) - chunk No {chunk_number}: {upload_chunk_payload}") + + response = requests.post(upload_chunk_url, + headers=headers, + data=upload_chunk_payload) + + if response.status_code == 200: + chunk_number += 1 + g_utils.logger.info(f"Uploaded chunk {chunk_number} of {num_chunks}") + break # Successful upload, break retry loop + + if retry_count < retries: + g_utils.logger.info(f"Retrying chunk upload in 5 seconds...") + sleep(5) # Implement backoff time between retries + retry_count += 1 + + except Exception as e: + error = Error(GENERAL_EXCEPTION_HOLDER.format(OutboundMessageIDs.CS2UI_ERROR.value,ERROR_ID,str(e))) + error_handler.enqueue_error(error=error) + except Exception as e: error = Error(GENERAL_EXCEPTION_HOLDER.format(OutboundMessageIDs.CS2UI_ERROR.value,ERROR_ID,str(e))) error_handler.enqueue_error(error=error)