diff --git a/tc/obsw_tc_service23_sdcard.py b/tc/obsw_tc_service23_sdcard.py index ccd07d808c1b253838c2d5e3e4b9322b88c0fdbf..b89ae1bfb3f10dc9cd95af6f9d92ed503f2e20a0 100644 --- a/tc/obsw_tc_service23_sdcard.py +++ b/tc/obsw_tc_service23_sdcard.py @@ -4,9 +4,6 @@ Created: 21.01.2020 07:48 @author: Jakob Meier """ -import math -from enum import Enum - import config.obsw_config as g from typing import Deque, Union @@ -17,206 +14,6 @@ from tmtc_core.utility.obsw_logger import get_logger LOGGER = get_logger() -class FileTransferHelper: - """ - This helper class fills the provided TC queue with appropriate PUS telecommands - to transfer a large file. - There are three modes which determine which telecommands will be generated: - 1. NORMAL: Generate telecommand to create a new file and append data packets if - the file data is too large. This will be the default mode. - 2. DELETE_OLD: Generate telecommand to delete old file and then perform same steps as the - normal mode - 3. RENAME_OLD: Rename old file and then perform same steps as in normal mode. - - Please note that the setter functions set_data have to be used to assign data, otherwise - an empty file will be created. The mode is set with setter commands as well. - """ - class TransferMode(Enum): - # Normal mode - NORMAL = 1 - # Generate a command to delete the old file first - DELETE_OLD = 2 - # Generate a command to rename the old file first. - RENAME_OLD = 3 - - def __init__(self, tc_queue: TcQueueT, max_size_of_app_data: int, - target_repository: str, target_filename: str, - object_id=g.SD_CARD_HANDLER_ID): - """ - @param tc_queue: TC queue which will be filled - @param max_size_of_app_data: Maximum allowed app data size. Number of generated packets - will depend on this value - @param target_repository: Repository path on target. - @param target_filename: Filename on target - @param object_id: - """ - self.__transfer_mode = self.TransferMode.NORMAL - self.max_size_of_app_data = max_size_of_app_data - self.__max_file_data_size = 0 - self.allowed_file_data_size = calculate_allowed_file_data_size( - max_size_of_app_data, target_filename, target_repository) - self.target_filename = target_filename - self.target_repository = target_repository - self.__renamed_name = self.target_filename + "old" - self.object_id = object_id - self.tc_queue = tc_queue - - self.__number_of_packets = 0 - self.__number_of_append_packets = 0 - self.__number_of_create_packets = 1 - self.__number_of_delete_packets = 0 - self.__number_of_finish_packets = 1 - - self.__lock_file = True - self.__local_filename = "" - self.__file_data = bytearray() - # This will generate a telecommand to delete the old file, if it exists - self.delete_old_file = False - # This will generater a telecommand to rename the old file, if it exists - self.rename_old_file = False - - def set_data_from_file(self, local_filename: str): - with open(local_filename, 'rb') as file: - self.__file_data = file.read() - - def set_data_raw(self, tc_data: bytearray): - self.__file_data = tc_data - - def set_to_delete_old_file(self): - self.__transfer_mode = self.TransferMode.DELETE_OLD - - def set_to_rename_old_file(self, renamed_name: str): - self.__transfer_mode = self.TransferMode.RENAME_OLD - self.__renamed_name = renamed_name - - def set_to_lock_file(self, lock_file: bool): - self.__lock_file = lock_file - - def get_number_of_packets_generated(self): - return self.__number_of_packets - - def set_max_file_data_size(self, max_file_data_size: int): - """ - If this value is specified and the source file is large (larger than the maximum allowed - app data!), the file data size will be set to this value. - @param max_file_data_size: - @return: - """ - self.__max_file_data_size = max_file_data_size - - def file_size(self): - return len(self.__file_data) - - def generate_packets(self, ssc: int): - if self.__transfer_mode == self.TransferMode.DELETE_OLD: - command = generate_rm_file_srv23_2_packet(self.target_filename, self.target_repository, - ssc, self.object_id) - self.__number_of_delete_packets = 1 - ssc += 1 - self.tc_queue.appendleft(command.pack_command_tuple()) - elif self.__transfer_mode == self.TransferMode.RENAME_OLD: - # not implemented yet - pass - if len(self.__file_data) > self.allowed_file_data_size: - # Large file, create file with init_data - if self.__max_file_data_size > 0: - init_data = self.__file_data[0:self.__max_file_data_size] - else: - init_data = self.__file_data[0:self.allowed_file_data_size] - else: - # Small file, one packet for file creation sufficient - command = generate_create_file_srv23_1_packet( - self.target_filename, self.target_repository, ssc, self.max_size_of_app_data, - self.__file_data) - ssc += 1 - self.tc_queue.appendleft(command.pack_command_tuple()) - return - - # Create large file. - command = generate_create_file_srv23_1_packet( - self.target_filename, self.target_repository, ssc, self.max_size_of_app_data, - init_data) - ssc += 1 - self.tc_queue.appendleft(command.pack_command_tuple()) - rest_of_data = self.__file_data[self.allowed_file_data_size:] - # Generate the rest of the packets to write to large file - if self.__max_file_data_size > 0: - self.__generate_append_to_file_packets_automatically( - data=rest_of_data, target_repository=self.target_repository, - target_filename=self.target_filename, size_of_data_blocks=self.__max_file_data_size, - init_ssc=ssc) - else: - self.__generate_append_to_file_packets_automatically( - data=rest_of_data, target_repository=self.target_repository, - target_filename=self.target_filename, size_of_data_blocks=self.max_size_of_app_data, - init_ssc=ssc) - ssc += 1 - last_command = generate_finish_append_to_file_srv23_131_packet( - filename=self.target_filename, repository_path=self.target_repository, - ssc=ssc, lock_file=self.__lock_file) - self.tc_queue.appendleft(last_command.pack_command_tuple()) - self.__number_of_packets = \ - self.__number_of_append_packets + self.__number_of_create_packets + \ - self.__number_of_delete_packets + 1 - - def __generate_append_to_file_packets_automatically( - self, data: bytearray, target_repository: str, target_filename: str, - size_of_data_blocks: int, init_ssc: int): - """ - This function generates PUS packets which is used to write data in a file. - A new file will be created if not already existing. If the file already exists, this might - lead to - - If the file data is larger than the maximum allowed size of application data, this function - will split the data into multiple packets and increment the initial SSC number by one for - each packet. - @param data: Data which will be split up. - @param init_ssc: First SSC, which will be incremented for each packet. - """ - header = bytearray(self.object_id) - header += target_repository.encode('utf-8') - # Add string terminator of repository path - header.append(0) - header += target_filename.encode('utf-8') - # Add string terminator of filename - header.append(0) - self.__split_large_file(header, size_of_data_blocks, data, init_ssc) - - def __split_large_file(self, header: bytearray, size_of_data_blocks: int, - data: bytearray, init_ssc: int): - """ - This function splits a large file in multiple packets and packs the generated packets - into the member deque. This is necessary because the packet size is limited. - @param header: Repository and file name which will always stay the same - @param size_of_data_blocks: The file data blocks will have this size - @param data: The data to pack in multiple packets - @param init_ssc: The ssc of the first command, will be incremented by one for each packet. - """ - number_of_packets = math.floor(len(data) / size_of_data_blocks) - packet_sequence_number = 0 - - for i in range(number_of_packets): - header.append(packet_sequence_number >> 8) - header.append(0xFF & packet_sequence_number) - header += data[i * size_of_data_blocks:(i + 1) * size_of_data_blocks] - - commands = PusTelecommand(service=23, subservice=130, ssc=init_ssc + i, - app_data=header) - self.tc_queue.appendleft(commands.pack_command_tuple()) - - # Remove everything except the header - header = header[:len(header) - size_of_data_blocks - 2] - packet_sequence_number = packet_sequence_number + 1 - # Last packet will be subservice 131 to finish the append operation - header.append(packet_sequence_number >> 8) - header.append(0xFF & packet_sequence_number) - self.__number_of_append_packets += number_of_packets - header += data[number_of_packets * size_of_data_blocks:len(data)] - commands = PusTelecommand(service=23, subservice=130, ssc=init_ssc + packet_sequence_number, - app_data=header) - self.tc_queue.appendleft(commands.pack_command_tuple()) - - def generate_print_sd_card_packet( ssc: int, object_id: bytearray = g.SD_CARD_HANDLER_ID) -> PusTelecommand: app_data = bytearray(object_id) diff --git a/utility/obsw_file_transfer_helper.py b/utility/obsw_file_transfer_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..fbfa2337882548ae2161fa50bf8f24fa6b258128 --- /dev/null +++ b/utility/obsw_file_transfer_helper.py @@ -0,0 +1,241 @@ +from enum import Enum +import math + +from config.obsw_config import SD_CARD_HANDLER_ID +from tmtc_core.tc.obsw_pus_tc_base import TcQueueT, PusTelecommand +from tc.obsw_tc_service23_sdcard import \ + calculate_allowed_file_data_size, generate_rm_file_srv23_2_packet, \ + generate_create_file_srv23_1_packet, generate_finish_append_to_file_srv23_131_packet, \ + generate_lock_file_srv23_5_6_packet + + +class FileTransferHelper: + """ + This helper class fills the provided TC queue with appropriate PUS telecommands + to transfer a large file. + There are three modes which determine which telecommands will be generated: + 1. NORMAL: Generate telecommand to create a new file and append data packets if + the file data is too large. This will be the default mode. + 2. DELETE_OLD: Generate telecommand to delete old file and then perform same steps as the + normal mode + 3. RENAME_OLD: Rename old file and then perform same steps as in normal mode. + + Please note that the setter functions set_data have to be used to assign data, otherwise + an empty file will be created. The mode is set with setter commands as well. + """ + class TransferMode(Enum): + # Normal mode + NORMAL = 1 + # Generate a command to delete the old file first + DELETE_OLD = 2 + # Generate a command to rename the old file first. + RENAME_OLD = 3 + + def __init__(self, tc_queue: TcQueueT, max_size_of_app_data: int, + target_repository: str, target_filename: str, + object_id=SD_CARD_HANDLER_ID): + """ + @param tc_queue: TC queue which will be filled + @param max_size_of_app_data: Maximum allowed app data size. Number of generated packets + will depend on this value + @param target_repository: Repository path on target. + @param target_filename: Filename on target + @param object_id: + """ + self.object_id = object_id + self.max_size_of_app_data = max_size_of_app_data + self.allowed_file_data_size = calculate_allowed_file_data_size( + max_size_of_app_data, target_filename, target_repository) + + self.target_filename = target_filename + self.target_repository = target_repository + + self.tc_queue = tc_queue + + self.__transfer_mode = self.TransferMode.NORMAL + self.__max_file_data_size = 0 + self.__renamed_name = self.target_filename + "old" + + self.__large_file = False + self.__number_of_packets = 0 + self.__number_of_append_packets = 0 + self.__number_of_create_packets = 1 + self.__number_of_delete_packets = 0 + self.__number_of_finish_packets = 1 + + self.__current_ssc = 0 + self.__lock_file = True + self.__local_filename = "" + self.__file_data = bytearray() + # This will generate a telecommand to delete the old file, if it exists + self.delete_old_file = False + # This will generater a telecommand to rename the old file, if it exists + self.rename_old_file = False + + def set_data_from_file(self, local_filename: str): + with open(local_filename, 'rb') as file: + self.__file_data = file.read() + + def set_data_raw(self, tc_data: bytearray): + self.__file_data = tc_data + + def set_to_delete_old_file(self): + self.__transfer_mode = self.TransferMode.DELETE_OLD + + def set_to_rename_old_file(self, renamed_name: str): + self.__transfer_mode = self.TransferMode.RENAME_OLD + self.__renamed_name = renamed_name + + def set_to_lock_file(self, lock_file: bool): + """ + Command will be sent to lock file after succesfull transfer + @param lock_file: + @return: + """ + self.__lock_file = lock_file + + def get_number_of_packets_generated(self): + return self.__number_of_packets + + def set_max_file_data_size(self, max_file_data_size: int): + """ + If this value is specified and the source file is large (larger than the maximum allowed + app data!), the file data size will be set to this value. + @param max_file_data_size: + @return: + """ + self.__max_file_data_size = max_file_data_size + + def file_size(self): + return len(self.__file_data) + + def generate_packets(self, ssc: int): + """ + Main function to generate all packets and fill them into the provided deque. + @param ssc: + @return: + """ + self.__current_ssc = ssc + self.__handle_delete_packet_generation() + if self.__transfer_mode == self.TransferMode.RENAME_OLD: + # not implemented yet + pass + self.__handle_create_file_packet_generation() + self.__handle_finish_and_lock_packet_generation() + self.__number_of_packets = \ + self.__number_of_create_packets + self.__number_of_append_packets + \ + self.__number_of_delete_packets + self.__number_of_finish_packets + + def __handle_delete_packet_generation(self): + if self.__transfer_mode == self.TransferMode.DELETE_OLD: + command = generate_rm_file_srv23_2_packet( + filename=self.target_filename, repository_path=self.target_repository, + ssc=self.__current_ssc, object_id=self.object_id) + self.__number_of_delete_packets = 1 + self.__current_ssc += 1 + self.tc_queue.appendleft(command.pack_command_tuple()) + + def __handle_create_file_packet_generation(self): + if len(self.__file_data) > self.allowed_file_data_size: + # Large file, create file with init_data + if self.__max_file_data_size > 0: + init_data = self.__file_data[0:self.__max_file_data_size] + else: + init_data = self.__file_data[0:self.allowed_file_data_size] + self.__large_file = True + else: + init_data = self.__file_data + + # Create file. + command = generate_create_file_srv23_1_packet( + self.target_filename, self.target_repository, ssc=self.__current_ssc, + max_size_of_app_data=self.max_size_of_app_data, initial_data=init_data) + self.__current_ssc += 1 + self.tc_queue.appendleft(command.pack_command_tuple()) + if not self.__large_file: + return + rest_of_data = self.__file_data[self.allowed_file_data_size:] + # Generate the rest of the packets to write to large file + if self.__max_file_data_size > 0: + self.__generate_append_to_file_packets_automatically( + data=rest_of_data, target_repository=self.target_repository, + target_filename=self.target_filename, size_of_data_blocks=self.__max_file_data_size, + init_ssc=self.__current_ssc) + else: + self.__generate_append_to_file_packets_automatically( + data=rest_of_data, target_repository=self.target_repository, + target_filename=self.target_filename, size_of_data_blocks=self.max_size_of_app_data, + init_ssc=self.__current_ssc) + self.__current_ssc += 1 + + def __generate_append_to_file_packets_automatically( + self, data: bytearray, target_repository: str, target_filename: str, + size_of_data_blocks: int, init_ssc: int): + """ + This function generates PUS packets which is used to write data in a file. + A new file will be created if not already existing. If the file already exists, this might + lead to + + If the file data is larger than the maximum allowed size of application data, this function + will split the data into multiple packets and increment the initial SSC number by one for + each packet. + @param data: Data which will be split up. + @param init_ssc: First SSC, which will be incremented for each packet. + """ + header = bytearray(self.object_id) + header += target_repository.encode('utf-8') + # Add string terminator of repository path + header.append(0) + header += target_filename.encode('utf-8') + # Add string terminator of filename + header.append(0) + self.__split_large_file(header, size_of_data_blocks, data, init_ssc) + + def __split_large_file(self, header: bytearray, size_of_data_blocks: int, + data: bytearray, init_ssc: int): + """ + This function splits a large file in multiple packets and packs the generated packets + into the member deque. This is necessary because the packet size is limited. + @param header: Repository and file name which will always stay the same + @param size_of_data_blocks: The file data blocks will have this size + @param data: The data to pack in multiple packets + @param init_ssc: The ssc of the first command, will be incremented by one for each packet. + """ + number_of_packets = math.floor(len(data) / size_of_data_blocks) + packet_sequence_number = 0 + + for i in range(number_of_packets): + header.append(packet_sequence_number >> 8) + header.append(0xFF & packet_sequence_number) + header += data[i * size_of_data_blocks:(i + 1) * size_of_data_blocks] + + commands = PusTelecommand(service=23, subservice=130, ssc=init_ssc + i, + app_data=header) + self.tc_queue.appendleft(commands.pack_command_tuple()) + + # Remove everything except the header + header = header[:len(header) - size_of_data_blocks - 2] + packet_sequence_number = packet_sequence_number + 1 + # Last packet will be subservice 131 to finish the append operation + header.append(packet_sequence_number >> 8) + header.append(0xFF & packet_sequence_number) + self.__number_of_append_packets += number_of_packets + header += data[number_of_packets * size_of_data_blocks:len(data)] + commands = PusTelecommand(service=23, subservice=130, ssc=init_ssc + packet_sequence_number, + app_data=header) + self.tc_queue.appendleft(commands.pack_command_tuple()) + + def __handle_finish_and_lock_packet_generation(self): + if self.__large_file: + last_command = generate_finish_append_to_file_srv23_131_packet( + filename=self.target_filename, repository_path=self.target_repository, + ssc=self.__current_ssc, lock_file=self.__lock_file) + else: + if self.__lock_file: + last_command = generate_lock_file_srv23_5_6_packet( + filename=self.target_filename, repository_path=self.target_repository, + object_id=self.object_id, lock=True, ssc=self.__current_ssc) + else: + self.__number_of_finish_packets = 0 + return + self.tc_queue.appendleft(last_command.pack_command_tuple()) \ No newline at end of file