diff --git a/docker-compose.yml b/docker-compose.yml index 00d4e10..60f80ff 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,13 +3,13 @@ services: mosquitto: container_name: mosquitto_broker build: - context: ./src/mosquitto_broker + context: ./src/server/mosquitto_broker dockerfile: ./dockerfiles/Dockerfile hostname: mosquitto_broker volumes: - - ./src/mosquitto_broker/conf:/mosquitto/config - - ./src/mosquitto_broker/log:/mosquitto/log - - ./src/mosquitto_broker/data:/mosquitto/data + - ./src/server/mosquitto_broker/conf:/mosquitto/config + - ./src/server/mosquitto_broker/log:/mosquitto/log + - ./src/server/mosquitto_broker/data:/mosquitto/data restart: always ports: - "1883:1883" diff --git a/src/server/commons.py b/src/server/commons.py index 89e760d..14ef337 100644 --- a/src/server/commons.py +++ b/src/server/commons.py @@ -1,20 +1,31 @@ +from pathlib import Path import struct import numpy as np from tensorflow.keras.preprocessing.image import load_img, img_to_array +BASE_DIR = Path(__file__).resolve().parent + + class OffloadingDataFiles: - data_file_path_device: str = "../device_inference_times.json" - data_file_path_edge: str = "../edge_inference_times.json" - data_file_path_sizes: str = "../layer_sizes.json" - evaluation_file_path: str = "../evaluations/evaluations.csv" - web_file_path: str = "../evaluations/web.csv" + data_file_path_device: str = str(BASE_DIR / "device_inference_times.json") + data_file_path_edge: str = str(BASE_DIR / "edge_inference_times.json") + data_file_path_sizes: str = str(BASE_DIR / "layer_sizes.json") + + +class EvaluationFiles: + evaluation_file_path: str = str(BASE_DIR / "evaluations/evaluations.csv") + web_file_path: str = str(BASE_DIR / "evaluations/web.csv") + + +class ModelFiles: + model_save_path: str = str(BASE_DIR / "models") class InputDataFiles: - test_data_file_path = "src/server/models/test/test_model/pred_data/input_data.png" # Path to test image - input_data_file_path = "../input_data.png" # Input image save path + test_data_file_path: str = str(BASE_DIR / "models/test/test_model/pred_data/input_data.png") # Path to test image + input_data_file_path: str = str(BASE_DIR / "input_data.png") # Input image save path class InputData: diff --git a/src/server/models/model_manager.py b/src/server/models/model_manager.py index 7460a5f..fddf2fb 100644 --- a/src/server/models/model_manager.py +++ b/src/server/models/model_manager.py @@ -5,6 +5,7 @@ import tensorflow as tf from server.commons import OffloadingDataFiles +from server.commons import ModelFiles from server.logger.log import logger from server.models.model_manager_config import ModelManagerConfig @@ -66,7 +67,7 @@ def load_model(self, model_path: str = ModelManagerConfig.MODEL_PATH): logger.debug(f"Loading model from path: {model_path}") try: self.model_path = model_path - self.model = tf.keras.models.load_model(f'src/server/models/test/{model_path}') + self.model = tf.keras.models.load_model(f'{ModelFiles.model_save_path}/test/{model_path}') self.num_layers = len(self.model.layers) except Exception as e: print(f"Error loading model: {e}") @@ -127,7 +128,7 @@ def predict_single_layer(self, layer_id: int, layer_offset: int, layer_input_dat # initialize interepreter with layer tflite model interpreter = tf.lite.Interpreter( - model_path=f'src/server/models/test/test_model/layers/tflite/submodel_{layer_id - layer_offset}.tflite') + model_path=f'{ModelFiles.model_save_path}/test/{ModelManagerConfig.MODEL_DIR_PATH}/layers/tflite/submodel_{layer_id - layer_offset}.tflite') interpreter.allocate_tensors() input_details = interpreter.get_input_details() output_details = interpreter.get_output_details() @@ -153,6 +154,6 @@ def save_inference_times(self, save_path: str | None = None): self.save_path = save_path self.save_path = self.save_path[:-1] if self.save_path[-1] == "/" else self.save_path inference_times = self.inference_times - with open(f"{self.save_path}/{OffloadingDataFiles.data_file_path_edge}", "w") as f: + with open(OffloadingDataFiles.data_file_path_edge, "w") as f: json.dump(inference_times, f, indent=4) logger.debug(f"Inference times saved") diff --git a/src/server/mqtt_client/mqtt_client.py b/src/server/mqtt_client/mqtt_client.py index 376a81b..636efe1 100644 --- a/src/server/mqtt_client/mqtt_client.py +++ b/src/server/mqtt_client/mqtt_client.py @@ -13,6 +13,7 @@ import queue from server.commons import OffloadingDataFiles +from server.commons import EvaluationFiles from server.commons import InputData from server.commons import InputDataFiles from server.logger.log import logger @@ -143,7 +144,7 @@ def handle_message_task(self, message, received_timestamp): message_content="InputImage", timestamp=None, ) - MqttMessageData.save_to_file(OffloadingDataFiles.evaluation_file_path, message_data.to_dict()) + MqttMessageData.save_to_file(EvaluationFiles.evaluation_file_path, message_data.to_dict()) logger.debug("Input image saved") return @@ -162,7 +163,7 @@ def handle_message_task(self, message, received_timestamp): # Extend message data message_data = self.extend_message_data(message_data, received_timestamp, message.payload) # Save message data to file - MqttMessageData.save_to_file(OffloadingDataFiles.evaluation_file_path, message_data.to_dict()) + MqttMessageData.save_to_file(EvaluationFiles.evaluation_file_path, message_data.to_dict()) # run offloading algorithm and ask for prediction after the device sends the registration message if message_data.topic == Topics.registration.value: @@ -195,7 +196,7 @@ def handle_message_task(self, message, received_timestamp): # finish inference prediction = Edge.run_inference(message_data.offloading_layer_index, np.array(message_data.layer_output, dtype=np.float32)) logger.debug(f"Prediction: {prediction.tolist()}") - MqttMessageData.save_to_file(OffloadingDataFiles.web_file_path, message_data.to_dict()) + MqttMessageData.save_to_file(EvaluationFiles.web_file_path, message_data.to_dict()) # run offloading algorithm offloading_algo = OffloadingAlgo( avg_speed=message_data.avg_speed,