Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions .github/workflows/smoke.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: smoke

on:
pull_request:

jobs:
smoke:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install runtime dependencies
run: python -m pip install -r requirements.txt
- name: Run clean-base verification
run: python scripts/verify_clean_base.py
21 changes: 21 additions & 0 deletions API/Classes/Base/Config.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,23 @@ def validate_path(base_dir, user_input):

SYSTEM = platform.system()


def _default_runtime_dir():
override = os.environ.get("MUIOGO_RUNTIME_DIR", "").strip()
if override:
return Path(override).expanduser()
Copy link

Copilot AI Apr 2, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When MUIOGO_RUNTIME_DIR is set, this returns the Path without resolving it. If a user provides a relative path here (e.g., "runtime"), the runtime/log paths become cwd-relative, which conflicts with the PR’s goal of eliminating cwd-dependent behavior. Consider normalizing the override to an absolute path (e.g., resolve it against the user home or require an absolute path and raise on relative input).

Suggested change
return Path(override).expanduser()
override_path = Path(override).expanduser()
if not override_path.is_absolute():
override_path = Path.home() / override_path
return override_path

Copilot uses AI. Check for mistakes.

if SYSTEM == "Windows":
base_dir = Path(os.environ.get("LOCALAPPDATA", str(Path.home())))
elif SYSTEM == "Darwin":
base_dir = Path.home() / "Library" / "Logs"
else:
base_dir = Path(
os.environ.get("XDG_STATE_HOME", str(Path.home() / ".local" / "state"))
)

return base_dir / "MUIOGO"

# S3_BUCKET = os.environ.get("S3_BUCKET")
# S3_KEY = os.environ.get("S3_KEY")
# S3_SECRET = os.environ.get("S3_SECRET")
Expand All @@ -62,6 +79,10 @@ def validate_path(base_dir, user_input):
CLASS_FOLDER = WEBAPP_PATH / "Classes"
SOLVERs_FOLDER = WEBAPP_PATH / "SOLVERs"
EXTRACT_FOLDER = BASE_DIR
RUNTIME_DIR = _default_runtime_dir()
LOG_DIR = RUNTIME_DIR / "logs"
APP_LOG_FILE = LOG_DIR / "app.log"
SOLVER_MODEL_FILE = SOLVERs_FOLDER / "model.v.5.4.txt"

# Ensure DataStorage exists
DATA_STORAGE.mkdir(parents=True, exist_ok=True)
Expand Down
191 changes: 126 additions & 65 deletions API/Classes/Case/DataFileClass.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from pathlib import Path
import logging
import pandas as pd
import traceback
import json, shutil, os, time, subprocess
Expand All @@ -9,6 +10,11 @@
from Classes.Case.OsemosysClass import Osemosys
from Classes.Base.FileClass import File
from Classes.Base.CustomThreadClass import CustomThread


logger = logging.getLogger(__name__)


class DataFile(Osemosys):
# def __init__(self, case):
# Osemosys.__init__(self, case)
Expand Down Expand Up @@ -790,19 +796,16 @@ def generateDatafile( self, caserunname ):

def createCaseRun(self, caserunname, data):
try:
caseRunPath = Path(Config.DATA_STORAGE,self.case,'res', caserunname)
csvPath = Path(Config.DATA_STORAGE,self.case,'res', caserunname, 'csv')
resDataPath = Path(Config.DATA_STORAGE,self.case,'view', 'resData.json')

if not os.path.exists(caseRunPath):
os.makedirs(caseRunPath)
os.makedirs(csvPath)
if not os.path.exists(resDataPath):
File.writeFile( data, resDataPath)
else:
resData = File.readFile(resDataPath)
resData['osy-cases'].append(data)
File.writeFile( resData, resDataPath)
caseRunPath = self.resultsPath / caserunname
csvPath = caseRunPath / "csv"

if not caseRunPath.exists():
caseRunPath.mkdir(parents=True, exist_ok=False)
csvPath.mkdir(parents=True, exist_ok=True)
case_runs = self._case_run_entries()
case_runs.append(data)
self.resDataPath.parent.mkdir(parents=True, exist_ok=True)
File.writeFile(self.resData, self.resDataPath)
response = {
"message": "You have created a case run!",
"status_code": "success"
Expand All @@ -822,16 +825,15 @@ def createCaseRun(self, caserunname, data):

def deleteScenarioCaseRuns(self, scenarioId):
try:
resData = File.readFile(self.resDataPath)
cases = resData['osy-cases']
cases = self._case_run_entries()

for cs in cases:
for sc in cs['Scenarios']:
if sc['ScenarioId'] == scenarioId:
cs['Scenarios'].remove(sc)


File.writeFile(resData, self.resDataPath)
File.writeFile(self.resData, self.resDataPath)
response = {
"message": "You have deleted scenario from caseruns!",
"status_code": "success"
Expand All @@ -847,41 +849,36 @@ def deleteScenarioCaseRuns(self, scenarioId):

def updateCaseRun(self, caserunname, oldcaserunname, data):
try:
caseRunPath = Path(Config.DATA_STORAGE,self.case,'res', oldcaserunname)
newcaseRunPath = Path(Config.DATA_STORAGE,self.case,'res', caserunname)
csvPath = Path(Config.DATA_STORAGE,self.case,'res', caserunname, 'csv')
resDataPath = Path(Config.DATA_STORAGE,self.case,'view', 'resData.json')
caseRunPath = self.resultsPath / oldcaserunname
newcaseRunPath = self.resultsPath / caserunname
csvPath = newcaseRunPath / "csv"

if not os.path.exists(newcaseRunPath):
if not newcaseRunPath.exists():
os.rename(caseRunPath, newcaseRunPath)

if not os.path.exists(csvPath):
os.makedirs(csvPath)
if not csvPath.exists():
csvPath.mkdir(parents=True, exist_ok=True)

resData = File.readFile(resDataPath)

resdata = resData['osy-cases']
resdata = self._case_run_entries()
for i, case in enumerate(resdata):
if case['Case'] == oldcaserunname:
resData['osy-cases'][i] = data
self.resData['osy-cases'][i] = data

File.writeFile( resData, resDataPath)
File.writeFile(self.resData, self.resDataPath)
response = {
"message": "You have updated a case run!",
"status_code": "success"
}
elif os.path.exists(newcaseRunPath) and caserunname==oldcaserunname:
if not os.path.exists(csvPath):
os.makedirs(csvPath)

resData = File.readFile(resDataPath)
elif newcaseRunPath.exists() and caserunname == oldcaserunname:
if not csvPath.exists():
csvPath.mkdir(parents=True, exist_ok=True)

resdata = resData['osy-cases']
resdata = self._case_run_entries()
for i, case in enumerate(resdata):
if case['Case'] == oldcaserunname:
resData['osy-cases'][i] = data
self.resData['osy-cases'][i] = data

File.writeFile( resData, resDataPath)
File.writeFile(self.resData, self.resDataPath)
response = {
"message": "You have updated a case run!",
"status_code": "success"
Expand All @@ -899,6 +896,24 @@ def updateCaseRun(self, caserunname, oldcaserunname, data):
except OSError:
raise OSError

def _case_run_entries(self):
if not isinstance(self.resData, dict):
self.resData = {"osy-cases": []}

case_runs = self.resData.get("osy-cases")
if not isinstance(case_runs, list):
case_runs = []
self.resData["osy-cases"] = case_runs

return case_runs

def _default_view_data(self):
view_definitions = {}
for group, lists in self.VARIABLES.items():
for item in lists:
view_definitions[item["id"]] = []
return {"osy-views": view_definitions}

def deleteCaseResultsJSON(self, caserunname):
try:
csvPath = Path(self.resultsPath, caserunname, "csv")
Expand All @@ -923,18 +938,24 @@ def deleteCaseResultsJSON(self, caserunname):

def deleteCaseRun(self, caserunname, resultsOnly):
try:
#caseRunPath = Path(Config.DATA_STORAGE,self.case,'res', caserunname)
#resDataPath = Path(Config.DATA_STORAGE,self.case,'view', 'resData.json')

caseRunPath = self.resultsPath / caserunname
if caseRunPath.exists():
if not resultsOnly:
shutil.rmtree(caseRunPath)
else:
for item in caseRunPath.iterdir():
if item.is_file() or item.is_symlink():
item.unlink()
elif item.is_dir():
shutil.rmtree(item)

if not resultsOnly:
resData = File.readFile(self.resDataPath)

for obj in resData['osy-cases']:
if obj['Case'] == caserunname:
resData['osy-cases'].remove(obj)

File.writeFile( resData, self.resDataPath)
self.resData["osy-cases"] = [
case
for case in self._case_run_entries()
if case.get("Case") != caserunname
]
File.writeFile(self.resData, self.resDataPath)

#delete from view folder
for group, array in self.VARIABLES.items():
Expand All @@ -960,8 +981,9 @@ def deleteCaseRun(self, caserunname, resultsOnly):
except OSError:
raise OSError

def cleanUp(self):
def _legacy_cleanUp_unused(self):
try:
return self.cleanUp()

#delete from view folder
# moramo izbrisati res i view folder ostaviti samo resData.json i viewDefinitions.json
Expand All @@ -970,11 +992,10 @@ def cleanUp(self):
# self.viewFolderPath = Path(Config.DATA_STORAGE,case,'view')
# folder_path = "C:/putanja/do/foldera"

for caserunname in os.listdir( self.resultsPath):
caserunname_path = os.path.join(self.resultsPath, caserunname)
# Skip files such as .DS_Store that can appear on macOS.
if not os.path.isdir(caserunname_path):
continue
if self.resultsPath.exists() and self.resultsPath.is_dir():
for case_run_path in self.resultsPath.iterdir():
if not case_run_path.is_dir():
continue
for carerunData in os.listdir( caserunname_path):
Comment on lines 984 to 999
Copy link

Copilot AI Apr 2, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

_legacy_cleanUp_unused currently immediately calls return self.cleanUp(), leaving the rest of the method as dead code. That dead code also references undefined names (e.g., caserunname_path) and has confusing indentation, which makes future maintenance risky if someone tries to resurrect it. Suggest removing this method entirely (or keep a minimal comment in git history) to avoid carrying broken unreachable code.

Copilot uses AI. Check for mistakes.
file_path = os.path.join(caserunname_path, carerunData)
try:
Expand Down Expand Up @@ -1022,6 +1043,54 @@ def cleanUp(self):
raise IndexError
except OSError:
raise OSError

def cleanUp(self):
try:
if self.resultsPath.exists() and self.resultsPath.is_dir():
for case_run_path in self.resultsPath.iterdir():
if not case_run_path.is_dir():
continue
for result_item in case_run_path.iterdir():
try:
if result_item.is_file() or result_item.is_symlink():
result_item.unlink()
elif result_item.is_dir():
shutil.rmtree(result_item)
except OSError as exc:
logger.warning("Failed to clean result item %s: %s", result_item, exc)

self.viewFolderPath.mkdir(parents=True, exist_ok=True)
for item in self.viewFolderPath.iterdir():
if item.name in {"resData.json", "viewDefinitions.json"}:
continue
try:
if item.is_file() or item.is_symlink():
item.unlink()
elif item.is_dir():
shutil.rmtree(item)
except OSError as exc:
logger.warning("Failed to clean view item %s: %s", item, exc)

viewDefPath = self.viewFolderPath / "viewDefinitions.json"
if not viewDefPath.exists():
File.writeFile(self._default_view_data(), viewDefPath)

self.resultsPath.mkdir(parents=True, exist_ok=True)
for case_run in self._case_run_entries():
case_name = case_run.get("Case")
if case_name:
(self.resultsPath / case_name).mkdir(parents=True, exist_ok=True)

response = {
"message": "You have recycled results!",
"status_code": "success"
}

return response
except(IOError, IndexError):
raise IndexError
except OSError:
raise OSError

def saveView(self, data, param):
try:
Expand Down Expand Up @@ -1069,20 +1138,12 @@ def updateViews(self, data, param):

def readDataFile( self, caserunname ):
try:

#f = open(self.dataFile, mode="r")
dataFilePath = Path(Config.DATA_STORAGE, self.case, 'res',caserunname,'data.txt')
if os.path.exists(dataFilePath):
f = open(dataFilePath, mode="r", encoding='utf-8-sig')
data = f.read()
f.close
else:
data = None
dataFilePath = self.resultsPath / caserunname / "data.txt"
if not dataFilePath.exists():
return None

# f = open(self.dataFile, 'r')
# file_contents = f.read()
# f.close()
return data
with dataFilePath.open(mode="r", encoding="utf-8-sig") as handle:
return handle.read()
except(IOError, IndexError):
raise IndexError
except OSError:
Expand Down
Loading
Loading