33import shutil
44import stat
55import tempfile
6+ import threading
67from ftplib import FTP , all_errors as ftp_errors
78from datetime import datetime
89from pathlib import Path
10+ from typing import Optional
911
1012import paramiko
1113from apscheduler .schedulers .background import BackgroundScheduler
3840
3941app = Flask (__name__ )
4042scheduler = BackgroundScheduler ()
43+ backup_status_lock = threading .Lock ()
44+ backup_status = {}
4145
4246
4347def load_devices ():
@@ -179,7 +183,26 @@ def ftp_download_tree(ftp: FTP, remote_dir: str, local_dir: Path):
179183 print (f"[FTP] Read failed: { remote_path } ({ e } )" )
180184
181185
182- def create_backup (device : dict ):
186+ def _device_key (device : dict ) -> str :
187+ return _job_id_for_device (device )
188+
189+
190+ def set_backup_status (device : dict , state : str , detail : Optional [str ] = None ):
191+ entry = {
192+ "state" : state ,
193+ "detail" : detail or "" ,
194+ "updated_at" : datetime .now ().isoformat (),
195+ }
196+ with backup_status_lock :
197+ backup_status [_device_key (device )] = entry
198+
199+
200+ def get_backup_status (device : dict ):
201+ with backup_status_lock :
202+ return backup_status .get (_device_key (device ))
203+
204+
205+ def create_backup (device : dict , status_callback = None ):
183206 label = device ["label" ]
184207 ip_address = device ["ip" ]
185208 username = device ["username" ]
@@ -188,6 +211,9 @@ def create_backup(device: dict):
188211 port_default = FTP_PORT_DEFAULT if protocol == "ftp" else SFTP_PORT_DEFAULT
189212 port = int (device .get ("port" ) or port_default )
190213
214+ if status_callback :
215+ status_callback (device , "connecting" , f"{ protocol .upper ()} session" )
216+
191217 timestamp = datetime .now ().strftime ("%Y-%m-%d_%H-%M-%S" )
192218 folder_name = f"{ label } -{ timestamp } "
193219
@@ -196,6 +222,8 @@ def create_backup(device: dict):
196222 temp_path .mkdir (parents = True , exist_ok = True )
197223
198224 directories = device .get ("paths" ) or PLC_DIRECTORIES
225+ if status_callback :
226+ status_callback (device , "downloading" , f"{ len (directories )} paths" )
199227 if protocol == "ftp" :
200228 ftp = _open_ftp (ip_address , username , password , port )
201229 try :
@@ -225,6 +253,9 @@ def create_backup(device: dict):
225253 finally :
226254 ssh .close ()
227255
256+ if status_callback :
257+ status_callback (device , "archiving" , "Creating zip archive" )
258+
228259 base_name = str ((BACKUP_OUTPUT_DIR / folder_name ).with_suffix ("" ))
229260 shutil .make_archive (base_name , "zip" , temp_path )
230261
@@ -234,33 +265,86 @@ def _job_id_for_device(device: dict) -> str:
234265 return f"backup-{ device ['label' ]} -{ device ['ip' ]} "
235266
236267
268+ def _get_job_next_run_time (job ):
269+ if not job :
270+ return None
271+ try :
272+ return getattr (job , "next_run_time" , None )
273+ except Exception :
274+ return None
275+
276+
237277def get_next_run_time_for_device (device : dict ):
238278 job = scheduler .get_job (_job_id_for_device (device ))
239- if not job or not job .next_run_time :
279+ next_run_time = _get_job_next_run_time (job )
280+ if not next_run_time :
240281 return None
241282 # ISO format is easy for the browser to parse & display
242- return job .next_run_time .isoformat ()
283+ return next_run_time .isoformat ()
284+
285+
286+ def _parse_iso_datetime (value : Optional [str ]):
287+ if not value :
288+ return None
289+ try :
290+ return datetime .fromisoformat (value )
291+ except ValueError :
292+ return None
293+
294+
295+ def _update_saved_next_run_time (device : dict ):
296+ next_run = get_next_run_time_for_device (device )
297+ devices_list = load_devices ()
298+ target_key = _device_key (device )
299+ updated = False
300+ for saved in devices_list :
301+ if _device_key (saved ) == target_key :
302+ saved ["next_run_at" ] = next_run
303+ updated = True
304+ break
305+ if updated :
306+ save_devices (devices_list )
307+
308+
309+ def run_backup_and_record (device : dict ):
310+ set_backup_status (device , "starting" , "Preparing backup" )
311+ try :
312+ create_backup (device , status_callback = set_backup_status )
313+ set_backup_status (device , "completed" , "Backup complete" )
314+ except Exception as exc :
315+ set_backup_status (device , "failed" , str (exc ))
316+ raise
317+ finally :
318+ _update_saved_next_run_time (device )
243319
244320
245321def schedule_device (device : dict ):
246322 interval = device .get ("interval" )
247323 seconds = INTERVAL_SECONDS .get (interval )
248324 if not seconds :
249325 return
326+ start_date = _parse_iso_datetime (device .get ("next_run_at" ))
327+ if start_date and start_date <= datetime .now ():
328+ start_date = None
250329
251- scheduler .add_job (
252- create_backup ,
253- trigger = IntervalTrigger (seconds = seconds ),
330+ job = scheduler .add_job (
331+ run_backup_and_record ,
332+ trigger = IntervalTrigger (seconds = seconds , start_date = start_date ),
254333 args = [device ],
255334 id = _job_id_for_device (device ),
256335 replace_existing = True ,
257336 )
337+ next_run_time = _get_job_next_run_time (job )
338+ if next_run_time :
339+ device ["next_run_at" ] = next_run_time .isoformat ()
258340
259341
260342def refresh_schedule ():
261343 scheduler .remove_all_jobs ()
262- for device in load_devices ():
344+ devices_list = load_devices ()
345+ for device in devices_list :
263346 schedule_device (device )
347+ save_devices (devices_list )
264348
265349
266350@app .route ("/" )
@@ -273,12 +357,16 @@ def devices():
273357 if request .method == "GET" :
274358 devices_list = load_devices ()
275359 for d in devices_list :
276- d ["next_backup" ] = get_next_run_time_for_device (d )
360+ d ["next_backup" ] = get_next_run_time_for_device (d ) or d .get ("next_run_at" )
361+ d ["backup_status" ] = get_backup_status (d )
277362 return jsonify (devices_list )
278363
279364 payload = request .get_json (silent = True ) or {}
280365 devices_payload = payload .get ("devices" , [])
281366 cleaned_devices = []
367+ existing_devices = {
368+ _device_key (device ): device for device in load_devices ()
369+ }
282370
283371 for device in devices_payload :
284372 label = str (device .get ("label" , "" )).strip ()
@@ -302,18 +390,22 @@ def devices():
302390 paths = device .get ("paths" ) or []
303391 cleaned_paths = [str (path ).strip () for path in paths if str (path ).strip ()]
304392
305- cleaned_devices .append (
306- {
307- "label" : label ,
308- "ip" : ip_address ,
309- "interval" : interval ,
310- "username" : username ,
311- "password" : password ,
312- "protocol" : protocol ,
313- "port" : port ,
314- "paths" : cleaned_paths ,
315- }
316- )
393+ cleaned_device = {
394+ "label" : label ,
395+ "ip" : ip_address ,
396+ "interval" : interval ,
397+ "username" : username ,
398+ "password" : password ,
399+ "protocol" : protocol ,
400+ "port" : port ,
401+ "paths" : cleaned_paths ,
402+ }
403+
404+ existing = existing_devices .get (_device_key (cleaned_device ))
405+ if existing and existing .get ("interval" ) == interval :
406+ cleaned_device ["next_run_at" ] = existing .get ("next_run_at" )
407+
408+ cleaned_devices .append (cleaned_device )
317409
318410 save_devices (cleaned_devices )
319411 refresh_schedule ()
@@ -336,7 +428,10 @@ def backup_device(device_index: int):
336428 devices_list = load_devices ()
337429 if device_index < 0 or device_index >= len (devices_list ):
338430 return jsonify ({"error" : "Device not found" }), 404
339- create_backup (devices_list [device_index ])
431+ device = devices_list [device_index ]
432+ set_backup_status (device , "queued" , "Backup queued" )
433+ thread = threading .Thread (target = run_backup_and_record , args = (device ,), daemon = True )
434+ thread .start ()
340435 return jsonify ({"status" : "backup_started" })
341436
342437
0 commit comments