-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathflexbuffContentsDatabase.py
More file actions
executable file
·83 lines (76 loc) · 3.83 KB
/
flexbuffContentsDatabase.py
File metadata and controls
executable file
·83 lines (76 loc) · 3.83 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
#!/usr/bin/python3
from paramiko import SSHClient
from scp import SCPClient
import MySQLdb as mariadb
import os
import sys
dirname = os.path.dirname(__file__)
def flexbuffContentsPull(flexbuff_tag): # function for scp of disk_used.txt file from a given flexxbuff machine to cwd.
flexbuff_tag = str(flexbuff_tag)
ssh = SSHClient()
ssh.load_system_host_keys()
ssh.connect(hostname= flexbuff_tag + '.phys.utas.edu.au',
username='observer',
#password=''
)
scp = SCPClient(ssh.get_transport())
scp.get('/tmp/disk_used.txt', dirname + '/disk_used_' + flexbuff_tag + '.csv')
scp.close()
ssh.close()
def updateFlexbuffContents(flexbuff_tag, db_name): # add data from disk_used file into the SQL database.
flexbuff_tag = str(flexbuff_tag)
db_name = str(db_name)
# connect to the database
conn = mariadb.connect(user='auscope', passwd='password', db=db_name, local_infile = 1)
cursor = conn.cursor()
# This process seems somewhat redundant but is best practice for ensuring table data is not deleted before ensuring new data has been loaded without errors.
# Create a table for the new inbound data named flexbuffXX_NEWSELECT @@GLOBAL.sql_mode;
query = "CREATE TABLE IF NOT EXISTS "+ flexbuff_tag + "_NEW (ExpID VARCHAR(10) NOT NULL PRIMARY KEY, DataUsage BIGINT, TimeStamp DATETIME);"
cursor.execute(query)
conn.commit()
# Create a temporary 'current' flexbuffXX table, this table will only not exist on a first run
query = "CREATE TABLE IF NOT EXISTS "+ flexbuff_tag + " (ExpID VARCHAR(10) NOT NULL PRIMARY KEY, DataUsage BIGINT, TimeStamp DATETIME);"
cursor.execute(query)
conn.commit()
# Load CSV data into the newly created flexbuffXX_NEW table
query = "LOAD DATA LOCAL INFILE '" + dirname + "/disk_used_" + flexbuff_tag + ".csv' REPLACE INTO TABLE "+ flexbuff_tag + "_NEW FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' (ExpID, DataUsage, @TimeStamp) SET TimeStamp = STR_TO_DATE(@TimeStamp,'%d %b %Y');"
cursor.execute(query)
conn.commit()
# Drop any existing flexbuffXX_OLD table (this will soon be replaced by the current flexbuffXX table)
query = "DROP TABLE IF EXISTS " + flexbuff_tag + "_OLD;"
cursor.execute(query)
conn.commit()
# Rename current flexbuffXX table that is getting replaced to flexbuffXX_OLD
query = "ALTER TABLE " + flexbuff_tag + " RENAME TO " + flexbuff_tag + "_OLD;"
cursor.execute(query)
conn.commit()
# Rename the new data table flexbuffXX_NEW to flexbuffXX
query = "ALTER TABLE " + flexbuff_tag + "_NEW RENAME TO " + flexbuff_tag + ";"
cursor.execute(query)
conn.commit()
def main(db_name):
db_name = str(db_name)
# create/connect to mariaDB flexbuff database
print('Connecting to mariaDB database ' + db_name + '.')
conn = mariadb.connect(user='auscope', passwd='password')
cursor = conn.cursor()
query = "CREATE DATABASE IF NOT EXISTS " + db_name +";"
cursor.execute(query)
conn.commit()
# setup the tags for the relevant flexbuff machines
flexbuff_id = ['flexbuffhb', 'flexbuffke', 'flexbuffyg', 'flexbuffcd']
# for each flexbuff machine, pull relevant disk usage data and add to the database
for flexbuff in flexbuff_id:
print('Attempting to pull disk contents data file from ' + flexbuff + '.')
try:
flexbuffContentsPull(flexbuff)
except Exception:
print("No disk usage data on " + flexbuff + ".")
print('Adding disk contents to the ' + flexbuff + ' table of the ' + db_name + ' mariaDB database.')
try:
updateFlexbuffContents(flexbuff, db_name)
except Exception:
print("Failed to add " + flexbuff + " contents to" + db_name + " .")
if __name__ == '__main__':
# auscope_file_scraper.py executed as a script
main(sys.argv[1])