-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathret_script.R
More file actions
58 lines (42 loc) · 1.16 KB
/
ret_script.R
File metadata and controls
58 lines (42 loc) · 1.16 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
library("reticulate")
print("Importing python module...")
model <- import("model.graph_mcmc")
print("Instantiating graph...")
graph <- model$Graph_MCMC(vector())
print("Reading from file...")
graph$read_from_file("generated.gml")
print("Detecting partition...")
graph$partition(B_min = 2, B_max = 5)
print("Sampling from posterior...")
B <- graph$mcmc(100, verbose = TRUE)
print("Generating features...")
feature_names <- graph$get_feature_names()
X <- graph$generate_feature_matrix()
Y <- graph$generate_posterior()
cat("X dimension: ", dim(X), "\n")
cat("Y dimension: ", dim(Y), "\n")
D <- dim(X)[2]
library(keras)
# define model
print("Defining model...")
model <- keras_model_sequential()
model %>%
layer_dense(
units = B,
input_shape = c(D),
activation = "softmax",
name = "BlockProbs"
)
# compile model
model %>% compile(
optimizer = "adam",
loss = "categorical_crossentropy",
metrics = c("accuracy")
)
# train
model %>% fit(X, Y, epochs = 10, verbose = 2)
# test metrics
score <- model %>% evaluate(X, Y, verbose = 0)
cat("Training loss:", score$loss, "\n")
# predict test set
predictions <- model %>% predict(X)