-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathmodel_config.json
More file actions
108 lines (108 loc) · 1.34 KB
/
model_config.json
File metadata and controls
108 lines (108 loc) · 1.34 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
{
"architectures": [
"ORCADLModel"
],
"attn_drop": 0.0,
"drop_path_rate": 0.2,
"drop_rate": 0.0,
"embed_dim": 96,
"enc_depths": [
2,
2,
2
],
"enc_heads": [
3,
6,
12
],
"atmo_dims": 2,
"atmo_embed_dims": 64,
"hidden_act": "gelu",
"in_chans": [
16,
16,
1,
16,
16,
1
],
"in_steps": 1,
"input_shape": [
128,
360
],
"is_moe": true,
"is_moe_decoder": true,
"is_moe_encoder": true,
"is_moe_atmo": false,
"lat_space": [
-63.5,
63.5,
128
],
"layer_norm_eps": 1e-05,
"lg_depths": [
2,
2
],
"lg_heads": [
12,
12
],
"lg_hidden_dim": 1152,
"lon_space": [
0.5,
359.5,
360
],
"loss_type": "rmse",
"mask_patch_size": [
8,
12
],
"mask_ratio": 0.8,
"max_t": 6,
"mlp_ratio": 4.0,
"out_chans": [
16,
16,
1,
16,
16,
1
],
"patch_norm": true,
"patch_size": [
2,
3
],
"predict_time_steps": 1,
"qk_scale": null,
"qkv_bias": true,
"torch_dtype": "float32",
"transformers_version": "4.28.1",
"use_absolute_embeddings": true,
"use_land_mask": true,
"use_mask_token": false,
"var_index": [
0,
1,
0,
2,
3,
1
],
"var_list": [
"so",
"thetao",
"tos",
"uo",
"vo",
"zos"
],
"window_size": [
8,
15
]
}