Skip to content

Commit 8bcd4b3

Browse files
authored
Merge pull request #50 from JGCRI/demeter_1.3.1
Updates to demeter for v2
2 parents f4ac94d + 2d632a8 commit 8bcd4b3

File tree

9 files changed

+406
-270
lines changed

9 files changed

+406
-270
lines changed

.github/workflows/build.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ jobs:
1212

1313
env:
1414
OS: ${{ matrix.os }}
15-
PYTHON: '3.9'
15+
PYTHON: '3.10.4'
1616

1717
steps:
1818

@@ -21,7 +21,7 @@ jobs:
2121
- name: Set up Python
2222
uses: actions/setup-python@master
2323
with:
24-
python-version: 3.9
24+
python-version: 3.10.4
2525

2626
- name: Install dependencies
2727
run: |
@@ -30,6 +30,6 @@ jobs:
3030
3131
- name: Test and generate coverage report
3232
run: |
33-
python -m pip install pytest
33+
python -m pip install pytest==6.2.5
3434
python -m pip install pytest-cov
3535
pytest --cov=./ --cov-report=xml

demeter/change/expansion.py

Lines changed: 33 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -15,21 +15,22 @@
1515
from multiprocessing.dummy import Pool as ThreadPool
1616
from itertools import repeat
1717

18-
def extense_parallel_helper(regix_metix,log, c, allregnumber, allregmet, spat_ludataharm, spat_region, spat_met, kernel_vector, cons_data,
19-
order_rules, final_landclasses, constraint_rules, transition_rules, land_mismatch,
20-
spat_ludataharm_orig_steps, target_change, yr,diag_file):
2118

19+
def extense_parallel_helper(regix_metix, log, c, allregnumber, allregmet, spat_ludataharm, spat_region, spat_met,
20+
kernel_vector, cons_data,
21+
order_rules, final_landclasses, constraint_rules, transition_rules, land_mismatch,
22+
spat_ludataharm_orig_steps, target_change, yr, diag_file, transitions):
2223
reg_idx, met_idx = regix_metix
23-
#print("processing region " + str(reg_idx))
24+
# print("processing region " + str(reg_idx))
2425

25-
# set previous region index to current
26-
#prev_reg = reg_idx
26+
# set previous region index to current
27+
# prev_reg = reg_idx
2728

2829
# update user per region change
2930
regnumber = allregnumber[reg_idx]
3031
metnumber = allregmet[reg_idx][met_idx]
3132
metnum_idx = metnumber - 1
32-
# update user per region change
33+
# update user per region change
3334
reg_met_mask = (spat_region == regnumber) & (spat_met == metnumber)
3435
spat_ludataharm_sub = spat_ludataharm[reg_met_mask]
3536
kernel_vector_sub = kernel_vector[reg_met_mask]
@@ -45,32 +46,33 @@ def extense_parallel_helper(regix_metix,log, c, allregnumber, allregmet, spat_lu
4546
# transitions[reg_met_mask, :, :] += trans_mat
4647

4748
# calculate non-achieved change
48-
49+
transitions[reg_met_mask, :, :] += trans_mat
4950

5051
non_chg = np.sum(abs(target_change[:, :, :])) / 2.
5152

5253
if non_chg > 0:
53-
non_chg_per = np.sum(abs(target_change[:, :, :].flatten())) / np.sum(abs(land_mismatch[:, :, :].flatten())) * 100
54+
non_chg_per = np.sum(abs(target_change[:, :, :].flatten())) / np.sum(
55+
abs(land_mismatch[:, :, :].flatten())) * 100
5456

5557
else:
5658
non_chg_per = 0
5759

58-
#log.info("Total non-achieved expansion change for time step {0}: {1} km2 ({2} %)".format(yr, non_chg, non_chg_per))
60+
# log.info("Total non-achieved expansion change for time step {0}: {1} km2 ({2} %)".format(yr, non_chg, non_chg_per))
5961

60-
# close file if diagnostic
6162

63+
# close file if diagnostic
6264

6365

6466
def _convert_pft(notdone, exp_target, met_idx, pft_toconv, spat_ludataharm_sub, pft, cons_data_subpft, reg,
65-
trans_mat, non_exist_cells, stochastic_expansion, selection_threshold, target_change, errortol,
66-
diag_file, diagnostic):
67+
trans_mat, non_exist_cells, stochastic_expansion, selection_threshold, target_change, errortol,
68+
diag_file, diagnostic):
6769
"""
6870
Apply conversion to every qualifying PFT.
6971
7072
:return: Array of PFTs
7173
"""
7274
if diagnostic == 1:
73-
diag_file.write('{},{},{},{},{}\n'.format(reg + 1, met_idx+1, pft, pft_toconv, exp_target))
75+
diag_file.write('{},{},{},{},{}\n'.format(reg + 1, met_idx + 1, pft, pft_toconv, exp_target))
7476

7577
while notdone:
7678
# grid cells with both the expanding and to-convert PFT
@@ -140,15 +142,15 @@ def _convert_pft(notdone, exp_target, met_idx, pft_toconv, spat_ludataharm_sub,
140142
& (np.sum(mean_cons_cells) != len(mean_cons_cells))
141143

142144
if diagnostic == 1:
143-
diag_file.write('{},{},{},{},{}\n'.format(reg + 1, met_idx+1, pft, pft_toconv, exp_target))
145+
diag_file.write('{},{},{},{},{}\n'.format(reg + 1, met_idx + 1, pft, pft_toconv, exp_target))
144146

145147
return exp_target, target_change, trans_mat
146148

147149

148-
def _expansion(diagnostic, diag_file, spat_ludataharm_sub, kernel_vector_sub, cons_data_sub_o, reg_idx, met_idx, order_rules, final_landclasses,
150+
def _expansion(diagnostic, diag_file, spat_ludataharm_sub, kernel_vector_sub, cons_data_sub_o, reg_idx, met_idx,
151+
order_rules, final_landclasses,
149152
errortol, constraint_rules, transition_rules, stochastic_expansion, selection_threshold, land_mismatch,
150153
target_change):
151-
152154
# get lengths for array creation
153155
l_shs = len(spat_ludataharm_sub[:, 0])
154156
l_ord = len(order_rules)
@@ -186,7 +188,7 @@ def _expansion(diagnostic, diag_file, spat_ludataharm_sub, kernel_vector_sub, co
186188
cons_data_sub[:, -1] = kdc
187189

188190
# create index order for constraints array where kernel density will be position 0
189-
cons_idx_order = [0 if i == cons_data_sub.shape[1]-1 else i+1 for i in range(cons_data_sub.shape[1])]
191+
cons_idx_order = [0 if i == cons_data_sub.shape[1] - 1 else i + 1 for i in range(cons_data_sub.shape[1])]
190192

191193
# reorder constraint weights array
192194
c_arg = np.argsort(cons_idx_order)
@@ -229,10 +231,11 @@ def _expansion(diagnostic, diag_file, spat_ludataharm_sub, kernel_vector_sub, co
229231

230232
# apply conversion to every qualifying PFT
231233
if len(exist_cells) > 0:
232-
233234
exp_target, target_change, trans_mat = _convert_pft(notdone, exp_target, met_idx, pft_toconv,
234-
spat_ludataharm_sub, pft, cons_data_subpft, reg_idx,
235-
trans_mat, non_exist_cells, stochastic_expansion,
235+
spat_ludataharm_sub, pft, cons_data_subpft,
236+
reg_idx,
237+
trans_mat, non_exist_cells,
238+
stochastic_expansion,
236239
selection_threshold, target_change, errortol,
237240
diag_file, diagnostic)
238241

@@ -266,8 +269,7 @@ def _reg_metric_iter(allregnumber, allregmet):
266269

267270
def apply_expansion(log, c, allregnumber, allregmet, spat_ludataharm, spat_region, spat_met, kernel_vector, cons_data,
268271
order_rules, final_landclasses, constraint_rules, transition_rules, land_mismatch,
269-
spat_ludataharm_orig_steps, target_change, yr):
270-
272+
spat_ludataharm_orig_steps, target_change, yr, transitions):
271273
# open diagnostic file if user-selected
272274
if c.diagnostic == 1:
273275
diag_fn, diag_ext = os.path.splitext(c.expansion_diag)
@@ -283,9 +285,14 @@ def apply_expansion(log, c, allregnumber, allregmet, spat_ludataharm, spat_regio
283285

284286
pool = ThreadPool(len(np.unique(regix_metix)))
285287

286-
pool.starmap(extense_parallel_helper,zip(regix_metix,repeat(log), repeat(c), repeat(allregnumber), repeat(allregmet), repeat(spat_ludataharm), repeat(spat_region), repeat(spat_met), repeat(kernel_vector), repeat(cons_data),
287-
repeat(order_rules), repeat(final_landclasses), repeat(constraint_rules), repeat(transition_rules), repeat(land_mismatch),
288-
repeat(spat_ludataharm_orig_steps), repeat(target_change), repeat(yr),repeat(diag_file)))
288+
pool.starmap(extense_parallel_helper,
289+
zip(regix_metix, repeat(log), repeat(c), repeat(allregnumber), repeat(allregmet),
290+
repeat(spat_ludataharm), repeat(spat_region), repeat(spat_met), repeat(kernel_vector),
291+
repeat(cons_data),
292+
repeat(order_rules), repeat(final_landclasses), repeat(constraint_rules), repeat(transition_rules),
293+
repeat(land_mismatch),
294+
repeat(spat_ludataharm_orig_steps), repeat(target_change), repeat(yr), repeat(diag_file),
295+
repeat(transitions)))
289296

290297
pool.terminate()
291298

demeter/change/intensification.py

Lines changed: 46 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -13,26 +13,30 @@
1313
import threading
1414
from multiprocessing.dummy import Pool as ThreadPool
1515
from itertools import repeat
16+
17+
import pandas as pd
18+
1619
import demeter.demeter_io.writer as wdr
1720

18-
def intense_parallel_helper(regix_metix,spat_region, order_rules, allregnumber, allregmet, spat_ludata,
19-
spat_landmatrix, gcam_landmatrix, yr_idx, d_regid_nm, target_change, spat_ludataharm,
20-
spat_met, kernel_vector, cons_data, final_landclasses,spat_ludataharm_orig_steps, yr,
21-
land_mismatch, constraint_rules, transition_rules,log, pass_number, c,diag_file):
2221

22+
def intense_parallel_helper(regix_metix, spat_region, order_rules, allregnumber, allregmet, spat_ludata,
23+
spat_landmatrix, gcam_landmatrix, yr_idx, d_regid_nm, target_change, spat_ludataharm,
24+
spat_met, kernel_vector, cons_data, final_landclasses, spat_ludataharm_orig_steps, yr,
25+
land_mismatch, constraint_rules, transition_rules, log, pass_number, c, diag_file,
26+
transitions):
2327
reg_idx, met_idx = regix_metix
24-
# print("processing region " + str(reg_idx))
28+
# print("processing region " + str(reg_idx))
2529

26-
# set previous region index to current
27-
#prev_reg = reg_idx
30+
# set previous region index to current
31+
# prev_reg = reg_idx
2832

2933
# update user per region change
3034

31-
# update user per region change
35+
# update user per region change
3236
regnumber, reg_idx, target_intensification = _create_summary(reg_idx, allregnumber, spat_ludata,
33-
spat_landmatrix, gcam_landmatrix, d_regid_nm,
34-
log, spat_region, yr_idx, target_change,
35-
pass_number, c)
37+
spat_landmatrix, gcam_landmatrix, d_regid_nm,
38+
log, spat_region, yr_idx, target_change,
39+
pass_number, c)
3640

3741
# calculate and write area diagnostic
3842
# diff_diagnostic(c.diag_dir, d_regid_nm, gcam_landmatrix, spat_landmatrix, reg_idx, yr, yr_idx)
@@ -53,25 +57,25 @@ def intense_parallel_helper(regix_metix,spat_region, order_rules, allregnumber,
5357

5458
# apply intensification
5559
spat_ludataharm[reg_met_mask], trans_mat, target_change, target_intensification = citz
56-
60+
# print("Number of dimensions:", trans_mat.ndim)
61+
# print("Shape:", trans_mat.shape)
62+
# wdr.write_transitions(s,c.step, transitions=trans_mat)
63+
# arr_reshaped = trans_mat.reshape(trans_mat.shape[0], -1)
64+
# np.savetxt("test.csv", arr_reshaped, delimiter=",")
5765
# log transition
58-
# transitions[reg_met_mask, :, :] += trans_mat
66+
transitions[reg_met_mask, :, :] += trans_mat
5967

6068
# calculate non-achieved change
6169

62-
6370
non_chg = np.sum(abs(target_change[:, :, :])) / 2.0
6471

6572
if non_chg > 0:
66-
non_chg_per = np.sum(abs(target_change[:, :, :].flatten())) / np.sum(abs(land_mismatch[:, :, :].flatten())) * 100
73+
non_chg_per = np.sum(abs(target_change[:, :, :].flatten())) / np.sum(
74+
abs(land_mismatch[:, :, :].flatten())) * 100
6775
else:
68-
non_chg_per = 0
69-
70-
#log.info("Total non-achieved intensification change for pass {0} time step {1}: {2} km2 ({3} %)".format(pass_number, yr, non_chg, non_chg_per))
71-
72-
73-
76+
non_chg_per = 0
7477

78+
# log.info("Total non-achieved intensification change for pass {0} time step {1}: {2} km2 ({3} %)".format(pass_number, yr, non_chg, non_chg_per))
7579

7680

7781
def diff_diagnostic(diag_outdir, d_regid_nm, gcam_landmatrix, spat_landmatrix, reg, yr, yr_idx):
@@ -82,17 +86,14 @@ def diff_diagnostic(diag_outdir, d_regid_nm, gcam_landmatrix, spat_landmatrix, r
8286
:return:
8387
"""
8488
# set outfile names
85-
gcam_out = os.path.join(diag_outdir, "{0}_{1}_gcam_landmatrix.csv".format(d_regid_nm[str(reg+1)], yr))
86-
base_out = os.path.join(diag_outdir, "{0}_{1}_spat_landmatrix.csv".format(d_regid_nm[str(reg+1)], yr))
89+
gcam_out = os.path.join(diag_outdir, "{0}_{1}_gcam_landmatrix.csv".format(d_regid_nm[str(reg + 1)], yr))
90+
base_out = os.path.join(diag_outdir, "{0}_{1}_spat_landmatrix.csv".format(d_regid_nm[str(reg + 1)], yr))
8791

8892
# write files
8993
wdr.array_to_csv(gcam_landmatrix[yr_idx, reg, :, :], gcam_out)
9094
wdr.array_to_csv(spat_landmatrix[reg, :, :], base_out)
9195

9296

93-
94-
95-
9697
def reg_metric_iter(allregnumber, allregmet):
9798
"""
9899
Create region, metric iterator.
@@ -108,14 +109,14 @@ def reg_metric_iter(allregnumber, allregmet):
108109

109110

110111
def _convert_pft(notdone, int_target, metnumber, pft_toconv, spat_ludataharm_sub, pft, cons_data_subpft, reg,
111-
target_intensification, trans_mat, target_change, errortol, diag_file, diagnostic):
112+
target_intensification, trans_mat, target_change, errortol, diag_file, diagnostic):
112113
"""
113114
Apply conversion to every qualifying PFT.
114115
115116
:return: Array of PFTs
116117
"""
117118
if diagnostic == 1:
118-
diag_file.write('{},{},{},{},{}\n'.format(reg+1, metnumber, pft, pft_toconv, int_target))
119+
diag_file.write('{},{},{},{},{}\n'.format(reg + 1, metnumber, pft, pft_toconv, int_target))
119120

120121
while notdone:
121122
# grid cells with both the expanding and to-convert PFT
@@ -223,7 +224,7 @@ def _intensification(diagnostic, diag_file, spat_ludataharm_sub, target_intensif
223224
cons_data_sub[:, -1] = kdc
224225

225226
# create index order for constraints array where kernel density will be position 0
226-
cons_idx_order = [0 if i == cons_data_sub.shape[1]-1 else i+1 for i in range(cons_data_sub.shape[1])]
227+
cons_idx_order = [0 if i == cons_data_sub.shape[1] - 1 else i + 1 for i in range(cons_data_sub.shape[1])]
227228

228229
# reorder constraint weights array
229230
c_arg = np.argsort(cons_idx_order)
@@ -233,7 +234,8 @@ def _intensification(diagnostic, diag_file, spat_ludataharm_sub, target_intensif
233234
cons_data_subpft = cons_data_sub
234235

235236
# invert negative constraints
236-
arr = np.ones(shape=np.shape(cons_data_sub[:, cons_rules_pft < 0])) + cons_data_subpft[:, cons_rules_pft < 0]
237+
arr = np.ones(shape=np.shape(cons_data_sub[:, cons_rules_pft < 0])) + cons_data_subpft[:,
238+
cons_rules_pft < 0]
237239
cons_data_subpft[:, cons_rules_pft < 0] = arr
238240

239241
# multiply negative constraints weight by -1 to turn it positive
@@ -316,15 +318,13 @@ def _create_summary(reg_idx, allregnumber, spat_ludata, spat_landmatrix, gcam_la
316318
if pass_number == 1:
317319
target_intensification[target_intensification > 0] *= c.intensification_ratio
318320

319-
320-
321321
return regnumber, prev_reg, target_intensification
322322

323323

324324
def apply_intensification(log, pass_number, c, spat_region, order_rules, allregnumber, allregmet, spat_ludata,
325325
spat_landmatrix, gcam_landmatrix, yr_idx, d_regid_nm, target_change, spat_ludataharm,
326-
spat_met, kernel_vector, cons_data, final_landclasses,spat_ludataharm_orig_steps, yr,
327-
land_mismatch, constraint_rules, transition_rules):
326+
spat_met, kernel_vector, cons_data, final_landclasses, spat_ludataharm_orig_steps, yr,
327+
land_mismatch, constraint_rules, transition_rules, transitions):
328328
"""
329329
There are two ways to expand land covers:
330330
1) on grid-cells where they do exist (intensification, at the expense of contracting land covers)
@@ -354,14 +354,19 @@ def apply_intensification(log, pass_number, c, spat_region, order_rules, allregn
354354

355355
pool = ThreadPool(len(np.unique(regix_metix)))
356356

357-
pool.starmap(intense_parallel_helper,zip(regix_metix,repeat(spat_region), repeat(order_rules), repeat(allregnumber), repeat(allregmet), repeat(spat_ludata),
358-
repeat(spat_landmatrix), repeat(gcam_landmatrix), repeat(yr_idx), repeat(d_regid_nm), repeat(target_change), repeat(spat_ludataharm),
359-
repeat(spat_met), repeat(kernel_vector), repeat(cons_data), repeat(final_landclasses),repeat(spat_ludataharm_orig_steps), repeat(yr),
360-
repeat(land_mismatch), repeat(constraint_rules), repeat(transition_rules),repeat(log), repeat(pass_number), repeat(c),repeat(diag_file)))
357+
pool.starmap(intense_parallel_helper,
358+
zip(regix_metix, repeat(spat_region), repeat(order_rules), repeat(allregnumber), repeat(allregmet),
359+
repeat(spat_ludata),
360+
repeat(spat_landmatrix), repeat(gcam_landmatrix), repeat(yr_idx), repeat(d_regid_nm),
361+
repeat(target_change), repeat(spat_ludataharm),
362+
repeat(spat_met), repeat(kernel_vector), repeat(cons_data), repeat(final_landclasses),
363+
repeat(spat_ludataharm_orig_steps), repeat(yr),
364+
repeat(land_mismatch), repeat(constraint_rules), repeat(transition_rules), repeat(log),
365+
repeat(pass_number), repeat(c), repeat(diag_file), repeat(transitions)))
361366
# for each region
362-
#for index, pkg in enumerate(regix_metix):
363-
364-
# unpack index vars
367+
# for index, pkg in enumerate(regix_metix):
365368

369+
# unpack index vars
370+
# wdr.write_transitions(self, c.step, transitions=transitions)
366371
pool.terminate()
367372
return [spat_ludataharm, spat_ludataharm_orig_steps, land_mismatch, cons_data, target_change]

0 commit comments

Comments
 (0)