-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathMakefile
More file actions
366 lines (305 loc) · 18.3 KB
/
Makefile
File metadata and controls
366 lines (305 loc) · 18.3 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
.EXPORT_ALL_VARIABLES:
.NOTPARALLEL:
.PHONY: *
MAKEFLAGS := --no-print-directory
SHELL := /bin/bash
DIST_PATH ?= ./dist
TEST_ARGS ?= --cov --cov-report=term-missing --cov-report=xml:$(DIST_PATH)/test-coverage.xml
SMOKE_TEST_ARGS ?=
FEATURE_TEST_ARGS ?= ./tests/features --format progress2
TF_WORKSPACE_NAME ?= $(shell terraform -chdir=terraform/infrastructure workspace show)
ENV ?= dev
ACCOUNT ?= dev
APP_ALIAS ?= default
HOST ?= $(TF_WORKSPACE_NAME).api.record-locator.$(ENV).national.nhs.uk
ENV_TYPE ?= $(ENV)
PERFTEST_TABLE_NAME ?= perftest
PERFTEST_HOST ?= perftest-1.perftest.record-locator.national.nhs.uk
PERFTEST_PATIENTS_WITH_POINTERS ?= 0
PERFTEST_POINTERS_PER_PATIENT ?= 0
PERFTEST_TYPE_DIST_PROFILE ?= default
PERFTEST_CUSTODIAN_DIST_PROFILE ?= default
PERFTEST_TOKEN_REFRESH_PORT ?= 8765
export PATH := $(PATH):$(PWD)/.venv/bin
export USE_SHARED_RESOURCES := $(shell poetry run python scripts/are_resources_shared_for_stack.py $(TF_WORKSPACE_NAME))
default: build
help: ## Show this help message
@echo "Usage: make [target]"
@echo
@echo "where [target] can be:"
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf " \033[36m%-30s\033[0m %s\n", $$1, $$2}'
asdf-install: ## Install the required tools via ASDF
@cat .tool-versions | while read tool_version; do \
tool="$${tool_version% *}"; \
asdf plugin add "$${tool}"; \
done
asdf install
configure: asdf-install check-warn ## Configure this project repo, including install dependencies
cp scripts/commit-msg.py .git/hooks/prepare-commit-msg && chmod ug+x .git/hooks/*
poetry install
poetry run pre-commit install
check: ## Check the build environment is setup correctly
@./scripts/check-build-environment.sh
check-warn:
@SHOULD_WARN_ONLY=true ./scripts/check-build-environment.sh
check-deploy: ## check the deploy environment is setup correctly
@./scripts/check-deploy-environment.sh
check-deploy-warn:
@SHOULD_WARN_ONLY=true ./scripts/check-deploy-environment.sh
build: check-warn build-api-packages build-layers build-dependency-layer build-seed-sandbox-lambda ## Build the project
build-seed-sandbox-lambda:
@echo "Building seed_sandbox Lambda"
@cd lambdas/seed_sandbox && make build
build-dependency-layer:
@echo "Building Lambda dependency layer"
@mkdir -p $(DIST_PATH)
./scripts/build-lambda-dependency-layer.sh $(DIST_PATH)
build-layers: ./layer/*
@echo "Building Lambda layers"
@mkdir -p $(DIST_PATH)
for layer in $^; do \
[ ! -d "$$layer" ] && continue; \
./scripts/build-lambda-layer.sh $${layer} $(DIST_PATH); \
done
build-api-packages: ./api/consumer/* ./api/producer/*
@echo "Building API packages"
@mkdir -p $(DIST_PATH)
for api in $^; do \
[ ! -d "$$api" ] && continue; \
./scripts/build-lambda-package.sh $${api} $(DIST_PATH); \
done
build-ci-image: ## Build the CI image
@echo "Building the CI image"
docker build \
-t nhsd-nrlf-ci-build:latest \
-f Dockerfile.ci-build
ecr-login: ## Login to NRLF ECR repo
@echo "Logging into ECR"
$(eval AWS_REGION := $(shell aws configure get region))
$(eval AWS_ACCOUNT_ID := $(shell aws sts get-caller-identity | jq -r .Account))
@aws ecr get-login-password --region "$(AWS_REGION)" \
| docker login --username AWS --password-stdin \
$(AWS_ACCOUNT_ID).dkr.ecr.$(AWS_REGION).amazonaws.com
publish-ci-image: ## Publish the CI image
@echo "Publishing the CI image"
$(eval AWS_REGION := $(shell aws configure get region))
$(eval AWS_ACCOUNT_ID := $(shell aws sts get-caller-identity | jq -r .Account))
@docker tag nhsd-nrlf-ci-build:latest \
$(AWS_ACCOUNT_ID).dkr.ecr.$(AWS_REGION).amazonaws.com/nhsd-nrlf-ci-build:latest
@docker push $(AWS_ACCOUNT_ID).dkr.ecr.$(AWS_REGION).amazonaws.com/nhsd-nrlf-ci-build:latest
test: check-warn ## Run the unit tests
@echo "Running unit tests"
PYTHONPATH=. poetry run pytest --ignore tests/smoke $(TEST_ARGS)
test-features-integration: check-warn ## Run the BDD feature tests in the integration environment
@echo "Running feature tests in the integration environment ${TF_WORKSPACE_NAME}"
behave --define="integration_test=true" \
--define="env=$(TF_WORKSPACE_NAME)" \
--define="account_name=$(ENV)" \
--define="use_shared_resources=${USE_SHARED_RESOURCES}" \
$(FEATURE_TEST_ARGS)
integration-test-with-custom_tag:
@echo "Running feature tests in the integration environment ${TF_WORKSPACE_NAME}"
behave --define="integration_test=true" --tags=@custom_tag \
--define="env=$(TF_WORKSPACE_NAME)" \
--define="account_name=$(ENV)" \
--define="use_shared_resources=${USE_SHARED_RESOURCES}" \
$(FEATURE_TEST_ARGS)
test-features-integration-report: check-warn ## Run the BDD feature tests in the integration environment and generate allure report therafter
@echo "Cleaning previous Allure results and reports"
rm -rf ./allure-results
rm -rf ./allure-report
@echo "Running feature tests in the integration environment"
behave --define="integration_test=true" \
--define="env=$(TF_WORKSPACE_NAME)" \
--define="account_name=$(ENV)" \
--define="use_shared_resources=${USE_SHARED_RESOURCES}" \
$(FEATURE_TEST_ARGS)
@echo "Generating Allure report"
allure generate ./allure-results -o ./allure-report --clean
@echo "Opening Allure report"
allure open ./allure-report
test-smoke-internal: check-warn ## Run the smoke tests against the internal environment
@echo "Running smoke tests against the internal environment ${TF_WORKSPACE_NAME}"
TEST_ENVIRONMENT_NAME=$(ENV) \
TEST_STACK_NAME=$(TF_WORKSPACE_NAME) \
TEST_STACK_DOMAIN=$(shell terraform -chdir=terraform/infrastructure output -raw domain 2>/dev/null) \
TEST_CONNECT_MODE="internal" \
pytest ./tests/smoke/scenarios/* $(SMOKE_TEST_ARGS)
test-smoke-public: check-warn ## Run the smoke tests for the external access points
@echo "Running smoke tests for the public endpoints ${ENV}"
TEST_ENVIRONMENT_NAME=$(ENV) \
TEST_STACK_NAME=$(TF_WORKSPACE_NAME) \
TEST_CONNECT_MODE="public" \
pytest ./tests/smoke/scenarios/* $(SMOKE_TEST_ARGS)
test-performance-prepare:
mkdir -p $(DIST_PATH)
PYTHONPATH=. poetry run python tests/performance/environment.py setup $(TF_WORKSPACE_NAME)
test-performance-internal: check-warn test-performance-baseline-internal test-performance-stress-internal ## Run the performance tests against the internal access points
test-performance-baseline-internal: check-warn ## Run the performance baseline tests for the internal access points
@echo "Running internal consumer performance baseline test"
TEST_CONNECT_MODE=internal \
TEST_STACK_DOMAIN=$(shell terraform -chdir=terraform/infrastructure output -raw domain 2>/dev/null) \
k6 run --out csv=$(DIST_PATH)/consumer-baseline.csv tests/performance/consumer/baseline.js -e HOST=$(HOST) -e ENV_TYPE=$(ENV_TYPE)
test-performance-baseline-public: check-warn ## Run the baseline performance tests for the external access points
@echo "Fetching public mode configuration and bearer token..."
@CONFIG_FILE=$$(mktemp /tmp/perf_config_XXXXXX); \
trap "rm -f $$CONFIG_FILE" EXIT; \
PYTHONPATH=. python3 tests/performance/get_test_config.py $(ENV_TYPE) 2>&1 | tail -n 1 > $$CONFIG_FILE; \
PUBLIC_BASE_URL=$$(jq -r '.public_base_url' $$CONFIG_FILE); \
echo "Running consumer performance baseline test against the external access points"; \
TEST_CONNECT_MODE=public \
TEST_PUBLIC_BASE_URL=$$PUBLIC_BASE_URL \
TEST_CONFIG_FILE=$$CONFIG_FILE \
k6 run --out csv=$(DIST_PATH)/consumer-baseline-public.csv tests/performance/consumer/baseline.js -e ENV_TYPE=$(ENV_TYPE)
test-performance-stress-internal: ## Run the performance stress tests for the internal access points
@echo "Running internal consumer performance stress test"
k6 run --out csv=$(DIST_PATH)/consumer-stress.csv tests/performance/consumer/stress.js -e HOST=$(HOST) -e ENV_TYPE=$(ENV_TYPE)
test-performance-stress-public: check-warn ## Run the stress performance tests for the external access points
@echo "Fetching public mode configuration and bearer token..."
@CONFIG_FILE=$$(mktemp /tmp/perf_config_XXXXXX); \
trap "rm -f $$CONFIG_FILE" EXIT; \
PYTHONPATH=. python3 tests/performance/get_test_config.py $(ENV_TYPE) 2>&1 | tail -n 1 > $$CONFIG_FILE; \
PUBLIC_BASE_URL=$$(jq -r '.public_base_url' $$CONFIG_FILE); \
echo "Running consumer performance stress test against the external access points"; \
TEST_CONNECT_MODE=public \
TEST_PUBLIC_BASE_URL=$$PUBLIC_BASE_URL \
TEST_CONFIG_FILE=$$CONFIG_FILE \
k6 run --out csv=$(DIST_PATH)/consumer-stress-public.csv tests/performance/consumer/stress.js -e ENV_TYPE=$(ENV_TYPE)
test-performance-soak-internal:
@echo "Running internal consumer performance soak test"
k6 run --out csv=$(DIST_PATH)/consumer-soak.csv tests/performance/consumer/soak.js -e HOST=$(HOST) -e ENV_TYPE=$(ENV_TYPE)
test-performance-soak-public: check-warn ## Run the soak performance tests for the external access points
@echo "Fetching public mode configuration and bearer token..."
@CONFIG_FILE=$$(mktemp /tmp/perf_config_XXXXXX); \
trap "rm -f $$CONFIG_FILE" EXIT; \
PYTHONPATH=. python3 tests/performance/get_test_config.py $(ENV_TYPE) 2>&1 | tail -n 1 > $$CONFIG_FILE; \
PUBLIC_BASE_URL=$$(jq -r '.public_base_url' $$CONFIG_FILE); \
echo "Running consumer performance soak test against the external access points"; \
TEST_CONNECT_MODE=public \
TEST_PUBLIC_BASE_URL=$$PUBLIC_BASE_URL \
TEST_CONFIG_FILE=$$CONFIG_FILE \
k6 run --out csv=$(DIST_PATH)/consumer-soak-public.csv tests/performance/consumer/soak.js -e ENV_TYPE=$(ENV_TYPE)
test-performance-output: ## Process outputs from the performance tests
@echo "Processing performance test outputs"
poetry run python tests/performance/process_results.py baseline $(DIST_PATH)/consumer-baseline.csv
poetry run python tests/performance/process_results.py stress $(DIST_PATH)/consumer-stress.csv
test-performance-cleanup:
PYTHONPATH=. poetry run python tests/performance/environment.py cleanup $(TF_WORKSPACE_NAME)
lint: check-warn ## Lint the project
SKIP="no-commit-to-branch" pre-commit run --all-files
clean: ## Remove all generated and temporary files
[ -n "$(DIST_PATH)" ] && \
rm -rf $(DIST_PATH)/*.zip && \
rmdir $(DIST_PATH) 2>/dev/null || true
get-access-token: check-warn ## Get an access token for an environment
@poetry run python tests/utilities/get_access_token.py $(ENV) $(APP_ALIAS)
get-s3-perms: check-warn ## Get s3 permissions for an environment
poetry run python scripts/get_s3_permissions.py ${USE_SHARED_RESOURCES} $(ENV) $(TF_WORKSPACE_NAME) "$(DIST_PATH)"
@echo "Creating new Lambda NRLF permissions layer zip"
./scripts/add-perms-to-lambda.sh $(DIST_PATH)
set-smoketest-perms: check-warn ## Set the permissions for the smoke tests
@echo "Setting permissions for smoke tests of env=$(ENV) stack=$(TF_WORKSPACE_NAME)...."
poetry run python scripts/set_smoketest_permissions.py $(ENV) $(TF_WORKSPACE_NAME) $(ENV)
truststore-build-all: check-warn ## Build all truststore resources
@./scripts/truststore.sh build-all
truststore-build-ca: check-warn ## Build a CA (Certificate Authority)
@./scripts/truststore.sh build-ca "$(CA_NAME)" "$(CA_SUBJECT)"
truststore-build-cert: check-warn ## Build a certificate
@./scripts/truststore.sh build-cert "$(CA_NAME)" "$(CERT_NAME)" "$(CERT_SUBJECT)"
truststore-pull-all-for-account: check-warn ## Pull all certificates for each environment in a given account
@./scripts/truststore.sh pull-all-for-account "$(ACCOUNT)"
truststore-pull-all: check-warn ## Pull all certificates
@./scripts/truststore.sh pull-all "$(ENV)"
truststore-pull-server: check-warn ## Pull a server certificate
@./scripts/truststore.sh pull-server "$(ENV)"
truststore-pull-client: check-warn ## Pull a client certificate
@./scripts/truststore.sh pull-client "$(ENV)"
truststore-pull-ca: check-warn ## Pull a CA certificate
@./scripts/truststore.sh pull-ca "$(ENV)"
swagger-merge: check-warn ## Generate Swagger Documentation
@./scripts/swagger.sh merge "$(TYPE)"
generate-models: check-warn ## Generate Pydantic Models
@echo "Generating producer models"
mkdir -p ./layer/nrlf/producer/fhir/r4
poetry run datamodel-codegen \
--input ./api/producer/swagger.yaml \
--input-file-type openapi \
--output ./layer/nrlf/producer/fhir/r4/model.py \
--output-model-type "pydantic_v2.BaseModel" \
--base-class nrlf.core.parent_model.Parent
poetry run datamodel-codegen \
--strict-types {str,bytes,int,float,bool} \
--input ./api/producer/swagger.yaml \
--input-file-type openapi \
--output ./layer/nrlf/producer/fhir/r4/strict_model.py \
--base-class nrlf.core.parent_model.Parent \
--output-model-type "pydantic_v2.BaseModel"
@echo "Generating consumer model"
mkdir -p ./layer/nrlf/consumer/fhir/r4
poetry run datamodel-codegen \
--input ./api/consumer/swagger.yaml \
--input-file-type openapi \
--output ./layer/nrlf/consumer/fhir/r4/model.py \
--base-class nrlf.core.parent_model.Parent \
--output-model-type "pydantic_v2.BaseModel"
perftest-generate-permissions: ## Generate perftest permissions and add to nrlf_permissions
@echo "Generating permissions for performance tests with DIST_PATH=$(DIST_PATH)"
PYTHONPATH=. poetry run python tests/performance/producer/generate_permissions.py --output_dir="$(DIST_PATH)/nrlf_permissions/K6PerformanceTest"
perftest-seed-tables: ## Seed tables and upload generated perftest input files to s3
@echo "Seeding performance test pointer tables with ENV=$(ENV) and PERFTEST_TABLE_NAME=$(PERFTEST_TABLE_NAME) and PERFTEST_PATIENTS_WITH_POINTERS=$(PERFTEST_PATIENTS_WITH_POINTERS) and PERFTEST_POINTERS_PER_PATIENT=$(PERFTEST_POINTERS_PER_PATIENT) and PERFTEST_TYPE_DIST_PROFILE=$(PERFTEST_TYPE_DIST_PROFILE) and PERFTEST_CUSTODIAN_DIST_PROFILE=$(PERFTEST_CUSTODIAN_DIST_PROFILE)"
rm -rf "${DIST_PATH}/nft"
mkdir -p "${DIST_PATH}/nft"
PYTHONPATH=. poetry run python ./scripts/seed_nft_tables.py --table_name=$(PERFTEST_TABLE_NAME) --patients_with_pointers=$(PERFTEST_PATIENTS_WITH_POINTERS) --pointers_per_patient=$(PERFTEST_POINTERS_PER_PATIENT) --type_dist_profile=$(PERFTEST_TYPE_DIST_PROFILE) --custodian_dist_profile=$(PERFTEST_CUSTODIAN_DIST_PROFILE)
zip -r "${DIST_PATH}/pointer_extract-${PERFTEST_TABLE_NAME}.zip" "${DIST_PATH}/nft"
aws s3 cp "${DIST_PATH}/pointer_extract-${PERFTEST_TABLE_NAME}.zip" "s3://nhsd-nrlf--${ENV}-metadata/performance/seed-pointers-extract-${PERFTEST_TABLE_NAME}.zip"
perftest-prepare: ## Prepare input files for producer & consumer perf tests
@echo "Preparing performance tests with ENV=$(ENV) and PERFTEST_TABLE_NAME=$(PERFTEST_TABLE_NAME) and DIST_PATH=$(DIST_PATH)"
rm -rf "${DIST_PATH}/nft"
mkdir -p "${DIST_PATH}/nft"
aws s3 cp "s3://nhsd-nrlf--${ENV}-metadata/performance/seed-pointers-extract-${PERFTEST_TABLE_NAME}.zip" "${DIST_PATH}/pointer_extract-${PERFTEST_TABLE_NAME}.zip"
unzip "${DIST_PATH}/pointer_extract-${PERFTEST_TABLE_NAME}.zip"
PYTHONPATH=. poetry run python ./tests/performance/generate_producer_distributions.py
perftest-producer-internal: ## Run producer perf tests
@echo "Running producer performance tests with HOST=$(PERFTEST_HOST) and ENV_TYPE=$(ENV_TYPE) and DIST_PATH=$(DIST_PATH)"
k6 run tests/performance/producer/perftest.js --summary-mode=full --out json=$(DIST_PATH)/producer-internal-$$(date +%Y%m%d%H%M%S).json -e HOST=$(PERFTEST_HOST) -e ENV_TYPE=$(ENV_TYPE) -e DIST_PATH=$(DIST_PATH)
perftest-producer-public: check-warn ## Run the producer perftests for the external access points
@echo "Starting token refresher in background with ENV=$(ENV) PERFTEST_TOKEN_REFRESH_PORT=$(PERFTEST_TOKEN_REFRESH_PORT)"
ENV=$(ENV) TOKEN_REFRESH_PORT=$(PERFTEST_TOKEN_REFRESH_PORT) PYTHONPATH=. poetry run python ./tests/performance/token_refresher.py &
trap "kill $$(lsof -t -i :$(PERFTEST_TOKEN_REFRESH_PORT)) 2>/dev/null" EXIT
@echo "Fetching public mode configuration..."
@CONFIG_FILE=$$(mktemp /tmp/perf_config_XXXXXX); \
trap "rm -f $$CONFIG_FILE" EXIT; \
PYTHONPATH=. poetry run python tests/performance/get_test_config.py $(ENV_TYPE) 2>&1 | tail -n 1 > $$CONFIG_FILE; \
PUBLIC_BASE_URL=$$(jq -r '.public_base_url' $$CONFIG_FILE); \
echo "Running public producer perftests with ENV_TYPE=$(ENV_TYPE) and DIST_PATH=$(DIST_PATH)"; \
TEST_CONNECT_MODE=public \
TEST_PUBLIC_BASE_URL=$$PUBLIC_BASE_URL \
TEST_CONFIG_FILE=$$CONFIG_FILE \
k6 run tests/performance/producer/perftest.js --summary-mode=full --out json=$(DIST_PATH)/producer-public-$$(date +%Y%m%d%H%M%S).json -e ENV_TYPE=$(ENV_TYPE) -e DIST_PATH=$(DIST_PATH)
kill $$(lsof -t -i :$(PERFTEST_TOKEN_REFRESH_PORT))
perftest-consumer-internal:
@echo "Running consumer performance tests with HOST=$(PERFTEST_HOST) and ENV_TYPE=$(ENV_TYPE) and DIST_PATH=$(DIST_PATH)"
k6 run tests/performance/consumer/perftest.js --summary-mode=full --out json=$(DIST_PATH)/consumer-internal-$$(date +%Y%m%d%H%M%S).json -e HOST=$(PERFTEST_HOST) -e ENV_TYPE=$(ENV_TYPE) -e DIST_PATH=$(DIST_PATH)
perftest-consumer-public: check-warn ## Run the consumer perftests for the external access points
@echo "Starting token refresher in background with ENV=$(ENV) PERFTEST_TOKEN_REFRESH_PORT=$(PERFTEST_TOKEN_REFRESH_PORT)"
ENV=$(ENV) TOKEN_REFRESH_PORT=$(PERFTEST_TOKEN_REFRESH_PORT) PYTHONPATH=. poetry run python ./tests/performance/token_refresher.py &
trap "kill $$(lsof -t -i :$(PERFTEST_TOKEN_REFRESH_PORT)) 2>/dev/null" EXIT
@echo "Fetching public mode configuration..."
@CONFIG_FILE=$$(mktemp /tmp/perf_config_XXXXXX); \
trap "rm -f $$CONFIG_FILE" EXIT; \
PYTHONPATH=. poetry run python tests/performance/get_test_config.py $(ENV_TYPE) 2>&1 | tail -n 1 > $$CONFIG_FILE; \
PUBLIC_BASE_URL=$$(jq -r '.public_base_url' $$CONFIG_FILE); \
echo "Running public consumer perftests with ENV_TYPE=$(ENV_TYPE) and DIST_PATH=$(DIST_PATH)"; \
TEST_CONNECT_MODE=public \
TEST_PUBLIC_BASE_URL=$$PUBLIC_BASE_URL \
TEST_CONFIG_FILE=$$CONFIG_FILE \
k6 run tests/performance/consumer/perftest.js --summary-mode=full --out json=$(DIST_PATH)/consumer-public-$$(date +%Y%m%d%H%M%S).json -e ENV_TYPE=$(ENV_TYPE) -e DIST_PATH=$(DIST_PATH)
kill $$(lsof -t -i :$(PERFTEST_TOKEN_REFRESH_PORT))
perftest-generate-pointer-table-extract:
@echo "Generating pointer table extract with PERFTEST_TABLE_NAME=$(PERFTEST_TABLE_NAME) and ENV=$(ENV) and DIST_PATH=$(DIST_PATH)"
rm -rf "${DIST_PATH}/nft"
mkdir -p "${DIST_PATH}/nft"
PYTHONPATH=. poetry run python tests/performance/perftest_environment.py generate_pointer_table_extract --output_dir="${DIST_PATH}/nft" --extract-size=2000000
./scripts/get-current-info.sh > "${DIST_PATH}/nft/info.json"
zip -r "${DIST_PATH}/pointer_extract-${PERFTEST_TABLE_NAME}.zip" "${DIST_PATH}/nft"
aws s3 cp "${DIST_PATH}/pointer_extract-${PERFTEST_TABLE_NAME}.zip" "s3://nhsd-nrlf--${ENV}-metadata/performance/seed-pointers-extract-${PERFTEST_TABLE_NAME}.zip"