-
Notifications
You must be signed in to change notification settings - Fork 107
/
Copy pathMakefile
198 lines (165 loc) · 7.42 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
ARGO_WORKFLOWS_VERSION="3.5.5"
OPENAPI_SPEC_URL="https://raw.githubusercontent.com/argoproj/argo-workflows/v$(ARGO_WORKFLOWS_VERSION)/api/openapi-spec/swagger.json"
SPEC_PATH="$(shell pwd)/argo-workflows-$(ARGO_WORKFLOWS_VERSION).json"
.PHONY: help
help: ## Showcase the help instructions for all the available `make` commands
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
.PHONY: install
install: ## Run poetry install with all extras for development
@poetry env use system
@poetry install --all-extras
.PHONY: install-3.9
install-3.9: ## Install and use Python 3.9 for generating test data
@poetry env use 3.9
@poetry install --all-extras
.PHONY: ci
ci: ## Run all the CI checks
ci: CI=1
ci: lint test test-type-hints check-codegen
.PHONY: codegen
codegen: ## Generate all models, services, examples, and init files
codegen: models services examples init-files
.PHONY: check-codegen
check-codegen: ## Check if the code is up to date
check-codegen:
@$(MAKE) codegen
@git diff --exit-code || (echo "Code is not up-to-date. Please run 'make codegen'" && exit 1)
.PHONY: format
format: ## Format and sort imports for source, tests, examples, etc.
@poetry run ruff format .
@poetry run ruff check . --fix
.PHONY: lint
lint: ## Run a `lint` process on Hera and report problems
@poetry run ruff check .
@poetry run ruff format . --check
@poetry run mypy -p hera
.PHONY: test
test: ## Run tests for Hera
@poetry run python -m pytest --cov-report=term-missing -m "not on_cluster" -k "not typehints"
.PHONY: test-type-hints
test-type-hints: ## Run type hint tests for Hera
@poetry run python -m pytest --cov-append -k "typehints"
.PHONY: workflows-models
workflows-models: ## Generate the Workflows models portion of Argo Workflows
@touch $(SPEC_PATH)
@poetry run python scripts/spec.py $(OPENAPI_SPEC_URL) $(SPEC_PATH)
@poetry run datamodel-codegen \
--input $(SPEC_PATH) \
--snake-case-field \
--target-python-version 3.9 \
--output src/hera/workflows/models \
--output-model-type pydantic.BaseModel \
--base-class hera.shared._pydantic.BaseModel \
--input-file-type jsonschema \
--wrap-string-literal \
--disable-appending-item-suffix \
--disable-timestamp \
--use-annotated \
--use-default-kwarg
@find src/hera/workflows/models/ -name '*.py' -exec sed -i.bak 's/from pydantic import Field/from hera.shared._pydantic import Field/' {} +
@find src/hera/workflows/models/ -name '*.bak' -delete
@poetry run python scripts/models.py $(OPENAPI_SPEC_URL) workflows
@rm $(SPEC_PATH)
@$(MAKE) format
.PHONY: events-models
events-models: ## Generate the Events models portion of Argo Workflows
@touch $(SPEC_PATH)
@poetry run python scripts/spec.py $(OPENAPI_SPEC_URL) $(SPEC_PATH)
@poetry run datamodel-codegen \
--input $(SPEC_PATH) \
--snake-case-field \
--target-python-version 3.9 \
--output src/hera/events/models \
--output-model-type pydantic.BaseModel \
--base-class hera.shared._pydantic.BaseModel \
--input-file-type jsonschema \
--wrap-string-literal \
--disable-appending-item-suffix \
--disable-timestamp \
--use-annotated \
--use-default-kwarg
@find src/hera/events/models/ -name '*.py' -exec sed -i.bak 's/from pydantic import Field/from hera.shared._pydantic import Field/' {} +
@find src/hera/events/models/ -name '*.bak' -delete
@poetry run python scripts/models.py $(OPENAPI_SPEC_URL) events
@rm $(SPEC_PATH)
@$(MAKE) format
.PHONY: models
models: ## Generate all the Argo Workflows models
models: workflows-models events-models
.PHONY: workflows-service
workflows-service: ## Generate the Workflows service option of Hera
@poetry run python scripts/service.py $(OPENAPI_SPEC_URL) workflows
$(MAKE) format
.PHONY: events-service
events-service: ## Generate the events service option of Hera
@poetry run python scripts/service.py $(OPENAPI_SPEC_URL) events
$(MAKE) format
.PHONY: services
services: ## Generate the services of Hera
services: workflows-service events-service
.PHONY: init-files
init-files: ## Generate the init-files of Hera
init-files:
@poetry run python scripts/init_files.py
$(MAKE) format
.PHONY: examples
examples: ## Generate documentation files for examples
@(cd docs && poetry run python generate.py)
.PHONY: build-docs
build-docs: ## Generate documentation locally
@python -m pip install --exists-action=w --no-cache-dir -r docs/requirements.txt
@python -m mkdocs build --clean --site-dir build/docs/html --config-file mkdocs.yml
# If you run this target mkdocs will watch the `docs` folder, so any changes
# will be reflected in your browser momentarily (without refreshing!)
.PHONY: host-docs
host-docs: ## Host and open the documentation locally (and rebuild automatically)
@python -m mkdocs serve --open --clean --config-file mkdocs.yml
.PHONY: regenerate-example
regenerate-example: ## Regenerates the yaml for a single example, using EXAMPLE_FILENAME envvar
regenerate-example: install
@HERA_REGENERATE=1 poetry run python -m pytest -k $(EXAMPLE_FILENAME)
.PHONY: regenerate-test-data
regenerate-test-data: ## Regenerates the test data from upstream examples and runs tests, report missing examples
regenerate-test-data: install-3.9
find examples -name "*.yaml" -type f -delete
HERA_REGENERATE=1 poetry run python -m pytest -k test_examples
make examples
@poetry run python -m pytest -k test_for_missing_examples --runxfail
.PHONY: install-k3d
install-k3d: ## Install k3d client
curl -s https://raw.githubusercontent.com/k3d-io/k3d/main/install.sh | bash
.PHONY: install-argo
install-argo: ## Install argo CLI client
# Download the binary
curl -sLO https://github.com/argoproj/argo-workflows/releases/download/v$(ARGO_WORKFLOWS_VERSION)/argo-linux-amd64.gz
# Unzip
gunzip argo-linux-amd64.gz
# Make binary executable
chmod +x argo-linux-amd64
# Move binary to path
sudo mv ./argo-linux-amd64 /usr/local/bin/argo
# Test installation
argo version
.PHONY: set-up-cluster
set-up-cluster: ## Create the cluster and argo namespace
k3d cluster list | grep test-cluster || k3d cluster create test-cluster
k3d kubeconfig merge test-cluster --kubeconfig-switch-context
kubectl get namespace argo || kubectl create namespace argo
.PHONY: set-up-argo
set-up-argo: ## Start the argo service
kubectl apply -n argo -f https://github.com/argoproj/argo-workflows/releases/download/v$(ARGO_WORKFLOWS_VERSION)/install.yaml
kubectl patch deployment argo-server --namespace argo --type='json' -p='[{"op": "replace", "path": "/spec/template/spec/containers/0/args", "value": ["server", "--auth-mode=server"]}]'
kubectl create rolebinding default-admin --clusterrole=admin --serviceaccount=argo:default --namespace=argo
kubectl rollout status -n argo deployment/argo-server --timeout=120s --watch=true
.PHONY: set-up-artifacts
set-up-artifacts: ## Adds minio for running examples with artifact storage
kubectl apply -n argo -f https://raw.githubusercontent.com/argoproj-labs/training-material/main/config/minio/minio.yaml
kubectl apply -n argo -f https://raw.githubusercontent.com/argoproj-labs/training-material/main/config/argo-workflows/workflows-controller-configmap.yaml
kubectl apply -n argo -f tests/submissions/roles.yaml
.PHONY: stop-cluster
stop-cluster: ## Stop the cluster
k3d cluster stop test-cluster
.PHONY: test-on-cluster
test-on-cluster: ## Run workflow tests (requires local argo cluster)
@(kubectl -n argo port-forward deployment/argo-server 2746:2746 &)
@poetry run python -m pytest tests/submissions -m on_cluster