-
Notifications
You must be signed in to change notification settings - Fork 14
Expand file tree
/
Copy pathjustfile
More file actions
296 lines (207 loc) · 8.55 KB
/
justfile
File metadata and controls
296 lines (207 loc) · 8.55 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
set dotenv-load := true
export VIRTUAL_ENV := env_var_or_default("VIRTUAL_ENV", ".venv")
export BIN := VIRTUAL_ENV + if os_family() == "unix" { "/bin" } else { "/Scripts" }
# list available commands
default:
@{{ just_executable() }} --list
# clean up temporary files
clean:
rm -rf .venv
# create a default .env file
_dotenv:
#!/usr/bin/env bash
set -euo pipefail
if [[ ! -f .env ]]; then
echo "No '.env' file found; creating a default '.env' from 'dotenv-sample'"
cp dotenv-sample .env
fi
virtualenv: _dotenv
#!/usr/bin/env bash
set -euo pipefail
# create venv
test -d $VIRTUAL_ENV || uv venv $VIRTUAL_ENV
# Install production requirements into venv.
prodenv:
#!/usr/bin/env bash
set -euxo pipefail
# Ensure all project dependencies are installed and up-to-date with
# the lockfile. The project is re-locked before syncing, so any
# changes to pyproject.toml are reflected in the environment
# (https://docs.astral.sh/uv/concepts/projects/sync/#locking-and-syncing).
# Disable the dev dependency group (--no-dev) and remove any
# extraneous packages (default uv sync behaviour)
# (https://docs.astral.sh/uv/reference/cli/#uv-sync)
uv sync --no-dev
# && dependencies are run after the recipe has run. Needs just>=0.9.9. # This is a killer feature over Makefiles.
# Install dev requirements into venv.
devenv: _dotenv && install-precommit
#!/usr/bin/env bash
set -euo pipefail
# Ensure all project dependencies are installed and up-to-date with
# the lockfile. The project is re-locked before syncing, so any
# changes to pyproject.toml are reflected in the environment
# (https://docs.astral.sh/uv/concepts/projects/sync/#locking-and-syncing).
# Do not remove extraneous packages (--inexact)
# (https://docs.astral.sh/uv/reference/cli/#uv-sync--inexact)
uv sync --inexact
# ensure precommit is installed
install-precommit:
#!/usr/bin/env bash
set -euo pipefail
BASE_DIR=$(git rev-parse --show-toplevel)
test -f $BASE_DIR/.git/hooks/pre-commit || $BIN/pre-commit install
# Upgrade a single package to the latest version per pyproject.toml
upgrade-package package: && devenv
uv lock --upgrade-package {{ package }}
# Upgrade all packages to the latest version per pyproject.toml, then
# update the local venv. NOTE: This does not upgrade the opensafely-pipeline;
# to upgrade, run the upgrade-pipeline recipe
update-dependencies: devenv
uv lock --upgrade
# upgrade our internal pipeline library
upgrade-pipeline: && prodenv
./scripts/upgrade-pipeline.sh pyproject.toml
# Run the dev project with telemetry
run-telemetry: devenv
$BIN/opentelemetry-instrument $BIN/python manage.py runserver --noreload
# run a Django management command
manage command *args:
$BIN/python manage.py {{command}} {{args}}
test-ci *args: assets
#!/bin/bash
export COVERAGE_PROCESS_START="pyproject.toml"
export COVERAGE_REPORT_ARGS="--omit=jobserver/github.py,jobserver/opencodelists.py,tests/fakes.py,tests/verification/*,tests/functional/*"
./scripts/test-coverage.sh -m "not verification and not functional" {{ args }}
# Run the Python functional tests, using Playwright.
test-functional *ARGS: devenv
$BIN/python manage.py collectstatic --no-input && \
$BIN/python -m pytest \
-m "functional" {{ ARGS }}
test-verification *args: devenv
#!/bin/bash
export COVERAGE_PROCESS_START="pyproject.toml"
export COVERAGE_REPORT_ARGS="--include=jobserver/github.py,jobserver/opencodelists.py,tests/fakes.py,tests/verification/*"
./scripts/test-coverage.sh -m "verification" {{ args }}
test *args: assets
$BIN/pytest -n auto -m "not verification and not slow_test and not functional" {{ args }}
format *args=".": devenv
$BIN/ruff format --check {{ args }}
django-upgrade *args="$(find applications jobserver redirects services staff tests -name '*.py' -type f)": devenv
$BIN/django-upgrade --target-version=5.0 {{ args }}
lint *args=".": devenv
$BIN/ruff check --output-format=full {{ args }}
$BIN/djhtml --tabwidth 2 --check templates/
# validate uv.lock
check-lockfile:
#!/usr/bin/env bash
set -euo pipefail
# Make sure dates in pyproject.toml and uv.lock are in sync
unset UV_EXCLUDE_NEWER
rc=0
uv lock --check || rc=$?
if test "$rc" != "0" ; then
echo "Timestamp cutoffs in uv.lock must match those in pyproject.toml. See DEVELOPERS.md for details and hints." >&2
exit $rc
fi
# run the various dev checks but does not change any files
check: check-lockfile format django-upgrade lint
check-migrations: devenv
$BIN/python manage.py makemigrations --dry-run --check \
|| echo "There is model state unaccounted for in the migrations, run `just migrate` to fix."
# generate migrations for any model changes
make-migrations: devenv
$BIN/python manage.py makemigrations
# apply any unapplied migrations
apply-migrations: devenv
$BIN/python manage.py migrate
# generate migrations and apply unapplied ones
migrate: make-migrations apply-migrations
# fix the things we can automate: linting, formatting, import sorting
fix: devenv
$BIN/ruff check --fix .
$BIN/ruff format .
$BIN/djhtml --tabwidth 2 templates/
load-dev-data: devenv
$BIN/python manage.py loaddata backends
# Run the dev project
run bind="localhost:8000": devenv
$BIN/python manage.py migrate
DJANGO_DEBUG_TOOLBAR=True $BIN/python manage.py runserver {{ bind }}
# Run the rap status service to fetch job updates from the RAP API
run-rapstatus: devenv
$BIN/python manage.py rap_status_service
# Run the dev server and the rap_status_service together
run-all:
{ just run & just run-rapstatus; }
run-prod: prodenv
$BIN/gunicorn -c gunicorn.conf.py jobserver.wsgi
# Remove built assets and collected static files
assets-clean:
rm -rf assets/dist
rm -rf staticfiles
# Install the Node.js dependencies
assets-install *args="":
#!/usr/bin/env bash
set -euo pipefail
# exit if lock file has not changed since we installed them. -nt == "newer than",
# but we negate with || to avoid error exit code
test package-lock.json -nt node_modules/.written || exit 0
npm ci {{ args }}
touch node_modules/.written
# Build the Node.js assets
assets-build:
#!/usr/bin/env bash
set -euo pipefail
# find files which are newer than dist/.written in the src directory. grep
# will exit with 1 if there are no files in the result. We negate this
# with || to avoid error exit code
# we wrap the find in an if in case dist/.written is missing so we don't
# trigger a failure prematurely
if test -f assets/dist/.written; then
find assets/src -type f -newer assets/dist/.written | grep -q . || exit 0
fi
npm run build
touch assets/dist/.written
# Ensure django's collectstatic is run if needed
collectstatic: devenv
./scripts/collect-me-maybe.sh $BIN/python
# install npm toolchain, build and collect assets
assets: assets-install assets-build collectstatic
# rebuild all npm/static assets
assets-rebuild: assets-clean assets
assets-run: assets-install
#!/usr/bin/env bash
set -euo pipefail
if [ "$ASSETS_DEV_MODE" == "False" ]; then
echo "Set ASSETS_DEV_MODE to a truthy value to run this command"
exit 1
fi
npm run dev
assets-test: assets-install
npm run test:coverage
# dump data for co-pilot reporting to a compressed SQLite database
dump-co-pilot-reporting-data:
./scripts/dump-co-pilot-reporting-data.sh
# The docker-* commands are simply aliases for the docker/justfile commands.
# We add them for autocompletion from the root dir.
# build docker image env=dev|prod
docker-build env="dev":
{{ just_executable() }} docker/build {{ env }}
# run python non-functional tests in the dev docker container
docker-test-py *args="":
{{ just_executable() }} docker/test-py {{ args }}
# run functional tests in docker container
docker-test-functional *args="":
{{ just_executable() }} docker/test-functional {{ args }}
# run server in dev or prod docker container
docker-serve env="dev" *args="":
{{ just_executable() }} docker/serve {{ env }} {{ args }}
# run cmd in dev or prod docker container
docker-run env="dev" *args="":
{{ just_executable() }} docker/run {{ env }} {{ args }}
# exec command in an existing dev docker container
docker-exec env="dev" *args="bash":
{{ just_executable() }} docker/exec {{ env }} {{ args }}
# run basic smoke test against a running job-server
docker-smoke-test host="http://localhost:8000":
{{ just_executable() }} docker/smoke-test {{ host }}