-
Notifications
You must be signed in to change notification settings - Fork 541
Expand file tree
/
Copy pathcontainer_integration_tests.yml
More file actions
332 lines (301 loc) · 10.7 KB
/
container_integration_tests.yml
File metadata and controls
332 lines (301 loc) · 10.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
name: Container Integration Tests Workflow
on:
workflow_dispatch:
inputs:
force_run:
description: 'Force run even if ENABLE_DOCKER_TESTS is false'
required: false
type: boolean
default: false
push:
branches:
- develop
paths-ignore:
- "doc/**"
- "**/*.md"
- "**/*.txt"
- ".github/ISSUE_TEMPLATE/**"
- ".github/*.md"
pull_request:
branches:
- develop
paths-ignore:
- "doc/**"
- "**/*.md"
- "**/*.txt"
- ".github/ISSUE_TEMPLATE/**"
- ".github/*.md"
concurrency:
group: "container-integration-tests-group"
cancel-in-progress: true
jobs:
main-integration-tests-workflow:
runs-on: ubuntu-latest
timeout-minutes: 120
defaults:
run:
shell: bash
permissions:
checks: write
pull-requests: write
steps:
# ---------------------------
# CHECKOUT
# ---------------------------
- name: Checkout repository
uses: actions/checkout@v6
# ---------------------------
# VERIFY DOCKER
# ---------------------------
- name: Verify Docker
run: |
set -euo pipefail
docker version
# ---------------------------
# SETUP JAVA + MAVEN
# ---------------------------
- name: Setup Java
uses: actions/setup-java@v5
with:
distribution: "temurin"
java-version: "21"
cache: "maven"
- name: Verify Maven
run: |
set -euo pipefail
mvn -version
# ---------------------------
# CLEAN PREVIOUS VOLUMES
# ---------------------------
- name: Clean docker-dev-volumes
run: |
set -euo pipefail
rm -rf docker-dev-volumes || true
# ---------------------------
# BUILD IMAGES (Dataverse-native)
# ---------------------------
- name: Build Dataverse containers via Maven
run: |
set -euo pipefail
# Force unit tests to run, generate coverage, and ignore failures so the build continues
mvn clean package -Pct -P all-unit-tests \
-DskipUnitTests=false \
-Djacoco.skip=false \
-Dmaven.test.failure.ignore=true
# ---------------------------
# START CONTAINERS (BACKGROUND)
# ---------------------------
- name: Start Dataverse stack
run: |
set -euo pipefail
mvn -Pct docker:start \
-Ddataverse.cors.origin=* \
-Ddataverse.cors.methods=GET,POST,PUT,DELETE,OPTIONS \
-Ddataverse.cors.headers.allow=range,content-type,x-dataverse-key,accept \
-Ddataverse.cors.headers.expose=content-encoding,content-range,accept-ranges \
-Ddataverse.feature.index-harvested-metadata-source=true \
-Ddataverse.oai.server.maxidentifiers=2 \
-Ddataverse.oai.server.maxrecords=2 \
# ---------------------------
# WAIT FOR API READINESS
# ---------------------------
- name: Wait for Dataverse API readiness
run: |
set -euo pipefail
URL="http://localhost:8080/api/info/version"
MAX_ATTEMPTS=10
SLEEP_TIME=15
echo "Waiting for Dataverse readiness..."
for attempt in $(seq 1 $MAX_ATTEMPTS); do
echo "Attempt $attempt..."
RESPONSE=$(curl -s --max-time 15 "$URL" || true)
STATUS=$(echo "$RESPONSE" | jq -r '.status' 2>/dev/null || echo "NOT_READY")
if [ "$STATUS" = "OK" ]; then
echo "Dataverse endpoint is READY."
echo "Dataverse waiting for full readiness. Waiting 30 more seconds."
sleep 30
echo "Response: $RESPONSE"
exit 0
fi
echo "Not ready. Sleeping ${SLEEP_TIME}s..."
sleep $SLEEP_TIME
if [ $SLEEP_TIME -lt 60 ]; then
SLEEP_TIME=$((SLEEP_TIME * 2))
if [ $SLEEP_TIME -gt 60 ]; then
SLEEP_TIME=60
fi
fi
done
echo "Dataverse failed to become ready."
docker ps
CONTAINERS="$(docker ps -aq)"
if [ -n "$CONTAINERS" ]; then
for cid in $CONTAINERS; do
echo "===== Logs for container $cid ====="
docker logs "$cid" || true
done
else
echo "No running containers to show logs for."
fi
exit 1
# ---------------------------
# MAP LOCALSTACK TO LOCALHOST
# ---------------------------
- name: Map localstack to localhost for Maven tests
run: echo "127.0.0.1 localstack" | sudo tee -a /etc/hosts
# ---------------------------
# CONFIGURE DATAVERSE FOR TESTS
# ---------------------------
- name: Configure Dataverse API Settings
run: |
set -euo pipefail
echo "Setting API Database Settings via internal container curl..."
# We define the settings in an array
declare -A settings=(
[":BuiltinUsersKey"]="burrito"
[":ProvCollectionEnabled"]="true"
[":AllowApiTokenLookupViaApi"]="true"
[":AllowSignUp"]="true"
)
# We run curl INSIDE the container so the source IP is 127.0.0.1
for key in "${!settings[@]}"; do
echo "Setting $key..."
docker exec dev_dataverse curl -s -X PUT -d "${settings[$key]}" "http://localhost:8080/api/admin/settings/$key"
echo ""
done
# ---------------------------
# PRE-TEST INJECTIONS (FROM YOUR ALTERNATE WORKFLOWS)
# ---------------------------
- name: Put SUSHI config file in place
run: |
set -euo pipefail
# Fixes MakeDataCountApiIT
docker exec dev_dataverse sh -c 'curl https://raw.githubusercontent.com/IQSS/dataverse/develop/src/test/java/edu/harvard/iq/dataverse/makedatacount/sushi_sample_logs.json > /tmp/sushi_sample_logs.json && head /tmp/sushi_sample_logs.json'
# ---------------------------
# RUN MAVEN TESTS
# ---------------------------
- name: Run Maven Integration Tests
env:
DVAPIKEY: "burrito"
DV_APIKEY: "burrito"
DV_API_KEY: "burrito"
run: |
set -euo pipefail
TEST_SUITE=$(cat tests/integration-tests.txt)
echo "Running suite: $TEST_SUITE"
# -Dmaven.test.failure.ignore=true forces the JVM to shut down cleanly
# and write the jacoco.exec file, even when tests fail.
mvn test \
-Dtest="$TEST_SUITE" \
-Dmaven.test.failure.ignore=true \
-Ddataverse.test.baseurl=http://localhost:8080 \
-DcompilerArgument=-Xlint:unchecked
# ---------------------------
# GENERATE REPORTS & DEPOSIT TO COVERALLS
# ---------------------------
- name: Deposit to Coveralls
if: always()
env:
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
continue-on-error: true
run: |
set -euo pipefail
# The pom.xml automatically merged the files and put them here:
mvn coveralls:report \
-P all-unit-tests \
-DrepoToken=$COVERALLS_REPO_TOKEN \
-DjacocoReports=target/site/jacoco-merged-test-coverage-report/jacoco.xml
# ---------------------------
# UPLOAD SUREFIRE/FAILSAFE REPORTS
# ---------------------------
- name: Upload Test Failure Reports
if: always()
uses: actions/upload-artifact@v4
with:
name: maven-test-reports
path: |
target/surefire-reports/
target/failsafe-reports/
target/site/jacoco/
retention-days: 14
# ---------------------------
# 2. PUBLISH TEST DASHBOARD IN GITHUB PR (Optional but highly recommended)
# ---------------------------
- name: Publish Test Results Dashboard
uses: EnricoMi/publish-unit-test-result-action@v2
if: always()
with:
files: |
target/failsafe-reports/TEST-*.xml
target/surefire-reports/TEST-*.xml
# ---------------------------
# FAIL WORKFLOW IF TESTS FAILED
# ---------------------------
- name: Check for Test Failures
if: always()
run: |
echo "Checking Surefire/Failsafe reports for failures..."
if grep -q "<failure" target/surefire-reports/*.xml target/failsafe-reports/*.xml 2>/dev/null; then
echo "Tests failed! Failing the workflow."
exit 1
fi
echo "All tests passed."
# ---------------------------
# COLLECT DOCKER LOGS (ALWAYS, WITH MAPPING)
# ---------------------------
- name: Collect Docker logs (mapped)
if: always()
run: |
mkdir -p docker-logs
echo "Gathering container metadata..."
docker ps -a --format '{{.Names}}|{{.Image}}|{{.Status}}' > docker-logs/container-summary.txt
while IFS='|' read -r name image status; do
# Create a readable label
label="$name"
case "$name" in
*dataverse*)
label="dataverse-app"
;;
*postgres*)
label="postgres-db"
;;
*solr*)
label="solr-index"
;;
*localstack*)
label="localstack-s3"
;;
esac
echo "Collecting logs for $name ($label)"
{
echo "===== CONTAINER: $name ====="
echo "Label: $label"
echo "Image: $image"
echo "Status: $status"
echo ""
echo "===== LOGS ====="
docker logs --timestamps "$name" 2>&1 || true
} > "docker-logs/${label}__${name}.log"
done < docker-logs/container-summary.txt
# ---------------------------
# UPLOAD DOCKER LOGS (ALWAYS)
# ---------------------------
- name: Upload Docker logs
if: always()
uses: actions/upload-artifact@v4
with:
name: docker-logs
path: docker-logs/
retention-days: 7
# ---------------------------
# SHUTDOWN STACK
# ---------------------------
- name: Stop Dataverse stack
if: always()
run: |
set -euo pipefail
mvn -Pct docker:stop || true
- name: Final Docker cleanup
if: always()
run: |
docker system prune -af || true