[Fix] remove transient failureConditions and fix AOSS pipeline cluster deploy #11557
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: CI | |
| on: | |
| push: | |
| branches: | |
| - main | |
| pull_request: | |
| merge_group: | |
| env: | |
| python-version: '3.11' | |
| java-version: '17' | |
| gradle-version: '8.12.1' | |
| node-version: '22.x' | |
| gradle-test-parallelization: '30' | |
| # Prevent multiple simultaneous runs except on main repo | |
| concurrency: | |
| group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }} | |
| cancel-in-progress: ${{ !(github.event_name == 'push' && github.repository == 'opensearch-project/opensearch-migrations' && github.ref == 'refs/heads/main') }} | |
| jobs: | |
| generate-cache-key: | |
| runs-on: ubuntu-22.04 | |
| outputs: | |
| docker_cache_key: ${{ steps.generate_docker_cache_key.outputs.key }} | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - name: Generate Docker Cache Key | |
| id: generate_docker_cache_key | |
| run: | | |
| files=$(find . -type f \( -name 'docker-compose.yml' -o -name 'Dockerfile' \)) | |
| file_contents=$(cat $files) | |
| key=$(echo "${file_contents}" | sha1sum | awk '{print $1}') | |
| echo "key=${key}" >> "$GITHUB_OUTPUT" | |
| gradle-extended-check: | |
| runs-on: ubuntu-22.04 | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| tasks: | |
| - spotlessCheck | |
| - publishToMavenLocal | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - uses: ./.github/actions/setup-env | |
| with: | |
| gradle-cache-enabled: true | |
| - name: Run Gradle Build | |
| run: ./gradlew ${{matrix.tasks}} | |
| env: | |
| OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED: '' | |
| gradle-build-macos: | |
| runs-on: macos-14 | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - uses: ./.github/actions/setup-env | |
| with: | |
| gradle-cache-enabled: false | |
| - name: Run Gradle build on macOS | |
| run: ./gradlew build | |
| env: | |
| OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED: '' | |
| python-lint: | |
| runs-on: ubuntu-22.04 | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - uses: ./.github/actions/setup-env | |
| - name: Install dependencies | |
| run: | | |
| python3 -m pip install --upgrade pip | |
| python3 -m pip install flake8 | |
| - name: Analysing the code with flake8 | |
| run: | | |
| flake8 $(git ls-files '*.py') | |
| python-tests: | |
| runs-on: ubuntu-22.04 | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| py-project: | |
| - migrationConsole/lib/console_link | |
| - migrationConsole/cluster_tools | |
| - k8sConfigMapUtilScripts | |
| - AIAdvisor/skills/solr-opensearch-migration-advisor | |
| - solrMigrationDevSandbox | |
| - libraries/testAutomation | |
| env: | |
| WORKING_DIR: ${{ startsWith(matrix.py-project, 'migrationConsole/') && format('./{0}', matrix.py-project) || startsWith(matrix.py-project, 'AIAdvisor/') && format('./{0}', matrix.py-project) || startsWith(matrix.py-project, 'libraries/') && format('./{0}', matrix.py-project) || matrix.py-project == 'solrMigrationDevSandbox' && format('./{0}', matrix.py-project) || format('./TrafficCapture/dockerSolution/src/main/docker/{0}', matrix.py-project) }} | |
| defaults: | |
| run: | |
| working-directory: ${{ env.WORKING_DIR }} | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - uses: ./.github/actions/setup-env | |
| # Set up Kind cluster for workflow integration tests (only for console_link project) | |
| - name: Set up Kind cluster | |
| if: matrix.py-project == 'migrationConsole/lib/console_link' | |
| uses: helm/kind-action@v1.14.0 | |
| with: | |
| cluster_name: kind-test-cluster | |
| # Verify Kind cluster is ready (only for console_link project) | |
| - name: Verify Kind cluster | |
| if: matrix.py-project == 'migrationConsole/lib/console_link' | |
| run: | | |
| kubectl cluster-info | |
| kubectl get nodes | |
| kubectl get namespaces | |
| - name: Install dependencies (Pipfile) | |
| if: matrix.py-project != 'AIAdvisor/skills/solr-opensearch-migration-advisor' | |
| run: | | |
| python3 -m pip install pipenv==2026.5.0 | |
| pipenv install --deploy --dev | |
| pipenv graph | |
| # Install dependencies for pyproject.toml-based projects | |
| - name: Install dependencies (pyproject.toml) | |
| if: matrix.py-project == 'AIAdvisor/skills/solr-opensearch-migration-advisor' | |
| run: | | |
| python3 -m pip install --upgrade pip | |
| python3 -m pip install -e ".[dev]" | |
| python3 -m pip list | |
| # Run tests for Pipfile-based projects | |
| - name: Run tests (Pipfile) | |
| if: matrix.py-project != 'AIAdvisor/skills/solr-opensearch-migration-advisor' | |
| run: | | |
| pipenv run test | |
| pipenv run coverage xml | |
| # Run tests for pyproject.toml-based projects | |
| - name: Run tests (pyproject.toml) | |
| if: matrix.py-project == 'AIAdvisor/skills/solr-opensearch-migration-advisor' | |
| run: | | |
| python3 -m pytest --cov=scripts --cov-report=xml | |
| - name: Get Sanitized Name | |
| env: | |
| PY_PROJECT: ${{ matrix.py-project }} | |
| run: echo "SANITIZED_PY_PROJECT=${PY_PROJECT//\//-}" >> $GITHUB_ENV | |
| - name: Upload coverage to Codecov | |
| if: always() | |
| uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 | |
| with: | |
| fail_ci_if_error: true | |
| flags: python | |
| skip_validation: true | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| verbose: true | |
| - name: List Docker images | |
| if: always() | |
| run: | | |
| echo "Recording Docker images. These include standard images pre-cached by GitHub Actions when Docker is run as root." | |
| docker image list --format '{{ if ne .Repository "<none>" }}{{ .Repository }}{{ if ne .Tag "<none>" }}:{{ .Tag }}{{ end }}{{ else }}{{ .ID }}{{ end }}' | |
| generate-test-matrix: | |
| runs-on: ubuntu-22.04 | |
| outputs: | |
| matrix: ${{ steps.set-matrix.outputs.matrix }} | |
| steps: | |
| - id: set-matrix | |
| run: | | |
| # Generate a JSON array from 0 to gradle-test-parallelization-1 | |
| indices=() | |
| for i in $(seq 0 $((${{ env.gradle-test-parallelization }}-1))); do | |
| indices+=($i) | |
| done | |
| echo "matrix=$(IFS=,; echo "[${indices[*]}]")" >> $GITHUB_OUTPUT | |
| gradle-tests: | |
| needs: [generate-test-matrix, generate-cache-key] | |
| runs-on: ubuntu-22.04 | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| index: ${{ fromJson(needs.generate-test-matrix.outputs.matrix) }} | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - uses: ./.github/actions/setup-env | |
| with: | |
| gradle-cache-enabled: true | |
| gradle-cache-read-only: ${{ !(github.event_name == 'push' && github.ref == 'refs/heads/main') }} | |
| - name: Free disk space | |
| run: | | |
| sudo rm -rf /usr/share/dotnet /usr/local/lib/android /opt/ghc /usr/share/swift | |
| sudo docker system prune -af | |
| df -h | |
| - name: Restore Docker Cache | |
| uses: AndreKurait/docker-cache@0.6.0 | |
| with: | |
| key: docker-${{ runner.os }}-${{ needs.generate-cache-key.outputs.docker_cache_key }} | |
| # Delegate cache saving to python-e2e-tests | |
| read-only: true | |
| - name: Run Gradle tests with striping | |
| run: | | |
| MAX_WORKERS=$(( $(nproc) - 1 )) | |
| ./gradlew allTests mergeJacocoReports \ | |
| --max-workers $MAX_WORKERS \ | |
| -Dtest.striping.total=${{ env.gradle-test-parallelization }} \ | |
| -Dtest.striping.index=${{ matrix.index }} \ | |
| -x spotlessCheck --stacktrace --continue | |
| env: | |
| OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED: '' | |
| - name: Detect Memory Dumps | |
| if: failure() | |
| run: | | |
| if find . -type f -name "*.hprof" | grep -q '.'; then | |
| echo "::group::Memory Dumps Detected" | |
| echo "::warning::Memory dumps were found and uploaded as artifacts. Review these files to diagnose OOM issues." | |
| echo "To download and inspect these files, navigate to 'Actions' -> 'Artifacts'." | |
| echo "::endgroup::" | |
| fi | |
| - name: Upload memory dump | |
| if: failure() | |
| uses: actions/upload-artifact@v7 | |
| with: | |
| if-no-files-found: warn | |
| name: memory-dumps-gradle-tests-stripe-${{ matrix.index }} | |
| path: ./**/*.hprof | |
| - name: Upload test reports for stripe ${{ matrix.index }} | |
| if: always() | |
| uses: actions/upload-artifact@v7 | |
| with: | |
| if-no-files-found: error | |
| name: test-reports-gradle-tests-stripe-${{ matrix.index }} | |
| path: | | |
| **/build/reports/tests/ | |
| - name: Upload coverage to Codecov | |
| if: always() | |
| uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 | |
| with: | |
| fail_ci_if_error: true | |
| flags: gradle | |
| skip_validation: true | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| verbose: true | |
| - name: List Docker images | |
| if: always() | |
| run: | | |
| echo "Recording Docker images. These include standard images pre-cached by GitHub Actions when Docker is run as root." | |
| docker image list --format '{{ if ne .Repository "<none>" }}{{ .Repository }}{{ if ne .Tag "<none>" }}:{{ .Tag }}{{ end }}{{ else }}{{ .ID }}{{ end }}' | |
| - name: Show disk usage | |
| if: always() | |
| run: | | |
| df -h | |
| docker system df -v | |
| python-e2e-tests: | |
| needs: [ generate-cache-key ] | |
| runs-on: ubuntu-22.04 | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - uses: ./.github/actions/setup-env | |
| with: | |
| gradle-cache-enabled: true | |
| - name: Cache Docker Images | |
| uses: AndreKurait/docker-cache@0.6.0 | |
| with: | |
| key: docker-${{ runner.os }}-${{ needs.generate-cache-key.outputs.docker_cache_key }} | |
| # Only save cache on push events | |
| read-only: ${{ github.event_name != 'push' }} | |
| - name: Build custom ES images | |
| run: ./gradlew :custom-es-images:buildImage_es_7_10 --info | |
| - name: Start Docker Solution | |
| run: ./gradlew -p TrafficCapture dockerSolution:ComposeUp -x test -x spotlessCheck --info --stacktrace | |
| env: | |
| OS_MIGRATIONS_GRADLE_SCAN_TOS_AGREE_AND_ENABLED: '' | |
| - name: Run E2E test script | |
| run: | | |
| docker exec $(docker ps --filter "name=migration-console" -q) pipenv run pytest /root/lib/integ_test/integ_test/replayer_tests.py --unique_id="testindex" -s | |
| - name: List Docker images | |
| if: always() | |
| run: | | |
| echo "Recording Docker images. These include standard images pre-cached by GitHub Actions when Docker is run as root." | |
| docker image list --format '{{ if ne .Repository "<none>" }}{{ .Repository }}{{ if ne .Tag "<none>" }}:{{ .Tag }}{{ end }}{{ else }}{{ .ID }}{{ end }}' | |
| - name: Show disk usage | |
| if: always() | |
| run: | | |
| df -h | |
| docker system df -v | |
| - name: Collect Docker, OpenSearch Benchmark, and Shared Logs | |
| if: always() | |
| run: | | |
| mkdir -p logs/docker logs/opensearch_benchmark_logs logs/shared_logs_output | |
| for container in $(docker ps -aq); do | |
| container_name=$(docker inspect --format '{{.Name}}' $container | sed 's/\///') | |
| docker logs $container > logs/docker/${container_name}_logs.txt 2>&1 | |
| done | |
| docker cp $(docker ps --filter "name=migration-console" -q):/root/.benchmark/logs logs/opensearch_benchmark_logs | |
| docker cp $(docker ps --filter "name=migration-console" -q):/shared-logs-output logs/shared_logs_output | |
| - name: Upload Logs | |
| if: always() | |
| uses: actions/upload-artifact@v7 | |
| with: | |
| if-no-files-found: error | |
| name: e2e-test-logs | |
| path: | | |
| logs/docker | |
| logs/opensearch_benchmark_logs | |
| logs/shared_logs_output | |
| - name: Clean up migrations docker images before caching | |
| run: | | |
| docker stop $(docker ps -q) && docker system prune --volumes -f | |
| docker image ls --format '{{.Repository}}:{{.Tag}}' | grep '^migrations/' | xargs -I {} docker image rm {} | |
| node-tests: | |
| runs-on: ubuntu-22.04 | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| npm-project: | |
| - ./deployment/cdk/opensearch-service-migration | |
| - ./deployment/migration-assistant-solution | |
| - ./TrafficCapture/SolrTransformations/transforms | |
| # TODO - get these enabled | |
| # - ./orchestrationSpecs/packages/argo-workflow-builders | |
| # - ./orchestrationSpecs/packages/config-processor | |
| # - ./orchestrationSpecs/packages/migration-workflow-templates | |
| # - ./orchestrationSpecs/packages/schemas | |
| defaults: | |
| run: | |
| working-directory: ${{ matrix.npm-project }} | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - uses: ./.github/actions/setup-env | |
| - name: Install NPM dependencies | |
| run: npm ci | |
| - name: Run CDK Jest Tests (using mocked images) | |
| run: npm run test | |
| - name: Upload coverage to Codecov | |
| if: always() | |
| uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 | |
| with: | |
| fail_ci_if_error: true | |
| flags: node | |
| skip_validation: true | |
| token: ${{ secrets.CODECOV_TOKEN }} | |
| verbose: true | |
| link-checker: | |
| runs-on: ubuntu-22.04 | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - name: lychee Link Checker | |
| id: lychee | |
| uses: lycheeverse/lychee-action@v2 | |
| with: | |
| args: --verbose --accept=200,403,429 "**/*.html" "**/*.md" "**/*.txt" "**/*.json" | |
| --offline | |
| --exclude "file:///github/workspace/*" | |
| --exclude "file://.kiro/*" | |
| --exclude "http://localhost*" | |
| --exclude "https://localhost*" | |
| --exclude "http://capture-proxy*" | |
| --exclude "https://capture-proxy*" | |
| --exclude-path "TrafficCapture/dockerSolution/src/main/docker/k8sConfigMapUtilScripts/tests/data" | |
| --exclude-path "orchestrationSpecs/packages/argo-workflow-builders/tests/integ/artifacts" | |
| --exclude "file://.*artifacts/parity-catalog.md" | |
| fail: true | |
| env: | |
| GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} | |
| codeql-analyze: | |
| name: CodeQL (${{ matrix.language }}) | |
| runs-on: ubuntu-22.04 | |
| permissions: | |
| security-events: write | |
| packages: read | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| include: | |
| - language: java-kotlin | |
| - language: javascript-typescript | |
| - language: python | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - uses: github/codeql-action/init@v4 | |
| with: | |
| languages: ${{ matrix.language }} | |
| build-mode: none | |
| - uses: github/codeql-action/analyze@v4 | |
| with: | |
| category: "/language:${{matrix.language}}" | |
| sonarqube-analysis: | |
| name: SonarQube Analysis | |
| runs-on: ubuntu-22.04 | |
| services: | |
| sonarqube: | |
| image: sonarqube:25.10.0.114319-community | |
| ports: | |
| - 9000:9000 | |
| options: >- | |
| --health-cmd="curl -s -u admin:admin http://localhost:9000/api/system/health | grep -o GREEN" | |
| --health-interval=10s | |
| --health-timeout=10s | |
| --health-retries=60 | |
| env: | |
| SONAR_ES_BOOTSTRAP_CHECKS_DISABLE: "true" | |
| steps: | |
| - uses: actions/checkout@v6 | |
| - uses: ./.github/actions/setup-env | |
| with: | |
| gradle-cache-enabled: true | |
| - name: Cache SonarQube Scanner | |
| uses: actions/cache@v5 | |
| with: | |
| path: ~/.sonar/cache | |
| key: sonar-cache | |
| restore-keys: sonar-cache | |
| - name: Generate SonarQube Token | |
| run: | | |
| echo "Generating a SonarQube token..." | |
| RESPONSE=$(curl -s -X POST -u "admin:admin" "http://localhost:9000/api/user_tokens/generate" \ | |
| -d "name=github-action-token" -d "type=GLOBAL_ANALYSIS_TOKEN") | |
| SONAR_TOKEN=$(echo "$RESPONSE" | jq -r '.token') | |
| if [ "$SONAR_TOKEN" == "null" ]; then | |
| echo "❌ Failed to generate SonarQube token!" | |
| exit 1 | |
| fi | |
| echo "::add-mask::$SONAR_TOKEN" | |
| echo "SONAR_TOKEN=$SONAR_TOKEN" >> $GITHUB_ENV | |
| - name: Run SonarQube Scanner | |
| run: | | |
| curl -sLo sonar-scanner-cli.zip https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-6.2.1.4610-linux-x64.zip | |
| unzip -q sonar-scanner-cli.zip -d $HOME | |
| export PATH="$HOME/sonar-scanner-6.2.1.4610-linux-x64/bin:$PATH" | |
| sonar-scanner \ | |
| -Dsonar.projectKey=local_project \ | |
| -Dsonar.host.url=http://localhost:9000 \ | |
| -Dsonar.login=$SONAR_TOKEN | |
| - name: Wait for issues to be processed | |
| run: sleep 60 | |
| - name: Collect issues from the server | |
| run: | | |
| curl -s -u "$SONAR_TOKEN:" "http://localhost:9000/api/issues/search?componentKeys=local_project" -o issues.json | |
| echo "::group::SonarQube Issues" | |
| jq -r '.issues[] | "File: \(.component):\(.line), Rule: \(.rule), Message: \(.message)"' issues.json | sort | |
| echo "::endgroup::" | |
| jq -c '.issues | sort_by(.creationDate) | reverse | .[]' issues.json | while read -r issue; do | |
| FILE=$(echo "$issue" | jq -r '.component | split(":")[1]') | |
| LINE=$(echo "$issue" | jq -r '.line') | |
| MESSAGE=$(echo "$issue" | jq -r '.message') | |
| RULE=$(echo "$issue" | jq -r '.rule') | |
| echo "::error file=$FILE,line=$LINE,title=$RULE::$MESSAGE" | |
| done | |
| ISSUE_COUNT=$(jq '.issues | length' issues.json) | |
| BASELINE_ISSUE_COUNT=0 | |
| if [ "$ISSUE_COUNT" -gt "$BASELINE_ISSUE_COUNT" ]; then | |
| echo "❌ Build failed: Found $ISSUE_COUNT issues, which is more than the baseline of $BASELINE_ISSUE_COUNT." | |
| exit 1 | |
| else | |
| echo "✅ Build passed: Found $ISSUE_COUNT issues, which is within the baseline of $BASELINE_ISSUE_COUNT." | |
| fi | |
| - name: Upload SonarQube Artifacts | |
| if: always() | |
| uses: actions/upload-artifact@v7 | |
| with: | |
| name: sonar-reports | |
| path: issues.json | |
| all-ci-checks-pass: | |
| needs: | |
| - python-tests | |
| - gradle-tests | |
| - link-checker | |
| - node-tests | |
| - python-e2e-tests | |
| - python-lint | |
| - gradle-extended-check | |
| - gradle-build-macos | |
| - codeql-analyze | |
| - sonarqube-analysis | |
| if: always() | |
| runs-on: ubuntu-22.04 | |
| steps: | |
| - if: ${{ contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'failure') }} | |
| run: | | |
| echo "One or more job cancelled or failed" && exit 1 | |
| - run: | | |
| echo '## :heavy_check_mark: All continuous integration checks pass' >> $GITHUB_STEP_SUMMARY |