Skip to content

Commit

Permalink
Merge pull request #207 from unity-sds/develop
Browse files Browse the repository at this point in the history
SPS release 24.3
  • Loading branch information
drewm-jpl authored Sep 19, 2024
2 parents e80d390 + 8542f8b commit 12c2c70
Show file tree
Hide file tree
Showing 104 changed files with 2,940 additions and 1,497 deletions.
39 changes: 22 additions & 17 deletions .github/workflows/integration_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ on:
MCP_VENUE_DEV_AIRFLOW_ENDPOINT:
description: "Base URL for the Airflow endpoint in MCP Venue Dev (i.e. http://abc.def.ghi:port-number)"
type: string
# MCP_VENUE_TEST_AIRFLOW_ENDPOINT:
# description: "Base URL for the Airflow endpoint in MCP Venue Test (i.e. http://abc.def.ghi:port-number)"
# type: string
MCP_VENUE_TEST_AIRFLOW_ENDPOINT:
description: "Base URL for the Airflow endpoint in MCP Venue Test (i.e. http://abc.def.ghi:port-number)"
type: string
jobs:
integration-tests:
runs-on: ubuntu-latest
Expand All @@ -29,6 +29,7 @@ jobs:
python -m pip install --upgrade pip
pip install -e ".[test]"
- name: MCP Venue Dev - Integration tests
id: mcp_venue_dev_integration_tests
continue-on-error: true
Expand All @@ -37,31 +38,35 @@ jobs:
run: |
pytest -vv --gherkin-terminal-reporter \
unity-test/system/integration \
--venue="dev" \
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_DEV_AIRFLOW_ENDPOINT || vars.MCP_VENUE_DEV_AIRFLOW_ENDPOINT }}
# - name: MCP Venue Test - Integration tests
# id: mcp_venue_test_integration_tests
# continue-on-error: true
# run: |
# pytest -vv --gherkin-terminal-reporter \
# unity-test/system/integration \
# --airflow-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_AIRFLOW_ENDPOINT || vars.MCP_VENUE_TEST_AIRFLOW_ENDPOINT }}
- name: MCP Venue Test - Integration tests
id: mcp_venue_test_integration_tests
continue-on-error: true
env:
AIRFLOW_WEBSERVER_PASSWORD: ${{ secrets.MCP_VENUE_TEST_AIRFLOW_WEBSERVER_PASSWORD }}
run: |
pytest -vv --gherkin-terminal-reporter \
unity-test/system/integration \
--venue="test" \
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_AIRFLOW_ENDPOINT || vars.MCP_VENUE_TEST_AIRFLOW_ENDPOINT }}
- name: Check Integration Tests Results
if: always()
run: |
dev_status=${{ steps.mcp_venue_dev_integration_tests.outcome }}
# test_status=${{ steps.mcp_venue_test_integration_tests.outcome }}
echo "Dev Integration Tests: $dev_status"
# echo "Test Integration Tests: $test_status"
test_status=${{ steps.mcp_venue_test_integration_tests.outcome }}
echo "Dev Venue Integration Tests status: $dev_status"
echo "Test Venue Integration Tests status: $test_status"
if [ "$dev_status" != "success" ]; then
echo "MCP Venue Dev Integration Tests failed."
exit 1
fi
# Uncomment this block when MCP Venue Test Integration tests are re-enabled
# if [ "$test_status" != "success" ]; then
# echo "MCP Venue Test Integration Tests failed."
# exit 1
# fi
if [ "$test_status" != "success" ]; then
echo "MCP Venue Test Integration Tests failed."
exit 1
fi
41 changes: 23 additions & 18 deletions .github/workflows/smoke_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,17 +57,16 @@ jobs:
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_DEV_AIRFLOW_ENDPOINT || vars.MCP_VENUE_DEV_AIRFLOW_ENDPOINT }} \
--ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_DEV_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_DEV_OGC_PROCESSES_ENDPOINT }}
# Temporary: comment out checks on MCP venue test until the SPS is redeployed
# - name: MCP Venue Test - Smoke tests
# id: mcp_venue_test_smoke_tests
# env:
# AIRFLOW_WEBSERVER_PASSWORD: ${{ secrets.MCP_VENUE_TEST_AIRFLOW_WEBSERVER_PASSWORD }}
# continue-on-error: true
# run: |
# pytest -vv --gherkin-terminal-reporter \
# unity-test/system/smoke \
# --airflow-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_AIRFLOW_ENDPOINT || vars.MCP_VENUE_TEST_AIRFLOW_ENDPOINT }} \
# --ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT }}
- name: MCP Venue Test - Smoke tests
id: mcp_venue_test_smoke_tests
env:
AIRFLOW_WEBSERVER_PASSWORD: ${{ secrets.MCP_VENUE_TEST_AIRFLOW_WEBSERVER_PASSWORD }}
continue-on-error: true
run: |
pytest -vv --gherkin-terminal-reporter \
unity-test/system/smoke \
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_AIRFLOW_ENDPOINT || vars.MCP_VENUE_TEST_AIRFLOW_ENDPOINT }} \
--ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_TEST_OGC_PROCESSES_ENDPOINT }}
- name: MCP Venue Ops - Smoke tests
id: mcp_venue_ops_smoke_tests
Expand All @@ -76,7 +75,7 @@ jobs:
continue-on-error: true
run: |
pytest -vv --gherkin-terminal-reporter \
unity-test/system/smoke/step_defs/test_airflow_api_health.py \
unity-test/system/smoke/ \
--airflow-endpoint=${{ github.event.inputs.MCP_VENUE_OPS_AIRFLOW_ENDPOINT || vars.MCP_VENUE_OPS_AIRFLOW_ENDPOINT }} \
--ogc-processes-endpoint=${{ github.event.inputs.MCP_VENUE_OPS_OGC_PROCESSES_ENDPOINT || vars.MCP_VENUE_OPS_OGC_PROCESSES_ENDPOINT }}
Expand All @@ -96,24 +95,30 @@ jobs:
if: always()
run: |
dev_status=${{ steps.mcp_venue_dev_smoke_tests.outcome }}
ops_status=${{ steps.mcp_venue_ops_smoke_tests.outcome }}
test_status=${{ steps.mcp_venue_test_smoke_tests.outcome }}
sbg_dev_status=${{ steps.mcp_sbg_dev_smoke_tests.outcome }}
ops_status=${{ steps.mcp_venue_ops_smoke_tests.outcome }}
echo "Dev Smoke Tests: $dev_status"
echo "Ops Smoke Tests: $ops_status"
echo "Test Smoke Tests: $test_status"
echo "SBG Dev Smoke Tests: $sbg_dev_status"
echo "Ops Smoke Tests: $ops_status"
# FIXME: must re-enable [ "$test_status" != "success" ]
if [ "$dev_status" != "success" ] || [ "$ops_status" != "success" ] || [ "$sbg_dev_status" != "success" ]; then
if [ "$dev_status" != "success" ] || [ "$test_status" != "success" ] \
|| [ "$sbg_dev_status" != "success" ] \
|| [ "$ops_status" != "success" ]; then
echo "One or more smoke tests failed."
if [ "$dev_status" != "success" ]; then
echo "MCP Venue Dev Smoke Tests failed."
fi
if [ "$ops_status" != "success" ]; then
echo "MCP Venue Ops Smoke Tests failed."
if [ "test_status" != "success" ]; then
echo "MCP Venue Test Smoke Tests failed."
fi
if [ "$sbg_dev_status" != "success" ]; then
echo "MCP Venue SBG Dev Smoke Tests failed."
fi
if [ "$ops_status" != "success" ]; then
echo "MCP Venue Ops Smoke Tests failed."
fi
exit 1
else
echo "All smoke tests passed."
Expand Down
4 changes: 3 additions & 1 deletion .github/workflows/static_analysis.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
name: Static Analysis

on: [pull_request]
on:
pull_request:
workflow_dispatch:

jobs:
pre-commit:
Expand Down
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -212,9 +212,10 @@ $RECYCLE.BIN/
# Local .terraform directories
**/.terraform/*

# .tfstate files
# Terraform files
*.tfstate
*.tfstate.*
**.hcl

# Crash log files
crash.log
Expand Down Expand Up @@ -247,3 +248,5 @@ terraform.rc

/lambda/deployment_packages/*
!/lambda/deployment_packages/.gitkeep

**/*.cfg
3 changes: 1 addition & 2 deletions .pre-commit-config-ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@ repos:
- id: check-yaml
- id: check-xml
- id: check-added-large-files
args:
- --maxkb=50000
args: [--maxkb=50000]
- id: check-json # Checks json files for parsable syntax.
- id: pretty-format-json # Sets a standard for formatting json files.
args:
Expand Down
3 changes: 1 addition & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@ repos:
- id: check-yaml
- id: check-xml
- id: check-added-large-files
args:
- --maxkb=50000
args: ["--maxkb=50000"]
- id: check-json # Checks json files for parsable syntax.
- id: pretty-format-json # Sets a standard for formatting json files.
args:
Expand Down
58 changes: 57 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,59 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

# [Unity Release 24.3] - 2024-09-22

## Tags

- SPS Version 2.2.0
- OGC API Version 2.0.0
- OGC Python Client Version 2.0.0

## Repositories

- unity-sps: <https://github.com/unity-sds/unity-sps/releases/tag/2.2.0>
- unity-sps-ogc-processes-api: <https://github.com/unity-sds/unity-sps-ogc-processes-api/releases/tag/2.0.0>
- unity-sps-ogc-processes-api-client-python: <https://github.com/unity-sds/unity-sps-ogc-processes-api-client-python/releases/tag/2.0.0>

## Epics

- EPIC: `Security`
- [[Bug]: Upgrade EKS 1.27 AMIs](https://github.com/unity-sds/unity-sps/issues/159)
- EPIC: `Scaling`
- [[New Feature]: Increase ephemeral disk space for Airflow workers](https://github.com/unity-sds/unity-sps/issues/152)
- [[New Feature]: Enable users to select the EC2 type to execute a workload](https://github.com/unity-sds/unity-sps/issues/153)
- [[New Feature]: Set the DAG run status to "failed" if the main worker task failed](https://github.com/unity-sds/unity-sps/issues/189)
- [[New Feature]: Demonstrate use of ECR within an Airflow DAG (https://github.com/unity-sds/unity-sps/issues/186)
- EPIC: `Airflow/WPS-T Integration`
- [[New Feature]: Create test to deploy, execute and undeploy the CWL DAG](https://github.com/unity-sds/unity-sps/issues/131)
- [[New Feature]: Enable execution of OGC data processing requests with arbitrary parameter values](https://github.com/unity-sds/unity-sps/issues/129)
- EPIC: `Production Venue Deployments`
- [[New Feature]: Airflow HTTPD Proxy development and configuration](https://github.com/unity-sds/unity-sps/issues/125)
- [[New Feature]: Expose SPS health check endpoints](https://github.com/unity-sds/unity-sps/issues/127)
- EPIC: `SPS Infrastructure`
- [[New Feature]: Update documentation for SPS deployment](https://github.com/unity-sds/unity-sps/issues/116)
- [[New Feature]: Review the SPS GitBook documentation](https://github.com/unity-sds/unity-sps/issues/118)
- [[New Feature]: Store SPS Terraform state on S3](https://github.com/unity-sds/unity-sps/issues/132)
- [[New Feature]: Parametrize the SPS Integration Tests](https://github.com/unity-sds/unity-sps/issues/155)
- [[New Feature] Upgrade SPS to latest version of Airflow 2.10.0](https://github.com/unity-sds/unity-sps/issues/195)

## Docker Containers

- ghcr.io/unity-sds/unity-sps/sps-airflow:2.2.0
- ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.2.0
- ghcr.io/unity-sds/unity-sps-ogc-processes-api/unity-sps-ogc-processes-api:2.0.0

## Documentation

- For Administrators:
- [SPS Deployment with Terraform](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/admin-guide/sps-deployment-with-terraform)
- [Interacting with an Existing SPS Deployment](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/admin-guide/interacting-with-an-existing-sps-deployment)
- [SPS Airflow Custom Docker Image Build Instructions](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/admin-guide/sps-airflow-custom-docker-image-build-instructions)
- [SPS Post Deployment Operations](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/admin-guide/sps-post-deployment-operations)
- For Deverlopers:
- [Tutorial: Deploy, Execute, and Undeploy a Process using the OGC API - Processes](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/developers-guide/tutorial-deploy-execute-and-undeploy-a-process-using-the-ogc-api-processes)
- For Users:
- [Tutorial: Register and Execute a CWL Workflow](https://app.gitbook.com/o/xZRqGQeQXJ0RP4VMj7Lq/s/UMIRhLdbRQTvMWop8Il9/developer-docs/science-processing/docs/users-guide/tutorial-register-and-execute-a-cwl-workflow)

# [Unity Release 24.2] - 2024-07-01

Expand Down Expand Up @@ -38,12 +91,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- [[Task] Add TESTING.md file to SPS repo](https://github.com/unity-sds/unity-sps/issues/99)
- EPIC: `SPS Infrastructure`
- [[New Feature] Store SPS Terraform state on S3](https://github.com/unity-sds/unity-sps/issues/132)
- EPIC: `SPS Security`
- [[Bug]: Upgrade EKS 1.27 AMIs](https://github.com/unity-sds/unity-sps/issues/159)
- [[Bug]: Upgrade to EKS 1.29 AMIs](https://github.com/unity-sds/unity-sps/issues/206)

## Docker Containers

- ghcr.io/unity-sds/unity-sps/sps-airflow:2.1.0
- ghcr.io/unity-sds/unity-sps/sps-docker-cwl:2.1.0
- ghcr.io/unity-sds/unity-sps-ogc-processes-api/unity-sps-ogc-processes-api:2.1.0
- ghcr.io/unity-sds/unity-sps-ogc-processes-api/unity-sps-ogc-processes-api:1.0.0

## Documentation

Expand Down
File renamed without changes.
2 changes: 1 addition & 1 deletion unity-test/TESTING.md → TESTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ The below list of test categories are included in our testing setup. Further det

### Security Tests

- Location: `/unity`
- Location: `/.github/workflows`
- Purpose: Identify potential security vulnerabilities.
- Running Tests:
- Manually: N/A
Expand Down
2 changes: 1 addition & 1 deletion airflow/dags/busybox.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
POD_TEMPLATE_FILE = "/opt/airflow/dags/docker_cwl_pod.yaml"

# The Kubernetes namespace within which the Pod is run (it must already exist)
POD_NAMESPACE = "airflow"
POD_NAMESPACE = "sps"

# The path of the working directory where the CWL workflow is executed
# (aka the starting directory for cwl-runner).
Expand Down
Loading

0 comments on commit 12c2c70

Please sign in to comment.