diff --git a/.github/workflows/check_pr.yml b/.github/workflows/check_pr.yml index 4f392d8..0242498 100644 --- a/.github/workflows/check_pr.yml +++ b/.github/workflows/check_pr.yml @@ -9,8 +9,7 @@ env: DBT_PROFILES_DIR: ./ BIGQUERY_PROJECT_ID: ${{ vars.DBT_BIGQUERY_PROJECT_ID }} BIGQUERY_DATASET_ID: ${{ vars.DBT_BIGQUERY_DATASET_ID }} - BIGQUERY_SERVICE_FILE: ./dbt-service-account.jso - KEYFILE_CONTENTS: ${{secrets.KEYFILE_CONTENTS}} + BIGQUERY_OAUTH_TOKEN: ${{ secrets.BIGQUERY_OAUTH_TOKEN }} SNOWFLAKE_ACCOUNT: ${{ vars.SNOWFLAKE_ACCOUNT }} SNOWFLAKE_DATABASE: ${{ vars.SNOWFLAKE_DATABASE }} @@ -47,7 +46,6 @@ jobs: - name: Set up BigQuery environment if: ${{ github.base_ref == 'main' }} run: | - echo "$KEYFILE_CONTENTS" > ${{ env.BIGQUERY_SERVICE_FILE }} pip install dbt-snowflake pandas dbt --version dbt deps diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..102db98 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,58 @@ +name: Dev build image & push + +on: + push: + branches: [ main ] + +jobs: + build: + name: Dockerize dbt + runs-on: ubuntu-latest + + steps: + # actions/checkout MUST come before auth + - name: Checkout + uses: actions/checkout@v2 + + - id: auth + name: Authenticate with Google Cloud + uses: "google-github-actions/auth@v1" + with: + token_format: access_token + project_id: ${{ vars.DBT_BIGQUERY_PROJECT_ID }} + credentials_json: ${{ secrets.KEYFILE_CONTENTS }} + access_token_lifetime: 300s + + - name: Login to Artifact Registry + uses: docker/login-action@v1 + with: + registry: us-central1-docker.pkg.dev + username: oauth2accesstoken + password: ${{ steps.auth.outputs.access_token }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Get tag + id: get-tag + run: echo ::set-output name=short_ref::${GITHUB_REF#refs/*/} + + - name: Build & Push Image + uses: docker/build-push-action@v4 + with: + context: ./silverwork_dbt + platforms: linux/amd64,linux/arm64 + push: true + tags: | + ${{ secrets.DBT_REGISTRY_ENDPOINT }}:${{ steps.get-tag.outputs.short_ref }} + ${{ secrets.DBT_REGISTRY_ENDPOINT }}:latest + + - name: Trigger Airflow init dag + env: + AIRFLOW_WEBSERVER_ENDPOINT: ${{ secrets.AIRFLOW_WEBSERVER_ENDPOINT }} + AIRFLOW_WEBSERVER_ID: ${{ secrets.AIRFLOW_WEBSERVER_ID }} + AIRFLOW_WEBSERVER_PASSWORD: ${{ secrets.AIRFLOW_WEBSERVER_PASSWORD }} + AIRFLOW_DBT_INIT_DAG_ID: ${{ secrets.AIRFLOW_DBT_INIT_DAG_ID }} + shell: bash + run: | + curl "$AIRFLOW_WEBSERVER_ENDPOINT/api/v1/dags/$AIRFLOW_DBT_INIT_DAG_ID/dagRuns" --user "$AIRFLOW_WEBSERVER_ID:$AIRFLOW_WEBSERVER_PASSWORD" -H 'Content-Type: application/json' -d '{}' \ No newline at end of file diff --git a/silverwork_dbt/Dockerfile b/silverwork_dbt/Dockerfile new file mode 100644 index 0000000..0e42fdd --- /dev/null +++ b/silverwork_dbt/Dockerfile @@ -0,0 +1,9 @@ +# 베이스 이미지 선택 +FROM --platform=linux/amd64 python:3.9-slim + +# 작업 디렉토리 설정 +WORKDIR /app + +COPY . /app + +RUN pip install dbt-snowflake dbt-bigquery pandas \ No newline at end of file diff --git a/silverwork_dbt/macros/get_custom_schema.sql b/silverwork_dbt/macros/get_custom_schema.sql new file mode 100644 index 0000000..11a47de --- /dev/null +++ b/silverwork_dbt/macros/get_custom_schema.sql @@ -0,0 +1,14 @@ +{% macro generate_schema_name(custom_schema_name, node) -%} + + {%- set default_schema = target.schema -%} + {%- if custom_schema_name is none -%} + + {{ default_schema }} + + {%- else -%} + + {{ custom_schema_name | trim }} + + {%- endif -%} + +{%- endmacro %} \ No newline at end of file diff --git a/silverwork_dbt/profiles.yml b/silverwork_dbt/profiles.yml index 491dc6d..90e562e 100644 --- a/silverwork_dbt/profiles.yml +++ b/silverwork_dbt/profiles.yml @@ -13,8 +13,8 @@ silverwork_dbt: threads: 10 stage: type: bigquery - method: service-account - keyfile: "{{ env_var('BIGQUERY_SERVICE_FILE') }}" + method: oauth-secrets + token: "{{ env_var('BIGQUERY_OAUTH_TOKEN') }}" project: "{{ env_var('BIGQUERY_PROJECT_ID') }}" dataset: "{{ env_var('BIGQUERY_DATASET_ID') }}" dataproc_region: us-central1