diff --git a/.github/workflows/check_pr.yml b/.github/workflows/check_pr.yml index 5bd0686..f068b46 100644 --- a/.github/workflows/check_pr.yml +++ b/.github/workflows/check_pr.yml @@ -9,7 +9,7 @@ env: DBT_PROFILES_DIR: ./ BIGQUERY_PROJECT_ID: ${{ vars.DBT_BIGQUERY_PROJECT_ID }} BIGQUERY_DATASET_ID: ${{ vars.DBT_BIGQUERY_DATASET_ID }} - BIGQUERY_SERVICE_FILE: ./dbt-service-account.jso + BIGQUERY_SERVICE_FILE: ./dbt-service-account.json KEYFILE_CONTENTS: ${{secrets.KEYFILE_CONTENTS}} SNOWFLAKE_ACCOUNT: ${{ vars.SNOWFLAKE_ACCOUNT }} diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..3bc6a5b --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,57 @@ +name: Dev build image & push + +on: + push: + branches: [ test ] + +env: + working-directory: silverwork_dbt + +jobs: + build: + name: Dockerize dbt + runs-on: ubuntu-latest + + steps: + # actions/checkout MUST come before auth + - name: Checkout + uses: actions/checkout@v2 + + - id: auth + name: Authenticate with Google Cloud + uses: "google-github-actions/auth@v1" + with: + token_format: access_token + project_id: ${{ vars.DBT_BIGQUERY_PROJECT_ID }} + credentials_json: ${{ secrets.KEYFILE_CONTENTS }} + access_token_lifetime: 300s + + - name: Login to Artifact Registry + uses: docker/login-action@v1 + with: + registry: us-central1-docker.pkg.dev + username: oauth2accesstoken + password: ${{ steps.auth.outputs.access_token }} + + - name: Get tag + id: get-tag + run: echo ::set-output name=short_ref::${GITHUB_REF#refs/*/} + + - name: Build & Push Image + uses: docker/build-push-action@v2 + with: + context: ${{ github.workspace }} + push: true + tags: | + ${{ secrets.DBT_REGISTRY_ENDPOINT }}:${{ steps.get-tag.outputs.short_ref }} + ${{ secrets.DBT_REGISTRY_ENDPOINT }}:latest + +# - name: Trigger Airflow init dag +# env: +# AIRFLOW_WEBSERVER_ENDPOINT: ${{ secrets.AIRFLOW_WEBSERVER_ENDPOINT }} +# AIRFLOW_WEBSERVER_ID: ${{ secrets.AIRFLOW_WEBSERVER_ID }} +# AIRFLOW_WEBSERVER_PASSWORD: ${{ secrets.AIRFLOW_WEBSERVER_PASSWORD }} +# AIRFLOW_DBT_INIT_DAG_ID: ${{ secrets.AIRFLOW_DBT_INIT_DAG_ID }} +# shell: bash +# run: | +# curl "$AIRFLOW_WEBSERVER_ENDPOINT/api/v1/dags/$AIRFLOW_DBT_INIT_DAG_ID/dagRuns" --user "$AIRFLOW_WEBSERVER_ID:$AIRFLOW_WEBSERVER_PASSWORD" -H 'Content-Type: application/json' -d '{}' diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..9246b31 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,11 @@ +# 베이스 이미지 선택 +FROM python:3.9-slim-buster + +# 작업 디렉토리 설정 +WORKDIR /app + +# 필요한 파일 복사 +COPY silverwork_dbt/ /app/silverwork_dbt/ + +# 의존성 설치 +RUN pip install dbt-snowflake pandas diff --git a/silverwork_dbt/macros/get_custom_schema.sql b/silverwork_dbt/macros/get_custom_schema.sql new file mode 100644 index 0000000..11a47de --- /dev/null +++ b/silverwork_dbt/macros/get_custom_schema.sql @@ -0,0 +1,14 @@ +{% macro generate_schema_name(custom_schema_name, node) -%} + + {%- set default_schema = target.schema -%} + {%- if custom_schema_name is none -%} + + {{ default_schema }} + + {%- else -%} + + {{ custom_schema_name | trim }} + + {%- endif -%} + +{%- endmacro %} \ No newline at end of file