Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PROD-2654 - MySQL on RDS as a detection/discovery source #5275

Open
wants to merge 15 commits into
base: main
Choose a base branch
from
Open
1 change: 1 addition & 0 deletions clients/admin-ui/src/types/api/models/ConnectionType.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ export enum ConnectionType {
MSSQL = "mssql",
MYSQL = "mysql",
POSTGRES = "postgres",
RDS_MYSQL = "rds_mysql",
REDSHIFT = "redshift",
S3 = "s3",
SAAS = "saas",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
"""add_rds_mysql_to_connector_type

Revision ID: 33b8a0f79b30
Revises: 9de4bb76307a
Create Date: 2024-09-24 12:51:49.384117

"""

import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = "33b8a0f79b30"
down_revision = "9de4bb76307a"
branch_labels = None
depends_on = None


def upgrade():
# Add 'rds_mysql' to ConnectionType enum
op.execute("ALTER TYPE connectiontype RENAME TO connectiontype_old")
op.execute(
"""
CREATE TYPE connectiontype AS ENUM (
'mongodb',
'mysql',
'https',
'snowflake',
'redshift',
'mssql',
'mariadb',
'bigquery',
'saas',
'manual',
'manual_webhook',
'timescale',
'fides',
'sovrn',
'attentive',
'dynamodb',
'postgres',
'generic_consent_email',
'generic_erasure_email',
'scylla',
's3',
'google_cloud_sql_mysql',
'google_cloud_sql_postgres',
'dynamic_erasure_email',
'rds_mysql'
)
"""
)
op.execute(
"""
ALTER TABLE connectionconfig ALTER COLUMN connection_type TYPE connectiontype USING
connection_type::text::connectiontype
"""
)
op.execute("DROP TYPE connectiontype_old")


def downgrade():
# Remove 'rds_mysql' from ConnectionType enum
op.execute("DELETE FROM connectionconfig WHERE connection_type IN ('rds_mysql')")
op.execute("ALTER TYPE connectiontype RENAME TO connectiontype_old")
op.execute(
"""
CREATE TYPE connectiontype AS ENUM (
'mongodb',
'mysql',
'https',
'snowflake',
'redshift',
'mssql',
'mariadb',
'bigquery',
'saas',
'manual',
'manual_webhook',
'timescale',
'fides',
'sovrn',
'attentive',
'dynamodb',
'postgres',
'generic_consent_email',
'generic_erasure_email',
'scylla',
's3',
'google_cloud_sql_mysql',
'google_cloud_sql_postgres',
'dynamic_erasure_email'
)
"""
)
op.execute(
"""
ALTER TABLE connectionconfig ALTER COLUMN connection_type TYPE connectiontype USING
connection_type::text::connectiontype
"""
)
op.execute("DROP TYPE connectiontype_old")
2 changes: 2 additions & 0 deletions src/fides/api/models/connectionconfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ class ConnectionType(enum.Enum):
mssql = "mssql"
mysql = "mysql"
postgres = "postgres"
rds_mysql = "rds_mysql"
redshift = "redshift"
s3 = "s3"
saas = "saas"
Expand Down Expand Up @@ -83,6 +84,7 @@ def human_readable(self) -> str:
ConnectionType.mssql.value: "Microsoft SQL Server",
ConnectionType.mysql.value: "MySQL",
ConnectionType.postgres.value: "PostgreSQL",
ConnectionType.rds_mysql.value: "RDS MySQL",
ConnectionType.redshift.value: "Amazon Redshift",
ConnectionType.s3.value: "Amazon S3",
ConnectionType.saas.value: "SaaS",
Expand Down
9 changes: 8 additions & 1 deletion src/fides/api/schemas/connection_configuration/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,12 @@
from fides.api.schemas.connection_configuration.connection_secrets_postgres import (
PostgreSQLSchema as PostgreSQLSchema,
)
from fides.api.schemas.connection_configuration.connection_secrets_rds_mysql import (
RDSMySQLDocsSchema as RDSMySQLDocsSchema,
)
from fides.api.schemas.connection_configuration.connection_secrets_rds_mysql import (
RDSMySQLSchema as RDSMySQLSchema,
)
from fides.api.schemas.connection_configuration.connection_secrets_redshift import (
RedshiftDocsSchema as RedshiftDocsSchema,
)
Expand Down Expand Up @@ -152,9 +158,10 @@
ConnectionType.mssql.value: MicrosoftSQLServerSchema,
ConnectionType.mysql.value: MySQLSchema,
ConnectionType.postgres.value: PostgreSQLSchema,
ConnectionType.rds_mysql.value: RDSMySQLSchema,
ConnectionType.redshift.value: RedshiftSchema,
ConnectionType.saas.value: SaaSSchema,
ConnectionType.s3.value: S3Schema,
ConnectionType.saas.value: SaaSSchema,
ConnectionType.scylla.value: ScyllaSchema,
ConnectionType.snowflake.value: SnowflakeSchema,
ConnectionType.sovrn.value: SovrnSchema,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class MySQLSchema(ConnectionConfigSecretsSchema):
description="The hostname or IP address of the server where the database is running.",
)
port: int = Field(
3306,
default=3306,
title="Port",
description="The network port number on which the server is listening for incoming connections (default: 3306).",
)
Expand All @@ -32,8 +32,8 @@ class MySQLSchema(ConnectionConfigSecretsSchema):
json_schema_extra={"sensitive": True},
)
dbname: str = Field(
description="The name of the specific database within the database server that you want to connect to.",
title="Database",
description="The name of the specific database within the database server that you want to connect to.",
)
ssh_required: bool = Field(
False,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from pydantic import Field

from fides.api.schemas.base_class import NoValidationSchema
from fides.api.schemas.connection_configuration.connection_secrets_base_aws import (
BaseAWSSchema,
)


class RDSMySQLSchema(BaseAWSSchema):
"""
Schema to validate the secrets needed to connect to a RDS MySQL Database
"""

username: str = Field(
default="fides_explorer",
andres-torres-marroquin marked this conversation as resolved.
Show resolved Hide resolved
title="Username",
description="The user account used to authenticate and access the databases.",
)
region: str = Field(
title="Region",
description="The AWS region where the RDS instances are located.",
)


class RDSMySQLDocsSchema(RDSMySQLSchema, NoValidationSchema):
"""RDS MySQL Secrets Schema for API Docs"""
90 changes: 87 additions & 3 deletions tests/ops/api/v1/endpoints/test_connection_template_endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ def test_search_system_type(self, api_client, generate_auth_header, url):
resp = api_client.get(url + "system_type=database", headers=auth_header)
assert resp.status_code == 200
data = resp.json()["items"]
assert len(data) == 14
assert len(data) == 15

def test_search_system_type_and_connection_type(
self,
Expand Down Expand Up @@ -913,7 +913,9 @@ def test_get_connection_secret_schema_dynamodb(
"description": "Determines which type of "
"authentication method to use "
"for connecting to Amazon Web "
"Services.",
"Services. Currently accepted "
"values are: `secret_keys` or "
"`automatic`.",
"title": "Authentication Method",
},
"aws_access_key_id": {
Expand Down Expand Up @@ -1261,7 +1263,9 @@ def test_get_connection_secret_schema_s3(
"description": "Determines which type of "
"authentication method to use "
"for connecting to Amazon Web "
"Services.",
"Services. Currently accepted "
"values are: `secret_keys` or "
"`automatic`.",
"title": "Authentication Method",
},
"aws_access_key_id": {
Expand Down Expand Up @@ -1294,6 +1298,86 @@ def test_get_connection_secret_schema_s3(
"type": "object",
}

def test_get_connection_secret_schema_rds(
self, api_client: TestClient, generate_auth_header, base_url
) -> None:
auth_header = generate_auth_header(scopes=[CONNECTION_TYPE_READ])
resp = api_client.get(
base_url.format(connection_type="rds_mysql"), headers=auth_header
)
assert resp.json() == {
"definitions": {
"AWSAuthMethod": {
"enum": ["automatic", "secret_keys"],
"title": "AWSAuthMethod",
"type": "string",
}
},
"description": "Schema to validate the secrets needed to connect to a RDS "
"MySQL Database",
"properties": {
"auth_method": {
"allOf": [{"$ref": "#/definitions/AWSAuthMethod"}],
"description": "Determines which type of "
"authentication method to use "
"for connecting to Amazon Web "
"Services. Currently accepted "
"values are: `secret_keys` or "
"`automatic`.",
"title": "Authentication Method",
},
"aws_access_key_id": {
"description": "Part of the credentials "
"that provide access to "
"your AWS account.",
"title": "Access Key ID",
"type": "string",
},
"aws_assume_role_arn": {
"description": "If provided, the ARN "
"of the role that "
"should be assumed to "
"connect to AWS.",
"title": "Assume Role ARN",
"type": "string",
},
"aws_secret_access_key": {
"description": "Part of the "
"credentials that "
"provide access to "
"your AWS account.",
"sensitive": True,
"title": "Secret Access Key",
"type": "string",
},
"ca_cert_url": {
"default": "https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem",
"description": "The URL to the CA certificate "
"used to authenticate the RDS "
"instances.",
"title": "CA Certificate URL",
"type": "string",
},
"region": {
"description": "The AWS region where the RDS "
"instances are located.",
"title": "Region",
"type": "string",
},
"username": {
"default": "fides_explorer",
"description": "The user account used to "
"authenticate and access the "
"databases.",
"title": "Username",
"type": "string",
},
},
"required": ["auth_method", "region"],
"title": "RDSMySQLSchema",
"type": "object",
}

def test_get_connection_secret_schema_snowflake(
self, api_client: TestClient, generate_auth_header, base_url
) -> None:
Expand Down
Loading