Skip to content

Commit

Permalink
feat: Add OpenLineage support for CloudSQLExecuteQueryOperator
Browse files Browse the repository at this point in the history
Signed-off-by: Kacper Muda <[email protected]>
  • Loading branch information
kacpermuda committed Dec 24, 2024
1 parent aa30ed9 commit 74aa67e
Show file tree
Hide file tree
Showing 2 changed files with 140 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from __future__ import annotations

from collections.abc import Iterable, Mapping, Sequence
from contextlib import contextmanager
from functools import cached_property
from typing import TYPE_CHECKING, Any

Expand All @@ -38,8 +39,7 @@

if TYPE_CHECKING:
from airflow.models import Connection
from airflow.providers.mysql.hooks.mysql import MySqlHook
from airflow.providers.postgres.hooks.postgres import PostgresHook
from airflow.providers.openlineage.extractors import OperatorLineage
from airflow.utils.context import Context


Expand Down Expand Up @@ -1256,7 +1256,8 @@ def __init__(
self.ssl_client_key = ssl_client_key
self.ssl_secret_id = ssl_secret_id

def _execute_query(self, hook: CloudSQLDatabaseHook, database_hook: PostgresHook | MySqlHook) -> None:
@contextmanager
def cloud_sql_proxy_context(self, hook: CloudSQLDatabaseHook):
cloud_sql_proxy_runner = None
try:
if hook.use_proxy:
Expand All @@ -1266,8 +1267,7 @@ def _execute_query(self, hook: CloudSQLDatabaseHook, database_hook: PostgresHook
# be taken over here by another bind(0).
# It's quite unlikely to happen though!
cloud_sql_proxy_runner.start_proxy()
self.log.info('Executing: "%s"', self.sql)
database_hook.run(self.sql, self.autocommit, parameters=self.parameters)
yield
finally:
if cloud_sql_proxy_runner:
cloud_sql_proxy_runner.stop_proxy()
Expand All @@ -1281,7 +1281,9 @@ def execute(self, context: Context):
hook.validate_socket_path_length()
database_hook = hook.get_database_hook(connection=connection)
try:
self._execute_query(hook, database_hook)
with self.cloud_sql_proxy_context(hook):
self.log.info('Executing: "%s"', self.sql)
database_hook.run(self.sql, self.autocommit, parameters=self.parameters)
finally:
hook.cleanup_database_hook()

Expand All @@ -1297,3 +1299,50 @@ def hook(self):
ssl_key=self.ssl_client_key,
ssl_secret_id=self.ssl_secret_id,
)

def get_openlineage_facets_on_complete(self, _) -> OperatorLineage | None:
try:
from airflow.providers.openlineage.sqlparser import SQLParser
except ImportError:
return None

hook = self.hook.db_hook

try:
from airflow.providers.openlineage.utils.utils import should_use_external_connection

use_external_connection = should_use_external_connection(hook)
except ImportError:
# OpenLineage provider release < 1.8.0 - we always use connection
use_external_connection = True

connection = hook.get_connection(self.gcp_cloudsql_conn_id)
try:
database_info = hook.get_openlineage_database_info(connection)
except AttributeError:
self.log.debug("%s has no database info provided", hook)
database_info = None

if database_info is None:
return None

try:
sql_parser = SQLParser(
dialect=hook.get_openlineage_database_dialect(connection),
default_schema=hook.get_openlineage_default_schema(),
)
except AttributeError:
self.log.debug("%s failed to get database dialect", hook)
return None

with self.cloud_sql_proxy_context(self.hook):
operator_lineage = sql_parser.generate_openlineage_metadata_from_sql(
sql=self.sql, # type:ignore[arg-type] # We pass Iterable, we expect list, which is fine
hook=hook,
database_info=database_info,
database=self.hook.database,
sqlalchemy_engine=hook.get_sqlalchemy_engine(),
use_connection=use_external_connection,
)

return operator_lineage
85 changes: 85 additions & 0 deletions providers/tests/google/cloud/operators/test_cloud_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,19 @@

import os
from unittest import mock
from unittest.mock import MagicMock

import pytest

from airflow.exceptions import AirflowException, TaskDeferred
from airflow.models import Connection
from airflow.providers.common.compat.openlineage.facet import (
Dataset,
SchemaDatasetFacet,
SchemaDatasetFacetFields,
SQLJobFacet,
)
from airflow.providers.common.sql.hooks.sql import DbApiHook
from airflow.providers.google.cloud.operators.cloud_sql import (
CloudSQLCloneInstanceOperator,
CloudSQLCreateInstanceDatabaseOperator,
Expand Down Expand Up @@ -822,3 +830,80 @@ def test_create_operator_with_too_long_unix_socket_path(self, get_connection):
operator.execute(None)
err = ctx.value
assert "The UNIX socket path length cannot exceed" in str(err)


@pytest.mark.parametrize(
"connection_port, default_port, expected_port",
[(None, 4321, 4321), (1234, None, 1234), (1234, 4321, 1234)],
)
def test_execute_openlineage_events(connection_port, default_port, expected_port):
class DBApiHookForTests(DbApiHook):
conn_name_attr = "sql_default"
get_conn = MagicMock(name="conn")
get_connection = MagicMock()

def get_openlineage_database_info(self, connection):
from airflow.providers.openlineage.sqlparser import DatabaseInfo

return DatabaseInfo(
scheme="sqlscheme",
authority=DbApiHook.get_openlineage_authority_part(connection, default_port=default_port),
)

dbapi_hook = DBApiHookForTests()

class CloudSQLExecuteQueryOperatorForTest(CloudSQLExecuteQueryOperator):
@property
def hook(self):
return MagicMock(db_hook=dbapi_hook, database="")

sql = """CREATE TABLE IF NOT EXISTS popular_orders_day_of_week (
order_day_of_week VARCHAR(64) NOT NULL,
order_placed_on TIMESTAMP NOT NULL,
orders_placed INTEGER NOT NULL
);
FORGOT TO COMMENT"""
op = CloudSQLExecuteQueryOperatorForTest(task_id="task_id", sql=sql)
DB_SCHEMA_NAME = "PUBLIC"
rows = [
(DB_SCHEMA_NAME, "popular_orders_day_of_week", "order_day_of_week", 1, "varchar"),
(DB_SCHEMA_NAME, "popular_orders_day_of_week", "order_placed_on", 2, "timestamp"),
(DB_SCHEMA_NAME, "popular_orders_day_of_week", "orders_placed", 3, "int4"),
]
dbapi_hook.get_connection.return_value = Connection(
conn_id="sql_default", conn_type="postgresql", host="host", port=connection_port
)
dbapi_hook.get_conn.return_value.cursor.return_value.fetchall.side_effect = [rows, []]

lineage = op.get_openlineage_facets_on_complete(None)
assert len(lineage.inputs) == 0
assert lineage.job_facets == {"sql": SQLJobFacet(query=sql)}
assert lineage.run_facets["extractionError"].failedTasks == 1
assert lineage.outputs == [
Dataset(
namespace=f"sqlscheme://host:{expected_port}",
name="PUBLIC.popular_orders_day_of_week",
facets={
"schema": SchemaDatasetFacet(
fields=[
SchemaDatasetFacetFields(name="order_day_of_week", type="varchar"),
SchemaDatasetFacetFields(name="order_placed_on", type="timestamp"),
SchemaDatasetFacetFields(name="orders_placed", type="int4"),
]
)
},
)
]


def test_with_no_openlineage_provider():
import importlib

def mock__import__(name, globals_=None, locals_=None, fromlist=(), level=0):
if level == 0 and name.startswith("airflow.providers.openlineage"):
raise ImportError("No provider 'apache-airflow-providers-openlineage'")
return importlib.__import__(name, globals=globals_, locals=locals_, fromlist=fromlist, level=level)

with mock.patch("builtins.__import__", side_effect=mock__import__):
op = CloudSQLExecuteQueryOperator(task_id="task_id", sql="SELECT 1;")
assert op.get_openlineage_facets_on_complete(None) is None

0 comments on commit 74aa67e

Please sign in to comment.