Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions apps/report-execution/scripts/replace_table_sql.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import os
import sys

# hack to import project functions # noqa: FIX004
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))

from src.utils import get_env_or_error
from tests.conftest import insert_fake_data

sql_file_path = sys.argv[1]
db_table = sys.argv[2]

conn_string = get_env_or_error('DATABASE_CONN_STRING')

with open(sql_file_path) as f:
sql = f.read()
insert_fake_data(
conn_string,
sql,
[db_table],
[]
)
87 changes: 65 additions & 22 deletions apps/report-execution/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import pytest
import tablefaker
import time_machine
import yaml
from testcontainers.compose import ContainerIsNotRunning, DockerCompose

from src import utils
Expand Down Expand Up @@ -135,13 +136,7 @@ def teardown():

def get_faker_sql(schema_name: str) -> str:
"""Process a fakertable schema and return the sql as a string."""
faker_path = os.path.join(
os.path.dirname(__file__),
'integration',
'assets',
'tablefaker_schema',
schema_name,
)
faker_path = _faker_schema_path(schema_name)
target_file_path = os.path.join(os.path.dirname(__file__), 'fake.sql')
tablefaker.to_sql(faker_path, target_file_path=target_file_path)
with open(target_file_path) as f:
Expand All @@ -157,6 +152,29 @@ def get_faker_sql(schema_name: str) -> str:
return result


def get_tables_from_faker(schema_name: str) -> tuple[list[str], list[str]]:
"""Given a faker schema name, parse out the db and fk tables."""
schema_path = _faker_schema_path(schema_name)

with open(schema_path) as f:
schema = yaml.safe_load(f.read())

db_tables = [t['table_name'] for t in schema['tables']]
fk_tables = schema['config']['nbs']['fk_tables']

return (db_tables, fk_tables)


def _faker_schema_path(schema_name: str) -> str:
return os.path.join(
os.path.dirname(__file__),
'integration',
'assets',
'tablefaker_schema',
schema_name,
)


def temp_name(table_name: str) -> str:
"""Assumes `[schema].[dbo].[table name]` format.

Expand All @@ -181,13 +199,30 @@ def fake_db_table(request):
The table is replaced for the entire module and it is assumed at this point only
one table with one set of fake data is needed per module.
"""
db_table = request.module.db_table
fk_tables = getattr(request.module, 'db_fk_tables', [])
faker_schema = request.module.faker_schema
(db_tables, fk_tables) = get_tables_from_faker(faker_schema)
faker_sql = get_faker_sql(faker_schema)

conn_string = utils.get_env_or_error('DATABASE_CONN_STRING')

# swap out original data for fake data
insert_fake_data(conn_string, faker_sql, db_tables, fk_tables)

# avoid connection inside connection
yield

# restore the original data
restore_original_data(conn_string, db_tables, fk_tables)


def insert_fake_data(
conn_string: str, sql: str, db_tables: list[str], fk_tables: list[str]
):
"""Run sql (inserts expected) into the database pointed to by the connection string.

Clears out the db_tables with contents to be replaced and the fk_tables that rely on
the current data in those db tables and saves the to temp tables
"""
# swap out original data for fake data
with db_transaction(conn_string) as trx:
# Tables with foreign keys pointing to the table we want to replace need to
Expand All @@ -202,25 +237,33 @@ def fake_db_table(request):
trx.execute(f'DELETE {fk_table}')
logging.info(f'cleared FK table: {fk_table}')

temp_db_table = temp_name(db_table)
trx.execute(
f"IF OBJECT_ID('{temp_db_table}') IS NOT NULL DROP TABLE {temp_db_table}"
)
trx.execute(f'SELECT * INTO {temp_db_table} FROM {db_table}')
trx.execute(f'DELETE {db_table}')
logging.info(f'cleared table: {db_table}')
trx.execute(faker_sql)
for db_table in db_tables:
tmp_db_table = temp_name(db_table)
trx.execute(
f"IF OBJECT_ID('{tmp_db_table}') IS NOT NULL DROP TABLE {tmp_db_table}"
)
trx.execute(f'SELECT * INTO {tmp_db_table} FROM {db_table}')
trx.execute(f'DELETE {db_table}')
logging.info(f'cleared table: {db_table}')

trx.execute(sql)
logging.info(f'Inserted fake data: {db_table}')

# avoid connection inside connection
yield

def restore_original_data(conn_string: str, db_tables: list[str], fk_tables: list[str]):
"""Restore the original data temporarily stored while the fake data was inserted.

Intended to be run after `insert_fake_data`.
"""
# restore the original data
with db_transaction(conn_string) as trx:
trx.execute(f'DELETE {db_table}')
trx.execute(f'INSERT INTO {db_table} SELECT * FROM {temp_db_table}')
logging.info(f'Restored table: {db_table}')
for db_table in db_tables:
trx.execute(f'DELETE {db_table}')
trx.execute(f'INSERT INTO {db_table} SELECT * FROM {temp_name(db_table)}')
trx.execute(f'DROP TABLE {temp_name(db_table)}')
logging.info(f'Restored table: {db_table}')

for fk_table in fk_tables:
trx.execute(f'INSERT INTO {fk_table} SELECT * FROM {temp_name(fk_table)}')
trx.execute(f'DROP TABLE {temp_name(fk_table)}')
logging.info(f'Restored FK table: {fk_table}')
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
config:
seed: 4
nbs:
fk_tables:
-'[NBS_ODSE].[dbo].[SubjectRaceInfo]'

tables:
# Table backing PHCDemographic view
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@
from src.execute_report import execute_report
from src.models import ReportSpec

db_table = '[NBS_ODSE].[dbo].[PublicHealthCaseFact]'
db_fk_tables = ['[NBS_ODSE].[dbo].[SubjectRaceInfo]']
faker_schema = 'phc_demographic.yaml'


Expand All @@ -17,7 +15,7 @@ class TestIntegrationNbsSr05Library:
"""Integration tests for the nbs_sr_05 library.

This library looks at the past five years of data and the date on the sql server
is not readily hardcoded, so the tests here are largely probabalistic.
is not readily hardcoded, so the tests here are largely probabilistic.
"""

@pytest.fixture(autouse=True)
Expand Down
Loading