-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* add run method * update tests / add rollback arg * simplify call * raise exception regardless * PR review - add bulk json insert method * remove unneeded vars * lint * devops context permissioning * slack / aws orbs * lint * lint * lint * lint * module import * fix test creds * fix test creds * fix test creds Co-authored-by: ncgl-syngenta <[email protected]>
- Loading branch information
1 parent
cecac1d
commit 26b7aff
Showing
7 changed files
with
307 additions
and
26 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,5 @@ | ||
version: 2.1 | ||
orbs: | ||
aws-cli: circleci/[email protected] | ||
slack: circleci/[email protected] | ||
sonarcloud: sonarsource/[email protected] | ||
commands: | ||
pipenv-install-dev: | ||
|
@@ -20,13 +18,23 @@ commands: | |
- store_artifacts: | ||
path: ./coverage/lint/report.html | ||
python-test: | ||
steps: | ||
- run: sudo rm -f /etc/boto.cfg | ||
- run: pipenv run test | ||
steps: | ||
- run: sudo rm -f /etc/boto.cfg | ||
- run: | ||
command: pipenv run test | ||
environment: | ||
AWS_ACCESS_KEY_ID: 0 | ||
AWS_SECRET_ACCESS_KEY: 0 | ||
AWS_DEFAULT_REGION: us-east-2 | ||
python-report: | ||
steps: | ||
- run: sudo rm -f /etc/boto.cfg | ||
- run: pipenv run coverage | ||
- run: | ||
command: pipenv run coverage | ||
environment: | ||
AWS_ACCESS_KEY_ID: 0 | ||
AWS_SECRET_ACCESS_KEY: 0 | ||
AWS_DEFAULT_REGION: us-east-2 | ||
- store_test_results: | ||
path: ./coverage/reports | ||
- store_artifacts: | ||
|
@@ -88,13 +96,11 @@ jobs: | |
- DATA_DIR=/tmp/localstack/data | ||
steps: | ||
- checkout | ||
- aws-cli/setup | ||
- pipenv-install-dev | ||
- python-lint | ||
- python-test | ||
- python-report | ||
- sonarcloud/scan | ||
- slack-error | ||
install-build-deploy: | ||
docker: | ||
- image: cimg/python:3.8 | ||
|
@@ -103,13 +109,11 @@ jobs: | |
- pipenv-install | ||
- pypi-setup | ||
- pypi-deploy | ||
- slack-status | ||
workflows: | ||
install-build-deploy: | ||
jobs: | ||
- install-build-deploy: | ||
context: | ||
- tools-cicd | ||
- pypi-token | ||
- sonarcloud-token | ||
filters: | ||
|
@@ -121,7 +125,6 @@ workflows: | |
jobs: | ||
- install-build-test: | ||
context: | ||
- tools-cicd | ||
- pypi-token | ||
- sonarcloud-token | ||
filters: | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
def insert_json_into_table( | ||
json, | ||
table_name, | ||
column_map, | ||
json_column_map, | ||
function_map=None, | ||
): | ||
[target_key] = {v.split(".")[0] for v in json_column_map.values()} | ||
|
||
return ( | ||
f"""{_build_json_cte(json)} | ||
{_build_insert_statement(table_name, column_map, json_column_map)} | ||
{_build_select_statement(column_map, json_column_map, function_map or {})} | ||
FROM ({_build_json_array_subquery(target_key)} | ||
""" | ||
) | ||
|
||
|
||
def _build_json_cte(json): | ||
return f"WITH _json_cte AS (SELECT '{json}'::json AS _json)" | ||
|
||
|
||
def _build_insert_statement(table, column_map, json_column_map): | ||
return f"INSERT INTO {table} ({', '.join(_get_column_order(column_map, json_column_map))})" | ||
|
||
|
||
def _build_json_array_subquery(target_key): | ||
return f"SELECT json_array_elements(_json->'{target_key}') AS _jsondict FROM _json_cte" | ||
|
||
|
||
def _build_select_statement(column_map, json_column_map, function_map): | ||
lines = [] | ||
|
||
for alias, definition in column_map.items(): | ||
if alias in function_map: | ||
result = _apply_function(alias, definition, function_map) | ||
result = f"{result} AS {alias}" | ||
|
||
else: | ||
result = f"{definition} AS {alias}" | ||
|
||
lines.append(result) | ||
|
||
for alias, definition in json_column_map.items(): | ||
result = _parse_json_line(alias, definition) | ||
|
||
if alias in function_map: | ||
result = _apply_function(alias, result, function_map) | ||
|
||
lines.append(result) | ||
|
||
return f"SELECT {', '.join(lines)}" | ||
|
||
|
||
def _get_column_order(column_map, json_column_map): | ||
return [ | ||
*column_map.keys(), | ||
*json_column_map.keys() | ||
] | ||
|
||
|
||
def _parse_json_line(k, v): | ||
parts = v.split('.') | ||
|
||
if len(parts) == 2: | ||
statement = f"_jsondict -> '{parts[1]}' AS {k}" | ||
|
||
elif len(parts) == 3: | ||
statement = f"_jsondict -> '{parts[1]}' ->> '{parts[2]}' AS {k}" | ||
|
||
return statement | ||
|
||
|
||
def _apply_function(k, statement, function_map): | ||
statement, alias = statement.split(' AS ') | ||
return f"{function_map[k].format(statement)} as {alias}" |
Oops, something went wrong.