Skip to content

Commit

Permalink
Merge branch 'main' into latest
Browse files Browse the repository at this point in the history
  • Loading branch information
chandra-tacc authored Jan 7, 2025
2 parents 56f912a + 756a76d commit 84e2523
Show file tree
Hide file tree
Showing 6 changed files with 111 additions and 12 deletions.
41 changes: 41 additions & 0 deletions .github/workflows/build-WI-67.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
name: Build on push to bug/WI-67-connection-issues
on:
push:
branches: [ bug/WI-67-connection-issues ]

jobs:
build_commit:
runs-on: ubuntu-latest
environment: docker
steps:
- uses: actions/checkout@v2
- name: Get shortsha
id: vars
run: |
if [ -z "$EVENT_SHA" ]; then SHORT_SHA=${GITHUB_SHA::8}; else SHORT_SHA=${EVENT_SHA::8}; fi
echo ::set-output name=sha_short::${SHORT_SHA}
env:
EVENT_SHA: ${{ github.event.client_payload.sha }}
- name: Print shortsha
run: |
echo $SHORTSHA
env:
SHORTSHA: ${{ steps.vars.outputs.sha_short }}
- uses: mr-smithers-excellent/docker-build-push@v3
name: Build & push commit tagged Docker image
with:
image: ${{ secrets.DOCKERHUB_REPO }}
tag: ${{ steps.vars.outputs.sha_short }}
registry: docker.io
dockerfile: Dockerfile
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- uses: mr-smithers-excellent/docker-build-push@v3
name: Build & push commit tagged Docker image
with:
image: ${{ secrets.DOCKERHUB_REPO }}
tag: latest
registry: docker.io
dockerfile: Dockerfile
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name: Build on push to auth-check
name: Build on push to experiment
on:
push:
branches: [ auth-check ]
branches: [ experiment ]

jobs:
build_commit:
Expand Down
9 changes: 7 additions & 2 deletions src/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
# from data_processing import *
from data_loading import *


# ----------------------------------------------------------------------------
# ENV Variables & DATA PARAMETERS
# ----------------------------------------------------------------------------
Expand Down Expand Up @@ -43,6 +44,7 @@
subjects2_filepath = None
monitoring_data_filepath = None


# ----------------------------------------------------------------------------
# LOAD ASSETS FILES
# ----------------------------------------------------------------------------
Expand Down Expand Up @@ -189,12 +191,14 @@ def api_imaging():
if imaging_data:
app.logger.info(f"Caching imaging report data. Date: {data_date}")
api_data_index['imaging'] = data_date

api_data_cache['imaging'] = imaging_data

return jsonify({'date': api_data_index['imaging'], 'data': api_data_cache['imaging']})
except Exception as e:
return handle_exception(e, "Imaging API")


@app.route("/api/consort")
def api_consort():
global datetime_format
Expand All @@ -213,6 +217,7 @@ def api_consort():
return jsonify({'date': api_data_index['consort'], 'data': api_data_cache['consort']})
except Exception as e:
app.logger.error(("Error in consort API request: {0}").format(str(e)))

return handle_exception(e, "Consort API")

# get_api_consort_data
Expand Down Expand Up @@ -274,7 +279,7 @@ def api_subjects():
app.logger.info(f"Caching subjects api response data. Date: {data_date}")
api_data_index['subjects'] = data_date
api_data_cache['subjects'] = latest_data


return jsonify({'date': api_data_index['subjects'], 'data': api_data_cache['subjects']})
except Exception as e:
Expand Down Expand Up @@ -335,7 +340,6 @@ def api_subjects_debug():
api_data_index['subjects'] = data_date
api_data_cache['subjects'] = latest_data


return jsonify({'date': api_data_index['subjects'], 'data': api_data_cache['subjects']})
except Exception as e:
traceback.print_exc()
Expand All @@ -356,5 +360,6 @@ def api_simple():
return jsonify({'date':'20231221', 'data':{'test-data':'test-data'}})



if __name__ == "__main__":
app.run(host='0.0.0.0')
2 changes: 1 addition & 1 deletion src/assets/screening_sites.csv
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,5 @@ MCC2: Corewell,2,Corewell,Thoracic,130000,139999,1/1/2022,1,2022,"8, 8, 8, 16, 1
MCC1: Rush,1,Rush,Thoracic,140000,149999,,,,,
MCC2: HFHS ,2,HFHS,TKA,150000,159999,,,,,
MCC1: University of Chicago,1,University of Chicago,Thoracic,160000,169999,,,,,
MCC2: HFHS West Bloomfield, 2, HFHS West Bloomfield,Thoracic,170000,179999,,,,,
MCC2: Corewell,2,Corewell,TKA,180000,189999,,,,,

4 changes: 2 additions & 2 deletions src/assets/sites.csv
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,5 @@
1,UC, University of Chicago
1,NS, NorthShore
2,UM, University of Michigan
2,WS, Wayne State University (pending)
2,SH, Spectrum Health (pending)
2,WS, Wayne State University
2,SH, Spectrum Health
63 changes: 58 additions & 5 deletions src/data_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
# ----------------------------------------------------------------------------
# Common utils
# ----------------------------------------------------------------------------

class MissingPortalSessionIdException(Exception):
'''Custom Exception for Misisng Session Id'''

Expand All @@ -61,6 +62,7 @@ def handle_exception(ex, api_message):
def _is_local():
return data_access_type == "LOCAL"


# ----------------------------------------------------------------------------
# Updating data checks
# ----------------------------------------------------------------------------
Expand Down Expand Up @@ -261,6 +263,7 @@ def get_local_monitoring_data(monitoring_data_filepath):
# ----------------------------------------------------------------------------
# LOAD DATA FROM API
# ----------------------------------------------------------------------------

def get_api_consort_data(tapis_token,
report='consort',
report_suffix = 'consort-data-[mcc]-latest.csv'):
Expand Down Expand Up @@ -303,9 +306,42 @@ def get_api_consort_data(tapis_token,
logger.warning("Unauthorized attempt to access Consort data")
return None

except Exception as e:
traceback.print_exc()
return None

if tapis_token:
cosort_columns = ['source','target','value', 'mcc']
consort_df = pd.DataFrame(columns=cosort_columns)

# # get list of mcc files
# filename1 = report_suffix.replace('[mcc]',str(1))
# filename2 = report_suffix.replace('[mcc]',str(2))
# files_list = [filename1, filename2]


mcc_list = [1,2]
for mcc in mcc_list:
filename = report_suffix.replace('[mcc]',str(mcc))
csv_url = '/'.join([files_api_root, report, filename])
csv_request = make_report_data_request(csv_url, tapis_token)
csv_content = csv_request.content
try:
csv_df = pd.read_csv(io.StringIO(csv_content.decode('utf-8')), usecols=[0,1,2], header=None)
csv_df['mcc'] = mcc
csv_df.columns = cosort_columns
except:
csv_df = pd.DataFrame(columns=cosort_columns)
consort_df = pd.concat([consort_df,csv_df])

consort_dict = consort_df.to_dict('records')
if not consort_dict:
consort_dict = ['No data found']
# IF DATA LOADS SUCCESSFULLY:
consort_data_json = {
'consort' : consort_df.to_dict('records')
}
return consort_data_json

else:
raise TapisTokenRetrievalException()

## Function to rebuild dataset from apis

Expand Down Expand Up @@ -408,6 +444,12 @@ def get_api_imaging_data(tapis_token):
return {'status':'500', 'source': 'imaging-log-latest.csv'}


# IF DATA LOADS SUCCESSFULLY:
imaging_data_json = {
'imaging' : imaging.to_dict('records'),
'qc' : qc.to_dict('records')
}

qc_filepath = '/'.join([files_api_root,'imaging','qc-log-latest.csv'])
qc_request = make_report_data_request(qc_filepath, tapis_token)
if qc_request.status_code == 200:
Expand All @@ -426,7 +468,8 @@ def get_api_imaging_data(tapis_token):
else:
raise TapisTokenRetrievalException()

except Exception as e:

except Exception:
traceback.print_exc()
return "exception: {}".format(e)

Expand Down Expand Up @@ -511,7 +554,7 @@ def get_api_blood_data(api_request):
except Exception as e:
traceback.print_exc()
return None


def get_api_subjects_json(tapis_token):
''' Load subjects data from api. Note data needs to be cleaned, etc. to create properly formatted data product'''
Expand Down Expand Up @@ -541,6 +584,10 @@ def get_api_subjects_json(tapis_token):
else:
raise TapisTokenRetrievalException()

response.raise_for_status()
tapis_token = response.json()['token']
logger.info("Received tapis token.")
return tapis_token
except Exception as e:
traceback.print_exc()
return None
Expand Down Expand Up @@ -579,6 +626,12 @@ def make_report_data_request(url, tapis_token):
return response


def make_report_data_request(url, tapis_token):
logger.info(f"Sending request to {url}")
response = requests.get(url, headers={'X-Tapis-Token': tapis_token})
logger.info(f'Response status code: {response.status_code}')
return response

# ----------------------------------------------------------------------------
# PROCESS SUBJECTS DATA
# ----------------------------------------------------------------------------
Expand Down

0 comments on commit 84e2523

Please sign in to comment.