diff --git a/dash/.dockerignore b/dash/.dockerignore new file mode 100644 index 00000000..ca5e3f26 --- /dev/null +++ b/dash/.dockerignore @@ -0,0 +1,2 @@ +venv/ +.idea/ \ No newline at end of file diff --git a/dash/.gitignore b/dash/.gitignore new file mode 100644 index 00000000..f7275bbb --- /dev/null +++ b/dash/.gitignore @@ -0,0 +1 @@ +venv/ diff --git a/dash/Dockerfile b/dash/Dockerfile new file mode 100644 index 00000000..8544b996 --- /dev/null +++ b/dash/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.10 +WORKDIR /code + +COPY requirements.txt / +RUN pip install -r /requirements.txt +COPY ./ ./ + +EXPOSE 8050 + +CMD ["python", "./app.py"] \ No newline at end of file diff --git a/dash/app.py b/dash/app.py new file mode 100644 index 00000000..7684ab2f --- /dev/null +++ b/dash/app.py @@ -0,0 +1,75 @@ +import logging +import os + +import dash +import dash_bootstrap_components as dbc +from dash import Dash, Output, Input, dcc +from dash import html + +from util import get_authz + +external_stylesheets = [dbc.themes.SKETCHY] + +logger = logging.getLogger('dash') + +print('DASH_URL_BASE_PATHNAME', os.environ.get('DASH_URL_BASE_PATHNAME', default="~not set~")) +app = Dash(__name__, external_stylesheets=external_stylesheets, use_pages=True) + + +@app.server.before_request +def check_privileges(): + """Do this before every call""" + get_authz() + + +# Clientside callback: refresh token by calling fence's /user endpoint +app.clientside_callback( + """ + // Call fence's /user endpoint, parse the response and update the profile + async function(n_intervals, data) { + const response = await fetch(location.origin + '/user/user'); + if (!response.ok) { + console.log('error retrieving user', response ) + return 'Profile (unauthorized)'; + } else { + const user = await response.json(); + console.log('clientside_callback you are logged in as:', user.username); + return 'Profile (' + user.username + ')'; + } + } + """, + Output('nav_item-profile', 'children'), + Input('clientside-interval', 'n_intervals') +) + + +app.layout = dbc.Container([ + html.H1("ACED", className="display-3"), + html.P( + "A simple dash app.", + className="lead", + ), + + dbc.Nav( + [ + dbc.NavItem( + dbc.NavLink(f"{page['name']}", href=page["relative_path"], id=f'nav_item-{page["name"].lower()}') + ) + for page in dash.page_registry.values() + ] + ), + + html.Hr(className="my-2"), + # define a timed client side action here + dcc.Interval( + id='clientside-interval', + n_intervals=0, + interval=60 * 1000 # in milliseconds check every minute + ), + # other page contents goes here + dash.page_container +]) + + +if __name__ == '__main__': + app.run_server(host="0.0.0.0", debug=True) # diff --git a/dash/assets/gitops-logo.png b/dash/assets/gitops-logo.png new file mode 100644 index 00000000..33f5bb2f Binary files /dev/null and b/dash/assets/gitops-logo.png differ diff --git a/dash/figures/histogram.py b/dash/figures/histogram.py new file mode 100644 index 00000000..78b4e01f --- /dev/null +++ b/dash/figures/histogram.py @@ -0,0 +1,123 @@ +import logging + +import dash_bootstrap_components as dbc +import pandas as pd +import plotly.express as px +from dash import html, dcc + +logger = logging.getLogger('dash') + + +def histogram_figures(histograms): + """An array of histograms figures, dataframes and names. + @return fig[], df[], name[]""" + + dfs = [] + figs = [] + names = [] + + assert len(histograms._aggregation.keys()) == 1 # noqa + entity = list(histograms._aggregation.keys())[0] # noqa + for k, v in histograms._aggregation[entity].items(): # noqa + v.name = k + df = pd.DataFrame(v.histogram) + fig = px.histogram(df, x="key", y="count", title=v.name, log_y=len(v.histogram) > 1, + id={ + 'index': f"{entity}-{k}", + 'type': 'query-parameter' + } + ) + fig.update_layout(legend=dict(orientation="h", title=None), + yaxis_title=None, xaxis_title=None, + plot_bgcolor='rgba(0,0,0,0)' + ) + + figs.append(fig) + dfs.append(df) + names.append(v.name) + + return figs, dfs, names + + +def histogram_selects(histograms): + """An array of histograms checklists dataframes, and names. + @return fig[], df[], name[]""" + + dfs = [] + checklists = [] + names = [] + + assert len(histograms._aggregation.keys()) == 1 # noqa + entity = list(histograms._aggregation.keys())[0] # noqa + for k, v in histograms._aggregation[entity].items(): # noqa + v.name = k + if not any([isinstance(h.key, str) for h in v.histogram]): + continue + df = pd.DataFrame(v.histogram) + checklist = dcc.Checklist( + id={ + 'index': f"{entity}-{k}", + 'type': 'query-parameter' + }, + options=[ + { + 'value': str(h.key), + 'label': html.Div( + [ + html.Div(h.key), + dbc.Badge(h.count, className="ms-1 term-count", color="info", + id={ + 'index': f"{entity}-{k}-{h.key}", + 'type': 'term-count' + }) + ], + style={'display': 'inline-flex', 'paddingLeft': '2em'} + ) + } + for h in v.histogram + ], + labelStyle={'display': 'flex'} + ) + + checklists.append(checklist) + dfs.append(df) + names.append(v.name) + + return checklists, dfs, names + + +def histogram_sliders(histograms): + """An array of histograms sliders dataframes, and names. + @return slider[], df[], name[]""" + + dfs = [] + sliders = [] + names = [] + + assert len(histograms._aggregation.keys()) == 1 # noqa + entity = list(histograms._aggregation.keys())[0] # noqa + for k, v in histograms._aggregation[entity].items(): # noqa + v.name = k + if any([isinstance(h.key, str) for h in v.histogram]): + continue + if not any([isinstance(h.key, list) for h in v.histogram]): + continue + + assert len(v.histogram) == 1 + df = pd.DataFrame(v.histogram) + min_ = int(v.histogram[0].key[0]) + max_ = int(v.histogram[0].key[1]) + # value = max_, + sliders.append( + dcc.RangeSlider(min_, max_, int((max_ - min_) / 10), + id={ + 'index': f"{entity}-{k}", + 'type': 'query-parameter' + } + ) + ) + + dfs.append(df) + names.append(v.name) + + return sliders, dfs, names diff --git a/dash/figures/project.py b/dash/figures/project.py new file mode 100644 index 00000000..38e520a3 --- /dev/null +++ b/dash/figures/project.py @@ -0,0 +1,31 @@ + +import pandas as pd +import plotly.express as px +from inflection import titleize, pluralize +from models.project import project_detail_counts + + +def counts(): + """Horizontal bar graph. + @return fig, df""" + + project_counts = list(project_detail_counts()) + flattened = [] + for p in project_counts: + for k, v in p.items(): + f = {'name': p.project[0].name} + if k == 'project': + continue + f['entity'] = titleize(pluralize(k.replace('_count', ''))) + f['count'] = v + flattened.append(f) + df = pd.DataFrame(flattened) + fig = px.bar(df, x="count", y="entity", color='name', orientation='h', + hover_data=["name", "count"], + height=400, + log_x=True) + fig.update_layout(legend=dict(orientation="h", title=None), yaxis_title=None, xaxis_title=None, + # paper_bgcolor='rgba(0,0,0,0)', + plot_bgcolor='rgba(0,0,0,0)' + ) + return fig, df diff --git a/dash/models/__init__.py b/dash/models/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dash/models/file.py b/dash/models/file.py new file mode 100644 index 00000000..7b9fe657 --- /dev/null +++ b/dash/models/file.py @@ -0,0 +1,126 @@ + +from dotwiz import DotWiz +from util import get_guppy_service + +import logging +logger = logging.getLogger('dash') + + +def get_files(dot_notation=True, variables={"filter": {"AND": []}, "sort": []}): + """Fetch histogram of counts for all projects. + @param variables: a graphql filter and sort + @type dot_notation: bool render results as a lightweight class""" + query = """ + query ($sort: JSON,$filter: JSON,) { + file (accessibility: all, offset: 0, first: 1000, , sort: $sort, filter: $filter,) { + file_id + patient_id + file_category + file_name + file_size + object_id + } + _aggregation { + file (filter: $filter, accessibility: all) { + _totalCount + } + } + } + """ + guppy_service = get_guppy_service() + data = guppy_service.graphql_query(query, variables=variables)['data'] + data = DotWiz(data) + return [f for f in data.file] + + +def get_file_histograms(dot_notation=True, variables={"filter": {"AND": []}}): + """Fetch histogram of counts for all projects. + @param variables: a graphql filter + @type dot_notation: bool render results as a lightweight class""" + + histogram_query = """ + query ($filter: JSON) { + _aggregation { + file (filter: $filter, filterSelf: false, accessibility: all) { + + project_id { + histogram { + key + count + } + }, + file_category { + histogram { + key + count + } + }, + data_type { + histogram { + key + count + } + }, + data_format { + histogram { + key + count + } + }, + patient_id { + histogram { + key + count + } + }, + patient_gender { + histogram { + key + count + } + }, + patient_disability_adjusted_life_years { + histogram { + key + count + } + }, + patient_ombCategory { + histogram { + key + count + } + }, + patient_ombCategory_detail { + histogram { + key + count + } + }, + patient_us_core_birthsex { + histogram { + key + count + } + }, + patient_quality_adjusted_life_years { + histogram { + key + count + } + }, + patient_maritalStatus_coding_0_display { + histogram { + key + count + } + } + } + } + } + """ + guppy_service = get_guppy_service() + data = guppy_service.graphql_query(histogram_query, variables=variables)['data'] + if dot_notation: + return DotWiz(data) + return data diff --git a/dash/models/observation.py b/dash/models/observation.py new file mode 100644 index 00000000..ce7f4b22 --- /dev/null +++ b/dash/models/observation.py @@ -0,0 +1,89 @@ + +from dotwiz import DotWiz +from util import get_guppy_service + +import logging +logger = logging.getLogger('dash') + + +def get_patients(variables={"filter": {"AND": []}}): + """Query histogram on patient_id to get all unique patients + @type variables: object graphql filter object + @return list of patient_id strings + """ + query = """ + query ($filter: JSON) { + _aggregation { + case(filter: $filter) { + patient_id { + histogram { + key + count + } + } + } + } + } + """ + guppy_service = get_guppy_service() + data = guppy_service.graphql_query(query, variables=variables)['data'] + data = DotWiz(data) + return [h.key for h in data._aggregation.case.patient_id.histogram] # noqa + + +def get_observation_histograms(dot_notation=True, variables={"filter": {"AND": []}}): + """Fetch histogram of counts for all observations. + @param variables: a graphql filter + @type dot_notation: bool render results as a lightweight class + """ + + histogram_query = """ + query ($filter: JSON) { + _aggregation { + case(filter: $filter, filterSelf: false, accessibility: all) { + category { + histogram { + key + count + } + } + code_display { + histogram { + key + count + } + } + patient_id { + histogram { + key + count + } + } + encounter_type { + histogram { + key + count + } + } + encounter_start { + histogram { + key + count + } + } + project_id { + histogram { + key + count + } + } + } + } + } + + """ + guppy_service = get_guppy_service() + data = guppy_service.graphql_query(histogram_query, variables=variables)['data'] + if dot_notation: + return DotWiz(data) + return data diff --git a/dash/models/project.py b/dash/models/project.py new file mode 100644 index 00000000..9715c959 --- /dev/null +++ b/dash/models/project.py @@ -0,0 +1,61 @@ +import logging + +from dotwiz import DotWiz +from util import get_submission_service + +logger = logging.getLogger('dash') + + +def get_project_summaries(dot_notation=True): + """Fetch summaries for all projects. + @type dot_notation: bool render results as a lightweight class + """ + project_summaries_query = """ + query gqlHelperHomepageQuery { + projects: project(first: 10000) { + name: project_id + code + id + } + research_subject_count: _research_subject_count + specimen_count: _specimen_count + observation_count: _observation_count + document_reference_count: _document_reference_count + } + """ + query_service = get_submission_service() + data = query_service.query(project_summaries_query)['data'] + # logger.debug(data) + if dot_notation: + return DotWiz(data) + return data + + +def project_detail_counts(dot_notation=True): + """Return detailed information about projects. + @type dot_notation: bool render results as a lightweight class + """ + project_detail_counts_query = """ + query gqlHelperProjectDetailQuery( + $name: [String] + ) { + project(project_id: $name) { + name: project_id + code + id + } + research_subject_count: _research_subject_count(project_id: $name) + specimen_count: _specimen_count(project_id: $name) + observation_count: _observation_count(project_id: $name) + document_reference_count: _document_reference_count(project_id: $name) + } + """ + query_service = get_submission_service() + project_summaries = get_project_summaries() + for project in project_summaries.projects: + data = query_service.query(project_detail_counts_query, variables={'name': project.name})['data'] + # logger.debug(data) + if dot_notation: + yield DotWiz(data) + else: + yield data diff --git a/dash/pages/__init__.py b/dash/pages/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dash/pages/cohorts.py b/dash/pages/cohorts.py new file mode 100644 index 00000000..685a44e7 --- /dev/null +++ b/dash/pages/cohorts.py @@ -0,0 +1,225 @@ +import collections +import json +import logging + +import dash +import dash_bootstrap_components as dbc +from dash import dcc, html, Output, Input, ALL, callback, get_app, dash_table +from figures.histogram import histogram_selects, histogram_sliders +from inflection import titleize, pluralize +from models.file import get_file_histograms, get_files +from models.observation import get_observation_histograms, get_patients + +logger = logging.getLogger('dash') + +dash.register_page(__name__) + + +def tree_dict(): + """A recursive default dict.""" + return collections.defaultdict(tree_dict) + + +def build_filters(values, ids): + """Create a filter from selected values and ids.""" + filters = tree_dict() + for value, id_ in zip(values, ids): + if not value: + continue + entity, parameter = id_["index"].split('-') + if 'AND' not in filters[entity]['filter']: + filters[entity]['filter']['AND'] = [] + filters[entity]['filter']['AND'].append({'IN': {parameter: value}}) + return filters + + +@callback( + Output('query-builder', 'children'), + Input({'type': 'query-parameter', 'index': ALL}, 'value'), + Input({'type': 'query-parameter', 'index': ALL}, 'id') +) +def display_filters(values, ids): + """Build graphql filter.""" + filters = build_filters(values, ids) + return json.dumps(filters, indent=4) + + +@callback( + Output('histogram-data', 'data'), + Input({'type': 'query-parameter', 'index': ALL}, 'value'), + Input({'type': 'query-parameter', 'index': ALL}, 'id') +) +def update_counters(values, ids): + """Run a histogram and then update badges.""" + filters = build_filters(values, ids) + histograms = {'_aggregation': {}} + histogram_fetchers = { + 'file': get_file_histograms, + 'case': get_observation_histograms + } + for entity_name in histogram_fetchers.keys(): + fetcher = histogram_fetchers.get(entity_name) + if fetcher: + fetcher_results = fetcher(variables=filters.get(entity_name, {"filter": {"AND": []}})) + for aggregation_name in fetcher_results['_aggregation']: + histograms['_aggregation'][aggregation_name] = fetcher_results['_aggregation'][aggregation_name] + return histograms + + +@callback( + Output('results', 'data'), + Input('query', 'n_clicks'), + Input({'type': 'query-parameter', 'index': ALL}, 'value'), + Input({'type': 'query-parameter', 'index': ALL}, 'id') +) +def query(n_clicks, values, ids): + """Run a histogram and then update badges.""" + filters = build_filters(values, ids) + patient_ids = get_patients(variables=filters.get('case', {"filter": {"AND": []}})) + file_filters = filters.get('file', {"filter": {"AND": []}}) + file_filters['filter']['AND'].append({"IN": {"patient_id": patient_ids}}) + file_filters['sort'] = [] + files = get_files(variables=file_filters) + return files + + +# Clientside callback: traverse histogram, match DOM with class name 'term-count' update counts in DOM directly +get_app().clientside_callback( + """ + // traverse histogram, match DOM with class name 'term-count' update counts in DOM directly + async function(histograms) { + // console.log('debug: histogram counters',histograms); + if (Object.keys(histograms).length === 0) { + // console.log("debug: empty histogram"); + return window.dash_clientside.no_update + } + // get all our badges into a lookup hash by id + const termCounts = Array.from(document.getElementsByClassName('term-count')); + const termCountLookup = {} + termCounts.forEach((item) => termCountLookup[JSON.parse(item.id)['index']] = item) + // const entity_name = 'file' ; + Object.keys(histograms._aggregation).forEach((entity_name) => { + console.log('Updating badges', entity_name) + const entity = histograms._aggregation[entity_name] ; + for (const property_name in entity) { + const p = `${entity_name}-${property_name}` + entity[property_name].histogram.forEach((h) => { + // TODO - check range sliders + if (termCountLookup[`${p}-${h.key}`]) { + termCountLookup[`${p}-${h.key}`].innerText = h.count + // remove from array + delete termCountLookup[`${p}-${h.key}`] + } + }) ; + // set items no longer in histogram to 0 + Object.keys(termCountLookup).forEach((k) => { + if (k.startsWith(p)) { + termCountLookup[k].innerText = '0'; + } + }) ; + } + }); + // always return no update since we updated dom directly + return window.dash_clientside.no_update + } + """, + Output('placeholder-dummy', 'children'), + Input('histogram-data', 'data'), + prevent_initial_call=True +) + +SIDEBAR_STYLE = { + "position": "fixed", + "top": 0, + "left": 0, + "bottom": 0, + "width": "24rem", + # "padding": "2rem 1rem", + "background-color": "#f8f9fa", +} + +CONTENT_STYLE = { + "margin-left": "25rem", + "margin-right": "2rem", + "padding": "2rem 1rem", +} + + +def layout(): + """Render the cohort page.""" + + def accordian(items_, dfs_, names_): + """Create an accordian with items for each facet.""" + return html.Div(dbc.Accordion( + [ + dbc.AccordionItem( + [ + item, + ], + title=f"{titleize(pluralize(name))}" + ) + for item, df, name in zip(items_, dfs_, names_) + ], + start_collapsed=True, + always_open=True + )) + + # get the data from guppy and plot each aggregation + file_histograms = get_file_histograms() + + items = [] + data_frames = [] + names = [] + + for i, d, n in [histogram_selects(file_histograms), histogram_sliders(file_histograms)]: + items.extend(i) + data_frames.extend(d) + names.extend(n) + + file_accordian = accordian(items, data_frames, names) + + observation_histograms = get_observation_histograms() + + items = [] + data_frames = [] + names = [] + + for i, d, n in [histogram_selects(observation_histograms), histogram_sliders(observation_histograms)]: + items.extend(i) + data_frames.extend(d) + names.extend(n) + + observation_accordian = accordian(items, data_frames, names) + + return html.Div([ + dbc.Row([ + dbc.Col([ + html.H2("Cohorts"), + html.Hr(className="my-2"), + html.P( + 'Quis imperdiet massa tincidunt nunc. Convallis tellus id interdum velit. Mauris pellentesque pulvinar pellentesque habitant morbi tristique senectus.'), + html.Hr(className="my-2"), + html.Code( + id="query-builder", + children="Selections go here..."), + ], style=CONTENT_STYLE), + ]), + dbc.Row([ + dbc.Col([ + dbc.Tabs([ + dbc.Tab([html.P('Quis imperdiet massa tincidunt nunc...')], label="Conditions"), + dbc.Tab([html.P('Quis imperdiet massa tincidunt nunc...')], label="Medications"), + dbc.Tab([html.P('Quis imperdiet massa tincidunt nunc...')], label="Demographics"), + dbc.Tab(observation_accordian, label="Observations"), + dbc.Tab(file_accordian, label="Files"), + ]) + ], style=SIDEBAR_STYLE), + dbc.Col([ + html.Hr(className="my-2"), + html.Button('Query', id='query', n_clicks=0, style={'display': 'flex', 'float': 'right'}), + dash_table.DataTable(id='results'), + dcc.Store(id='histogram-data', storage_type='local'), + html.P(id='placeholder-dummy', hidden=True), + ], style=CONTENT_STYLE) + ]) + ]) diff --git a/dash/pages/home.py b/dash/pages/home.py new file mode 100644 index 00000000..83954775 --- /dev/null +++ b/dash/pages/home.py @@ -0,0 +1,24 @@ + +import logging + +import dash +from dash import html, dash_table, dcc + +from figures.project import counts as project_counts + +logger = logging.getLogger('dash') + + +dash.register_page(__name__, path='/') + + +def layout(): + """Show the welcome message""" + fig, df = project_counts() + return [ + html.H2("Welcome"), + html.Hr(className="my-2"), + html.P( + "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."), + dcc.Graph(figure=fig) + ] diff --git a/dash/pages/profile.py b/dash/pages/profile.py new file mode 100644 index 00000000..dd37b04b --- /dev/null +++ b/dash/pages/profile.py @@ -0,0 +1,60 @@ +import logging + +import dash_bootstrap_components as dbc +import pandas as pd +from util import get_authz, get_submission_service + +import dash +from dash import Input, Output, callback, dash_table, html + +logger = logging.getLogger('dash') + + +dash.register_page(__name__) + + +def layout(): + # query_service = get_submission_service() + # data = query_service.query("{ program(first:0) { id name projects { id code } } }") + # logger.error(data) + + simplified_authz = {k: ','.join([m['method'] for m in v]) for k, v in get_authz().items()} + df = pd.DataFrame([{'path': k, 'methods': v} for k, v in simplified_authz.items()]) + if 'path' in df: + df['id'] = df['path'] + df.set_index('id', inplace=True, drop=False) + + return [ + html.H2("Authorization"), + html.P( + "You have the following authorization profile.", + className="lead", + ), + html.Hr(className="my-2"), + dbc.Label('Select a row:'), + # see https://stackoverflow.com/questions/61905396/dash-datatable-with-select-all-checkbox + # more tricks https://github.com/Coding-with-Adam/Dash-by-Plotly/blob/master/DataTable/Tips-and-tricks/filter-datatable.py + dash_table.DataTable(df.to_dict('records'), + columns=[ + {'name': i, 'id': i, 'deletable': False} for i in df.columns + # omit the id column + if i != 'id' + ], + id='tbl', + row_selectable='multi', + page_current=0, + page_size=10, + sort_action='native', + cell_selectable=False, + ), + dbc.Alert(id='tbl_out'), + ] + + +@callback(Output('tbl_out', 'children'), + Input('tbl', 'derived_virtual_row_ids'), + Input('tbl', 'selected_row_ids') + ) +def update_graphs(row_ids, selected_row_ids): + return str(selected_row_ids) + diff --git a/dash/requirements.txt b/dash/requirements.txt new file mode 100644 index 00000000..d4996c55 --- /dev/null +++ b/dash/requirements.txt @@ -0,0 +1,15 @@ +pandas +dash +requests +cachelib +dash_bootstrap_components + +gen3 + +# dotnotation for dicts + +dotwiz + +flatten_json==0.1.13 + +inflection \ No newline at end of file diff --git a/dash/util/__init__.py b/dash/util/__init__.py new file mode 100644 index 00000000..4042c65e --- /dev/null +++ b/dash/util/__init__.py @@ -0,0 +1,193 @@ +import logging + +import flask +import requests +from cachelib import SimpleCache +from gen3.auth import endpoint_from_token +logger = logging.getLogger('dash') +from gen3.query import Gen3Query +from gen3.submission import Gen3Submission + +# Cache the access for 60 seconds so that we don't make multiple requests to +# Arborist when a user accesses a webpage and fetches multiple JS/CSS files. +ACCESS_CACHE = SimpleCache(default_timeout=60) + + +def get_guppy_service(endpoint='http://revproxy-service') -> Gen3Query: + """Construct a Query Class + + See https://uc-cdis.github.io/gen3sdk-python/_build/html/query.html#gen3.query.Gen3Query + """ + return Gen3Query(Gen3SessionAuth(endpoint=endpoint)) + + +def get_submission_service(endpoint='http://revproxy-service') -> Gen3Submission: + """Construct a Query Class + + See https://uc-cdis.github.io/gen3sdk-python/_build/html/submission.html#gen3.submission.Gen3Submission.query + """ + return Gen3Submission(Gen3SessionAuth(endpoint=endpoint)) + + +def get_authz(): + """Returns authorization from arborist.""" + # no request active + if not flask.request: + logger.error('get_authz: No flask request') + return {} + + # get incoming headers + if 'Authorization' not in flask.request.headers: + logger.error('get_authz: No Authorization in flask request') + return {} + + authorization = flask.request.headers['Authorization'] + # hit, in cache + if ACCESS_CACHE.has(authorization): + return ACCESS_CACHE.get(authorization) + # miss, go get it + # logger.debug("Refreshing authz") + arborist_response = requests.get('http://revproxy-service/authz/mapping', + headers={'Authorization': authorization}) + arborist_response.raise_for_status() + # update cache + ACCESS_CACHE.set(authorization, arborist_response.json()) + return ACCESS_CACHE.get(authorization) + + +class Gen3SessionAuth(requests.auth.AuthBase): + """ + An Auth helper based on access token. + No attempt is made to refresh. + """ + def __init__(self, access_token=None, endpoint=None): + """ + endpoint + """ + if access_token: + self._access_token = access_token + elif not flask.request: + logger.error('Gen3SessionAuth No flask request') + elif 'Authorization' not in flask.request.headers: + logger.error('Gen3SessionAuth: No Authorization in flask request') + else: + authorization_parts = flask.request.headers['Authorization'].split(' ') + assert len(authorization_parts) == 2, f"Gen3SessionAuth expected bearer token {flask.request.headers['Authorization']}" + self._access_token = authorization_parts[-1] + if endpoint: + self.endpoint = endpoint + else: + self.endpoint = endpoint_from_token(self._access_token) + # logger.debug(f"Gen3SessionAuth _access_token {self._access_token} endpoint {self.endpoint}") + + def __call__(self, request_): + """Adds authorization header to the request + This gets called by the python.requests package on outbound requests + so that authentication can be added. + Args: + request_ (object): The incoming request object + """ + request_.headers["Authorization"] = "bearer " + self._access_token + # logger.debug(f"Gen3SessionAuth request_.headers['Authorization'] {request_.headers['Authorization']}") + return request_ + +# def get_authz_mock(): +# """""" +# return {'/data_file': [{'service': 'fence', 'method': 'file_upload'}, {'service': 'indexd', 'method': '*'}], +# '/open': [{'service': '*', 'method': 'read'}, {'service': '*', 'method': 'read-storage'}], +# '/programs': [{'service': 'indexd', 'method': '*'}], +# '/programs/MyFirstProgram': [{'service': 'indexd', 'method': '*'}], +# '/programs/MyFirstProgram/projects': [{'service': 'indexd', 'method': '*'}], +# '/programs/MyFirstProgram/projects/MyFirstProject': [{'service': '*', 'method': 'create'}, +# {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/aced': [{'service': 'indexd', 'method': '*'}], +# '/programs/aced/projects': [{'service': 'indexd', 'method': '*'}], +# '/programs/aced/projects/Alcoholism': [{'service': '*', 'method': 'create'}, +# {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/aced/projects/Alzheimers': [{'service': '*', 'method': 'create'}, +# {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/aced/projects/Breast_Cancer': [{'service': '*', 'method': 'create'}, +# {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/aced/projects/Colon_Cancer': [{'service': '*', 'method': 'create'}, +# {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/aced/projects/Diabetes': [{'service': '*', 'method': 'create'}, +# {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/aced/projects/Lung_Cancer': [{'service': '*', 'method': 'create'}, +# {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/aced/projects/Prostate_Cancer': [{'service': '*', 'method': 'create'}, +# {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/jnkns': [{'service': '*', 'method': 'create'}, {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/jnkns/projects': [{'service': '*', 'method': 'create'}, {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/jnkns/projects/jenkins': [{'service': '*', 'method': 'create'}, +# {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/program1': [{'service': '*', 'method': 'create'}, {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/program1/projects': [{'service': '*', 'method': 'create'}, {'service': '*', 'method': 'delete'}, +# {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/programs/program1/projects/P1': [{'service': '*', 'method': 'create'}, +# {'service': '*', 'method': 'delete'}, {'service': '*', 'method': 'read'}, +# {'service': '*', 'method': 'read-storage'}, +# {'service': '*', 'method': 'update'}, +# {'service': '*', 'method': 'write-storage'}, +# {'service': 'indexd', 'method': '*'}], +# '/services/sheepdog/submission/program': [{'service': 'sheepdog', 'method': '*'}], +# '/services/sheepdog/submission/project': [{'service': 'sheepdog', 'method': '*'}], +# '/workspace': [{'service': 'jupyterhub', 'method': 'access'}]} diff --git a/docker-compose.override.yml b/docker-compose.override.yml index c406044d..a2b5eac9 100644 --- a/docker-compose.override.yml +++ b/docker-compose.override.yml @@ -123,6 +123,27 @@ services: - minio-manchester - minio-stanford + dash-service: + build: dash + container_name: dash-service + ports: + - "8050:8050" # HOST:CONTAINER + environment: + - DASH_URL_BASE_PATHNAME=/dash/proxy/ + networks: + - devnet + + + fhir-service: + image: "hapiproject/hapi:latest" + ports: + - "8090:8080" + volumes: + - ./fhir/data:/data/hapi + environment: + SPRING_CONFIG_LOCATION: 'file:///data/hapi/application.yaml' + networks: + - devnet # expose postgres to host os @@ -142,4 +163,4 @@ volumes: data4-1: data4-2: data5-1: - data5-2: \ No newline at end of file + data5-2: diff --git a/docker-compose.yml b/docker-compose.yml index cb4bbf76..784b0255 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -181,7 +181,9 @@ services: - bootstrap.memory_lock=false # For apple silicon - bootstrap.system_call_filter=false - - "ES_JAVA_OPTS=-Xms2g -Xmx4g" + # For apple silicon + # - "ES_JAVA_OPTS=-Xms6g -Xmx6g -XX:+HeapDumpOnOutOfMemoryError -XX:UseAVX=0" + - "ES_JAVA_OPTS=-Xms6g -Xmx6g" entrypoint: - /bin/bash # mmapfs requires systemctl update - see https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules-store.html#mmapfs @@ -224,7 +226,8 @@ services: depends_on: - peregrine-service portal-service: - image: "quay.io/cdis/data-portal:3.33.0" # 2021.03 + # image: "quay.io/cdis/data-portal:3.33.0" # 2021.03 + build: data-portal container_name: portal-service command: ["bash", "/var/www/data-portal/waitForContainers.sh"] deploy: @@ -276,7 +279,7 @@ services: - devnet volumes: - ./nginx.conf:/etc/nginx/nginx.conf - - ./minio.conf.staging:/etc/nginx/minio.conf + - ./minio.conf:/etc/nginx/minio.conf - ./Secrets/TLS/service.crt:/etc/nginx/ssl/nginx.crt - ./Secrets/TLS/service.key:/etc/nginx/ssl/nginx.key ports: diff --git a/etl/setup-minio.sh b/etl/setup-minio.sh index cff784cd..3bb7e849 100644 --- a/etl/setup-minio.sh +++ b/etl/setup-minio.sh @@ -18,6 +18,14 @@ mc mb ucl/aced-ucl mc mb manchester/aced-manchester mc mb stanford/aced-stanford +## remove all objects from bucket +#mc rm --recursive default/aced-default +#mc rm --recursive default/aced-public +#mc rm --recursive ohsu/aced-ohsu +#mc rm --recursive ucl/aced-ucl +#mc rm --recursive manchester/aced-manchester +#mc rm --recursive stanford/aced-stanford + # add users mc admin user add default $MINIO_TEST_USER $MINIO_TEST_PASSWORD diff --git a/etl/tube_lite b/etl/tube_lite index 13c1d9b3..3531f548 100755 --- a/etl/tube_lite +++ b/etl/tube_lite @@ -152,6 +152,9 @@ query ($first: Int!, $offset: Int!) { component_0_valueQuantity_code component_0_valueQuantity_unit component_0_valueQuantity_value + component_0_valueCodeableConcept_coding_0_system + component_0_valueCodeableConcept_coding_0_code + component_0_valueCodeableConcept_coding_0_display component_1_code_coding_0_code component_1_code_coding_0_display component_1_code_coding_0_system @@ -166,6 +169,9 @@ query ($first: Int!, $offset: Int!) { component_1_valueQuantity_code component_1_valueQuantity_unit component_1_valueQuantity_value + component_1_valueCodeableConcept_coding_0_system + component_1_valueCodeableConcept_coding_0_code + component_1_valueCodeableConcept_coding_0_display } medication_requests { medication_id: id diff --git a/fhir/README.md b/fhir/README.md new file mode 100644 index 00000000..65e24977 --- /dev/null +++ b/fhir/README.md @@ -0,0 +1,12 @@ +DROP DATABASE hapi ; CREATE DATABASE hapi owner fhir_user ; + + +\c hapi; +DO $$ DECLARE + r RECORD; +BEGIN + FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP + EXECUTE 'TRUNCATE TABLE ' || quote_ident(r.tablename) || ' CASCADE '; + END LOOP; +END $$; + diff --git a/fhir/data/application.yaml b/fhir/data/application.yaml new file mode 100644 index 00000000..d1a76a2a --- /dev/null +++ b/fhir/data/application.yaml @@ -0,0 +1,205 @@ +#Adds the option to go to eg. http://localhost:8080/actuator/health for seeing the running configuration +#see https://docs.spring.io/spring-boot/docs/current/reference/html/actuator.html#actuator.endpoints +management: + endpoints: + web: + exposure: + include: "health,prometheus" +spring: + main: + allow-circular-references: true + #allow-bean-definition-overriding: true + flyway: + enabled: false + check-location: false + baselineOnMigrate: true + datasource: + url: 'jdbc:postgresql://postgres:5432/hapi' + username: 'fhir_user' + password: 'fhir_pass' + driverClassName: org.postgresql.Driver + max-active: 20 + +# url: 'jdbc:h2:file:./target/database/h2' +# #url: jdbc:h2:mem:test_mem +# username: sa +# password: null +# driverClassName: org.h2.Driver +# max-active: 15 + + # database connection pool size + hikari: + maximum-pool-size: 25 + jpa: + properties: + hibernate.format_sql: false + hibernate.show_sql: false + #Hibernate dialect is automatically detected except Postgres and H2. + #If using H2, then supply the value of ca.uhn.fhir.jpa.model.dialect.HapiFhirH2Dialect + #If using postgres, then supply the value of ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgres94Dialect + + hibernate.dialect: ca.uhn.fhir.jpa.model.dialect.HapiFhirPostgres94Dialect + # hibernate.hbm2ddl.auto: update + # hibernate.jdbc.batch_size: 20 + # hibernate.cache.use_query_cache: false + # hibernate.cache.use_second_level_cache: false + # hibernate.cache.use_structured_entries: false + # hibernate.cache.use_minimal_puts: false + ### These settings will enable fulltext search with lucene or elastic + hibernate.search.enabled: true + ### lucene parameters +# hibernate.search.backend.type: lucene +# hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers$HapiLuceneAnalysisConfigurer +# hibernate.search.backend.directory.type: local-filesystem +# hibernate.search.backend.directory.root: target/lucenefiles +# hibernate.search.backend.lucene_version: lucene_current + ### elastic parameters ===> see also elasticsearch section below <=== +# hibernate.search.backend.type: elasticsearch +# hibernate.search.backend.analysis.configurer: ca.uhn.fhir.jpa.search.HapiHSearchAnalysisConfigurers$HapiElasticAnalysisConfigurer +hapi: + fhir: + ### This enables the swagger-ui at /fhir/swagger-ui/index.html as well as the /fhir/api-docs (see https://hapifhir.io/hapi-fhir/docs/server_plain/openapi.html) + openapi_enabled: true + ### This is the FHIR version. Choose between, DSTU2, DSTU3, R4 or R5 + fhir_version: R4 + ### enable to use the ApacheProxyAddressStrategy which uses X-Forwarded-* headers + ### to determine the FHIR server address + # use_apache_address_strategy: false + ### forces the use of the https:// protocol for the returned server address. + ### alternatively, it may be set using the X-Forwarded-Proto header. + # use_apache_address_strategy_https: false + ### enable to set the Server URL + # server_address: http://hapi.fhir.org/baseR4 + # defer_indexing_for_codesystems_of_size: 101 + # install_transitive_ig_dependencies: true + # implementationguides: + ### example from registry (packages.fhir.org) + # swiss: + # name: swiss.mednet.fhir + # version: 0.8.0 + # example not from registry + # ips_1_0_0: + # url: https://build.fhir.org/ig/HL7/fhir-ips/package.tgz + # name: hl7.fhir.uv.ips + # version: 1.0.0 + # supported_resource_types: + # - Patient + # - Observation + ################################################## + # Allowed Bundle Types for persistence (defaults are: COLLECTION,DOCUMENT,MESSAGE) + ################################################## + # allowed_bundle_types: COLLECTION,DOCUMENT,MESSAGE,TRANSACTION,TRANSACTIONRESPONSE,BATCH,BATCHRESPONSE,HISTORY,SEARCHSET + # allow_cascading_deletes: true + # allow_contains_searches: true + # allow_external_references: true + # allow_multiple_delete: true + # allow_override_default_search_params: true + # auto_create_placeholder_reference_targets: false + # cql_enabled: true + # default_encoding: JSON + # default_pretty_print: true + # default_page_size: 20 + # delete_expunge_enabled: true + # enable_repository_validating_interceptor: true + # enable_index_missing_fields: false + # enable_index_of_type: true + # enable_index_contained_resource: false + ### !!Extended Lucene/Elasticsearch Indexing is still a experimental feature, expect some features (e.g. _total=accurate) to not work as expected!! + ### more information here: https://hapifhir.io/hapi-fhir/docs/server_jpa/elastic.html + advanced_lucene_indexing: false + bulk_export_enabled: true + bulk_import_enabled: true + # enforce_referential_integrity_on_delete: false + # This is an experimental feature, and does not fully support _total and other FHIR features. + # enforce_referential_integrity_on_delete: false + # enforce_referential_integrity_on_write: false + # etag_support_enabled: true + # expunge_enabled: true + client_id_strategy: ANY + # fhirpath_interceptor_enabled: false + # filter_search_enabled: true + # graphql_enabled: true + # narrative_enabled: true + # mdm_enabled: true + # local_base_urls: + # - https://hapi.fhir.org/baseR4 + mdm_enabled: false + # partitioning: + # allow_references_across_partitions: false + # partitioning_include_in_search_hashes: false + cors: + allow_Credentials: true + # These are allowed_origin patterns, see: https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/web/cors/CorsConfiguration.html#setAllowedOriginPatterns-java.util.List- + allowed_origin: + - '*' + + # Search coordinator thread pool sizes + search-coord-core-pool-size: 20 + search-coord-max-pool-size: 100 + search-coord-queue-capacity: 200 + + # Threadpool size for BATCH'ed GETs in a bundle. + # bundle_batch_pool_size: 10 + # bundle_batch_pool_max_size: 50 + + # logger: + # error_format: 'ERROR - ${requestVerb} ${requestUrl}' + # format: >- + # Path[${servletPath}] Source[${requestHeader.x-forwarded-for}] + # Operation[${operationType} ${operationName} ${idOrResourceName}] + # UA[${requestHeader.user-agent}] Params[${requestParameters}] + # ResponseEncoding[${responseEncodingNoDefault}] + # log_exceptions: true + # name: fhirtest.access + # max_binary_size: 104857600 + # max_page_size: 200 + # retain_cached_searches_mins: 60 + # reuse_cached_search_results_millis: 60000 + tester: + home: + name: Local Tester + server_address: 'http://localhost:8080/fhir' + refuse_to_fetch_third_party_urls: false + fhir_version: R4 + global: + name: Global Tester + server_address: "http://hapi.fhir.org/baseR4" + refuse_to_fetch_third_party_urls: false + fhir_version: R4 + # validation: + # requests_enabled: true + # responses_enabled: true + # binary_storage_enabled: true + inline_resource_storage_below_size: 10000 +# bulk_export_enabled: true +# subscription: +# resthook_enabled: true +# websocket_enabled: false +# email: +# from: some@test.com +# host: google.com +# port: +# username: +# password: +# auth: +# startTlsEnable: +# startTlsRequired: +# quitWait: +# lastn_enabled: true +# store_resource_in_lucene_index_enabled: true +### This is configuration for normalized quantity serach level default is 0 +### 0: NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED - default +### 1: NORMALIZED_QUANTITY_STORAGE_SUPPORTED +### 2: NORMALIZED_QUANTITY_SEARCH_SUPPORTED +# normalized_quantity_search_level: 2 +#elasticsearch: +# debug: +# pretty_print_json_log: false +# refresh_after_write: false +# enabled: false +# password: SomePassword +# required_index_status: YELLOW +# rest_url: 'localhost:9200' +# protocol: 'http' +# schema_management_strategy: CREATE +# username: SomeUsername \ No newline at end of file diff --git a/local_windmill_setup.MD b/local_windmill_setup.MD new file mode 100644 index 00000000..1d89da0b --- /dev/null +++ b/local_windmill_setup.MD @@ -0,0 +1,62 @@ +These are the steps for setting up a local Windmill local Windmill/Portal-Service Instance + +1. clone feature/local-data-portal branch and follow the steps to setup in +https://github.com/ACED-IDP/compose-services-training/blob/feature/staging/docs/New_Setup.md + + +OR if you already have a working staging implementation: + + +```sh +dc down rm -f +``` + +change line 230 in docker compose yaml: +https://github.com/ACED-IDP/compose-services-training/blob/feature/local-data-portal/docker-compose.yml#L230 + + +2. Clone data-portal into your compose-services root directory located here: +https://github.com/uc-cdis/data-portal/ + +3. cd into data portal and build the image locally with: +```sh +docker build -t windmill . +``` + +4. cd into the root of compose-services and dc up + +5. If portal-services is running and healthy proceed otherwise dc down dc rm -f dc up -d + +6. Run this command in data-portal directory : +```sh +npm i +``` + +7. Put your gitops.json file in data/config and title it config.json. This is the file that you will edit to make changes in your local windmill build + +8. The below command is what you run when you want to see changes in a gitops file. The hot mapping only works for changes directly to the source code, +but the command should only take 30 seconds at most and will automatically update the page when it has been initiated. + +```sh +HOSTNAME=aced-training.compbio.ohsu.edu APP=config NODE_ENV=dev bash ./runWebpack.sh +change hostname to whatever your hostname is that you were using before. +``` + +This is the new root url of the website. You know you have suceeded in the previous steps when this works: +https://aced-training.compbio.ohsu.edu/dev.html/ + +Visit this if your webpage isn't loading https://localhost:9443/bundle.js you will probably have to visit it the first time you load the dev server. + +9. You might need this line if you get an error about certs or versions when running the runWebpack.sh command: +```sh +export NODE_OPTIONS=--openssl-legacy-provider +``` + +10. To get the query page to load make this one line change in data-portal/src/localconf.js:L108 and reload webpack with the above command: + +```txt +const graphqlSchemaUrl = `${hostname}${(basename && basename !== '/' && basename !== '/dev.html') ? basename : ''}/data/schema.json`;s +``` + + + diff --git a/minio.conf.staging b/minio.conf.staging index 807cac4c..32e02774 100644 --- a/minio.conf.staging +++ b/minio.conf.staging @@ -1,6 +1,6 @@ # -# MINIO configuration -# +# MINIO configuration +# # For load balancing, cluster support: coordinate with docker-compose.override.yml upstream minio-default { @@ -45,6 +45,10 @@ # MINIO S3 storage host(s) # server { + # + # TODO this is the only change for staging. Can we implement a script & template combination + # so there is not one of these files for every environment? + # server_name minio-default-staging.aced-idp.org; listen 80; diff --git a/nginx.conf b/nginx.conf index 49270bb6..edfa0007 100644 --- a/nginx.conf +++ b/nginx.conf @@ -283,6 +283,28 @@ http { location /lw-workspace/ { return 302 /lw-workspace/proxy; } + + location /dash/proxy { + + if ($saved_set_cookie != "") { + add_header Set-Cookie $saved_set_cookie always; + } + + error_page 403 = @errorworkspace; + + proxy_pass http://dash-service:8050; + proxy_http_version 1.1; + proxy_set_header Host $host; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection $http_connection; + proxy_set_header Authorization "$access_token"; + } + + location /dash/ { + return 302 /dash/proxy; + } + + } diff --git a/scripts/postgres_init.sql b/scripts/postgres_init.sql index 8237ab94..570faa99 100644 --- a/scripts/postgres_init.sql +++ b/scripts/postgres_init.sql @@ -30,3 +30,7 @@ ALTER USER indexd_user WITH SUPERUSER; CREATE USER arborist_user; ALTER USER arborist_user WITH PASSWORD 'arborist_pass'; ALTER USER arborist_user WITH SUPERUSER; + +CREATE USER fhir_user; +ALTER USER fhir_user WITH PASSWORD 'fhir_pass'; +ALTER USER fhir_user WITH SUPERUSER; \ No newline at end of file