From 09fe62496eb2e5f7886935fd227304a29857823a Mon Sep 17 00:00:00 2001 From: hfields Date: Thu, 5 Dec 2024 21:59:46 -0700 Subject: [PATCH 01/29] set weights values to be numbers --- calliope_app/client/static/js/scenarios.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/calliope_app/client/static/js/scenarios.js b/calliope_app/client/static/js/scenarios.js index 2dad7329..d98a166d 100644 --- a/calliope_app/client/static/js/scenarios.js +++ b/calliope_app/client/static/js/scenarios.js @@ -786,11 +786,11 @@ function activate_scenario_settings() { }); $('#settings_weights_import_data').on('click', function() { - dialogObj["monetary"] = $("#monetary").val(); - dialogObj["co2"] = $("#co2").val(); - dialogObj["ch4"] = $("#ch4").val(); - dialogObj["n2o"] = $("#n2o").val(); - dialogObj["co2e"] = $("#co2e").val(); + dialogObj["monetary"] = !isNaN(parseFloat($("#monetary").val())) ? parseFloat($("#monetary").val()) : $("#monetary").val(); + dialogObj["co2"] = !isNaN(parseFloat($("#co2").val())) ? parseFloat($("#co2").val()) : $("#co2").val(); + dialogObj["ch4"] = !isNaN(parseFloat($("#ch4").val())) ? parseFloat($("#ch4").val()) : $("#ch4").val(); + dialogObj["n2o"] = !isNaN(parseFloat($("#n2o").val())) ? parseFloat($("#n2o").val()) : $("#n2o").val(); + dialogObj["co2e"] = !isNaN(parseFloat($("#co2e").val())) ? parseFloat($("#co2e").val()) : $("#co2e").val(); $('textarea[name="edit' + dialogInputId + '"]').text(JSON.stringify(dialogObj, undefined, 2)); $('#scenario_weights_json_form').hide(); From e4322ceae368d1bd567337074fde8dd0239cb56c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Dec 2024 21:48:48 +0000 Subject: [PATCH 02/29] Bump django from 4.2.16 to 4.2.17 in /calliope_app Bumps [django](https://github.com/django/django) from 4.2.16 to 4.2.17. - [Commits](https://github.com/django/django/compare/4.2.16...4.2.17) --- updated-dependencies: - dependency-name: django dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- calliope_app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index 491a36d1..87733b50 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -2,7 +2,7 @@ django_ratelimit==4.1.0 git+https://github.com/NREL/GEOPHIRES-X.git#egg=geophires-x boto3==1.24.37 celery[redis]==5.3.0 -django==4.2.16 +django==4.2.17 django-crispy-forms==1.14.0 django-environ>=0.4.5 django-modeltranslation==0.18.12 From 3a282717e6ad76e1ae292634df39e29957a768b3 Mon Sep 17 00:00:00 2001 From: "Morris, James" Date: Mon, 9 Dec 2024 15:36:54 -0700 Subject: [PATCH 03/29] Gradient links and storage fix Adding a number of enhancements from HPC work to gradient Transmission (links) gradient support Support for systemwide energy cap constraints on techs Fixing issue where copying the 'energy_cap_per_storage_cap' constraint would cause an infeasible run with rounded capacities --- calliope_app/api/calliope_utils.py | 267 +++++++++++++++-------------- 1 file changed, 138 insertions(+), 129 deletions(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index 1ab3e3c3..0520730b 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -492,111 +492,125 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): built_techs = {} built_loc_techs = {} - for l in old_model['locations']: - if 'techs' in old_model['locations'][l]: - for t in old_model['locations'][l]['techs']: - old_tech = old_model['techs'][t] - new_tech = new_techs['techs'][t] - new_loc_tech = new_loctechs['locations'][l]['techs'][t] - loc_tech = old_model['locations'][l]['techs'][t] - if ('energy_cap_max' in loc_tech.get('constraints',{}) or 'storage_cap_max' in loc_tech.get('constraints',{})) or\ - ('energy_cap_max' in old_tech.get('constraints',{}) or 'storage_cap_max' in old_tech.get('constraints',{})): - if loc_tech.get('results',{'energy_cap_equals':0}).get('energy_cap_equals',0) != 0 or\ - loc_tech.get('results',{'storage_cap_equals':0}).get('storage_cap_equals',0) != 0: - loc_tech_b = copy.deepcopy(loc_tech) - built_tech_names.append(t) - - if 'constraints' in loc_tech_b: - [loc_tech_b['constraints'].pop(c) for c in ['energy_cap_max', 'storage_cap_max'] if c in loc_tech_b['constraints']] - else: - loc_tech_b['constraints'] = {} - if 'energy_cap_equals' in loc_tech['results']: - loc_tech_b['constraints']['energy_cap_equals'] = loc_tech['results']['energy_cap_equals'] - if 'storage_cap_equals' in loc_tech['results']: - loc_tech_b['constraints']['storage_cap_equals'] = loc_tech['results']['storage_cap_equals'] - cost_classes = [c for c in loc_tech_b.keys() if 'costs.' in c] - for cost in cost_classes: - [loc_tech_b[cost].pop(c) for c in ['energy_cap','interest_rate','storage_cap'] if c in loc_tech_b[cost]] - loc_tech_b.pop('results') - - if new_loc_tech and 'constraints' in new_loc_tech: - new_energy_cap_min = new_loc_tech['constraints'].get('energy_cap_min',new_tech.get('constraints',{}).get('energy_cap_min',0)) - new_energy_cap_max = new_loc_tech['constraints'].get('energy_cap_max',new_tech.get('constraints',{}).get('energy_cap_max',0)) - new_storage_cap_min = new_loc_tech['constraints'].get('storage_cap_min',new_tech.get('constraints',{}).get('storage_cap_min',0)) - new_storage_cap_max = new_loc_tech['constraints'].get('storage_cap_max',new_tech.get('constraints',{}).get('storage_cap_max',0)) - else: - new_energy_cap_min = new_tech.get('constraints',{}).get('energy_cap_min',0) - new_energy_cap_max = new_tech.get('constraints',{}).get('energy_cap_max',0) - new_storage_cap_min = new_tech.get('constraints',{}).get('storage_cap_min',0) - new_storage_cap_max = new_tech.get('constraints',{}).get('storage_cap_max',0) + for loc_type in ['locations','links']: + for l in old_model[loc_type]: + if 'techs' in old_model[loc_type][l]: + for t in old_model[loc_type][l]['techs']: + old_tech = old_model['techs'][t] + if t not in new_techs['techs']: + continue + new_tech = new_techs['techs'][t] + new_loc_tech = new_loctechs[loc_type][l]['techs'][t] + loc_tech = old_model[loc_type][l]['techs'][t] + if ('energy_cap_max' in loc_tech.get('constraints',{}) or 'storage_cap_max' in loc_tech.get('constraints',{})) or\ + ('energy_cap_max' in old_tech.get('constraints',{}) or 'storage_cap_max' in old_tech.get('constraints',{})): + if loc_tech.get('results',{'energy_cap_equals':0}).get('energy_cap_equals',0) != 0 or\ + loc_tech.get('results',{'storage_cap_equals':0}).get('storage_cap_equals',0) != 0: + loc_tech_b = copy.deepcopy(loc_tech) + + # Record built techs and the total systemwide capacity of those techs to use with energy_cap_max_systemwide + if t in built_tech_names: + built_tech_names[t] += loc_tech.get('results',{'energy_cap_equals':0}).get('energy_cap_equals',0) + else: + built_tech_names[t] = loc_tech.get('results',{'energy_cap_equals':0}).get('energy_cap_equals',0) - if new_loc_tech == None: + if 'constraints' in loc_tech_b: + [loc_tech_b['constraints'].pop(c) for c in ['energy_cap_max', 'storage_cap_max'] if c in loc_tech_b['constraints']] + else: + loc_tech_b['constraints'] = {} + if 'energy_cap_equals' in loc_tech['results']: + loc_tech_b['constraints']['energy_cap_equals'] = loc_tech['results']['energy_cap_equals'] + if 'storage_cap_equals' in loc_tech['results']: + loc_tech_b['constraints']['storage_cap_equals'] = loc_tech['results']['storage_cap_equals'] + if 'energy_cap_per_storage_cap_equals' in loc_tech_b['constraints']: + loc_tech_b['constraints'].pop('energy_cap_per_storage_cap_equals') + cost_classes = [c for c in loc_tech_b.keys() if 'costs.' in c] + for cost in cost_classes: + [loc_tech_b[cost].pop(c) for c in ['energy_cap','interest_rate','storage_cap'] if c in loc_tech_b[cost]] + loc_tech_b.pop('results') + + if new_loc_tech and 'constraints' in new_loc_tech: + new_energy_cap_min = new_loc_tech['constraints'].get('energy_cap_min',new_tech.get('constraints',{}).get('energy_cap_min',0)) + new_energy_cap_max = new_loc_tech['constraints'].get('energy_cap_max',new_tech.get('constraints',{}).get('energy_cap_max',0)) + new_storage_cap_min = new_loc_tech['constraints'].get('storage_cap_min',new_tech.get('constraints',{}).get('storage_cap_min',0)) + new_storage_cap_max = new_loc_tech['constraints'].get('storage_cap_max',new_tech.get('constraints',{}).get('storage_cap_max',0)) + else: + new_energy_cap_min = new_tech.get('constraints',{}).get('energy_cap_min',0) + new_energy_cap_max = new_tech.get('constraints',{}).get('energy_cap_max',0) + new_storage_cap_min = new_tech.get('constraints',{}).get('storage_cap_min',0) + new_storage_cap_max = new_tech.get('constraints',{}).get('storage_cap_max',0) + + if new_loc_tech == None: new_loc_tech = {} - if 'constraints' not in new_loc_tech: + if 'constraints' not in new_loc_tech: new_loc_tech['constraints'] = {} - if new_energy_cap_min > 0 and new_energy_cap_min-loc_tech['results']['energy_cap_equals'] > 0: - new_loc_tech['constraints']['energy_cap_min'] = new_energy_cap_min-loc_tech['results']['energy_cap_equals'] - if new_loc_tech['constraints']['energy_cap_min'] < 0: - new_loc_tech['constraints']['energy_cap_min'] = 0 - if new_energy_cap_max != 'inf' and new_energy_cap_max > 0: - new_loc_tech['constraints']['energy_cap_max'] = new_energy_cap_max-loc_tech['results']['energy_cap_equals'] - if new_loc_tech['constraints']['energy_cap_max'] < 0: - new_loc_tech['constraints']['energy_cap_max'] = 0 - if new_storage_cap_min > 0 and new_storage_cap_min-loc_tech['results']['storage_cap_equals'] > 0: - new_loc_tech['constraints']['storage_cap_min'] = new_storage_cap_min-loc_tech['results']['storage_cap_equals'] - if new_loc_tech['constraints']['storage_cap_min'] < 0: - new_loc_tech['constraints']['storage_cap_min'] = 0 - if new_storage_cap_max != 'inf' and new_storage_cap_max > 0: - new_loc_tech['constraints']['storage_cap_max'] = new_storage_cap_max-loc_tech['results']['storage_cap_equals'] - if new_loc_tech['constraints']['storage_cap_max'] < 0: - new_loc_tech['constraints']['storage_cap_max'] = 0 - - new_loctechs['locations'][l]['techs'][t] = new_loc_tech - for x in loc_tech_b: - for y in loc_tech_b[x].keys(): - # Copy over timeseries files for old techs, updating year to match new year - if 'file=' in str(loc_tech_b[x][y]): - filename=loc_tech_b[x][y].replace('file=','').replace('.csv:value','') - ts_df = pd.read_csv(old_inputs+'/'+filename+'.csv') - ts_df['Unnamed: 0'] = pd.to_datetime(ts_df['Unnamed: 0']) - freq = pd.infer_freq(ts_df['Unnamed: 0']) - if not calendar.isleap(new_year): - feb_29_mask = (ts_df['Unnamed: 0'].dt.month == 2) & (ts_df['Unnamed: 0'].dt.day == 29) - ts_df = ts_df[~feb_29_mask] - ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.drop(columns=['Unnamed: 0'], inplace=True) - elif not calendar.isleap(old_year): - ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.drop(columns=['Unnamed: 0'], inplace=True) - idx = pd.date_range(ts_df.index.min(),ts_df.index.max(),freq=freq) - ts_df = ts_df.reindex(idx, fill_value=0) - - # Leap Year Handling (Fill w/ Feb 28th) - feb_28_mask = (ts_df.index.month == 2) & (ts_df.index.day == 28) - feb_29_mask = (ts_df.index.month == 2) & (ts_df.index.day == 29) - feb_28 = ts_df.loc[feb_28_mask, 'value'].values - feb_29 = ts_df.loc[feb_29_mask, 'value'].values - if ((len(feb_29) > 0) & (len(feb_28) > 0)): - ts_df.loc[feb_29_mask, 'value'] = feb_28 - else: - ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.drop(columns=['Unnamed: 0'], inplace=True) - ts_df.index.name = None - ts_df.to_csv(os.path.join(new_inputs,filename+'-'+str(old_year)+'.csv'),index=True) - loc_tech_b[x][y] = 'file='+filename+'-'+str(old_year)+'.csv:value' - - if l not in built_loc_techs: - built_loc_techs[l] = {} - built_loc_techs[l][t+'_'+str(old_year)] = loc_tech_b - - new_loctechs['locations'][l]['techs'][t+'_'+str(old_year)] = loc_tech_b - for t in built_tech_names: + if new_energy_cap_min > 0 and new_energy_cap_min-loc_tech['results']['energy_cap_equals'] > 0: + new_loc_tech['constraints']['energy_cap_min'] = new_energy_cap_min-loc_tech['results']['energy_cap_equals'] + if new_loc_tech['constraints']['energy_cap_min'] < 0: + new_loc_tech['constraints']['energy_cap_min'] = 0 + if new_energy_cap_max != 'inf' and new_energy_cap_max > 0: + new_loc_tech['constraints']['energy_cap_max'] = new_energy_cap_max-loc_tech['results']['energy_cap_equals'] + if new_loc_tech['constraints']['energy_cap_max'] < 0: + new_loc_tech['constraints']['energy_cap_max'] = 0 + if new_storage_cap_min > 0 and new_storage_cap_min-loc_tech['results']['storage_cap_equals'] > 0: + new_loc_tech['constraints']['storage_cap_min'] = new_storage_cap_min-loc_tech['results']['storage_cap_equals'] + if new_loc_tech['constraints']['storage_cap_min'] < 0: + new_loc_tech['constraints']['storage_cap_min'] = 0 + if new_storage_cap_max != 'inf' and new_storage_cap_max > 0: + new_loc_tech['constraints']['storage_cap_max'] = new_storage_cap_max-loc_tech['results']['storage_cap_equals'] + if new_loc_tech['constraints']['storage_cap_max'] < 0: + new_loc_tech['constraints']['storage_cap_max'] = 0 + + new_loctechs[loc_type][l]['techs'][t] = new_loc_tech + for x in loc_tech_b: + for y in loc_tech_b[x].keys(): + try: + # Copy over timeseries files for old techs, updating year to match new year + if 'file=' in loc_tech_b[x][y]: + filename=loc_tech_b[x][y].replace('file=','').replace('.csv:value','') + ts_df = pd.read_csv(old_inputs+'/'+filename+'.csv') + ts_df['Unnamed: 0'] = pd.to_datetime(ts_df['Unnamed: 0']) + freq = pd.infer_freq(ts_df['Unnamed: 0']) + if not calendar.isleap(new_year): + feb_29_mask = (ts_df['Unnamed: 0'].month == 2) & (ts_df['Unnamed: 0'].index.day == 29) + ts_df = ts_df[~feb_29_mask] + ts_df['Unnamed: 0'] = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + elif not calendar.isleap(old_year): + ts_df['Unnamed: 0'] = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.index = ts_df['Unnamed: 0'] + + # Leap Year Handling (Fill w/ Feb 28th) + feb_28_mask = (ts_df.index.month == 2) & (ts_df.index.day == 28) + feb_29_mask = (ts_df.index.month == 2) & (ts_df.index.day == 29) + feb_28 = ts_df.loc[feb_28_mask, 'value'].values + feb_29 = ts_df.loc[feb_29_mask, 'value'].values + if ((len(feb_29) > 0) & (len(feb_28) > 0)): + ts_df.loc[feb_29_mask, 'value'] = feb_28 + ts_df['Unnamed: 0'] = ts_df.index + ts_df.to_csv(new_inputs+filename+'-'+str(old_year)+'.csv',index=False) + loc_tech_b[x][y] = 'file='+filename+'-'+str(old_year)+'.csv:value' + except TypeError: + continue + + if l not in built_loc_techs: + built_loc_techs[l] = {} + built_loc_techs[l][t+'_'+str(old_year)] = loc_tech_b + + new_loctechs[loc_type][l]['techs'][t+'_'+str(old_year)] = loc_tech_b + for t in built_tech_names.keys(): tech = old_model['techs'][t] - tech_b = copy.deepcopy(tech) + + # Handle systemwide energy cap gradient + if 'constraints' in new_techs['techs'][t]: + if 'energy_cap_max_systemwide' in new_techs['techs'][t]['constraints']: + new_techs['techs'][t]['constraints']['energy_cap_max_systemwide'] = max([new_techs['techs'][t]['constraints']['energy_cap_max_systemwide']-built_tech_names[t],0]) + if 'energy_cap_equals_systemwide' in new_techs['techs'][t]['constraints']: + new_techs['techs'][t]['constraints']['energy_cap_max_systemwide'] = max([new_techs['techs'][t]['constraints']['energy_cap_equals_systemwide']-built_tech_names[t],0]) + if 'constraints' in tech_b: - [tech_b['constraints'].pop(c) for c in ['energy_cap_max', 'storage_cap_max'] if c in tech_b['constraints']] + [tech_b['constraints'].pop(c) for c in ['energy_cap_max', 'storage_cap_max','energy_cap_per_storage_cap_equals'] if c in tech_b['constraints']] cost_classes = [c for c in tech_b.keys() if 'costs.' in c] for cost in cost_classes: [tech_b[cost].pop(c) for c in ['energy_cap','interest_rate','storage_cap'] if c in tech_b[cost]] @@ -606,37 +620,32 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): tech_b['essentials']['name'] += ' '+str(old_year) for x in tech_b: - for y in tech_b[x].keys(): - # Copy over timeseries files for old techs, updating year to match new year - if 'file=' in str(tech_b[x][y]): - filename=tech_b[x][y].replace('file=','').replace('.csv:value','') - ts_df = pd.read_csv(old_inputs+'/'+filename+'.csv') - ts_df['Unnamed: 0'] = pd.to_datetime(ts_df['Unnamed: 0']) - freq = pd.infer_freq(ts_df['Unnamed: 0']) - if not calendar.isleap(new_year): - feb_29_mask = (ts_df['Unnamed: 0'].dt.month == 2) & (ts_df['Unnamed: 0'].dt.day == 29) - ts_df = ts_df[~feb_29_mask] - ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.drop(columns=['Unnamed: 0'], inplace=True) - elif not calendar.isleap(old_year): - ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.drop(columns=['Unnamed: 0'], inplace=True) - idx = pd.date_range(ts_df.index.min(),ts_df.index.max(),freq=freq) - ts_df = ts_df.reindex(idx, fill_value=0) - - # Leap Year Handling (Fill w/ Feb 28th) - feb_28_mask = (ts_df.index.month == 2) & (ts_df.index.day == 28) - feb_29_mask = (ts_df.index.month == 2) & (ts_df.index.day == 29) - feb_28 = ts_df.loc[feb_28_mask, 'value'].values - feb_29 = ts_df.loc[feb_29_mask, 'value'].values - if ((len(feb_29) > 0) & (len(feb_28) > 0)): - ts_df.loc[feb_29_mask, 'value'] = feb_28 - else: - ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.drop(columns=['Unnamed: 0'], inplace=True) - ts_df.index.name = None - ts_df.to_csv(os.path.join(new_inputs,filename+'-'+str(old_year)+'.csv'),index=True) - tech_b[x][y] = 'file='+filename+'-'+str(old_year)+'.csv:value' + for y in tech_b[x]: + try: + if 'file=' in tech_b[x][y]: + filename=tech_b[x][y].replace('file=','').replace('.csv:value','') + ts_df = pd.read_csv(old_inputs+'/'+filename+'.csv') + ts_df['Unnamed: 0'] = pd.to_datetime(ts_df['Unnamed: 0']) + if not calendar.isleap(new_year): + feb_29_mask = (ts_df['Unnamed: 0'].month == 2) & (ts_df['Unnamed: 0'].index.day == 29) + ts_df = ts_df[~feb_29_mask] + ts_df['Unnamed: 0'] = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + elif not calendar.isleap(old_year): + ts_df['Unnamed: 0'] = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.index = ts_df['Unnamed: 0'] + + # Leap Year Handling (Fill w/ Feb 28th) + feb_28_mask = (ts_df.index.month == 2) & (ts_df.index.day == 28) + feb_29_mask = (ts_df.index.month == 2) & (ts_df.index.day == 29) + feb_28 = ts_df.loc[feb_28_mask, 'value'].values + feb_29 = ts_df.loc[feb_29_mask, 'value'].values + if ((len(feb_29) > 0) & (len(feb_28) > 0)): + ts_df.loc[feb_29_mask, 'value'] = feb_28 + ts_df['Unnamed: 0'] = ts_df.index + ts_df.to_csv(new_inputs+filename+'-'+str(old_year)+'.csv',index=False) + tech_b[x][y] = 'file='+filename+'-'+str(old_year)+'.csv:value' + except (TypeError,FileNotFoundError): + continue built_techs[t+'_'+str(old_year)] = tech_b new_techs['techs'][t+'_'+str(old_year)] = tech_b From 9ce6f16948519ebf5a43bb54c06e3e6c5ce43428 Mon Sep 17 00:00:00 2001 From: "Morris, James" Date: Mon, 9 Dec 2024 15:44:46 -0700 Subject: [PATCH 04/29] Fixing timeseries code --- calliope_app/api/calliope_utils.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index 0520730b..0cfd8c2e 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -621,18 +621,23 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): for x in tech_b: for y in tech_b[x]: - try: - if 'file=' in tech_b[x][y]: + for y in tech_b[x].keys(): + # Copy over timeseries files for old techs, updating year to match new year + if 'file=' in str(tech_b[x][y]): filename=tech_b[x][y].replace('file=','').replace('.csv:value','') ts_df = pd.read_csv(old_inputs+'/'+filename+'.csv') ts_df['Unnamed: 0'] = pd.to_datetime(ts_df['Unnamed: 0']) + freq = pd.infer_freq(ts_df['Unnamed: 0']) if not calendar.isleap(new_year): - feb_29_mask = (ts_df['Unnamed: 0'].month == 2) & (ts_df['Unnamed: 0'].index.day == 29) + feb_29_mask = (ts_df['Unnamed: 0'].dt.month == 2) & (ts_df['Unnamed: 0'].dt.day == 29) ts_df = ts_df[~feb_29_mask] - ts_df['Unnamed: 0'] = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.drop(columns=['Unnamed: 0'], inplace=True) elif not calendar.isleap(old_year): - ts_df['Unnamed: 0'] = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.index = ts_df['Unnamed: 0'] + ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.drop(columns=['Unnamed: 0'], inplace=True) + idx = pd.date_range(ts_df.index.min(),ts_df.index.max(),freq=freq) + ts_df = ts_df.reindex(idx, fill_value=0) # Leap Year Handling (Fill w/ Feb 28th) feb_28_mask = (ts_df.index.month == 2) & (ts_df.index.day == 28) @@ -641,11 +646,12 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): feb_29 = ts_df.loc[feb_29_mask, 'value'].values if ((len(feb_29) > 0) & (len(feb_28) > 0)): ts_df.loc[feb_29_mask, 'value'] = feb_28 - ts_df['Unnamed: 0'] = ts_df.index - ts_df.to_csv(new_inputs+filename+'-'+str(old_year)+'.csv',index=False) + else: + ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.drop(columns=['Unnamed: 0'], inplace=True) + ts_df.index.name = None + ts_df.to_csv(os.path.join(new_inputs,filename+'-'+str(old_year)+'.csv'),index=True) tech_b[x][y] = 'file='+filename+'-'+str(old_year)+'.csv:value' - except (TypeError,FileNotFoundError): - continue built_techs[t+'_'+str(old_year)] = tech_b new_techs['techs'][t+'_'+str(old_year)] = tech_b From 8ca376d23870abb960924298a583f4109c28a790 Mon Sep 17 00:00:00 2001 From: jgu2 Date: Mon, 9 Dec 2024 17:22:09 -0700 Subject: [PATCH 05/29] Set calliope logging critical verbosity --- calliope_app/api/calliope_utils.py | 41 ++++++++++++++++++++---------- 1 file changed, 27 insertions(+), 14 deletions(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index 1ab3e3c3..9ad14a22 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -3,19 +3,20 @@ interfacing with Calliope. """ +import calendar +import copy +import json +import logging import os -import yaml import shutil -from calliope import Model as CalliopeModel + +import calliope import pandas as pd -import json -import copy -import calendar +import yaml +from calliope import Model as CalliopeModel from api.models.configuration import Scenario_Param, Scenario_Loc_Tech, \ - Location, Tech_Param, Loc_Tech_Param, Loc_Tech, Scenario, Carrier -from api.models.outputs import Run -import logging + Location, Tech_Param, Loc_Tech_Param, Loc_Tech, Scenario logger = logging.getLogger(__name__) @@ -36,7 +37,7 @@ def get_model_yaml_set(run, scenario_id, year): # NOTE: deprecated run parameter in the database if unique_param == "run.objective_options": continue - + if unique_param not in unique_params: # If parameter hasn't been set, add to Return List unique_params.append(unique_param) @@ -164,7 +165,7 @@ def get_loc_techs_yaml_set(scenario_id, year): value = float(param.value) / 100 else: value = param.value - + param_list = [parent_type, location, 'techs', param.loc_tech.technology.calliope_name]+\ unique_param.split('.') @@ -173,7 +174,7 @@ def get_loc_techs_yaml_set(scenario_id, year): def get_carriers_yaml_set(scenario_id): model = Scenario.objects.get(id=scenario_id).model - + carriers_yaml_set = {} for carrier in model.carriers.all(): carriers_yaml_set[carrier.name] = {'rate':carrier.rate_unit,'quantity':carrier.quantity_unit} @@ -229,6 +230,12 @@ def run_basic(model_path, logger): model = CalliopeModel(config=model_path) logger.info(model.info()) logger.info(model._model_data.coords.get("techs_non_transmission", [])) + + # NOTE: HiGHS solver has issue via Pyomo + # Weird to bypass by setting calliope log verbosity to 'critical' + # 'info' and 'debug' not working. + calliope.set_log_verbosity("critical") + model.run() _write_outputs(model, model_path) return model.results.termination_condition @@ -241,6 +248,12 @@ def run_clustered(model_path, idx, logger): _set_subset_time(model_path) _set_capacities(model_path) model = CalliopeModel(config=model_path) + + # NOTE: HiGHS solver has issue via Pyomo + # Weird to bypass by setting calliope log verbosity to 'critical' + # 'info' and 'debug' not working. + calliope.set_log_verbosity("critical") + model.run() _write_outputs(model, model_path) if model.results.termination_condition != 'optimal': @@ -552,7 +565,7 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): if new_loc_tech['constraints']['storage_cap_max'] < 0: new_loc_tech['constraints']['storage_cap_max'] = 0 - new_loctechs['locations'][l]['techs'][t] = new_loc_tech + new_loctechs['locations'][l]['techs'][t] = new_loc_tech for x in loc_tech_b: for y in loc_tech_b[x].keys(): # Copy over timeseries files for old techs, updating year to match new year @@ -602,7 +615,7 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): [tech_b[cost].pop(c) for c in ['energy_cap','interest_rate','storage_cap'] if c in tech_b[cost]] if len(tech_b[cost].keys()) == 0: tech_b.pop(cost) - + tech_b['essentials']['name'] += ' '+str(old_year) for x in tech_b: @@ -664,4 +677,4 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): yaml.dump(new_loctechs,outfile,default_flow_style=False) with open(new_inputs+'/model.yaml', 'w') as outfile: - yaml.dump(new_model,outfile,default_flow_style=False) \ No newline at end of file + yaml.dump(new_model,outfile,default_flow_style=False) From c75c5512c8274db54334a75f1da920642f12b9c3 Mon Sep 17 00:00:00 2001 From: jgu2 Date: Tue, 10 Dec 2024 05:38:06 -0700 Subject: [PATCH 06/29] Set Clustered run default False --- calliope_app/client/templates/add_run.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/client/templates/add_run.html b/calliope_app/client/templates/add_run.html index 5c5975e4..4c670a69 100644 --- a/calliope_app/client/templates/add_run.html +++ b/calliope_app/client/templates/add_run.html @@ -89,7 +89,7 @@
{% trans "Run Options" %}:
{% trans "Enable Clustered Run:" %} - +
From 53dfeeff4087c1b101687041a0b059f0c52d9f4c Mon Sep 17 00:00:00 2001 From: jgu2 Date: Wed, 18 Dec 2024 16:21:55 -0700 Subject: [PATCH 07/29] Patch for highs solver running through --- calliope_app/api/calliope_utils.py | 14 ++++++-------- calliope_app/calliope-files/backend/run.py | 8 ++++---- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index 9ad14a22..531237d9 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -231,10 +231,9 @@ def run_basic(model_path, logger): logger.info(model.info()) logger.info(model._model_data.coords.get("techs_non_transmission", [])) - # NOTE: HiGHS solver has issue via Pyomo - # Weird to bypass by setting calliope log verbosity to 'critical' - # 'info' and 'debug' not working. - calliope.set_log_verbosity("critical") + # NOTE: with log_to_console=True, the model run would get hanging if error happened. + if model.run_config['solver'] == 'appsi_highs': + model.run_config['solver_options']['log_to_console'] = False model.run() _write_outputs(model, model_path) @@ -249,10 +248,9 @@ def run_clustered(model_path, idx, logger): _set_capacities(model_path) model = CalliopeModel(config=model_path) - # NOTE: HiGHS solver has issue via Pyomo - # Weird to bypass by setting calliope log verbosity to 'critical' - # 'info' and 'debug' not working. - calliope.set_log_verbosity("critical") + # NOTE: with log_to_console=True, the model run would get hanging if error happened. + if model.run_config['solver'] == 'appsi_highs': + model.run_config['solver_options']['log_to_console'] = False model.run() _write_outputs(model, model_path) diff --git a/calliope_app/calliope-files/backend/run.py b/calliope_app/calliope-files/backend/run.py index 0a70e0c1..4ee335d9 100644 --- a/calliope_app/calliope-files/backend/run.py +++ b/calliope_app/calliope-files/backend/run.py @@ -152,10 +152,10 @@ def run_plan( # NOTE: pyomo==6.7.0 opt does not have name attribute # Disable this for use 'appsi_highs' solver, which does not have 'persistent' in name. - # if "persistent" in opt.name and persistent is True: - # results.attrs["objective_function_value"] = opt.get_model_attr("ObjVal") - # else: - results.attrs["objective_function_value"] = backend_model.obj() + if "appsi_highs" not in solver and "persistent" in opt.name and persistent is True: + results.attrs["objective_function_value"] = opt.get_model_attr("ObjVal") + else: + results.attrs["objective_function_value"] = backend_model.obj() else: results = xr.Dataset(attrs={"termination_condition": termination}) From ffa3705bc79a8635a4e346e2fe90defbdd22bbea Mon Sep 17 00:00:00 2001 From: jgu2 Date: Wed, 18 Dec 2024 16:39:04 -0700 Subject: [PATCH 08/29] Update set log_to_console=False --- calliope_app/api/calliope_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index 531237d9..7984242a 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -233,7 +233,7 @@ def run_basic(model_path, logger): # NOTE: with log_to_console=True, the model run would get hanging if error happened. if model.run_config['solver'] == 'appsi_highs': - model.run_config['solver_options']['log_to_console'] = False + model.run_config['solver_options'] = {'log_to_console': False} model.run() _write_outputs(model, model_path) From 93d914cb3ae885a66f1ae4e34e1d1772f010b800 Mon Sep 17 00:00:00 2001 From: jgu2 Date: Wed, 18 Dec 2024 16:51:10 -0700 Subject: [PATCH 09/29] Update set log_to_console=False --- calliope_app/api/calliope_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index 7984242a..f7765f9f 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -250,7 +250,7 @@ def run_clustered(model_path, idx, logger): # NOTE: with log_to_console=True, the model run would get hanging if error happened. if model.run_config['solver'] == 'appsi_highs': - model.run_config['solver_options']['log_to_console'] = False + model.run_config['solver_options'] = {'log_to_console': False} model.run() _write_outputs(model, model_path) From bc7698c793bce268d9d9ad021280edd14f693cb1 Mon Sep 17 00:00:00 2001 From: jgu2 Date: Thu, 19 Dec 2024 19:35:32 -0700 Subject: [PATCH 10/29] Bump the version to 1.1.5 --- calliope_app/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/version.py b/calliope_app/version.py index c72e3798..9b102be7 100644 --- a/calliope_app/version.py +++ b/calliope_app/version.py @@ -1 +1 @@ -__version__ = "1.1.4" +__version__ = "1.1.5" From ee49a42a30242511b77f2c5cb76039f9e137baed Mon Sep 17 00:00:00 2001 From: jgu2 Date: Fri, 10 Jan 2025 10:03:51 -0700 Subject: [PATCH 11/29] Bump pysam from 2.1.5.dev3 to 2.2.0 --- calliope_app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index 87733b50..52dd49fb 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -7,7 +7,7 @@ django-crispy-forms==1.14.0 django-environ>=0.4.5 django-modeltranslation==0.18.12 flower>=2.0.1 -nrel-pysam==2.1.5.dev3 +nrel-pysam==2.2.0 pint==0.21 psycopg2-binary==2.9.3 pyyaml==6.0 From eed383d4810bbd2906155153aae81b8b734af59f Mon Sep 17 00:00:00 2001 From: "Morris, James" Date: Tue, 14 Jan 2025 11:35:47 -0700 Subject: [PATCH 12/29] Fixing minor issue --- calliope_app/api/calliope_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index c96c6a5d..911742fa 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -499,7 +499,7 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): new_loctechs = yaml.safe_load(open(new_inputs+'/locations.yaml','r')) new_model = yaml.safe_load(open(new_inputs+'/model.yaml','r')) - built_tech_names = [] + built_tech_names = {} built_techs = {} built_loc_techs = {} From b2e3b8b1a751928c3c72a1160cb4b86a9728cb9b Mon Sep 17 00:00:00 2001 From: jgu2 Date: Fri, 4 Apr 2025 08:37:30 -0600 Subject: [PATCH 13/29] Upgrade Django to 4.2.20 --- calliope_app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index 52dd49fb..bdd994ae 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -2,7 +2,7 @@ django_ratelimit==4.1.0 git+https://github.com/NREL/GEOPHIRES-X.git#egg=geophires-x boto3==1.24.37 celery[redis]==5.3.0 -django==4.2.17 +django==4.2.20 django-crispy-forms==1.14.0 django-environ>=0.4.5 django-modeltranslation==0.18.12 From d94c2e7993f8662dc415b14c2e877e4a064255cb Mon Sep 17 00:00:00 2001 From: "Morris, James" Date: Mon, 28 Apr 2025 13:20:07 -0600 Subject: [PATCH 14/29] Updating logger to show stack trace Updating error logger on model run task to show stack trace to aid debugging errors --- calliope_app/api/tasks.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/calliope_app/api/tasks.py b/calliope_app/api/tasks.py index 20861574..6ab809c1 100644 --- a/calliope_app/api/tasks.py +++ b/calliope_app/api/tasks.py @@ -469,7 +469,8 @@ def on_failure(self, exc, task_id, args, kwargs, einfo): timeout_message = "Run Timeout! TimeLimit=%s seconds." % self.time_limit exc = timeout_message if str(exc) == "SoftTimeLimitExceeded()" else exc - logger.error("{!r}".format(str(exc))) + logger.exception(exc) + #logger.error("{!r}".format(str(exc))) # Add pretty error to the log for key in self.clean_msg_dict: From a8b6cc625483430d585cc73d629e8fa03eb1eaf0 Mon Sep 17 00:00:00 2001 From: "Morris, James" Date: Mon, 28 Apr 2025 14:29:18 -0600 Subject: [PATCH 15/29] Fixing double loop bug Fixing bug caused by double loop in timeseries processing for gradient techs --- calliope_app/api/calliope_utils.py | 63 +++++++++++++++--------------- 1 file changed, 31 insertions(+), 32 deletions(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index 911742fa..37f981fa 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -632,38 +632,37 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): tech_b['essentials']['name'] += ' '+str(old_year) for x in tech_b: - for y in tech_b[x]: - for y in tech_b[x].keys(): - # Copy over timeseries files for old techs, updating year to match new year - if 'file=' in str(tech_b[x][y]): - filename=tech_b[x][y].replace('file=','').replace('.csv:value','') - ts_df = pd.read_csv(old_inputs+'/'+filename+'.csv') - ts_df['Unnamed: 0'] = pd.to_datetime(ts_df['Unnamed: 0']) - freq = pd.infer_freq(ts_df['Unnamed: 0']) - if not calendar.isleap(new_year): - feb_29_mask = (ts_df['Unnamed: 0'].dt.month == 2) & (ts_df['Unnamed: 0'].dt.day == 29) - ts_df = ts_df[~feb_29_mask] - ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.drop(columns=['Unnamed: 0'], inplace=True) - elif not calendar.isleap(old_year): - ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.drop(columns=['Unnamed: 0'], inplace=True) - idx = pd.date_range(ts_df.index.min(),ts_df.index.max(),freq=freq) - ts_df = ts_df.reindex(idx, fill_value=0) - - # Leap Year Handling (Fill w/ Feb 28th) - feb_28_mask = (ts_df.index.month == 2) & (ts_df.index.day == 28) - feb_29_mask = (ts_df.index.month == 2) & (ts_df.index.day == 29) - feb_28 = ts_df.loc[feb_28_mask, 'value'].values - feb_29 = ts_df.loc[feb_29_mask, 'value'].values - if ((len(feb_29) > 0) & (len(feb_28) > 0)): - ts_df.loc[feb_29_mask, 'value'] = feb_28 - else: - ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.drop(columns=['Unnamed: 0'], inplace=True) - ts_df.index.name = None - ts_df.to_csv(os.path.join(new_inputs,filename+'-'+str(old_year)+'.csv'),index=True) - tech_b[x][y] = 'file='+filename+'-'+str(old_year)+'.csv:value' + for y in tech_b[x].keys(): + # Copy over timeseries files for old techs, updating year to match new year + if 'file=' in str(tech_b[x][y]): + filename=tech_b[x][y].replace('file=','').replace('.csv:value','') + ts_df = pd.read_csv(old_inputs+'/'+filename+'.csv') + ts_df['Unnamed: 0'] = pd.to_datetime(ts_df['Unnamed: 0']) + freq = pd.infer_freq(ts_df['Unnamed: 0']) + if not calendar.isleap(new_year): + feb_29_mask = (ts_df['Unnamed: 0'].dt.month == 2) & (ts_df['Unnamed: 0'].dt.day == 29) + ts_df = ts_df[~feb_29_mask] + ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.drop(columns=['Unnamed: 0'], inplace=True) + elif not calendar.isleap(old_year): + ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.drop(columns=['Unnamed: 0'], inplace=True) + idx = pd.date_range(ts_df.index.min(),ts_df.index.max(),freq=freq) + ts_df = ts_df.reindex(idx, fill_value=0) + + # Leap Year Handling (Fill w/ Feb 28th) + feb_28_mask = (ts_df.index.month == 2) & (ts_df.index.day == 28) + feb_29_mask = (ts_df.index.month == 2) & (ts_df.index.day == 29) + feb_28 = ts_df.loc[feb_28_mask, 'value'].values + feb_29 = ts_df.loc[feb_29_mask, 'value'].values + if ((len(feb_29) > 0) & (len(feb_28) > 0)): + ts_df.loc[feb_29_mask, 'value'] = feb_28 + else: + ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.drop(columns=['Unnamed: 0'], inplace=True) + ts_df.index.name = None + ts_df.to_csv(os.path.join(new_inputs,filename+'-'+str(old_year)+'.csv'),index=True) + tech_b[x][y] = 'file='+filename+'-'+str(old_year)+'.csv:value' built_techs[t+'_'+str(old_year)] = tech_b new_techs['techs'][t+'_'+str(old_year)] = tech_b From 104a3680aac5240de6181a271b80e919083ab265 Mon Sep 17 00:00:00 2001 From: "Morris, James" Date: Mon, 28 Apr 2025 17:28:18 -0600 Subject: [PATCH 16/29] Explicitly adding older version of coolprop Coolprop 6.8.0 seems to break when installing from pip. Reverting to 6.7.0 explicitly to resolve --- calliope_app/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index bdd994ae..0ce54b3a 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -1,4 +1,5 @@ django_ratelimit==4.1.0 +coolprop<=6.7.0 git+https://github.com/NREL/GEOPHIRES-X.git#egg=geophires-x boto3==1.24.37 celery[redis]==5.3.0 From 22686320727ea6f72115db5bdbf5a22ed471243c Mon Sep 17 00:00:00 2001 From: "Morris, James" Date: Tue, 29 Apr 2025 09:29:43 -0600 Subject: [PATCH 17/29] Standardizing gradient timeseries code Standardizing the gradient timeseries code between the loc_techs and techs sections and updating all file I/O to use os.path.join() --- calliope_app/api/calliope_utils.py | 32 ++++++++++++++++++------------ 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index 37f981fa..186c7363 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -493,11 +493,11 @@ def _yaml_outputs(model_path, outputs_dir): yaml.dump(model, open(os.path.join(outputs_dir,'model_results.yaml'),'w+'), default_flow_style=False) def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): - old_model = yaml.safe_load(open(old_results+'/model_results.yaml')) + old_model = yaml.safe_load(open(os.path.join(old_results,'model_results.yaml'))) - new_techs = yaml.safe_load(open(new_inputs+'/techs.yaml','r')) - new_loctechs = yaml.safe_load(open(new_inputs+'/locations.yaml','r')) - new_model = yaml.safe_load(open(new_inputs+'/model.yaml','r')) + new_techs = yaml.safe_load(open(os.path.join(new_inputs,'techs.yaml','r'))) + new_loctechs = yaml.safe_load(open(os.path.join(new_inputs,'locations.yaml','r'))) + new_model = yaml.safe_load(open(os.path.join(new_inputs,'model.yaml','r'))) built_tech_names = {} built_techs = {} @@ -584,12 +584,15 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): ts_df['Unnamed: 0'] = pd.to_datetime(ts_df['Unnamed: 0']) freq = pd.infer_freq(ts_df['Unnamed: 0']) if not calendar.isleap(new_year): - feb_29_mask = (ts_df['Unnamed: 0'].month == 2) & (ts_df['Unnamed: 0'].index.day == 29) + feb_29_mask = (ts_df['Unnamed: 0'].dt.month == 2) & (ts_df['Unnamed: 0'].dt.day == 29) ts_df = ts_df[~feb_29_mask] - ts_df['Unnamed: 0'] = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.drop(columns=['Unnamed: 0'], inplace=True) elif not calendar.isleap(old_year): - ts_df['Unnamed: 0'] = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) - ts_df.index = ts_df['Unnamed: 0'] + ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.drop(columns=['Unnamed: 0'], inplace=True) + idx = pd.date_range(ts_df.index.min(),ts_df.index.max(),freq=freq) + ts_df = ts_df.reindex(idx, fill_value=0) # Leap Year Handling (Fill w/ Feb 28th) feb_28_mask = (ts_df.index.month == 2) & (ts_df.index.day == 28) @@ -598,8 +601,11 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): feb_29 = ts_df.loc[feb_29_mask, 'value'].values if ((len(feb_29) > 0) & (len(feb_28) > 0)): ts_df.loc[feb_29_mask, 'value'] = feb_28 - ts_df['Unnamed: 0'] = ts_df.index - ts_df.to_csv(new_inputs+filename+'-'+str(old_year)+'.csv',index=False) + else: + ts_df.index = ts_df['Unnamed: 0'].apply(lambda x: x.replace(year=new_year)) + ts_df.drop(columns=['Unnamed: 0'], inplace=True) + ts_df.index.name = None + ts_df.to_csv(os.path.join(new_inputs,filename+'-'+str(old_year)+'.csv'),index=True) loc_tech_b[x][y] = 'file='+filename+'-'+str(old_year)+'.csv:value' except TypeError: continue @@ -683,11 +689,11 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): if t in c.get('techs_rhs',[]) and t+'_'+str(old_year) not in c.get('techs',[]): new_model['group_constraints'][g]['techs_rhs'].append(t+'_'+str(old_year)) - with open(new_inputs+'/techs.yaml','w') as outfile: + with open(os.path.join(new_inputs,'techs.yaml','w')) as outfile: yaml.dump(new_techs,outfile,default_flow_style=False) - with open(new_inputs+'/locations.yaml','w') as outfile: + with open(os.path.join(new_inputs,'locations.yaml','w')) as outfile: yaml.dump(new_loctechs,outfile,default_flow_style=False) - with open(new_inputs+'/model.yaml', 'w') as outfile: + with open(os.path.join(new_inputs,'model.yaml', 'w')) as outfile: yaml.dump(new_model,outfile,default_flow_style=False) From c011795dceeb7f9fa3fe1baa89b6fd8a64ad9d8a Mon Sep 17 00:00:00 2001 From: "Morris, James" Date: Tue, 29 Apr 2025 10:48:04 -0600 Subject: [PATCH 18/29] Fixing issue with open/path Fixing issue with accidently passing the open type into os.path.join() --- calliope_app/api/calliope_utils.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index 186c7363..43a7ddc4 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -452,9 +452,9 @@ def _yaml_outputs(model_path, outputs_dir): results_var = {'energy_cap':'results_energy_cap.csv','storage_cap':'results_storage_cap.csv'} inputs_dir = os.path.join(base_path, 'inputs') - model = yaml.load(open(os.path.join(inputs_dir,'model.yaml')), Loader=yaml.FullLoader) - model.update(yaml.load(open(os.path.join(inputs_dir,'locations.yaml')), Loader=yaml.FullLoader)) - model.update(yaml.load(open(os.path.join(inputs_dir,'techs.yaml')), Loader=yaml.FullLoader)) + model = yaml.safe_load(open(os.path.join(inputs_dir,'model.yaml'),'r')) + model.update(yaml.load(open(os.path.join(inputs_dir,'locations.yaml'),'r'))) + model.update(yaml.load(open(os.path.join(inputs_dir,'techs.yaml'),'r'))) has_outputs = False for v in results_var.keys(): @@ -493,11 +493,11 @@ def _yaml_outputs(model_path, outputs_dir): yaml.dump(model, open(os.path.join(outputs_dir,'model_results.yaml'),'w+'), default_flow_style=False) def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): - old_model = yaml.safe_load(open(os.path.join(old_results,'model_results.yaml'))) + old_model = yaml.safe_load(open(os.path.join(old_results,'model_results.yaml'),'r')) - new_techs = yaml.safe_load(open(os.path.join(new_inputs,'techs.yaml','r'))) - new_loctechs = yaml.safe_load(open(os.path.join(new_inputs,'locations.yaml','r'))) - new_model = yaml.safe_load(open(os.path.join(new_inputs,'model.yaml','r'))) + new_techs = yaml.safe_load(open(os.path.join(new_inputs,'techs.yaml'),'r')) + new_loctechs = yaml.safe_load(open(os.path.join(new_inputs,'locations.yaml'),'r')) + new_model = yaml.safe_load(open(os.path.join(new_inputs,'model.yaml'),'r')) built_tech_names = {} built_techs = {} @@ -689,11 +689,11 @@ def apply_gradient(old_inputs,old_results,new_inputs,old_year,new_year,logger): if t in c.get('techs_rhs',[]) and t+'_'+str(old_year) not in c.get('techs',[]): new_model['group_constraints'][g]['techs_rhs'].append(t+'_'+str(old_year)) - with open(os.path.join(new_inputs,'techs.yaml','w')) as outfile: + with open(os.path.join(new_inputs,'techs.yaml'),'w') as outfile: yaml.dump(new_techs,outfile,default_flow_style=False) - with open(os.path.join(new_inputs,'locations.yaml','w')) as outfile: + with open(os.path.join(new_inputs,'locations.yaml'),'w') as outfile: yaml.dump(new_loctechs,outfile,default_flow_style=False) - with open(os.path.join(new_inputs,'model.yaml', 'w')) as outfile: + with open(os.path.join(new_inputs,'model.yaml'), 'w') as outfile: yaml.dump(new_model,outfile,default_flow_style=False) From c635000b5cc14d566326af8d3e81009e3319af03 Mon Sep 17 00:00:00 2001 From: "Morris, James" Date: Wed, 30 Apr 2025 12:57:42 -0600 Subject: [PATCH 19/29] Finalizing switch to yaml safe_load --- calliope_app/api/calliope_utils.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/calliope_app/api/calliope_utils.py b/calliope_app/api/calliope_utils.py index 43a7ddc4..7b0482b0 100644 --- a/calliope_app/api/calliope_utils.py +++ b/calliope_app/api/calliope_utils.py @@ -283,7 +283,7 @@ def run_clustered(model_path, idx, logger): def _set_clustering(model_path, on=False, k=14): # Read with open(model_path) as file: - model_yaml = yaml.load(file, Loader=yaml.FullLoader) + model_yaml = yaml.safe_load(file) # Update if on is True: time = {} @@ -303,7 +303,7 @@ def _set_clustering(model_path, on=False, k=14): def _set_subset_time(model_path, start_time=None, end_time=None): # Read with open(model_path) as file: - model_yaml = yaml.load(file, Loader=yaml.FullLoader) + model_yaml = yaml.safe_load(file) # Update if start_time is not None: subset_time = [start_time, end_time] @@ -320,7 +320,7 @@ def _set_capacities(model_path, ignore_techs=[], # ---- UPDATE MODEL REFERENCE # Read with open(model_path) as file: - model_yaml = yaml.load(file, Loader=yaml.FullLoader) + model_yaml = yaml.safe_load(file) # Update Model Settings if capacity is None: model_yaml['import'] = ['techs.yaml', 'locations.yaml'] @@ -335,7 +335,7 @@ def _set_capacities(model_path, ignore_techs=[], # Read locations_path = model_path.replace('model.yaml', 'locations.yaml') with open(locations_path) as file: - locations_yaml = yaml.load(file, Loader=yaml.FullLoader) + locations_yaml = yaml.safe_load(file) # Update Locations Settings for loc, loc_data in locations_yaml['locations'].items(): if 'techs' not in loc_data: @@ -453,8 +453,8 @@ def _yaml_outputs(model_path, outputs_dir): inputs_dir = os.path.join(base_path, 'inputs') model = yaml.safe_load(open(os.path.join(inputs_dir,'model.yaml'),'r')) - model.update(yaml.load(open(os.path.join(inputs_dir,'locations.yaml'),'r'))) - model.update(yaml.load(open(os.path.join(inputs_dir,'techs.yaml'),'r'))) + model.update(yaml.safe_load(open(os.path.join(inputs_dir,'locations.yaml'),'r'))) + model.update(yaml.safe_load(open(os.path.join(inputs_dir,'techs.yaml'),'r'))) has_outputs = False for v in results_var.keys(): From 56b757e8215f6fb2501b767ef9ca8e8ef6933427 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 May 2025 15:46:47 +0000 Subject: [PATCH 20/29] Bump django from 4.2.20 to 4.2.21 in /calliope_app Bumps [django](https://github.com/django/django) from 4.2.20 to 4.2.21. - [Commits](https://github.com/django/django/compare/4.2.20...4.2.21) --- updated-dependencies: - dependency-name: django dependency-version: 4.2.21 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- calliope_app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index 0ce54b3a..c432dd29 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -3,7 +3,7 @@ coolprop<=6.7.0 git+https://github.com/NREL/GEOPHIRES-X.git#egg=geophires-x boto3==1.24.37 celery[redis]==5.3.0 -django==4.2.20 +django==4.2.21 django-crispy-forms==1.14.0 django-environ>=0.4.5 django-modeltranslation==0.18.12 From 5217bbb9d26e550f51984c36930a7ca42a17fa29 Mon Sep 17 00:00:00 2001 From: jgu2 Date: Fri, 1 Aug 2025 20:40:39 -0600 Subject: [PATCH 21/29] Upgrade django from 4.2.21 to 4.2.23 --- calliope_app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index c432dd29..fe6b9a5a 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -3,7 +3,7 @@ coolprop<=6.7.0 git+https://github.com/NREL/GEOPHIRES-X.git#egg=geophires-x boto3==1.24.37 celery[redis]==5.3.0 -django==4.2.21 +django==4.2.23 django-crispy-forms==1.14.0 django-environ>=0.4.5 django-modeltranslation==0.18.12 From 5739d456bbed2ee114fcc28da7d0f5e1ea03ebe4 Mon Sep 17 00:00:00 2001 From: jgu2 Date: Fri, 1 Aug 2025 21:52:09 -0600 Subject: [PATCH 22/29] Pin GeophiresX version to v3.9.8 --- calliope_app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index fe6b9a5a..05e4e84b 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -1,6 +1,6 @@ django_ratelimit==4.1.0 coolprop<=6.7.0 -git+https://github.com/NREL/GEOPHIRES-X.git#egg=geophires-x +git+https://github.com/NREL/GEOPHIRES-X.git@v3.9.8#egg=geophires-x boto3==1.24.37 celery[redis]==5.3.0 django==4.2.23 From f42284fb97e2a55ce68cc28ef8a7499569ad0191 Mon Sep 17 00:00:00 2001 From: jgu2 Date: Wed, 6 Aug 2025 13:25:33 -0600 Subject: [PATCH 23/29] Install geophires-x tag 3.8.10 --- calliope_app/compose/Dockerfile | 4 ++-- calliope_app/requirements.txt | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/calliope_app/compose/Dockerfile b/calliope_app/compose/Dockerfile index b99b8ac9..fa8bb910 100644 --- a/calliope_app/compose/Dockerfile +++ b/calliope_app/compose/Dockerfile @@ -29,8 +29,8 @@ RUN apt-get update -y --fix-missing \ # install python packages WORKDIR /www COPY requirements.txt requirements-dev.txt /www/ -RUN pip install --upgrade pip -RUN pip install -r requirements.txt && pip install -r requirements-dev.txt +RUN pip install --upgrade pip && pip install -r requirements.txt && pip install -r requirements-dev.txt +RUN pip install "git+https://github.com/NREL/GEOPHIRES-X.git@v3.8.10" # Install calliope without dependencies, as already installed in requirements RUN pip install calliope==0.6.8 --no-deps diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index 05e4e84b..bd85fb70 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -1,6 +1,5 @@ django_ratelimit==4.1.0 coolprop<=6.7.0 -git+https://github.com/NREL/GEOPHIRES-X.git@v3.9.8#egg=geophires-x boto3==1.24.37 celery[redis]==5.3.0 django==4.2.23 @@ -8,7 +7,7 @@ django-crispy-forms==1.14.0 django-environ>=0.4.5 django-modeltranslation==0.18.12 flower>=2.0.1 -nrel-pysam==2.2.0 +nrel-pysam==3.0.2 pint==0.21 psycopg2-binary==2.9.3 pyyaml==6.0 From 9fb0f9efa1eb29a27f42c86246bf7d48acb9eb8f Mon Sep 17 00:00:00 2001 From: jgu2 Date: Mon, 15 Sep 2025 17:04:18 -0600 Subject: [PATCH 24/29] Upgrade Django to 4.2.24 --- calliope_app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index bd85fb70..a54f6ac6 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -2,7 +2,7 @@ django_ratelimit==4.1.0 coolprop<=6.7.0 boto3==1.24.37 celery[redis]==5.3.0 -django==4.2.23 +django==4.2.24 django-crispy-forms==1.14.0 django-environ>=0.4.5 django-modeltranslation==0.18.12 From bfe6e3d3451a85fac76f336a1716f29b3b8c8549 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Oct 2025 23:36:58 +0000 Subject: [PATCH 25/29] Bump django from 4.2.24 to 4.2.25 in /calliope_app Bumps [django](https://github.com/django/django) from 4.2.24 to 4.2.25. - [Commits](https://github.com/django/django/compare/4.2.24...4.2.25) --- updated-dependencies: - dependency-name: django dependency-version: 4.2.25 dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- calliope_app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index a54f6ac6..b9e0bc08 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -2,7 +2,7 @@ django_ratelimit==4.1.0 coolprop<=6.7.0 boto3==1.24.37 celery[redis]==5.3.0 -django==4.2.24 +django==4.2.25 django-crispy-forms==1.14.0 django-environ>=0.4.5 django-modeltranslation==0.18.12 From a65c3459d3fee9454222c5fa2362f33c2530c9a4 Mon Sep 17 00:00:00 2001 From: jgu2 Date: Fri, 7 Nov 2025 09:29:12 -0700 Subject: [PATCH 26/29] Upgrade django to 4.2.26 --- calliope_app/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/calliope_app/requirements.txt b/calliope_app/requirements.txt index b9e0bc08..d34203a2 100644 --- a/calliope_app/requirements.txt +++ b/calliope_app/requirements.txt @@ -2,7 +2,7 @@ django_ratelimit==4.1.0 coolprop<=6.7.0 boto3==1.24.37 celery[redis]==5.3.0 -django==4.2.25 +django==4.2.26 django-crispy-forms==1.14.0 django-environ>=0.4.5 django-modeltranslation==0.18.12 From 1b7c7796963364c6167339e734d5af974471d693 Mon Sep 17 00:00:00 2001 From: "Morris, James" Date: Tue, 9 Dec 2025 12:44:40 -0700 Subject: [PATCH 27/29] Updating for new lab change Updating references to the old lab name/acronym to use the new name/NLR --- .gitignore | 5 ++++- README.md | 6 +++--- calliope_app/api/models/engage.py | 2 +- calliope_app/api/views/outputs.py | 2 +- calliope_app/calliope_app/settings/base.py | 2 +- calliope_app/calliope_app/urls.py | 2 +- calliope_app/client/templates/base.html | 2 +- calliope_app/client/templates/locations.html | 2 +- .../notifications/notify_user_on_run_failure.txt | 2 +- .../notifications/notify_user_on_run_success.txt | 2 +- .../client/templates/registration/login.html | 2 +- .../templates/registration/pw_reset_email.html | 2 +- .../templates/registration/pw_reset_subject.txt | 2 +- .../templates/registration/user_activation.txt | 2 +- calliope_app/locale/en/LC_MESSAGES/django.po | 2 +- calliope_app/locale/es/LC_MESSAGES/django.po | 4 ++-- calliope_app/locale/fr/LC_MESSAGES/django.po | 4 ++-- calliope_app/setup.py | 2 +- docs/_sources/contributing-to-engage.rst.txt | 4 ++-- docs/_sources/getting-started.rst.txt | 6 +++--- docs/_sources/index.rst.txt | 6 +++--- docs/_sources/nrel-deployment.rst.txt | 8 ++++---- docs/_sources/release-notes.rst.txt | 2 +- docs/contributing-to-engage.html | 10 +++++----- docs/genindex.html | 2 +- docs/getting-started.html | 6 +++--- docs/index.html | 12 ++++++------ docs/language-translation.html | 2 +- docs/license.html | 2 +- docs/nrel-deployment.html | 16 ++++++++-------- docs/optimization-solvers.html | 4 ++-- docs/release-notes.html | 8 ++++---- .../national_scale/timeseries_data/README.html | 2 +- .../sample-timeseries-data/README.html | 2 +- docs/search.html | 2 +- docs/searchindex.js | 2 +- docs/sources/conf.py | 2 +- docs/sources/contributing-to-engage.rst | 4 ++-- docs/sources/getting-started.rst | 6 +++--- docs/sources/index.rst | 6 +++--- docs/sources/nrel-deployment.rst | 8 ++++---- docs/sources/optimization-solvers.rst | 2 +- docs/sources/release-notes.rst | 2 +- 43 files changed, 88 insertions(+), 85 deletions(-) diff --git a/.gitignore b/.gitignore index b7fe82f5..977fd1f0 100644 --- a/.gitignore +++ b/.gitignore @@ -168,4 +168,7 @@ data/timeseries/* !data/timeseries/50fa074e-9279-4b3f-bead-7762fe1643f3.csv !data/timeseries/141a6e6c-e47f-4363-85ed-8d40b525ac77.csv !data/timeseries/f97df022-1dc7-4f59-bb15-3e0d94f4ba9d.csv -!data/timeseries/dac8030c-c9b5-48db-8550-cc78312ddabb.csv \ No newline at end of file +!data/timeseries/dac8030c-c9b5-48db-8550-cc78312ddabb.csv +calliope_app/scripts/fixtures/parameters.xlsx +calliope_app/scripts/fixtures/template_loc_tech_params.xlsx +calliope_app/scripts/fixtures/template_tech_params.xlsx diff --git a/README.md b/README.md index ead2d7e2..ef11c1d3 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ -# NREL Engage: A [Calliope](https://github.com/calliope-project/calliope) Web Environment for Collaborative Energy Systems Modeling and Planning +# NLR Engage: A [Calliope](https://github.com/calliope-project/calliope) Web Environment for Collaborative Energy Systems Modeling and Planning -Engage is a free, open-access energy system planning tool that allows multiple users/stakeholders to develop and collaborate on capacity expansion models. The tool, developed at the National Renewable Energy Laboratory (NREL), provides a collaborative and easy-to-use interface built on Calliope, a multi-scale energy systems modeling framework. +Engage is a free, open-access energy system planning tool that allows multiple users/stakeholders to develop and collaborate on capacity expansion models. The tool, developed at the National Laboratory of the Rockies (NLR), provides a collaborative and easy-to-use interface built on Calliope, a multi-scale energy systems modeling framework. -- Visit the NREL hosted webtool at https://engage.nrel.gov/ +- Visit the NLR hosted webtool at https://engage.nrel.gov/ ## Requirements diff --git a/calliope_app/api/models/engage.py b/calliope_app/api/models/engage.py index 822998a0..3c65c62b 100644 --- a/calliope_app/api/models/engage.py +++ b/calliope_app/api/models/engage.py @@ -84,7 +84,7 @@ def register(http_host, email, password, first_name, last_name, organization): message = render_to_string("registration/user_activation.txt", context) try: send_mail( - subject="NREL ENGAGE Registration", + subject="NLR ENGAGE Registration", message=message, from_email=settings.AWS_SES_FROM_EMAIL, recipient_list=[email] diff --git a/calliope_app/api/views/outputs.py b/calliope_app/api/views/outputs.py index c6dbd9df..2c921b91 100644 --- a/calliope_app/api/views/outputs.py +++ b/calliope_app/api/views/outputs.py @@ -443,7 +443,7 @@ def delete_run(request): @csrf_protect def publish_run(request): """ - Publish a scenario run to Cambium (https://cambium.nrel.gov/) + Publish a scenario run to the Scenario Viewer (https://scenarioviewer.nrel.gov/) Parameters: model_uuid (uuid): required diff --git a/calliope_app/calliope_app/settings/base.py b/calliope_app/calliope_app/settings/base.py index b607ec46..43e71888 100644 --- a/calliope_app/calliope_app/settings/base.py +++ b/calliope_app/calliope_app/settings/base.py @@ -148,7 +148,7 @@ os.path.join(ROOT_DIR, 'locale'), ) -## NREL API Key +## NLR API Key NREL_API_EMAIL = env.str("NREL_API_EMAIL", "") NREL_API_KEY = env.str("NREL_API_KEY", "") diff --git a/calliope_app/calliope_app/urls.py b/calliope_app/calliope_app/urls.py index 778e6dac..da00a76d 100644 --- a/calliope_app/calliope_app/urls.py +++ b/calliope_app/calliope_app/urls.py @@ -20,7 +20,7 @@ from django.conf.urls.i18n import i18n_patterns from django.conf.urls.static import static -admin.site.site_header = 'NREL\'s Calliope Admin' +admin.site.site_header = 'NLR\'s Calliope Admin' urlpatterns = [ path('i18n/', include('django.conf.urls.i18n')), diff --git a/calliope_app/client/templates/base.html b/calliope_app/client/templates/base.html index 25b9d740..097a05c1 100644 --- a/calliope_app/client/templates/base.html +++ b/calliope_app/client/templates/base.html @@ -238,7 +238,7 @@ {% trans "Project Assistance" %}:  - Tom Harris + James Morris
diff --git a/calliope_app/client/templates/locations.html b/calliope_app/client/templates/locations.html index dc5dd119..5bcf8ab7 100644 --- a/calliope_app/client/templates/locations.html +++ b/calliope_app/client/templates/locations.html @@ -4,7 +4,7 @@ {% load i18n %} {% block head %} -NREL Engage | {{ model.name }} | Locations +NLR Engage | {{ model.name }} | Locations