Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 33 additions & 8 deletions .github/workflows/cicd.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ on:
release:
types: [ created ]
workflow_dispatch: # manual trigger

jobs:
test:
# Matrix testing to test across OSs and Python versions
Expand All @@ -18,27 +18,52 @@ jobs:
fail-fast: true
matrix:
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
# Necessary for poetry & Windows
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
version-constraints:
- ""
- "'pandas==1.*' 'numpy==1.*'"
exclude:
# pandas 1.x is not compatible python 3.13
- python-version: "3.13"
version-constraints: "'pandas==1.*' 'numpy==1.*'"
# uv run segfaults for windows on python 3.13
- os: "windows-latest"
python-version: '3.13'
# numpy build issues on macos-latest
- os: "macos-latest"
python-version: '3.13'


# Necessary for Windows
defaults:
run:
shell: bash
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4

- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

- name: Install the latest version of uv
uses: astral-sh/setup-uv@v3
uses: astral-sh/setup-uv@v6
with:
enable-cache: true

- name: Sync dependencies
run: uv sync

- name: Resync with restricted versions
if: ${{ matrix.version-constraints != '' }}
run: |
uv pip install ${{ matrix.version-constraints }} --reinstall --strict

- name: Run tests
run: uv run --frozen pytest tests src -ra --doctest-modules

publish:
if: github.event_name == 'release' && github.event.action == 'created'
needs: test
Expand All @@ -52,10 +77,10 @@ jobs:
steps:
# Checkout repo
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
# Install uv with cache
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v3
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
# Sync dependencies
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ authors = [
]
dependencies = [
"mip==1.16rc0",
"pandas>=2.2.2",
"pandas>=1.5",
"xmltodict==0.12.0",
"requests>=2.0.0",
]
Expand Down
2 changes: 1 addition & 1 deletion src/nempy/markets.py
Original file line number Diff line number Diff line change
Expand Up @@ -3167,7 +3167,7 @@ def not_closest_three(df):
df = df.iloc[3:, :]
return df

vars_to_remove = vars.groupby(['interconnector', 'link'], as_index=False).apply(not_closest_three, include_groups=False)
vars_to_remove = vars.groupby(['interconnector', 'link'], as_index=False)[['distance', 'variable_id']].apply(not_closest_three)
si.disable_variables(vars_to_remove.loc[:, ['variable_id']])

def get_constraint_set_names(self):
Expand Down
19 changes: 16 additions & 3 deletions src/nempy/spot_market_backend/dataframe_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,25 @@ def _check_data_type(self, series):
if not all(series.apply(lambda x: callable(x))):
raise ColumnDataTypeError('All elements of column {} should have type callable'.format(self.name))
elif self.data_type != series.dtype:
raise ColumnDataTypeError('Column {} should have type {}'.format(self.name, self.data_type))
raise ColumnDataTypeError(
'Column {} should have type {}, but found {}'.format(
self.name,
self.data_type,
series.dtype,
)
)

def _check_allowed_values(self, series):
if self.allowed_values is not None:
if not series.isin(self.allowed_values).all():
raise ColumnValues("The column {} can only contain the values {}.".format(self.name, self.allowed_values))
invalid_values = ~series.isin(self.allowed_values)
if invalid_values.any():
raise ColumnValues(
"The column {} has invalid entries ({}). Must only contain the values {}.".format(
self.name,
sorted(series[invalid_values].unique().tolist()),
self.allowed_values,
)
)

def _check_is_real_number(self, series):
if self.must_be_real_number:
Expand Down
18 changes: 8 additions & 10 deletions src/nempy/spot_market_backend/solver_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,26 +114,24 @@ def add_sos_type_2(self, sos_variables, sos_id_columns, position_column):

"""

# Function that adds sets to mip model.
def add_sos_vars(sos_group):
self.mip_model.add_sos(list(zip(sos_group['vars'], sos_group[position_column])), 2)

# For each variable_id get the variable object from the mip model
sos_variables['vars'] = sos_variables['variable_id'].apply(lambda x: self.variables[x])

# Break up the sets based on their id and add them to the model separately.
sos_variables.groupby(sos_id_columns).apply(add_sos_vars, include_groups=False)
for _, sos_group in sos_variables.groupby(sos_id_columns):
self.mip_model.add_sos(list(zip(sos_group['vars'], sos_group[position_column])), 2)

# This is a hack to make sure mip knows there are binary constraints.
self.mip_model.add_var(var_type=BINARY, obj=0.0)

def add_sos_type_1(self, sos_variables):
# Function that adds sets to mip model.
def add_sos_vars(sos_group):
self.mip_model.add_sos(list(zip(sos_group['vars'], [1.0 for i in range(len(sos_variables['vars']))])), 1)

# For each variable_id get the variable object from the mip model
sos_variables['vars'] = sos_variables['variable_id'].apply(lambda x: self.variables[x])

# Break up the sets based on their id and add them to the model separately.
sos_variables.groupby('sos_id').apply(add_sos_vars)
for _, sos_group in sos_variables.groupby('sos_id'):
self.mip_model.add_sos(list(zip(sos_group['vars'], [1.0 for i in range(len(sos_variables['vars']))])), 1)

# This is a hack to make mip knows there are binary constraints.
self.mip_model.add_var(var_type=BINARY, obj=0.0)

Expand Down
23 changes: 22 additions & 1 deletion tests/test_markets.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,14 @@ def test_one_region_energy_market():
'dispatch': [40.0, 20.0]
})

expected_region_dispatch_summary = pd.DataFrame({
'region': ['NSW'],
'dispatch': [60.0],
})

assert_frame_equal(market.get_energy_prices(), expected_prices)
assert_frame_equal(market.get_unit_dispatch(), expected_dispatch)
assert_frame_equal(market.get_region_dispatch_summary(), expected_region_dispatch_summary)


def test_two_region_energy_market():
Expand Down Expand Up @@ -111,8 +117,14 @@ def test_two_region_energy_market():
'dispatch': [60.0, 80.0]
})

expected_region_dispatch_summary = pd.DataFrame({
'region': ['NSW', 'VIC'],
'dispatch': [60.0, 80.0],
})

assert_frame_equal(market.get_energy_prices(), expected_prices)
assert_frame_equal(market.get_unit_dispatch(), expected_dispatch)
assert_frame_equal(market.get_region_dispatch_summary(), expected_region_dispatch_summary)


def test_one_interconnector():
Expand Down Expand Up @@ -204,9 +216,18 @@ def constant_losses(flow):
'losses': [(90.0/0.975) * 0.05]
})

expected_region_dispatch_summary = pd.DataFrame({
'region': ['NSW', 'VIC'],
'dispatch': [94.615385, 0.0],
'inflow': [-92.307692, 92.307692],
'transmission_losses': [0.0, 0.0],
'interconnector_losses': [2.307692, 2.307692],
})

assert_frame_equal(market.get_energy_prices(), expected_prices)
assert_frame_equal(market.get_unit_dispatch(), expected_dispatch)
assert_frame_equal(market.get_interconnector_flows(), expected_interconnector_flow)
assert_frame_equal(market.get_region_dispatch_summary(), expected_region_dispatch_summary)


def test_one_region_energy_and_raise_regulation_markets():
Expand Down Expand Up @@ -1049,4 +1070,4 @@ def test_setting_constraint_on_unit_with_no_bid_volume_doesnt_raise_error():
})

assert_frame_equal(market.get_energy_prices(), expected_prices)
assert_frame_equal(market.get_unit_dispatch(), expected_dispatch)
assert_frame_equal(market.get_unit_dispatch(), expected_dispatch)