diff --git a/.github/workflows/cicd.yml b/.github/workflows/cicd.yml index 7b577df..4b2cac3 100644 --- a/.github/workflows/cicd.yml +++ b/.github/workflows/cicd.yml @@ -9,7 +9,7 @@ on: release: types: [ created ] workflow_dispatch: # manual trigger - + jobs: test: # Matrix testing to test across OSs and Python versions @@ -18,27 +18,52 @@ jobs: fail-fast: true matrix: os: ["ubuntu-latest", "macos-latest", "windows-latest"] - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] - # Necessary for poetry & Windows + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + version-constraints: + - "" + - "'pandas==1.*' 'numpy==1.*'" + exclude: + # pandas 1.x is not compatible python 3.13 + - python-version: "3.13" + version-constraints: "'pandas==1.*' 'numpy==1.*'" + # uv run segfaults for windows on python 3.13 + - os: "windows-latest" + python-version: '3.13' + # numpy build issues on macos-latest + - os: "macos-latest" + python-version: '3.13' + + + # Necessary for Windows defaults: run: shell: bash runs-on: ${{ matrix.os }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} - name: Install the latest version of uv - uses: astral-sh/setup-uv@v3 + uses: astral-sh/setup-uv@v6 with: enable-cache: true - name: Sync dependencies run: uv sync + - name: Resync with restricted versions + if: ${{ matrix.version-constraints != '' }} + run: | + uv pip install ${{ matrix.version-constraints }} --reinstall --strict + - name: Run tests run: uv run --frozen pytest tests src -ra --doctest-modules - + publish: if: github.event_name == 'release' && github.event.action == 'created' needs: test @@ -52,10 +77,10 @@ jobs: steps: # Checkout repo - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Install uv with cache - name: Install the latest version of uv - uses: astral-sh/setup-uv@v3 + uses: astral-sh/setup-uv@v6 with: enable-cache: true # Sync dependencies diff --git a/pyproject.toml b/pyproject.toml index 0a26558..6dc84f3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ authors = [ ] dependencies = [ "mip==1.16rc0", - "pandas>=2.2.2", + "pandas>=1.5", "xmltodict==0.12.0", "requests>=2.0.0", ] diff --git a/src/nempy/markets.py b/src/nempy/markets.py index be76993..a27eb38 100644 --- a/src/nempy/markets.py +++ b/src/nempy/markets.py @@ -3167,7 +3167,7 @@ def not_closest_three(df): df = df.iloc[3:, :] return df - vars_to_remove = vars.groupby(['interconnector', 'link'], as_index=False).apply(not_closest_three, include_groups=False) + vars_to_remove = vars.groupby(['interconnector', 'link'], as_index=False)[['distance', 'variable_id']].apply(not_closest_three) si.disable_variables(vars_to_remove.loc[:, ['variable_id']]) def get_constraint_set_names(self): diff --git a/src/nempy/spot_market_backend/dataframe_validator.py b/src/nempy/spot_market_backend/dataframe_validator.py index 29c3e71..4ee7b18 100644 --- a/src/nempy/spot_market_backend/dataframe_validator.py +++ b/src/nempy/spot_market_backend/dataframe_validator.py @@ -71,12 +71,25 @@ def _check_data_type(self, series): if not all(series.apply(lambda x: callable(x))): raise ColumnDataTypeError('All elements of column {} should have type callable'.format(self.name)) elif self.data_type != series.dtype: - raise ColumnDataTypeError('Column {} should have type {}'.format(self.name, self.data_type)) + raise ColumnDataTypeError( + 'Column {} should have type {}, but found {}'.format( + self.name, + self.data_type, + series.dtype, + ) + ) def _check_allowed_values(self, series): if self.allowed_values is not None: - if not series.isin(self.allowed_values).all(): - raise ColumnValues("The column {} can only contain the values {}.".format(self.name, self.allowed_values)) + invalid_values = ~series.isin(self.allowed_values) + if invalid_values.any(): + raise ColumnValues( + "The column {} has invalid entries ({}). Must only contain the values {}.".format( + self.name, + sorted(series[invalid_values].unique().tolist()), + self.allowed_values, + ) + ) def _check_is_real_number(self, series): if self.must_be_real_number: diff --git a/src/nempy/spot_market_backend/solver_interface.py b/src/nempy/spot_market_backend/solver_interface.py index 091c2d4..cd1a01a 100644 --- a/src/nempy/spot_market_backend/solver_interface.py +++ b/src/nempy/spot_market_backend/solver_interface.py @@ -114,26 +114,24 @@ def add_sos_type_2(self, sos_variables, sos_id_columns, position_column): """ - # Function that adds sets to mip model. - def add_sos_vars(sos_group): - self.mip_model.add_sos(list(zip(sos_group['vars'], sos_group[position_column])), 2) - # For each variable_id get the variable object from the mip model sos_variables['vars'] = sos_variables['variable_id'].apply(lambda x: self.variables[x]) + # Break up the sets based on their id and add them to the model separately. - sos_variables.groupby(sos_id_columns).apply(add_sos_vars, include_groups=False) + for _, sos_group in sos_variables.groupby(sos_id_columns): + self.mip_model.add_sos(list(zip(sos_group['vars'], sos_group[position_column])), 2) + # This is a hack to make sure mip knows there are binary constraints. self.mip_model.add_var(var_type=BINARY, obj=0.0) def add_sos_type_1(self, sos_variables): - # Function that adds sets to mip model. - def add_sos_vars(sos_group): - self.mip_model.add_sos(list(zip(sos_group['vars'], [1.0 for i in range(len(sos_variables['vars']))])), 1) - # For each variable_id get the variable object from the mip model sos_variables['vars'] = sos_variables['variable_id'].apply(lambda x: self.variables[x]) + # Break up the sets based on their id and add them to the model separately. - sos_variables.groupby('sos_id').apply(add_sos_vars) + for _, sos_group in sos_variables.groupby('sos_id'): + self.mip_model.add_sos(list(zip(sos_group['vars'], [1.0 for i in range(len(sos_variables['vars']))])), 1) + # This is a hack to make mip knows there are binary constraints. self.mip_model.add_var(var_type=BINARY, obj=0.0) diff --git a/tests/test_markets.py b/tests/test_markets.py index 086ce93..f29c358 100644 --- a/tests/test_markets.py +++ b/tests/test_markets.py @@ -55,8 +55,14 @@ def test_one_region_energy_market(): 'dispatch': [40.0, 20.0] }) + expected_region_dispatch_summary = pd.DataFrame({ + 'region': ['NSW'], + 'dispatch': [60.0], + }) + assert_frame_equal(market.get_energy_prices(), expected_prices) assert_frame_equal(market.get_unit_dispatch(), expected_dispatch) + assert_frame_equal(market.get_region_dispatch_summary(), expected_region_dispatch_summary) def test_two_region_energy_market(): @@ -111,8 +117,14 @@ def test_two_region_energy_market(): 'dispatch': [60.0, 80.0] }) + expected_region_dispatch_summary = pd.DataFrame({ + 'region': ['NSW', 'VIC'], + 'dispatch': [60.0, 80.0], + }) + assert_frame_equal(market.get_energy_prices(), expected_prices) assert_frame_equal(market.get_unit_dispatch(), expected_dispatch) + assert_frame_equal(market.get_region_dispatch_summary(), expected_region_dispatch_summary) def test_one_interconnector(): @@ -204,9 +216,18 @@ def constant_losses(flow): 'losses': [(90.0/0.975) * 0.05] }) + expected_region_dispatch_summary = pd.DataFrame({ + 'region': ['NSW', 'VIC'], + 'dispatch': [94.615385, 0.0], + 'inflow': [-92.307692, 92.307692], + 'transmission_losses': [0.0, 0.0], + 'interconnector_losses': [2.307692, 2.307692], + }) + assert_frame_equal(market.get_energy_prices(), expected_prices) assert_frame_equal(market.get_unit_dispatch(), expected_dispatch) assert_frame_equal(market.get_interconnector_flows(), expected_interconnector_flow) + assert_frame_equal(market.get_region_dispatch_summary(), expected_region_dispatch_summary) def test_one_region_energy_and_raise_regulation_markets(): @@ -1049,4 +1070,4 @@ def test_setting_constraint_on_unit_with_no_bid_volume_doesnt_raise_error(): }) assert_frame_equal(market.get_energy_prices(), expected_prices) - assert_frame_equal(market.get_unit_dispatch(), expected_dispatch) \ No newline at end of file + assert_frame_equal(market.get_unit_dispatch(), expected_dispatch)