Skip to content

Commit

Permalink
Merge pull request #118 from moia-oss/master
Browse files Browse the repository at this point in the history
update matsim 2024-09-11-11-06
  • Loading branch information
nkuehnel authored Sep 11, 2024
2 parents dac8f06 + 7a610be commit a92bd23
Show file tree
Hide file tree
Showing 33 changed files with 1,388 additions and 273 deletions.
10 changes: 8 additions & 2 deletions contribs/application/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,14 @@
<artifactId>xercesImpl</artifactId>
<version>2.12.2</version>
</dependency>

</dependencies>
<dependency>
<groupId>org.matsim.contrib</groupId>
<artifactId>dvrp</artifactId>
<version>2025.0-SNAPSHOT</version>
<scope>compile</scope>
</dependency>

</dependencies>

<build>
<plugins>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import org.apache.logging.log4j.Logger;
import org.matsim.api.core.v01.Coord;
import org.matsim.application.avro.XYTData;
import org.matsim.application.options.CsvOptions;
import org.matsim.core.config.Config;
import org.matsim.core.utils.io.IOUtils;
import org.matsim.core.utils.misc.Time;
Expand Down Expand Up @@ -92,7 +93,7 @@ public void setMaxTime(int maxTime) {
/**
* Merges noise data from multiple files into one file.
*/
public void run() {
public void run() throws IOException {
mergeReceiverPointData(outputDirectory + "/immissions/", "immission");
mergeReceiverPointData(outputDirectory + "/damages_receiverPoint/", "damages_receiverPoint");
mergeLinkData(outputDirectory.toString() + "/emissions/", "emission");
Expand All @@ -116,7 +117,7 @@ private void writeAvro(XYTData xytData, File output) {
}
}

private void mergeLinkData(String pathParameter, String label) {
private void mergeLinkData(String pathParameter, String label) throws IOException {
log.info("Merging emissions data for label {}", label);
Object2DoubleMap<String> mergedData = new Object2DoubleOpenHashMap<>();
Table csvOutputMerged = Table.create(TextColumn.create("Link Id"), DoubleColumn.create("value"));
Expand All @@ -126,9 +127,10 @@ private void mergeLinkData(String pathParameter, String label) {

// Read the file
Table table = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(path))
.columnTypesPartial(Map.of("Link Id", ColumnType.TEXT))
.columnTypesPartial(Map.of("Link Id", ColumnType.TEXT,
"Noise Emission " + Time.writeTime(time, Time.TIMEFORMAT_HHMMSS), ColumnType.DOUBLE))
.sample(false)
.separator(';').build());
.separator(CsvOptions.detectDelimiter(path)).build());

for (Row row : table) {
String linkId = row.getString("Link Id");
Expand Down Expand Up @@ -157,7 +159,7 @@ private void mergeLinkData(String pathParameter, String label) {
* @param outputDir path to the receiverPoint data
* @param label label for the receiverPoint data (which kind of data)
*/
private void mergeReceiverPointData(String outputDir, String label) {
private void mergeReceiverPointData(String outputDir, String label) throws IOException {

// data per time step, maps coord to value
Int2ObjectMap<Object2FloatMap<FloatFloatPair>> data = new Int2ObjectOpenHashMap<>();
Expand Down Expand Up @@ -188,7 +190,7 @@ private void mergeReceiverPointData(String outputDir, String label) {
"t", ColumnType.DOUBLE,
valueHeader, ColumnType.DOUBLE))
.sample(false)
.separator(';').build());
.separator(CsvOptions.detectDelimiter(timeDataFile)).build());

// Loop over all rows in the data file
for (Row row : dataTable) {
Expand Down Expand Up @@ -265,7 +267,7 @@ private void mergeReceiverPointData(String outputDir, String label) {
// Merges the immissions data

@Deprecated
private void mergeImmissionsCSV(String pathParameter, String label) {
private void mergeImmissionsCSV(String pathParameter, String label) throws IOException {
log.info("Merging immissions data for label {}", label);
Object2DoubleMap<Coord> mergedData = new Object2DoubleOpenHashMap<>();

Expand All @@ -284,7 +286,7 @@ private void mergeImmissionsCSV(String pathParameter, String label) {
"Receiver Point Id", ColumnType.INTEGER,
"t", ColumnType.DOUBLE))
.sample(false)
.separator(';').build());
.separator(CsvOptions.detectDelimiter(path)).build());

// Loop over all rows in the file
for (Row row : table) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ private static Feature createDefaultFeature(Link link) {

String highwayType = NetworkUtils.getHighwayType(link);
categories.put("highway_type", highwayType);
ft.put("idx", link.getId().index());
ft.put("speed", NetworkUtils.getAllowedSpeed(link));
ft.put("num_lanes", link.getNumberOfLanes());
ft.put("length", link.getLength());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
package org.matsim.application.prepare.network.params.ref;

import it.unimi.dsi.fastutil.objects.Object2DoubleMap;
import it.unimi.dsi.fastutil.objects.Object2ObjectMap;
import org.matsim.application.prepare.Predictor;
import org.matsim.application.prepare.network.params.NetworkModel;

/**
* Reference model that uses one specific speed factor for each link.
*/
public final class IndividualParams implements NetworkModel {

private static final Predictor INSTANCE = new Model();

@Override
public Predictor speedFactor(String junctionType, String highwayType) {
return INSTANCE;
}

private static final class Model implements Predictor {

@Override
public double predict(Object2DoubleMap<String> features, Object2ObjectMap<String, String> categories) {
return predict(features, categories, new double[0]);
}

@Override
public double predict(Object2DoubleMap<String> features, Object2ObjectMap<String, String> categories, double[] params) {
if (params.length == 0)
return 1;

return params[(int) features.getDouble("idx")];
}

@Override
public double[] getData(Object2DoubleMap<String> features, Object2ObjectMap<String, String> categories) {
return new double[]{
features.getDouble("idx")
};
}
}

}
Loading

0 comments on commit a92bd23

Please sign in to comment.