Skip to content

Commit

Permalink
Merge pull request #119 from moia-oss/master
Browse files Browse the repository at this point in the history
update matsim CW45
  • Loading branch information
mfrawley-moia authored Nov 5, 2024
2 parents a92bd23 + 263fd04 commit 9a5f465
Show file tree
Hide file tree
Showing 191 changed files with 47,266 additions and 3,617 deletions.
2 changes: 1 addition & 1 deletion contribs/application/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@
<dependency>
<groupId>com.github.matsim-org</groupId>
<artifactId>gtfs2matsim</artifactId>
<version>fc8b13954d</version>
<version>0bd5850fd6</version>
<exclusions>
<!-- Exclude unneeded dependencies and these with known CVE -->
<exclusion>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -440,12 +440,17 @@ public static Path matchInput(String name, Path dir) {
if (path.isPresent())
return path.get();

// Match more general pattern at last
path = matchPattern(".+\\.[a-zA-Z0-9]*_" + name + "\\..+", dir);
// Match more general pattern
path = matchPattern(".+\\.[a-zA-Z0-9\\-]*_" + name + "\\..+", dir);
if (path.isPresent())
return path.get();

throw new IllegalArgumentException("Could not match input file: " + name);
// Even more permissive pattern
path = matchPattern(".+[a-zA-Z0-9_.\\-]*(_|\\.)" + name + ".+", dir);
if (path.isPresent())
return path.get();

throw new IllegalArgumentException("Could not match input file: %s (in %s)".formatted(name, dir));
}

private static Optional<Path> matchSuffix(String suffix, Path dir) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import org.locationtech.jts.geom.Envelope;
import org.matsim.api.core.v01.Coord;
import org.matsim.api.core.v01.Scenario;
import org.matsim.api.core.v01.TransportMode;
import org.matsim.application.ApplicationUtils;
import org.matsim.application.CommandSpec;
import org.matsim.application.MATSimAppCommand;
Expand All @@ -20,6 +21,7 @@
import org.matsim.core.config.Config;
import org.matsim.core.config.ConfigUtils;
import org.matsim.core.scenario.ScenarioUtils;
import org.matsim.core.utils.collections.CollectionUtils;
import org.matsim.core.utils.geometry.CoordinateTransformation;
import org.matsim.core.utils.io.IOUtils;
import picocli.CommandLine;
Expand All @@ -28,10 +30,7 @@
import java.nio.file.Path;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.*;

@CommandLine.Command(
name = "noise-analysis",
Expand Down Expand Up @@ -88,10 +87,18 @@ public Integer call() throws Exception {
NoiseConfigGroup noiseParameters = ConfigUtils.addOrGetModule(config, NoiseConfigGroup.class);

if(overrideParameters){
log.warn("no NoiseConfigGroup was configured before. Will set some standards. You should check the next lines in the log file!");
log.warn("no NoiseConfigGroup was configured before. Will set some standards. You should check the next lines in the log file and the output_config.xml!");
noiseParameters.setConsideredActivitiesForReceiverPointGridArray(considerActivities.toArray(String[]::new));
noiseParameters.setConsideredActivitiesForDamageCalculationArray(considerActivities.toArray(String[]::new));

{
Set<String> set = CollectionUtils.stringArrayToSet( new String[]{TransportMode.bike, TransportMode.walk, TransportMode.transit_walk, TransportMode.non_network_walk} );
noiseParameters.setNetworkModesToIgnoreSet( set );
}
{
String[] set = new String[]{"freight"};
noiseParameters.setHgvIdPrefixesArray( set );
}
//use actual speed and not freespeed
noiseParameters.setUseActualSpeedLevel(true);
//use the valid speed range (recommended by IK)
Expand Down Expand Up @@ -181,7 +188,7 @@ private Config prepareConfig() {
config.transit().setTransitScheduleFile(null);
config.transit().setVehiclesFile(null);
config.plans().setInputFile(ApplicationUtils.matchInput("plans", input.getRunDirectory()).toAbsolutePath().toString());
config.facilities().setInputFile(null);
config.facilities().setInputFile(ApplicationUtils.matchInput("facilities", input.getRunDirectory()).toAbsolutePath().toString());
config.eventsManager().setNumberOfThreads(null);
config.eventsManager().setEstimatedNumberOfEvents(null);
//ts, aug '24: not sure if and why we need to set 1 thread
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@

import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.ints.IntList;
import it.unimi.dsi.fastutil.ints.IntOpenHashSet;
import it.unimi.dsi.fastutil.ints.IntSet;
import it.unimi.dsi.fastutil.objects.Object2IntLinkedOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2IntMap;
import it.unimi.dsi.fastutil.objects.Object2LongMap;
Expand All @@ -15,6 +17,7 @@
import org.locationtech.jts.geom.Geometry;
import org.locationtech.jts.geom.GeometryFactory;
import org.locationtech.jts.geom.Point;
import org.matsim.application.ApplicationUtils;
import org.matsim.application.CommandSpec;
import org.matsim.application.MATSimAppCommand;
import org.matsim.application.options.CsvOptions;
Expand All @@ -32,6 +35,7 @@
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.stream.IntStream;

Expand All @@ -43,48 +47,43 @@
produces = {
"mode_share.csv", "mode_share_per_dist.csv", "mode_users.csv", "trip_stats.csv",
"mode_share_per_%s.csv", "population_trip_stats.csv", "trip_purposes_by_hour.csv",
"mode_share_distance_distribution.csv",
"mode_share_distance_distribution.csv", "mode_shift.csv",
"mode_choices.csv", "mode_choice_evaluation.csv", "mode_choice_evaluation_per_mode.csv",
"mode_confusion_matrix.csv", "mode_prediction_error.csv"
}
)
public class TripAnalysis implements MATSimAppCommand {

private static final Logger log = LogManager.getLogger(TripAnalysis.class);

/**
* Attributes which relates this person to a reference person.
*/
public static String ATTR_REF_ID = "ref_id";
public static final String ATTR_REF_ID = "ref_id";
/**
* Person attribute that contains the reference modes of a person. Multiple modes are delimited by "-".
*/
public static String ATTR_REF_MODES = "ref_modes";
public static final String ATTR_REF_MODES = "ref_modes";
/**
* Person attribute containing its weight for analysis purposes.
*/
public static String ATTR_REF_WEIGHT = "ref_weight";

public static final String ATTR_REF_WEIGHT = "ref_weight";
private static final Logger log = LogManager.getLogger(TripAnalysis.class);
@CommandLine.Option(names = "--person-filter", description = "Define which persons should be included into trip analysis. Map like: Attribute name (key), attribute value (value). " +
"The attribute needs to be contained by output_persons.csv. Persons who do not match all filters are filtered out.", split = ",")
private final Map<String, String> personFilters = new HashMap<>();
@CommandLine.Mixin
private InputOptions input = InputOptions.ofCommand(TripAnalysis.class);
@CommandLine.Mixin
private OutputOptions output = OutputOptions.ofCommand(TripAnalysis.class);

@CommandLine.Option(names = "--input-ref-data", description = "Optional path to reference data", required = false)
private String refData;

@CommandLine.Option(names = "--match-id", description = "Pattern to filter agents by id")
private String matchId;

@CommandLine.Option(names = "--dist-groups", split = ",", description = "List of distances for binning", defaultValue = "0,1000,2000,5000,10000,20000")
private List<Long> distGroups;

@CommandLine.Option(names = "--modes", split = ",", description = "List of considered modes, if not set all will be used")
private List<String> modeOrder;

@CommandLine.Option(names = "--shp-filter", description = "Define how the shp file filtering should work", defaultValue = "home")
private LocationFilter filter;

@CommandLine.Mixin
private ShpOptions shp;

Expand Down Expand Up @@ -131,6 +130,20 @@ private static double[] calcHistogram(double[] data, double[] bins) {
return hist;
}

private static Map<String, ColumnType> getColumnTypes() {
Map<String, ColumnType> columnTypes = new HashMap<>(Map.of("person", ColumnType.TEXT,
"trav_time", ColumnType.STRING, "wait_time", ColumnType.STRING, "dep_time", ColumnType.STRING,
"longest_distance_mode", ColumnType.STRING, "main_mode", ColumnType.STRING,
"start_activity_type", ColumnType.TEXT, "end_activity_type", ColumnType.TEXT,
"first_pt_boarding_stop", ColumnType.TEXT, "last_pt_egress_stop", ColumnType.TEXT));

// Map.of only has 10 argument max
columnTypes.put("traveled_distance", ColumnType.LONG);
columnTypes.put("euclidean_distance", ColumnType.LONG);

return columnTypes;
}

@Override
public Integer call() throws Exception {

Expand All @@ -146,6 +159,43 @@ public Integer call() throws Exception {
persons = persons.where(persons.textColumn("person").matchesRegex(matchId));
}

// filter persons according to person (attribute) filter
if (!personFilters.isEmpty()) {
IntSet generalFilteredRowIds = null;
for (Map.Entry<String, String> entry : personFilters.entrySet()) {
if (!persons.containsColumn(entry.getKey())) {
log.warn("Persons table does not contain column for filter attribute {}. Filter on {} will not be applied.", entry.getKey(), entry.getValue());
continue;
}
log.info("Using person filter for attribute {} and value {}", entry.getKey(), entry.getValue());

IntSet filteredRowIds = new IntOpenHashSet();

for (int i = 0; i < persons.rowCount(); i++) {
Row row = persons.row(i);
String value = row.getString(entry.getKey());
// only add value once
if (value.equals(entry.getValue())) {
filteredRowIds.add(i);
}
}

if (generalFilteredRowIds == null) {
// If generalFilteredRowIds is empty, add all elements from filteredRowIds to generalFilteredRowIds
generalFilteredRowIds = filteredRowIds;
} else {
// If generalFilteredRowIds is not empty, retain only the elements that are also in filteredRowIds
generalFilteredRowIds.retainAll(filteredRowIds);
}
}

if (generalFilteredRowIds != null) {
persons = persons.where(Selection.with(generalFilteredRowIds.intStream().toArray()));
}
}

log.info("Filtered {} out of {} persons", persons.rowCount(), total);

// Home filter by standard attribute
if (shp.isDefined() && filter == LocationFilter.home) {
Geometry geometry = shp.getGeometry();
Expand All @@ -166,18 +216,8 @@ public Integer call() throws Exception {

log.info("Filtered {} out of {} persons", persons.rowCount(), total);

Map<String, ColumnType> columnTypes = new HashMap<>(Map.of("person", ColumnType.TEXT,
"trav_time", ColumnType.STRING, "wait_time", ColumnType.STRING, "dep_time", ColumnType.STRING,
"longest_distance_mode", ColumnType.STRING, "main_mode", ColumnType.STRING,
"start_activity_type", ColumnType.TEXT, "end_activity_type", ColumnType.TEXT,
"first_pt_boarding_stop", ColumnType.TEXT, "last_pt_egress_stop", ColumnType.TEXT));

// Map.of only has 10 argument max
columnTypes.put("traveled_distance", ColumnType.LONG);
columnTypes.put("euclidean_distance", ColumnType.LONG);

Table trips = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(input.getPath("trips.csv")))
.columnTypesPartial(columnTypes)
.columnTypesPartial(getColumnTypes())
.sample(false)
.separator(CsvOptions.detectDelimiter(input.getPath("trips.csv"))).build());

Expand Down Expand Up @@ -271,6 +311,8 @@ public Integer call() throws Exception {

writeTripDistribution(joined);

writeModeShift(joined);

return 0;
}

Expand Down Expand Up @@ -540,6 +582,34 @@ private void writeTripDistribution(Table trips) throws IOException {
}
}

private void writeModeShift(Table trips) throws IOException {
Path path;
try {
Path dir = Path.of(input.getPath("trips.csv")).getParent().resolve("ITERS").resolve("it.0");
path = ApplicationUtils.matchInput("trips.csv", dir);
} catch (Exception e) {
log.error("Could not find trips from 0th iteration.", e);
return;
}

Table originalTrips = Table.read().csv(CsvReadOptions.builder(IOUtils.getBufferedReader(path.toString()))
.columnTypesPartial(getColumnTypes())
.sample(false)
.separator(CsvOptions.detectDelimiter(path.toString())).build());

// Use longest_distance_mode where main_mode is not present
originalTrips.stringColumn("main_mode")
.set(originalTrips.stringColumn("main_mode").isMissing(),
originalTrips.stringColumn("longest_distance_mode"));

originalTrips.column("main_mode").setName("original_mode");

Table joined = new DataFrameJoiner(trips, "trip_id").inner(true, originalTrips);
Table aggr = joined.summarize("trip_id", count).by("original_mode", "main_mode");

aggr.write().csv(output.getPath("mode_shift.csv").toFile());
}

/**
* How shape file filtering should be applied.
*/
Expand Down
Loading

0 comments on commit 9a5f465

Please sign in to comment.