diff --git a/makefile b/makefile index 98f0388..f3d10fb 100644 --- a/makefile +++ b/makefile @@ -102,6 +102,8 @@ data/gtfs/%.merged.with_flex.gtfs: data/gtfs/%.merged.gtfs.zip $(info unzipping $* GTFS feed) rm -rf $@ unzip -d $@ $< + $(info patching station stop_ids referenced in stoptimes) + python3 scripts/patch_nvbw_station_stops.py $@ $(info patching GTFS-Flex data into the GTFS feed) # todo: pick flex rules file based on GTFS feed docker run -i --rm -v $(HOST_MOUNT)/data/gtfs/$(@F):/gtfs derhuerst/generate-gtfs-flex:4 stadtnavi-herrenberg-flex-rules.js diff --git a/scripts/patch_nvbw_station_stops.py b/scripts/patch_nvbw_station_stops.py new file mode 100644 index 0000000..7a819b0 --- /dev/null +++ b/scripts/patch_nvbw_station_stops.py @@ -0,0 +1,66 @@ +# This script patches stations referenced as stops by +# adding a new stop with `:na:na` appended to the id and +# replacing the original station's ID in stop_times.txt +# by this new stop ID. +# Note: This script is a quick and dirty hack and no +# general purpose patch script. I.e. GTFS feeds with transfers +# might need additional changes. + +# Background: this fix is necssary + + +import csv +import os +from os.path import join +import sys + +gtfs_dir = sys.argv[1] if len(sys.argv) >= 2 else '' + +original_stops_file_path = join(gtfs_dir, 'stops.txt') +new_stops_file_path = join(gtfs_dir, 'new_stops.txt') +original_stop_times_file_path = join(gtfs_dir, 'stop_times.txt') +new_stop_times_file_path= join(gtfs_dir, 'new_stop_times.txt') + +with open(original_stops_file_path) as csvfile: + + reader = csv.DictReader(csvfile) + stations = {} + for row in reader: + if row['location_type'] == '1': + stations[row['stop_id']] = row + +renamed_stop_ids = {} +# check if they are references by stop_times +with open(original_stop_times_file_path) as csvfile: + with open(new_stop_times_file_path, 'w') as new_stops_file: + + reader = csv.DictReader(csvfile) + writer = csv.DictWriter(new_stops_file, reader.fieldnames) + writer.writeheader() + for row in reader: + if row['stop_id'] in stations: + print(row['stop_id'],row['trip_id'],row['departure_time']) + new_stop_id = row['stop_id']+':na:na' + renamed_stop_ids[row['stop_id']] = new_stop_id + row['stop_id']= new_stop_id + + writer.writerow(row) + +with open(original_stops_file_path) as csvfile: + with open(new_stops_file_path, 'w') as new_stops_file: + reader = csv.DictReader(csvfile) + writer = csv.DictWriter(new_stops_file, reader.fieldnames) + writer.writeheader() + + for row in reader: + writer.writerow(row) + if row['stop_id'] in renamed_stop_ids: + row['stop_id'] = renamed_stop_ids[row['stop_id'] ] + row['location_type'] = '0' + writer.writerow(row) + +os.remove(original_stops_file_path) +os.rename(new_stops_file_path, original_stops_file_path) + +os.remove(original_stop_times_file_path) +os.rename(new_stop_times_file_path, original_stop_times_file_path)