Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.github.skjolber.dc.gtfs.mt.StopTimeAdapter Maven / Gradle / Ivy
package com.github.skjolber.dc.gtfs.mt;
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ThreadPoolExecutor;
import com.github.skjolber.dc.GtfsFeed;
import com.github.skjolber.dc.gtfs.IntermediateProcessor;
import com.github.skjolber.dc.model.Stop;
import com.github.skjolber.dc.model.StopTime;
import com.github.skjolber.dc.model.Trip;
import com.github.skjolber.stcsv.CsvReader;
import com.github.skjolber.stcsv.databinder.CsvMapper2;
import com.github.skjolber.stcsv.databinder.StaticCsvMapper;
import com.github.skjolber.unzip.FileEntryChunkStreamHandler;
import com.github.skjolber.unzip.FileEntryHandler;
import com.github.skjolber.unzip.FileEntryStreamHandler;
import com.github.skjolber.unzip.csv.AbstractSesselTjonnaCsvFileEntryChunkStreamHandler;
import com.github.skjolber.unzip.csv.AbstractSesselTjonnaCsvFileEntryStreamHandler;
import com.github.skjolber.unzip.csv.CsvLineHandler;
import com.github.skjolber.unzip.csv.CsvLineHandlerFactory;
/**
* For chunked / multi-threaded parsing.
*
*/
@SuppressWarnings("unchecked")
public class StopTimeAdapter implements CsvLineHandlerFactory {
protected static CsvMapper2> parser;
public static CsvMapper2> getParser() {
return parser;
}
private static int getStringAsSeconds(String string) { // 14:30:00
return
(string.charAt(6) - '0') * 10 + (string.charAt(7) - '0') // seconds
+ (string.charAt(3) - '0') * 600 + (string.charAt(4) - '0') * 60 // minutes
+ (string.charAt(0) - '0') * 36000 + (string.charAt(1) - '0') * 3600 // hours
;
}
static {
// trip_id,stop_id,stop_sequence,arrival_time,departure_time,pickup_type,drop_off_type,shape_dist_traveled,stop_headsign
parser = CsvMapper2.builder(StopTime.class, (Class>)(Class>)IntermediateProcessor.class)
.stringField("trip_id")
.consumer( (t, i, id) -> i.add(0, id, t))
.required()
.stringField("stop_id")
.consumer( (t, i, id) -> i.add(1, id, t) )
.required()
.integerField("stop_sequence")
.setter(StopTime::setStopSequence)
.optional()
.stringField("arrival_time")
.consumer( (t, v) -> t.setArrivalTime(getStringAsSeconds(v)) )
.optional()
.stringField("departure_time")
.consumer( (t, v) -> t.setDepartureTime(getStringAsSeconds(v)) )
.optional()
.integerField("timepoint")
.setter(StopTime::setTimepoint)
.optional()
.integerField("pickup_type")
.setter(StopTime::setPickupType)
.optional()
.integerField("drop_off_type")
.setter(StopTime::setDropOffType)
.optional()
.doubleField("shape_dist_traveled")
.setter(StopTime::setShapeDistTraveled)
.optional()
.stringField("stop_headsign")
.setter(StopTime::setStopHeadsign)
.quotedWithoutLinebreaks()
.optional()
.stringField("fare_period_id")
.setter(StopTime::setFarePeriodId)
.optional()
.build();
}
protected Map handlers = new ConcurrentHashMap<>(Runtime.getRuntime().availableProcessors());
protected Map> processors = new ConcurrentHashMap<>(Runtime.getRuntime().availableProcessors());
protected final int chunkLength;
protected GtfsFeed feed;
public StopTimeAdapter(int chunkLength, GtfsFeed feed) {
this.chunkLength = chunkLength;
this.feed = feed;
}
@Override
public CsvLineHandler getHandler(String fileName, ThreadPoolExecutor executor) {
if(!fileName.equals("stop_times.txt")) {
throw new IllegalArgumentException("Unexpected file name " + fileName);
}
StopTimeHandler csvLineHandler = (StopTimeHandler) handlers.get(Thread.currentThread());
if(csvLineHandler == null) {
csvLineHandler = new StopTimeHandler();
handlers.put(Thread.currentThread(), csvLineHandler);
}
return (CsvLineHandler) csvLineHandler;
}
private class StopTimeCsvFileEntryStreamHandler extends AbstractSesselTjonnaCsvFileEntryStreamHandler {
public StopTimeCsvFileEntryStreamHandler(String name, CsvLineHandlerFactory csvLineHandlerFactory, long size, FileEntryHandler delegate, ThreadPoolExecutor executor) {
super(name, csvLineHandlerFactory, delegate, executor);
}
public StopTimeCsvFileEntryStreamHandler(String name, CsvLineHandlerFactory csvLineHandlerFactory, FileEntryHandler delegate, ThreadPoolExecutor executor) {
super(name, csvLineHandlerFactory, delegate, executor);
}
@Override
protected CsvReader createCsvReader(Reader reader, ThreadPoolExecutor executorService) throws Exception {
return parser.create(reader, newIntermediateProcessor());
}
}
private class StopTimeCsvFileEntryChunkStreamHandler extends AbstractSesselTjonnaCsvFileEntryChunkStreamHandler {
public StopTimeCsvFileEntryChunkStreamHandler(String name, Charset charset, CsvLineHandlerFactory csvLineHandlerFactory) {
super(name, charset, null, csvLineHandlerFactory);
}
@Override
protected StaticCsvMapper createStaticCsvMapper(String firstLine) throws Exception {
this.fileChunkSplitter = new StopTimeSequenceChunkerSplitter(firstLine, chunkLength);
return new StaticCsvMapperAdapter>(parser.buildStaticCsvMapper(firstLine)) {
@Override
protected IntermediateProcessor newIntermediateProcessor() {
return StopTimeAdapter.this.newIntermediateProcessor();
}
};
}
}
protected IntermediateProcessor newIntermediateProcessor() {
IntermediateProcessor processor = processors.get(Thread.currentThread());
if(processor == null) {
processor = new IntermediateProcessor(2);
processors.put(Thread.currentThread(), processor);
}
return processor;
}
public FileEntryStreamHandler getFileEntryStreamHandler(FileEntryHandler delegate, ThreadPoolExecutor executor, long size) throws Exception {
/*
if(executor.getCorePoolSize() >= 2) {
// prevent deadlocks by carefully choosing the parallel buffer size
return new StopTimeCsvFileEntryStreamHandler("stop_times.txt", this, Math.min(chunkLength, size), delegate, executor);
}
*/
return new StopTimeCsvFileEntryStreamHandler("stop_times.txt", this, delegate, executor);
}
public FileEntryChunkStreamHandler getFileEntryChunkedStreamHandler() throws Exception {
return new StopTimeCsvFileEntryChunkStreamHandler("stop_times.txt", StandardCharsets.UTF_8, this);
}
public void resolveReferences() {
for (IntermediateProcessor p : processors.values()) {
for (Entry> entry : p.getById(1).entrySet()) {
Stop stop = feed.getStop(entry.getKey());
List list = entry.getValue();
for(StopTime stopTime : list) {
stopTime.setStop(stop);
stop.add(stopTime);
}
}
for (Entry> entry : p.getById(0).entrySet()) {
Trip trip = feed.getTrip(entry.getKey());
if(trip == null) {
throw new RuntimeException();
}
for(StopTime stopTime : entry.getValue()) {
stopTime.setTrip(trip);
}
}
}
for (StopTimeHandler stopTimeHandler : handlers.values()) {
for (List list : stopTimeHandler.getStopTimes()) {
if(!list.isEmpty()) {
// handle inconsistencies in sequence numbering here
for(int i = 0; i < list.size() - 1; i++) {
if(list.get(i).getTrip() != list.get(i + 1).getTrip()) {
StopTime last = list.get(i);
StopTime first = list.get(i + 1);
last.setNext(null);
first.setPrevious(null);
last.getTrip().setStopTimes(list.subList(0, i + 1));
list = list.subList(i + 1, list.size());
i = 0;
// TODO normalize sequence numbers too?
}
}
if(!list.isEmpty()) {
Trip trip = list.get(0).getTrip();
if(trip.getStopTimes() != null) {
throw new RuntimeException(trip.getId() + " was not empty");
}
trip.setStopTimes(list);
}
}
}
}
for (Stop stop : feed.getStops()) {
Collections.sort(stop.getStopTimes(), StopTime.departureTimeComparator);
}
}
public Collection getHandlers() {
return handlers.values();
}
}