forked from georust/transitfeed
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
FeedReader and TransitFeed help deal with complete feeds
FeedReader helps access transit feed entries in a compressed archive or directory. TransitFeed is a helpful container for transit feed entries that can be filled with a feed reader. Addresses some of georust#5.
- Loading branch information
Showing
2 changed files
with
274 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,272 @@ | ||
use serde; | ||
use std::collections::HashMap; | ||
use std::path::Path; | ||
use std::fs::File; | ||
use zip::ZipArchive; | ||
|
||
use transit::{Agency, Stop, Route, Trip, StopTime, Calendar, CalendarDate, FareAttribute, FareRule, ShapePoint, Frequency, Transfer, FeedInfo}; | ||
use gtfs::GTFSIterator; | ||
use gtfs::Error; | ||
|
||
pub struct FeedReader { | ||
pub feed_path: String | ||
} | ||
|
||
impl FeedReader { | ||
pub fn from_zip(zipfile: &str, output: &str) -> Result<Self, Error> { | ||
let output_path = Path::new(output); | ||
let mut zip = zip::ZipArchive::new(File::open(zipfile).unwrap()).unwrap(); | ||
extract_zip(&mut zip, output_path); | ||
Ok(FeedReader { feed_path: output.to_string() }) | ||
} | ||
|
||
pub fn agencies(&self) -> Result<GTFSIterator<File, Agency>, Error> { | ||
self.make_iterator("agency.txt") | ||
} | ||
|
||
pub fn stops(&self) -> Result<GTFSIterator<File, Stop>, Error> { | ||
self.make_iterator("stops.txt") | ||
} | ||
|
||
pub fn routes(&self) -> Result<GTFSIterator<File, Route>, Error> { | ||
self.make_iterator("routes.txt") | ||
} | ||
|
||
pub fn trips(&self) -> Result<GTFSIterator<File, Trip>, Error> { | ||
self.make_iterator("trips.txt") | ||
} | ||
|
||
pub fn stop_times(&self) -> Result<GTFSIterator<File, StopTime>, Error> { | ||
self.make_iterator("stop_times.txt") | ||
} | ||
|
||
pub fn calendars(&self) -> Result<GTFSIterator<File, Calendar>, Error> { | ||
self.make_iterator("calendar.txt") | ||
} | ||
|
||
pub fn calendar_dates(&self) -> Result<GTFSIterator<File, CalendarDate>, Error> { | ||
self.make_iterator("calendar_dates.txt") | ||
} | ||
|
||
pub fn fare_attributes(&self) -> Result<GTFSIterator<File, FareAttribute>, Error> { | ||
self.make_iterator("fare_attributes.txt") | ||
} | ||
|
||
pub fn fare_rules(&self) -> Result<GTFSIterator<File, FareRule>, Error> { | ||
self.make_iterator("fare_rules.txt") | ||
} | ||
|
||
pub fn shapes(&self) -> Result<GTFSIterator<File, ShapePoint>, Error> { | ||
self.make_iterator("shapes.txt") | ||
} | ||
|
||
pub fn frequencies(&self) -> Result<GTFSIterator<File, Frequency>, Error> { | ||
self.make_iterator("frequencies.txt") | ||
} | ||
|
||
pub fn transfers(&self) -> Result<GTFSIterator<File, Transfer>, Error> { | ||
self.make_iterator("transfers.txt") | ||
} | ||
|
||
pub fn feed_info(&self) -> Result<GTFSIterator<File, FeedInfo>, Error> { | ||
self.make_iterator("feed_info.txt") | ||
} | ||
|
||
fn make_iterator<T>(&self, filename: &str) -> Result<GTFSIterator<File, T>, Error> | ||
where T: serde::de::DeserializeOwned | ||
{ | ||
Ok(GTFSIterator::from_path(Path::new(&self.feed_path).join(filename).to_str().unwrap())?) | ||
} | ||
} | ||
|
||
/// Container for all transit records | ||
pub struct TransitFeed { | ||
pub agencies: Vec<Agency>, | ||
pub stops: Vec<Stop>, | ||
pub routes: Vec<Route>, | ||
pub trips: Vec<Trip>, | ||
pub stoptimes: Vec<StopTime>, | ||
pub calendars: Vec<Calendar>, | ||
pub calendar_dates: Option<Vec<CalendarDate>>, | ||
pub fare_attributes: Option<Vec<FareAttribute>>, | ||
pub fare_rules: Option<Vec<FareRule>>, | ||
pub shapes: Option<Vec<ShapePoint>>, | ||
pub frequencies: Option<Vec<Frequency>>, | ||
pub transfers: Option<Vec<Transfer>>, | ||
pub feedinfo: Option<FeedInfo>, | ||
|
||
stop_map: HashMap<String, usize>, | ||
route_map: HashMap<String, usize>, | ||
trip_map: HashMap<String, usize>, | ||
} | ||
|
||
impl TransitFeed { | ||
pub fn from_reader(reader: &FeedReader) -> Result<Self, Error> { | ||
let agencies = load_feed_file(try!(reader.agencies())); | ||
let stops = load_feed_file(try!(reader.stops())); | ||
let routes = load_feed_file(try!(reader.routes())); | ||
let trips = load_feed_file(try!(reader.trips())); | ||
let stoptimes = load_feed_file(try!(reader.stop_times())); | ||
let calendars = load_feed_file(try!(reader.calendars())); | ||
|
||
let stop_map = make_map(&stops, |stop: &Stop| stop.stop_id.clone()); | ||
let route_map = make_map(&routes, |route: &Route| route.route_id.clone()); | ||
let trip_map = make_map(&trips, |trip: &Trip| trip.trip_id.clone()); | ||
|
||
Ok(TransitFeed { | ||
agencies: agencies, | ||
stops: stops, | ||
stop_map: stop_map, | ||
routes: routes, | ||
route_map: route_map, | ||
trips: trips, | ||
trip_map: trip_map, | ||
stoptimes: stoptimes, | ||
calendars: calendars, | ||
calendar_dates: load_optional_feed_file(reader.calendar_dates()), | ||
fare_attributes: load_optional_feed_file(reader.fare_attributes()), | ||
fare_rules: load_optional_feed_file(reader.fare_rules()), | ||
shapes: load_optional_feed_file(reader.shapes()), | ||
frequencies: load_optional_feed_file(reader.frequencies()), | ||
transfers: load_optional_feed_file(reader.transfers()), | ||
feedinfo: match load_optional_feed_file(reader.feed_info()) { | ||
Some(mut records) => { | ||
if records.len() != 1 { | ||
println!("Unexpected number of entries in feed_info.txt"); | ||
} | ||
records.pop() | ||
}, | ||
None => None | ||
}, | ||
}) | ||
} | ||
|
||
pub fn find_stop(&self, id: &str) -> Option<&Stop> { | ||
TransitFeed::find_record(id, &self.stop_map, &self.stops) | ||
} | ||
|
||
pub fn find_route(&self, id: &str) -> Option<&Route> { | ||
TransitFeed::find_record(id, &self.route_map, &self.routes) | ||
} | ||
|
||
pub fn find_trip(&self, id: &str) -> Option<&Trip> { | ||
TransitFeed::find_record(id, &self.trip_map, &self.trips) | ||
} | ||
|
||
fn find_record<'a, T>(record_id: &str, map: &HashMap<String, usize>, records: &'a Vec<T>) -> Option<&'a T> { | ||
map.get(record_id).map(|index| &records[*index]) | ||
} | ||
} | ||
|
||
// TODO: Need to log stuff here | ||
fn load_feed_file<R, T>(iter: GTFSIterator<R, T>) -> Vec<T> | ||
where R: std::io::Read, | ||
for<'de> T: serde::Deserialize<'de> | ||
{ | ||
iter.filter_map(|r| match r { | ||
Ok(r) => Some(r), | ||
Err(e) => { println!("SKIPPING - {}", e); None } | ||
}).collect() | ||
} | ||
|
||
fn load_optional_feed_file<R, T>(result: Result<GTFSIterator<R, T>, Error>) -> Option<Vec<T>> | ||
where R: std::io::Read, | ||
for<'de> T: serde::Deserialize<'de> | ||
{ | ||
match result { | ||
Ok(iter) => Some(load_feed_file(iter)), | ||
Err(e) => { println!("SKIPPING optional file - {}", e); None } | ||
} | ||
} | ||
|
||
fn make_map<T, F: (Fn(&T) -> String)>(records: &Vec<T>, key_fn: F) -> HashMap<String, usize> { | ||
records.iter().enumerate() | ||
.map(|(index, record)| (key_fn(record), index)) | ||
.collect() | ||
} | ||
|
||
// move this elsewhere | ||
use std; | ||
use std::io; | ||
use std::fs; | ||
#[cfg(unix)] | ||
use std::os::unix::fs::PermissionsExt; | ||
use zip; | ||
|
||
fn extract_zip<T: io::Read + io::Seek>(archive: &mut ZipArchive<T>, output: &Path) { | ||
for i in 0..archive.len() | ||
{ | ||
let mut file = archive.by_index(i).unwrap(); | ||
let outpath = output.join(sanitize_filename(file.name())); | ||
println!("{}", outpath.display()); | ||
|
||
{ | ||
let comment = file.comment(); | ||
if comment.len() > 0 { println!(" File comment: {}", comment); } | ||
} | ||
|
||
// shouldn't need this for GTFS data? | ||
//create_directory(outpath.parent().unwrap_or(std::path::Path::new("")), None); | ||
|
||
let perms = convert_permissions(file.unix_mode()); | ||
|
||
// also suspicious but why not? | ||
if (&*file.name()).ends_with("/") { | ||
create_directory(&outpath, perms); | ||
|
||
} | ||
else { | ||
write_file(&mut file, &outpath, perms); | ||
} | ||
} | ||
} | ||
|
||
#[cfg(unix)] | ||
fn convert_permissions(mode: Option<u32>) -> Option<fs::Permissions> | ||
{ | ||
match mode { | ||
Some(mode) => Some(fs::Permissions::from_mode(mode)), | ||
None => None, | ||
} | ||
} | ||
#[cfg(not(unix))] | ||
fn convert_permissions(_mode: Option<u32>) -> Option<fs::Permissions> | ||
{ | ||
None | ||
} | ||
|
||
fn write_file(file: &mut zip::read::ZipFile, outpath: &std::path::Path, perms: Option<fs::Permissions>) | ||
{ | ||
let mut outfile = fs::File::create(&outpath).unwrap(); | ||
io::copy(file, &mut outfile).unwrap(); | ||
if let Some(perms) = perms { | ||
fs::set_permissions(outpath, perms).unwrap(); | ||
} | ||
} | ||
|
||
fn create_directory(outpath: &std::path::Path, perms: Option<fs::Permissions>) | ||
{ | ||
fs::create_dir_all(&outpath).unwrap(); | ||
if let Some(perms) = perms { | ||
fs::set_permissions(outpath, perms).unwrap(); | ||
} | ||
} | ||
|
||
fn sanitize_filename(filename: &str) -> std::path::PathBuf | ||
{ | ||
let no_null_filename = match filename.find('\0') { | ||
Some(index) => &filename[0..index], | ||
None => filename, | ||
}; | ||
|
||
std::path::Path::new(no_null_filename) | ||
.components() | ||
.filter(|component| match *component { | ||
std::path::Component::Normal(..) => true, | ||
_ => false | ||
}) | ||
.fold(std::path::PathBuf::new(), |mut path, ref cur| { | ||
path.push(cur.as_os_str()); | ||
path | ||
}) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters