Compare commits
48 commits
for-usage-
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
ab59bd57fb | ||
|
65cab1b379 | ||
|
ebe9cf219c | ||
|
b7f7abaaca | ||
|
e833ae1d35 | ||
|
c0f696fc52 | ||
|
661b9dc43b | ||
|
ef0c8946b1 | ||
|
c43b6032d8 | ||
|
574da01e12 | ||
|
646bb13f39 | ||
|
b07393991a | ||
|
f0b74c210e | ||
|
a74b71af50 | ||
|
5a6590533c | ||
|
d93723861e | ||
|
574b946dce | ||
|
f5c932d16a | ||
|
cc3d82a25e | ||
|
c3804b0d98 | ||
|
46996effa6 | ||
|
5505cb60a9 | ||
|
d0ac9a3502 | ||
|
b882d0620f | ||
|
34da27df6d | ||
|
cb7b9fc3cc | ||
|
574ed37209 | ||
|
83dc935bfd | ||
|
e851019d3a | ||
|
07d343eed8 | ||
|
9a2366a1c7 | ||
|
714e771fba | ||
|
367e50799e | ||
|
bc554c36df | ||
|
9a21d029e8 | ||
|
bce5517bb0 | ||
|
f830f4c88a | ||
|
aa49034e50 | ||
|
90a89d7cd9 | ||
|
7f6f960ed7 | ||
|
b0bc895146 | ||
|
8ce210ad53 | ||
|
384884ed6f | ||
|
a5584e1c42 | ||
|
2f26038fff | ||
|
dec4b7ff8b | ||
|
521d2b76ad | ||
|
083d98e431 |
35 changed files with 4038 additions and 2 deletions
31
.clang-format
Normal file
31
.clang-format
Normal file
|
@ -0,0 +1,31 @@
|
|||
# Configuration file for clang-format, based on docs/CPP_STYLE.md.
|
||||
|
||||
BasedOnStyle: Google
|
||||
IndentWidth: 2
|
||||
BreakBeforeBraces: Allman
|
||||
ColumnLimit: 100
|
||||
|
||||
Language: Cpp
|
||||
AccessModifierOffset: -2
|
||||
AllowShortBlocksOnASingleLine: Never
|
||||
AllowShortCaseLabelsOnASingleLine: true
|
||||
AllowShortFunctionsOnASingleLine: All
|
||||
AllowShortIfStatementsOnASingleLine: Never
|
||||
AllowShortLoopsOnASingleLine: false
|
||||
BreakConstructorInitializersBeforeComma: true
|
||||
ConstructorInitializerIndentWidth: 4
|
||||
DerivePointerAlignment: false
|
||||
IndentCaseLabels: false
|
||||
NamespaceIndentation: None
|
||||
PointerAlignment: Middle
|
||||
SortIncludes: true
|
||||
Standard: c++17
|
||||
|
||||
IncludeBlocks: Preserve
|
||||
IncludeCategories:
|
||||
- Regex: '^<.*\.h>'
|
||||
Priority: 1
|
||||
- Regex: '^<.*'
|
||||
Priority: 2
|
||||
- Regex: '.*'
|
||||
Priority: 3
|
31
.github/workflows/ccpp.yml
vendored
Normal file
31
.github/workflows/ccpp.yml
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
name: C/C++ CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, add-* ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: git_actions
|
||||
run: git submodule update --init --recursive
|
||||
- name: cmake
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install mm-common g++-9
|
||||
export CXX=g++-9
|
||||
cmake .
|
||||
- name: make
|
||||
run: |
|
||||
export CXX=g++-9
|
||||
make
|
||||
- name: run_tests
|
||||
run: |
|
||||
pwd
|
||||
ctest --output-on-failure
|
34
.github/workflows/cla.yml
vendored
Normal file
34
.github/workflows/cla.yml
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
name: "CLA Assistant"
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_target:
|
||||
types: [opened,closed,synchronize]
|
||||
|
||||
jobs:
|
||||
CLAssistant:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: "CLA Assistant"
|
||||
if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target'
|
||||
# Alpha Release
|
||||
uses: cla-assistant/github-action@v2.1.0-alpha
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
||||
with:
|
||||
path-to-signatures: 'signatures/version1/cla.json'
|
||||
path-to-document: 'https://github.com/mapsme/just_gtfs/blob/master/MIT_CLA.md'
|
||||
# branch should not be protected
|
||||
branch: 'master'
|
||||
allowlist: mesozoic-drones,tatiana-yan,bot*
|
||||
|
||||
#below are the optional inputs - If the optional inputs are not given, then default values will be taken
|
||||
#remote-organization-name: enter the remote organization name where the signatures should be stored (Default is storing the signatures in the same repository)
|
||||
#remote-repository-name: enter the remote repository name where the signatures should be stored (Default is storing the signatures in the same repository)
|
||||
#create-file-commit-message: 'For example: Creating file for storing CLA Signatures'
|
||||
#signed-commit-message: 'For example: $contributorName has signed the CLA in #$pullRequestNo'
|
||||
#custom-notsigned-prcomment: 'pull request comment with Introductory message to ask new contributors to sign'
|
||||
#custom-pr-sign-comment: 'The signature to be committed in order to sign the CLA'
|
||||
#custom-allsigned-prcomment: 'pull request comment when all contributors has signed, defaults to **CLA Assistant Lite bot** All Contributors have signed the CLA.'
|
||||
|
9
.gitignore
vendored
9
.gitignore
vendored
|
@ -30,3 +30,12 @@
|
|||
*.exe
|
||||
*.out
|
||||
*.app
|
||||
|
||||
# Other
|
||||
.DS_Store
|
||||
.idea/
|
||||
cmake-build-debug/
|
||||
CMakeFiles/
|
||||
Makefile
|
||||
*.cmake
|
||||
CMakeCache.txt
|
||||
|
|
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
[submodule "doctest"]
|
||||
path = doctest
|
||||
url = https://github.com/onqtam/doctest
|
18
CMakeLists.txt
Normal file
18
CMakeLists.txt
Normal file
|
@ -0,0 +1,18 @@
|
|||
cmake_minimum_required(VERSION 3.6)
|
||||
|
||||
project(just_gtfs LANGUAGES CXX VERSION 0.1)
|
||||
|
||||
include_directories(include)
|
||||
include_directories(doctest/doctest)
|
||||
|
||||
set(CMAKE_CXX_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED on)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -Werror")
|
||||
|
||||
enable_testing()
|
||||
|
||||
add_library(just_gtfs INTERFACE)
|
||||
target_include_directories(just_gtfs INTERFACE ${CMAKE_CURRENT_LIST_DIR}/include)
|
||||
|
||||
add_subdirectory(tests)
|
||||
add_subdirectory(benchmarks)
|
59
MIT_CLA.md
Normal file
59
MIT_CLA.md
Normal file
|
@ -0,0 +1,59 @@
|
|||
## Individual Contributor License Agreement (CLA)
|
||||
|
||||
**Thank you for submitting your contributions to this project.**
|
||||
|
||||
By signing this CLA, you agree that the following terms apply to all of your past, present and future contributions
|
||||
to the project.
|
||||
|
||||
### License.
|
||||
|
||||
You hereby represent that all present, past and future contributions are governed by the
|
||||
[MIT License](https://opensource.org/licenses/MIT)
|
||||
copyright statement.
|
||||
|
||||
This entails that to the extent possible under law, you transfer all copyright and related or neighboring rights
|
||||
of the code or documents you contribute to the project itself or its maintainers.
|
||||
Furthermore you also represent that you have the authority to perform the above waiver
|
||||
with respect to the entirety of you contributions.
|
||||
|
||||
### Moral Rights.
|
||||
|
||||
To the fullest extent permitted under applicable law, you hereby waive, and agree not to
|
||||
assert, all of your “moral rights” in or relating to your contributions for the benefit of the project.
|
||||
|
||||
### Third Party Content.
|
||||
|
||||
If your Contribution includes or is based on any source code, object code, bug fixes, configuration changes, tools,
|
||||
specifications, documentation, data, materials, feedback, information or other works of authorship that were not
|
||||
authored by you (“Third Party Content”) or if you are aware of any third party intellectual property or proprietary
|
||||
rights associated with your Contribution (“Third Party Rights”),
|
||||
then you agree to include with the submission of your Contribution full details respecting such Third Party
|
||||
Content and Third Party Rights, including, without limitation, identification of which aspects of your
|
||||
Contribution contain Third Party Content or are associated with Third Party Rights, the owner/author of the
|
||||
Third Party Content and Third Party Rights, where you obtained the Third Party Content, and any applicable
|
||||
third party license terms or restrictions respecting the Third Party Content and Third Party Rights. For greater
|
||||
certainty, the foregoing obligations respecting the identification of Third Party Content and Third Party Rights
|
||||
do not apply to any portion of a Project that is incorporated into your Contribution to that same Project.
|
||||
|
||||
### Representations.
|
||||
|
||||
You represent that, other than the Third Party Content and Third Party Rights identified by
|
||||
you in accordance with this Agreement, you are the sole author of your Contributions and are legally entitled
|
||||
to grant the foregoing licenses and waivers in respect of your Contributions. If your Contributions were
|
||||
created in the course of your employment with your past or present employer(s), you represent that such
|
||||
employer(s) has authorized you to make your Contributions on behalf of such employer(s) or such employer
|
||||
(s) has waived all of their right, title or interest in or to your Contributions.
|
||||
|
||||
### Disclaimer.
|
||||
|
||||
To the fullest extent permitted under applicable law, your Contributions are provided on an "as is"
|
||||
basis, without any warranties or conditions, express or implied, including, without limitation, any implied
|
||||
warranties or conditions of non-infringement, merchantability or fitness for a particular purpose. You are not
|
||||
required to provide support for your Contributions, except to the extent you desire to provide support.
|
||||
|
||||
### No Obligation.
|
||||
|
||||
You acknowledge that the maintainers of this project are under no obligation to use or incorporate your contributions
|
||||
into the project. The decision to use or incorporate your contributions into the project will be made at the
|
||||
sole discretion of the maintainers or their authorized delegates.
|
||||
|
162
README.md
162
README.md
|
@ -1,2 +1,160 @@
|
|||
# just_gtfs
|
||||
C++17 header-only GTFS parsing library
|
||||
# just_gtfs - header-only modern C++ library for reading and writing GTFS feeds
|
||||
|
||||
[](https://github.com/mapsme/just_gtfs)
|
||||
|
||||
[](https://shields.io/)
|
||||
[](https://lbesson.mit-license.org/)
|
||||

|
||||
[](https://github.com/CUTR-at-USF/awesome-transit)
|
||||
[](https://github.com/mapsme/just_gtfs/issues)
|
||||
|
||||
|
||||
## Table of Contents
|
||||
- [Description](#description)
|
||||
- [Reading and writing GTFS feeds](#reading-and-writing-gtfs-feeds)
|
||||
- [How to add library to your project](#how-to-add-library-to-your-project)
|
||||
- [Used third-party tools](#used-third-party-tools)
|
||||
- [Contributing](#contributing)
|
||||
- [Resources](#resources)
|
||||
|
||||
## Description
|
||||
The just_gtfs library implements reading and writing static transit data in GTFS - [General Transit Feed Specification](https://developers.google.com/transit/gtfs/reference).
|
||||
|
||||
Its main features:
|
||||
- Fast reading and writing of GTFS feeds
|
||||
- Support for [extended GTFS route types](https://developers.google.com/transit/gtfs/reference/extended-route-types)
|
||||
- Simple working with GTFS `Date` and `Time` formats
|
||||
- Header-only
|
||||
- Written in C++17
|
||||
- Tested on GCC and Clang
|
||||
|
||||
|
||||
## Reading and writing GTFS feeds
|
||||
Library provides main class for working with GTFS feeds: `gtfs::Feed`. It also provides classes for each of the 17 GTFS entities: `Route`, `Stop`, `Pathway`, `Translation` and others.
|
||||
GTFS csv files are mapped to the corresponding C++ classes. Every GTFS entity can be accessed through `gtfs::Feed` corresponding getters & setters.
|
||||
|
||||
:pushpin: All GTFS entities are managed in the same way. So here is the example for working with `agencies`.
|
||||
|
||||
Method of the `Feed` class for reading `agency.txt`:
|
||||
```c++
|
||||
Result read_agencies()
|
||||
```
|
||||
|
||||
Method for reading reading not only agencies but all GTFS entities. Path to the feed is specified in the `Feed` constructor:
|
||||
```c++
|
||||
Result read_feed()
|
||||
```
|
||||
|
||||
Method for getting reference to the `Agencies` - `std::vector` of all `Agency` objects of the feed:
|
||||
```c++
|
||||
const Agencies & get_agencies()
|
||||
```
|
||||
|
||||
Method for finding agency by its id. Returns `std::optional` so you should check if the result is `std::nullopt`:
|
||||
```c++
|
||||
std::optional<Agency> get_agency(const Id & agency_id)
|
||||
```
|
||||
|
||||
Method for adding agency to the feed:
|
||||
```c++
|
||||
void add_agency(const Agency & agency)
|
||||
```
|
||||
|
||||
Method for writing agencies to the `agency.txt` file to `gtfs_path`.
|
||||
```c++
|
||||
Result write_agencies(const std::string & gtfs_path)
|
||||
```
|
||||
|
||||
Method for writing all GTFS entities (not only agencies, but stops, stop times, calendar etc):
|
||||
```c++
|
||||
Result write_feed(const std::string & gtfs_path)
|
||||
```
|
||||
|
||||
:pushpin: **There are similar methods for all other GTFS entities** for getting the list of entities, finding and adding them.
|
||||
For some of them additional methods are provided.
|
||||
For example, you can find all the stop times for current stop by its id:
|
||||
```c++
|
||||
StopTimes get_stop_times_for_stop(const Id & stop_id)
|
||||
```
|
||||
|
||||
Or you can find stop times for the particular trip:
|
||||
```c++
|
||||
StopTimes get_stop_times_for_trip(const Id & trip_id, bool sort_by_sequence = true)
|
||||
```
|
||||
|
||||
### Example of reading GTFS feed and working with its stops and routes
|
||||
:pushpin: Provide `gtfs::Feed` the feed path, read it and work with GTFS entities such as stops and routes:
|
||||
```c++
|
||||
Feed feed("~/data/SFMTA/");
|
||||
if (feed.read_feed() == ResultCode::OK)
|
||||
{
|
||||
Stops stops = feed.get_stops();
|
||||
std::cout << "Stops count in feed: " << stops.size() << std::endl;
|
||||
|
||||
for (const Stop & stop: stops)
|
||||
{
|
||||
std::cout << stop.stop_id << std::endl;
|
||||
}
|
||||
|
||||
Route route = feed.get_route("route_id_1009");
|
||||
if (route)
|
||||
{
|
||||
std::cout << route->route_long_name << std::endl;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Example of parsing shapes.txt and working with its contents
|
||||
GTFS feed can be wholly read from directory as in the example above or you can read GTFS files separately. E.g., if you need only shapes data, you can avoid parsing all other files and just work with the shapes.
|
||||
|
||||
:pushpin: Read only `shapes.txt` from the feed and work with shapes:
|
||||
```c++
|
||||
Feed feed("~/data/SFMTA/");
|
||||
if (feed.read_shapes() == ResultCode::OK)
|
||||
{
|
||||
Shapes all_shapes = feed.get_shapes();
|
||||
Shape shape = feed.get_shape("9367");
|
||||
|
||||
for (const ShapePoint & point: shape)
|
||||
{
|
||||
std::cout << point.shape_pt_lat << " " << point.shape_pt_lon << std::endl;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Example of writing GTFS:
|
||||
:pushpin: If you already filled the `feed` object with data that suits you, you can write it to the corresponding path:
|
||||
```c++
|
||||
Feed feed;
|
||||
|
||||
// Fill feed with agencies, stops, routes and other required data:
|
||||
|
||||
feed.add_trip(some_trip);
|
||||
feed.add_attribution(attr);
|
||||
|
||||
feed.write_feed("~/data/custom_feed/");
|
||||
```
|
||||
|
||||
## How to add library to your project
|
||||
- For including just_gtfs to your own project **as a submodule:** use branch "for-usage-as-submodule" which consists of a single header.
|
||||
- Another way of including just_gtfs to your project: just_gtfs is completely contained inside a single header and therefore it is sufficient to copy include/just_gtfs/just_gtfs.h to your **include paths.** The library does not have to be explicitly build.
|
||||
- For building library and **running tests:**
|
||||
Clone just_gtfs with `git clone --recursive` or run `git submodule update --init --recursive --remote` after cloning.
|
||||
In the just_gtfs project directory build the project and run unit tests:
|
||||
```
|
||||
cmake .
|
||||
make
|
||||
ctest --output-on-failure --verbose
|
||||
```
|
||||
The library makes use of the C++17 features and therefore you have to use the appropriate compiler version.
|
||||
|
||||
## Used third-party tools
|
||||
- [**doctest**](https://github.com/onqtam/doctest) for unit testing.
|
||||
|
||||
## Contributing
|
||||
Please open a [Github issue](https://github.com/mapsme/just_gtfs/issues/new) with as much of the information as you're able to specify, or create a [pull request](https://github.com/mapsme/just_gtfs/pulls) according to our [guidelines](https://github.com/mapsme/just_gtfs/blob/master/docs/CPP_STYLE.md).
|
||||
|
||||
## Resources
|
||||
[GTFS reference in Google GitHub repository](https://github.com/google/transit/blob/master/gtfs/spec/en/reference.md)
|
||||
|
||||
[GTFS reference on Google Transit API](https://developers.google.com/transit/gtfs/reference?csw=1)
|
||||
|
|
0
benchmarks/CMakeLists.txt
Normal file
0
benchmarks/CMakeLists.txt
Normal file
6
docs/CPP_STYLE.md
Normal file
6
docs/CPP_STYLE.md
Normal file
|
@ -0,0 +1,6 @@
|
|||
## C++ Style Guide
|
||||
|
||||
We use C++ code style similar to the [MAPS.ME project](https://github.com/mapsme/omim/blob/master/docs/CPP_STYLE.md) with some differences:
|
||||
- Use **CamelCase** for class names and **snake_case** for other entities like methods, variables, etc.
|
||||
- Use left-to-right order for variables/params: `const std::string & s` (reference to the const string).
|
||||
- Do not use prefixes like `m_` for member variables.
|
BIN
docs/logo.jpeg
Normal file
BIN
docs/logo.jpeg
Normal file
Binary file not shown.
After Width: | Height: | Size: 46 KiB |
1
doctest
Submodule
1
doctest
Submodule
|
@ -0,0 +1 @@
|
|||
Subproject commit 932a2ca50666138256dae56fbb16db3b1cae133a
|
2880
include/just_gtfs/just_gtfs.h
Normal file
2880
include/just_gtfs/just_gtfs.h
Normal file
File diff suppressed because it is too large
Load diff
20
signatures/version1/cla.json
Normal file
20
signatures/version1/cla.json
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"signedContributors": [
|
||||
{
|
||||
"name": "nilsnolde",
|
||||
"id": 25637358,
|
||||
"comment_id": 1441674396,
|
||||
"created_at": "2023-02-23T12:23:33Z",
|
||||
"repoId": 250751634,
|
||||
"pullRequestNo": 18
|
||||
},
|
||||
{
|
||||
"name": "Osyotr",
|
||||
"id": 8740768,
|
||||
"comment_id": 2089322842,
|
||||
"created_at": "2024-05-02T00:13:58Z",
|
||||
"repoId": 250751634,
|
||||
"pullRequestNo": 21
|
||||
}
|
||||
]
|
||||
}
|
10
tests/CMakeLists.txt
Normal file
10
tests/CMakeLists.txt
Normal file
|
@ -0,0 +1,10 @@
|
|||
file(GLOB TESTS RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp)
|
||||
|
||||
message(STATUS "CMAKE_CURRENT_BINARY_DIR=" ${CMAKE_CURRENT_BINARY_DIR})
|
||||
|
||||
foreach(TEST_SOURCE ${TESTS})
|
||||
string(REPLACE ".cpp" "" TEST_TARGET "${TEST_SOURCE}")
|
||||
add_executable(${TEST_TARGET} ${TEST_SOURCE})
|
||||
target_compile_features(${TEST_TARGET} PRIVATE cxx_std_17)
|
||||
add_test("${TEST_TARGET}" "${TEST_TARGET}" WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} --verbose)
|
||||
endforeach()
|
3
tests/data/output_feed/agency.txt
Normal file
3
tests/data/output_feed/agency.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
agency_id,agency_name,agency_url,agency_timezone,agency_lang,agency_phone,agency_fare_url,agency_email
|
||||
0Id_b^3 Company,"Big Big ""Bus Company""",,,,,b3c.no,b3c@gtfs.com
|
||||
kwf,"""killer whale ferries""",,Asia/Tokyo,en,842,f@mail.com,
|
2
tests/data/sample_feed/agency.txt
Normal file
2
tests/data/sample_feed/agency.txt
Normal file
|
@ -0,0 +1,2 @@
|
|||
agency_id,agency_name,agency_url,agency_timezone
|
||||
DTA,Demo Transit Authority,http://google.com,America/Los_Angeles
|
2
tests/data/sample_feed/attributions.txt
Normal file
2
tests/data/sample_feed/attributions.txt
Normal file
|
@ -0,0 +1,2 @@
|
|||
attribution_id,organization_name,is_producer,is_operator,is_authority,attribution_url
|
||||
0,Test inc,1,0,0,"https://test.pl/gtfs/"
|
3
tests/data/sample_feed/calendar.txt
Normal file
3
tests/data/sample_feed/calendar.txt
Normal file
|
@ -0,0 +1,3 @@
|
|||
service_id,monday,tuesday,wednesday,thursday,friday,saturday,sunday,start_date,end_date
|
||||
FULLW,1,1,1,1,1,1,1,20070101,20101231
|
||||
WE,0,0,0,0,0,1,1,20070101,20101231
|
2
tests/data/sample_feed/calendar_dates.txt
Normal file
2
tests/data/sample_feed/calendar_dates.txt
Normal file
|
@ -0,0 +1,2 @@
|
|||
service_id,date,exception_type
|
||||
FULLW,20070604,2
|
4
tests/data/sample_feed/fare_attributes.txt
Normal file
4
tests/data/sample_feed/fare_attributes.txt
Normal file
|
@ -0,0 +1,4 @@
|
|||
fare_id,price,currency_type,payment_method,transfers,transfer_duration
|
||||
p,1.25,USD,0,0,
|
||||
a,5.25,USD,1,1,
|
||||
x,20,USD,0,,60
|
5
tests/data/sample_feed/fare_rules.txt
Normal file
5
tests/data/sample_feed/fare_rules.txt
Normal file
|
@ -0,0 +1,5 @@
|
|||
fare_id,route_id,origin_id,destination_id,contains_id
|
||||
p,AB,,,
|
||||
p,STBA,,,
|
||||
p,BFC,,,
|
||||
a,AAMV,,,
|
2
tests/data/sample_feed/feed_info.txt
Normal file
2
tests/data/sample_feed/feed_info.txt
Normal file
|
@ -0,0 +1,2 @@
|
|||
feed_publisher_name,feed_publisher_url,feed_lang,feed_version,feed_license
|
||||
"Test Solutions, Inc.",http://test,en,,
|
12
tests/data/sample_feed/frequencies.txt
Normal file
12
tests/data/sample_feed/frequencies.txt
Normal file
|
@ -0,0 +1,12 @@
|
|||
trip_id,start_time,end_time,headway_secs
|
||||
STBA,6:00:00,22:00:00,1800
|
||||
CITY1,6:00:00,7:59:59,1800
|
||||
CITY2,6:00:00,7:59:59,1800
|
||||
CITY1,8:00:00,9:59:59,600
|
||||
CITY2,8:00:00,9:59:59,600
|
||||
CITY1,10:00:00,15:59:59,1800
|
||||
CITY2,10:00:00,15:59:59,1800
|
||||
CITY1,16:00:00,18:59:59,600
|
||||
CITY2,16:00:00,18:59:59,600
|
||||
CITY1,19:00:00,22:00:00,1800
|
||||
CITY2,19:00:00,22:00:00,1800
|
4
tests/data/sample_feed/levels.txt
Normal file
4
tests/data/sample_feed/levels.txt
Normal file
|
@ -0,0 +1,4 @@
|
|||
level_id,level_index,level_name
|
||||
U321L1,-1.5,"Vestibul"
|
||||
U321L2,-2,"Vestibul2"
|
||||
U321L0,0,"Povrch"
|
4
tests/data/sample_feed/pathways.txt
Normal file
4
tests/data/sample_feed/pathways.txt
Normal file
|
@ -0,0 +1,4 @@
|
|||
pathway_id,from_stop_id,to_stop_id,pathway_mode,signposted_as,reversed_signposted_as,is_bidirectional
|
||||
T-A01C01,1073S,1098E,2,"Sign1","Sign2",1
|
||||
T-A01D01,1075S,1118S,1,"Sign4",,0
|
||||
T-A01D01,1075N,1118N,1,,,1
|
6
tests/data/sample_feed/routes.txt
Normal file
6
tests/data/sample_feed/routes.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
route_id,agency_id,route_short_name,route_long_name,route_desc,route_type,route_url,route_color,route_text_color
|
||||
AB,DTA,10,Airport - Bullfrog,,3,,,
|
||||
BFC,DTA,20,Bullfrog - Furnace Creek Resort,,3,,,
|
||||
STBA,DTA,30,Stagecoach - Airport Shuttle,,3,,,
|
||||
CITY,DTA,40,City,,3,,,
|
||||
AAMV,DTA,50,Airport - Amargosa Valley,,3,,,
|
9
tests/data/sample_feed/shapes.txt
Normal file
9
tests/data/sample_feed/shapes.txt
Normal file
|
@ -0,0 +1,9 @@
|
|||
shape_id,shape_pt_lat,shape_pt_lon,shape_pt_sequence,shape_dist_traveled
|
||||
10237, 43.5176524709, -79.6906570431,50017,12669
|
||||
10237, 43.5176982107, -79.6906412064,50018,12669
|
||||
10237, 43.5177439788, -79.6906278437,50019,12669
|
||||
10237, 43.5177457792, -79.6906278048,50020,12669
|
||||
10243, 43.6448714082, -79.5249161004,10001,0
|
||||
10243, 43.6448078510, -79.5252239093,10002,0
|
||||
10243, 43.6446766156, -79.5251713255,10003,0
|
||||
10243, 43.6445544452, -79.5251234796,10004,0
|
29
tests/data/sample_feed/stop_times.txt
Normal file
29
tests/data/sample_feed/stop_times.txt
Normal file
|
@ -0,0 +1,29 @@
|
|||
trip_id,arrival_time,departure_time,stop_id,stop_sequence,stop_headsign,pickup_type,drop_off_time,shape_dist_traveled
|
||||
STBA,6:00:00,6:00:00,STAGECOACH,1,,,,
|
||||
STBA,6:20:00,6:20:00,BEATTY_AIRPORT,2,,,,
|
||||
CITY1,6:00:00,6:00:00,STAGECOACH,1,,,,
|
||||
CITY1,6:05:00,6:07:00,NANAA,2,,,,
|
||||
CITY1,6:12:00,6:14:00,NADAV,3,,,,
|
||||
CITY1,6:19:00,6:21:00,DADAN,4,,,,
|
||||
CITY1,6:26:00,6:28:00,EMSI,5,,,,
|
||||
CITY2,6:28:00,6:30:00,EMSI,1,,,,
|
||||
CITY2,6:35:00,6:37:00,DADAN,2,,,,
|
||||
CITY2,6:42:00,6:44:00,NADAV,3,,,,
|
||||
CITY2,6:49:00,6:51:00,NANAA,4,,,,
|
||||
CITY2,6:56:00,6:58:00,STAGECOACH,5,,,,
|
||||
AB1,8:00:00,8:00:00,BEATTY_AIRPORT,1,,,,
|
||||
AB1,8:10:00,8:15:00,BULLFROG,2,,,,
|
||||
AB2,12:05:00,12:05:00,BULLFROG,1,,,,
|
||||
AB2,12:15:00,12:15:00,BEATTY_AIRPORT,2
|
||||
BFC1,8:20:00,8:20:00,BULLFROG,1
|
||||
BFC1,9:20:00,9:20:00,FUR_CREEK_RES,2
|
||||
BFC2,11:00:00,11:00:00,FUR_CREEK_RES,1
|
||||
BFC2,12:00:00,12:00:00,BULLFROG,2
|
||||
AAMV1,8:00:00,8:00:00,BEATTY_AIRPORT,1
|
||||
AAMV1,9:00:00,9:00:00,AMV,2
|
||||
AAMV2,10:00:00,10:00:00,AMV,1
|
||||
AAMV2,11:00:00,11:00:00,BEATTY_AIRPORT,2
|
||||
AAMV3,13:00:00,13:00:00,BEATTY_AIRPORT,1
|
||||
AAMV3,14:00:00,14:00:00,AMV,2
|
||||
AAMV4,15:00:00,15:00:00,AMV,1
|
||||
AAMV4,16:00:00,16:00:00,BEATTY_AIRPORT,2
|
10
tests/data/sample_feed/stops.txt
Normal file
10
tests/data/sample_feed/stops.txt
Normal file
|
@ -0,0 +1,10 @@
|
|||
stop_id,stop_name,stop_desc,stop_lat,stop_lon,zone_id,stop_url
|
||||
FUR_CREEK_RES,Furnace Creek Resort (Demo),,36.425288,-117.133162,,
|
||||
BEATTY_AIRPORT,Nye County Airport (Demo),,36.868446,-116.784582,,
|
||||
BULLFROG,Bullfrog (Demo),,36.88108,-116.81797,,
|
||||
STAGECOACH,Stagecoach Hotel & Casino (Demo),,36.915682,-116.751677,,
|
||||
NADAV,North Ave / D Ave N (Demo),,36.914893,-116.76821,,
|
||||
NANAA,North Ave / N A Ave (Demo),,36.914944,-116.761472,,
|
||||
DADAN,Doing Ave / D Ave N (Demo),,36.909489,-116.768242,,
|
||||
EMSI,E Main St / S Irving St (Demo),,36.905697,-116.76218,,
|
||||
AMV,Amargosa Valley (Demo),,36.641496,-116.40094,,
|
5
tests/data/sample_feed/transfers.txt
Normal file
5
tests/data/sample_feed/transfers.txt
Normal file
|
@ -0,0 +1,5 @@
|
|||
from_stop_id,to_stop_id,transfer_type,min_transfer_time
|
||||
130,4,2,70
|
||||
227,4,0,160
|
||||
314,11,1,
|
||||
385,11,2,
|
2
tests/data/sample_feed/translations.txt
Normal file
2
tests/data/sample_feed/translations.txt
Normal file
|
@ -0,0 +1,2 @@
|
|||
table_name,field_name,language,translation,record_id,record_sub_id,field_value
|
||||
stop_times,stop_headsign,en,"Downtown",,,
|
12
tests/data/sample_feed/trips.txt
Normal file
12
tests/data/sample_feed/trips.txt
Normal file
|
@ -0,0 +1,12 @@
|
|||
route_id,service_id,trip_id,trip_headsign,direction_id,block_id,shape_id
|
||||
AB,FULLW,AB1,to Bullfrog,0,1,
|
||||
AB,FULLW,AB2,to Airport,1,2,
|
||||
STBA,FULLW,STBA,Shuttle,,,
|
||||
CITY,FULLW,CITY1,,0,,
|
||||
CITY,FULLW,CITY2,,1,,
|
||||
BFC,FULLW,BFC1,to Furnace Creek Resort,0,1,
|
||||
BFC,FULLW,BFC2,to Bullfrog,1,2,
|
||||
AAMV,WE,AAMV1,to Amargosa Valley,0,,
|
||||
AAMV,WE,AAMV2,to Airport,1,,
|
||||
AAMV,WE,AAMV3,to Amargosa Valley,0,,
|
||||
AAMV,WE,AAMV4,to Airport,1,,
|
660
tests/unit_tests.cpp
Normal file
660
tests/unit_tests.cpp
Normal file
|
@ -0,0 +1,660 @@
|
|||
#define DOCTEST_CONFIG_IMPLEMENT_WITH_MAIN
|
||||
#include "doctest.h"
|
||||
|
||||
#include "just_gtfs/just_gtfs.h"
|
||||
|
||||
using namespace gtfs;
|
||||
|
||||
TEST_SUITE_BEGIN("Handling time GTFS fields");
|
||||
TEST_CASE("Time in H:MM:SS format")
|
||||
{
|
||||
Time stop_time("0:19:00");
|
||||
REQUIRE(stop_time.is_provided());
|
||||
CHECK_EQ(stop_time.get_hh_mm_ss(), std::make_tuple(0, 19, 0));
|
||||
CHECK_EQ(stop_time.get_raw_time(), "0:19:00");
|
||||
CHECK_EQ(stop_time.get_total_seconds(), 19 * 60);
|
||||
}
|
||||
|
||||
TEST_CASE("Time in HH:MM:SS format")
|
||||
{
|
||||
Time stop_time("39:45:30");
|
||||
CHECK_EQ(stop_time.get_hh_mm_ss(), std::make_tuple(39, 45, 30));
|
||||
CHECK_EQ(stop_time.get_raw_time(), "39:45:30");
|
||||
CHECK_EQ(stop_time.get_total_seconds(), 39 * 60 * 60 + 45 * 60 + 30);
|
||||
}
|
||||
|
||||
TEST_CASE("Time in HHH:MM:SS format")
|
||||
{
|
||||
Time stop_time("103:05:21");
|
||||
CHECK_EQ(stop_time.get_hh_mm_ss(), std::make_tuple(103, 5, 21));
|
||||
CHECK_EQ(stop_time.get_raw_time(), "103:05:21");
|
||||
CHECK_EQ(stop_time.get_total_seconds(), 103 * 60 * 60 + 5 * 60 + 21);
|
||||
}
|
||||
|
||||
TEST_CASE("Time from integers 1")
|
||||
{
|
||||
Time stop_time(14, 30, 0);
|
||||
CHECK_EQ(stop_time.get_hh_mm_ss(), std::make_tuple(14, 30, 0));
|
||||
CHECK_EQ(stop_time.get_raw_time(), "14:30:00");
|
||||
CHECK_EQ(stop_time.get_total_seconds(), 14 * 60 * 60 + 30 * 60);
|
||||
}
|
||||
|
||||
TEST_CASE("Time from integers 2")
|
||||
{
|
||||
Time stop_time(3, 0, 0);
|
||||
CHECK_EQ(stop_time.get_hh_mm_ss(), std::make_tuple(3, 0, 0));
|
||||
CHECK_EQ(stop_time.get_raw_time(), "03:00:00");
|
||||
CHECK_EQ(stop_time.get_total_seconds(), 3 * 60 * 60);
|
||||
}
|
||||
|
||||
TEST_CASE("Invalid time format")
|
||||
{
|
||||
CHECK_THROWS_AS(Time("12/10/00"), const InvalidFieldFormat &);
|
||||
CHECK_THROWS_AS(Time("12:100:00"), const InvalidFieldFormat &);
|
||||
CHECK_THROWS_AS(Time("12:10:100"), const InvalidFieldFormat &);
|
||||
CHECK_THROWS_AS(Time("12:10/10"), const InvalidFieldFormat &);
|
||||
}
|
||||
|
||||
TEST_CASE("Time not provided")
|
||||
{
|
||||
Time stop_time("");
|
||||
CHECK(!stop_time.is_provided());
|
||||
}
|
||||
|
||||
TEST_CASE("Convert to Time with 24 hours max")
|
||||
{
|
||||
Time stop_time_near_midnight("24:05:00");
|
||||
CHECK(stop_time_near_midnight.limit_hours_to_24max());
|
||||
CHECK_EQ(stop_time_near_midnight.get_raw_time(), "00:05:00");
|
||||
|
||||
Time stop_time_morning("27:05:00");
|
||||
stop_time_morning.limit_hours_to_24max();
|
||||
CHECK_EQ(stop_time_morning.get_raw_time(), "03:05:00");
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
||||
TEST_SUITE_BEGIN("Handling date GTFS fields");
|
||||
TEST_CASE("Date not provided")
|
||||
{
|
||||
Date date("");
|
||||
CHECK(!date.is_provided());
|
||||
}
|
||||
|
||||
TEST_CASE("Invalid date format")
|
||||
{
|
||||
// Violation of the format YYYYMMDD:
|
||||
CHECK_THROWS_AS(Date("1999314"), const InvalidFieldFormat &);
|
||||
CHECK_THROWS_AS(Date("20081414"), const InvalidFieldFormat &);
|
||||
CHECK_THROWS_AS(Date("20170432"), const InvalidFieldFormat &);
|
||||
|
||||
// Count of days in february (leap year):
|
||||
CHECK_THROWS_AS(Date("20200230"), const InvalidFieldFormat &);
|
||||
// Count of days in february (not leap year):
|
||||
CHECK_THROWS_AS(Date("20210229"), const InvalidFieldFormat &);
|
||||
|
||||
// Count of days in months with 30 days:
|
||||
CHECK_THROWS_AS(Date("19980431"), const InvalidFieldFormat &);
|
||||
CHECK_THROWS_AS(Date("19980631"), const InvalidFieldFormat &);
|
||||
CHECK_THROWS_AS(Date("19980931"), const InvalidFieldFormat &);
|
||||
CHECK_THROWS_AS(Date("19981131"), const InvalidFieldFormat &);
|
||||
}
|
||||
|
||||
TEST_CASE("Date from string 1")
|
||||
{
|
||||
Date date("20230903");
|
||||
CHECK_EQ(date.get_yyyy_mm_dd(), std::make_tuple(2023, 9, 3));
|
||||
CHECK_EQ(date.get_raw_date(), "20230903");
|
||||
CHECK(date.is_provided());
|
||||
}
|
||||
|
||||
TEST_CASE("Date from string 2")
|
||||
{
|
||||
Date date("20161231");
|
||||
CHECK_EQ(date.get_yyyy_mm_dd(), std::make_tuple(2016, 12, 31));
|
||||
CHECK_EQ(date.get_raw_date(), "20161231");
|
||||
CHECK(date.is_provided());
|
||||
}
|
||||
|
||||
TEST_CASE("Date from string 3")
|
||||
{
|
||||
Date date("20200229");
|
||||
CHECK_EQ(date.get_yyyy_mm_dd(), std::make_tuple(2020, 2, 29));
|
||||
CHECK_EQ(date.get_raw_date(), "20200229");
|
||||
CHECK(date.is_provided());
|
||||
}
|
||||
|
||||
TEST_CASE("Date from integers")
|
||||
{
|
||||
Date date(2022, 8, 16);
|
||||
CHECK_EQ(date.get_yyyy_mm_dd(), std::make_tuple(2022, 8, 16));
|
||||
|
||||
CHECK_EQ(date.get_raw_date(), "20220816");
|
||||
CHECK(date.is_provided());
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
||||
TEST_SUITE_BEGIN("Csv parsing");
|
||||
TEST_CASE("Record with empty values")
|
||||
{
|
||||
const auto res = CsvParser::split_record(",, ,");
|
||||
REQUIRE_EQ(res.size(), 4);
|
||||
for (const auto & token : res)
|
||||
CHECK(token.empty());
|
||||
}
|
||||
|
||||
TEST_CASE("Header with UTF BOM")
|
||||
{
|
||||
const auto res = CsvParser::split_record("\xef\xbb\xbfroute_id, agency_id", true);
|
||||
REQUIRE_EQ(res.size(), 2);
|
||||
CHECK_EQ(res[0], "route_id");
|
||||
CHECK_EQ(res[1], "agency_id");
|
||||
}
|
||||
|
||||
TEST_CASE("Quotation marks")
|
||||
{
|
||||
const auto res = CsvParser::split_record(R"(27681 ,,"Sisters, OR",,"44.29124",1)");
|
||||
REQUIRE_EQ(res.size(), 6);
|
||||
CHECK_EQ(res[2], "Sisters, OR");
|
||||
CHECK_EQ(res[4], "44.29124");
|
||||
CHECK_EQ(res[5], "1");
|
||||
}
|
||||
|
||||
TEST_CASE("Not wrapped quotation marks")
|
||||
{
|
||||
const auto res = CsvParser::split_record(R"(Contains "quotes", commas and text)");
|
||||
REQUIRE_EQ(res.size(), 2);
|
||||
CHECK_EQ(res[0], R"(Contains "quotes")");
|
||||
CHECK_EQ(res[1], "commas and text");
|
||||
}
|
||||
|
||||
TEST_CASE("Wrapped quotation marks")
|
||||
{
|
||||
const auto res = CsvParser::split_record(R"("Contains ""quotes"", commas and text")");
|
||||
REQUIRE_EQ(res.size(), 1);
|
||||
CHECK_EQ(res[0], R"(Contains "quotes", commas and text)");
|
||||
}
|
||||
|
||||
TEST_CASE("Double wrapped quotation marks")
|
||||
{
|
||||
const auto res = CsvParser::split_record(R"(""Double quoted text"")");
|
||||
REQUIRE_EQ(res.size(), 1);
|
||||
}
|
||||
|
||||
TEST_CASE("Read quoted empty values")
|
||||
{
|
||||
const auto res = CsvParser::split_record(",\"\"");
|
||||
REQUIRE_EQ(res.size(), 2);
|
||||
CHECK_EQ(res[0], "");
|
||||
CHECK_EQ(res[1], "");
|
||||
}
|
||||
TEST_CASE("Read quoted quote")
|
||||
{
|
||||
const auto res = CsvParser::split_record(",\"\"\"\"");
|
||||
REQUIRE_EQ(res.size(), 2);
|
||||
CHECK_EQ(res[0], "");
|
||||
CHECK_EQ(res[1], "\"");
|
||||
}
|
||||
|
||||
TEST_CASE("Read quoted double quote")
|
||||
{
|
||||
const auto res = CsvParser::split_record(",\"\"\"\"\"\"");
|
||||
REQUIRE_EQ(res.size(), 2);
|
||||
CHECK_EQ(res[0], "");
|
||||
CHECK_EQ(res[1], "\"\"");
|
||||
}
|
||||
|
||||
TEST_CASE("Read quoted values with quotes in begin")
|
||||
{
|
||||
const auto res = CsvParser::split_record(",\"\"\"Name\"\" and some other\"");
|
||||
REQUIRE_EQ(res.size(), 2);
|
||||
CHECK_EQ(res[0], "");
|
||||
CHECK_EQ(res[1], "\"Name\" and some other");
|
||||
}
|
||||
|
||||
TEST_CASE("Read quoted values with quotes at end")
|
||||
{
|
||||
const auto res = CsvParser::split_record(",\"Text and \"\"Name\"\"\"");
|
||||
REQUIRE_EQ(res.size(), 2);
|
||||
CHECK_EQ(res[0], "");
|
||||
CHECK_EQ(res[1], "Text and \"Name\"");
|
||||
}
|
||||
TEST_SUITE_END();
|
||||
|
||||
TEST_SUITE_BEGIN("Read & write");
|
||||
// Credits:
|
||||
// https://developers.google.com/transit/gtfs/examples/gtfs-feed
|
||||
TEST_CASE("Empty container before parsing")
|
||||
{
|
||||
Feed feed("data/non_existing_dir");
|
||||
REQUIRE(feed.get_agencies().empty());
|
||||
auto agency = feed.get_agency("agency_10");
|
||||
CHECK(!agency);
|
||||
}
|
||||
|
||||
TEST_CASE("Non existend directory")
|
||||
{
|
||||
Feed feed("data/non_existing_dir");
|
||||
REQUIRE_EQ(feed.read_transfers(), ResultCode::ERROR_FILE_ABSENT);
|
||||
CHECK_EQ(feed.get_transfers().size(), 0);
|
||||
}
|
||||
|
||||
TEST_CASE("Transfers")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_transfers(), ResultCode::OK);
|
||||
const auto & transfers = feed.get_transfers();
|
||||
CHECK_EQ(transfers.size(), 4);
|
||||
|
||||
CHECK_EQ(transfers[0].from_stop_id, "130");
|
||||
CHECK_EQ(transfers[0].to_stop_id, "4");
|
||||
CHECK_EQ(transfers[0].transfer_type, TransferType::MinimumTime);
|
||||
CHECK_EQ(transfers[0].min_transfer_time, 70);
|
||||
|
||||
const auto & transfer = feed.get_transfer("314", "11");
|
||||
REQUIRE(transfer);
|
||||
CHECK_EQ(transfer.value().transfer_type, TransferType::Timed);
|
||||
CHECK_EQ(transfer.value().min_transfer_time, 0);
|
||||
}
|
||||
|
||||
TEST_CASE("Calendar")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_calendar(), ResultCode::OK);
|
||||
const auto & calendar = feed.get_calendar();
|
||||
REQUIRE_EQ(calendar.size(), 2);
|
||||
|
||||
const auto & calendar_record = feed.get_calendar("WE");
|
||||
REQUIRE(calendar_record);
|
||||
|
||||
CHECK_EQ(calendar_record->start_date, Date(2007, 01, 01));
|
||||
CHECK_EQ(calendar_record->end_date, Date(2010, 12, 31));
|
||||
|
||||
CHECK_EQ(calendar_record->monday, CalendarAvailability::NotAvailable);
|
||||
CHECK_EQ(calendar_record->tuesday, CalendarAvailability::NotAvailable);
|
||||
CHECK_EQ(calendar_record->wednesday, CalendarAvailability::NotAvailable);
|
||||
CHECK_EQ(calendar_record->thursday, CalendarAvailability::NotAvailable);
|
||||
CHECK_EQ(calendar_record->friday, CalendarAvailability::NotAvailable);
|
||||
CHECK_EQ(calendar_record->saturday, CalendarAvailability::Available);
|
||||
CHECK_EQ(calendar_record->sunday, CalendarAvailability::Available);
|
||||
}
|
||||
|
||||
TEST_CASE("Calendar dates")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_calendar_dates(), ResultCode::OK);
|
||||
const auto & calendar_dates = feed.get_calendar_dates();
|
||||
REQUIRE_EQ(calendar_dates.size(), 1);
|
||||
|
||||
const auto & calendar_record = feed.get_calendar_dates("FULLW");
|
||||
REQUIRE(!calendar_record.empty());
|
||||
|
||||
CHECK_EQ(calendar_record[0].date, Date(2007, 06, 04));
|
||||
CHECK_EQ(calendar_record[0].exception_type, CalendarDateException::Removed);
|
||||
}
|
||||
|
||||
TEST_CASE("Read GTFS feed")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_feed(), ResultCode::OK);
|
||||
|
||||
CHECK_EQ(feed.get_agencies().size(), 1);
|
||||
CHECK_EQ(feed.get_routes().size(), 5);
|
||||
CHECK_EQ(feed.get_trips().size(), 11);
|
||||
CHECK_EQ(feed.get_shapes().size(), 8);
|
||||
CHECK_EQ(feed.get_stops().size(), 9);
|
||||
CHECK_EQ(feed.get_stop_times().size(), 28);
|
||||
CHECK_EQ(feed.get_transfers().size(), 4);
|
||||
CHECK_EQ(feed.get_frequencies().size(), 11);
|
||||
CHECK_EQ(feed.get_attributions().size(), 1);
|
||||
CHECK_EQ(feed.get_calendar().size(), 2);
|
||||
CHECK_EQ(feed.get_calendar_dates().size(), 1);
|
||||
CHECK_EQ(feed.get_fare_attributes().size(), 3);
|
||||
CHECK_EQ(feed.get_fare_rules().size(), 4);
|
||||
CHECK(!feed.get_feed_info().feed_publisher_name.empty());
|
||||
CHECK_EQ(feed.get_levels().size(), 3);
|
||||
CHECK_EQ(feed.get_pathways().size(), 3);
|
||||
CHECK_EQ(feed.get_translations().size(), 1);
|
||||
}
|
||||
|
||||
TEST_CASE("Agency")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_agencies(), ResultCode::OK);
|
||||
|
||||
const auto & agencies = feed.get_agencies();
|
||||
REQUIRE_EQ(agencies.size(), 1);
|
||||
CHECK_EQ(agencies[0].agency_id, "DTA");
|
||||
CHECK_EQ(agencies[0].agency_name, "Demo Transit Authority");
|
||||
CHECK_EQ(agencies[0].agency_url, "http://google.com");
|
||||
CHECK(agencies[0].agency_lang.empty());
|
||||
CHECK_EQ(agencies[0].agency_timezone, "America/Los_Angeles");
|
||||
|
||||
const auto agency = feed.get_agency("DTA");
|
||||
CHECK(agency);
|
||||
|
||||
REQUIRE_EQ(feed.write_agencies("data/output_feed"), ResultCode::OK);
|
||||
Feed feed_copy("data/output_feed");
|
||||
REQUIRE_EQ(feed_copy.read_agencies(), ResultCode::OK);
|
||||
CHECK_EQ(agencies, feed_copy.get_agencies());
|
||||
}
|
||||
|
||||
TEST_CASE("Routes")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_routes(), ResultCode::OK);
|
||||
|
||||
const auto & routes = feed.get_routes();
|
||||
REQUIRE_EQ(routes.size(), 5);
|
||||
CHECK_EQ(routes[0].route_id, "AB");
|
||||
CHECK_EQ(routes[0].agency_id, "DTA");
|
||||
CHECK_EQ(routes[0].route_short_name, "10");
|
||||
CHECK_EQ(routes[0].route_long_name, "Airport - Bullfrog");
|
||||
CHECK_EQ(routes[0].route_type, RouteType::Bus);
|
||||
CHECK(routes[0].route_text_color.empty());
|
||||
CHECK(routes[0].route_color.empty());
|
||||
CHECK(routes[0].route_desc.empty());
|
||||
|
||||
const auto & route = feed.get_route("AB");
|
||||
CHECK(route);
|
||||
}
|
||||
|
||||
TEST_CASE("Trips")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_trips(), ResultCode::OK);
|
||||
|
||||
const auto & trips = feed.get_trips();
|
||||
REQUIRE_EQ(trips.size(), 11);
|
||||
|
||||
CHECK_EQ(trips[0].block_id, "1");
|
||||
CHECK_EQ(trips[0].route_id, "AB");
|
||||
CHECK_EQ(trips[0].direction_id, TripDirectionId::DefaultDirection);
|
||||
CHECK_EQ(trips[0].trip_headsign, "to Bullfrog");
|
||||
CHECK(trips[0].shape_id.empty());
|
||||
CHECK_EQ(trips[0].service_id, "FULLW");
|
||||
CHECK_EQ(trips[0].trip_id, "AB1");
|
||||
|
||||
const auto & trip = feed.get_trip("AB1");
|
||||
REQUIRE(trip);
|
||||
CHECK(trip.value().trip_short_name.empty());
|
||||
}
|
||||
|
||||
TEST_CASE("Stops")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_stops(), ResultCode::OK);
|
||||
|
||||
const auto & stops = feed.get_stops();
|
||||
REQUIRE_EQ(stops.size(), 9);
|
||||
CHECK_EQ(stops[0].stop_lat, 36.425288);
|
||||
CHECK_EQ(stops[0].stop_lon, -117.133162);
|
||||
CHECK(stops[0].stop_code.empty());
|
||||
CHECK(stops[0].stop_url.empty());
|
||||
CHECK_EQ(stops[0].stop_id, "FUR_CREEK_RES");
|
||||
CHECK(stops[0].stop_desc.empty());
|
||||
CHECK_EQ(stops[0].stop_name, "Furnace Creek Resort (Demo)");
|
||||
CHECK_EQ(stops[0].location_type, StopLocationType::StopOrPlatform);
|
||||
CHECK(stops[0].zone_id.empty());
|
||||
|
||||
auto const & stop = feed.get_stop("FUR_CREEK_RES");
|
||||
REQUIRE(stop);
|
||||
}
|
||||
|
||||
TEST_CASE("StopTimes")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_stop_times(), ResultCode::OK);
|
||||
|
||||
const auto & stop_times = feed.get_stop_times();
|
||||
REQUIRE_EQ(stop_times.size(), 28);
|
||||
|
||||
CHECK_EQ(stop_times[0].trip_id, "STBA");
|
||||
CHECK_EQ(stop_times[0].arrival_time, Time(06, 00, 00));
|
||||
CHECK_EQ(stop_times[0].departure_time, Time(06, 00, 00));
|
||||
CHECK_EQ(stop_times[0].stop_id, "STAGECOACH");
|
||||
CHECK_EQ(stop_times[0].stop_sequence, 1);
|
||||
CHECK(stop_times[0].stop_headsign.empty());
|
||||
CHECK_EQ(stop_times[0].pickup_type, StopTimeBoarding::RegularlyScheduled);
|
||||
CHECK_EQ(stop_times[0].drop_off_type, StopTimeBoarding::RegularlyScheduled);
|
||||
|
||||
CHECK_EQ(feed.get_stop_times_for_stop("STAGECOACH").size(), 3);
|
||||
CHECK_EQ(feed.get_stop_times_for_trip("STBA").size(), 2);
|
||||
}
|
||||
|
||||
TEST_CASE("Shapes")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_shapes(), ResultCode::OK);
|
||||
|
||||
const auto & shapes = feed.get_shapes();
|
||||
REQUIRE_EQ(shapes.size(), 8);
|
||||
CHECK_EQ(shapes[0].shape_id, "10237");
|
||||
CHECK_EQ(shapes[0].shape_pt_lat, 43.5176524709);
|
||||
CHECK_EQ(shapes[0].shape_pt_lon, -79.6906570431);
|
||||
CHECK_EQ(shapes[0].shape_pt_sequence, 50017);
|
||||
CHECK_EQ(shapes[0].shape_dist_traveled, 12669);
|
||||
|
||||
const auto & shape = feed.get_shape("10237");
|
||||
CHECK_EQ(shape.size(), 4);
|
||||
}
|
||||
|
||||
TEST_CASE("Calendar")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_calendar(), ResultCode::OK);
|
||||
|
||||
const auto & calendar = feed.get_calendar();
|
||||
REQUIRE_EQ(calendar.size(), 2);
|
||||
CHECK_EQ(calendar[0].service_id, "FULLW");
|
||||
CHECK_EQ(calendar[0].start_date, Date(2007, 01, 01));
|
||||
CHECK_EQ(calendar[0].end_date, Date(2010, 12, 31));
|
||||
CHECK_EQ(calendar[0].monday, CalendarAvailability::Available);
|
||||
CHECK_EQ(calendar[0].sunday, CalendarAvailability::Available);
|
||||
|
||||
const auto & calendar_for_service = feed.get_calendar("FULLW");
|
||||
CHECK(calendar_for_service);
|
||||
}
|
||||
|
||||
TEST_CASE("Calendar dates")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_calendar_dates(), ResultCode::OK);
|
||||
|
||||
const auto & calendar_dates = feed.get_calendar_dates();
|
||||
REQUIRE_EQ(calendar_dates.size(), 1);
|
||||
CHECK_EQ(calendar_dates[0].service_id, "FULLW");
|
||||
CHECK_EQ(calendar_dates[0].date, Date(2007, 06, 04));
|
||||
CHECK_EQ(calendar_dates[0].exception_type, CalendarDateException::Removed);
|
||||
|
||||
const auto & calendar_dates_for_service = feed.get_calendar_dates("FULLW");
|
||||
CHECK_EQ(calendar_dates_for_service.size(), 1);
|
||||
}
|
||||
|
||||
TEST_CASE("Frequencies")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_frequencies(), ResultCode::OK);
|
||||
|
||||
const auto & frequencies = feed.get_frequencies();
|
||||
REQUIRE_EQ(frequencies.size(), 11);
|
||||
CHECK_EQ(frequencies[0].trip_id, "STBA");
|
||||
CHECK_EQ(frequencies[0].start_time, Time(6, 00, 00));
|
||||
CHECK_EQ(frequencies[0].end_time, Time(22, 00, 00));
|
||||
CHECK_EQ(frequencies[0].headway_secs, 1800);
|
||||
|
||||
const auto & frequencies_for_trip = feed.get_frequencies("CITY1");
|
||||
CHECK_EQ(frequencies_for_trip.size(), 5);
|
||||
}
|
||||
|
||||
TEST_CASE("Fare attributes")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_fare_attributes(), ResultCode::OK);
|
||||
|
||||
const auto & attributes = feed.get_fare_attributes();
|
||||
REQUIRE_EQ(attributes.size(), 3);
|
||||
CHECK_EQ(attributes[0].fare_id, "p");
|
||||
CHECK_EQ(attributes[0].price, 1.25);
|
||||
CHECK_EQ(attributes[0].currency_type, "USD");
|
||||
CHECK_EQ(attributes[0].payment_method, FarePayment::OnBoard);
|
||||
CHECK_EQ(attributes[0].transfers, FareTransfers::No);
|
||||
CHECK_EQ(attributes[0].transfer_duration, 0);
|
||||
|
||||
CHECK_EQ(attributes[1].fare_id, "a");
|
||||
CHECK_EQ(attributes[1].price, 5.25);
|
||||
CHECK_EQ(attributes[1].currency_type, "USD");
|
||||
CHECK_EQ(attributes[1].payment_method, FarePayment::BeforeBoarding);
|
||||
CHECK_EQ(attributes[1].transfers, FareTransfers::Once);
|
||||
CHECK_EQ(attributes[1].transfer_duration, 0);
|
||||
|
||||
CHECK_EQ(attributes[2].fare_id, "x");
|
||||
CHECK_EQ(attributes[2].price, 20);
|
||||
CHECK_EQ(attributes[2].currency_type, "USD");
|
||||
CHECK_EQ(attributes[2].payment_method, FarePayment::OnBoard);
|
||||
CHECK_EQ(attributes[2].transfers, FareTransfers::Unlimited);
|
||||
CHECK_EQ(attributes[2].transfer_duration, 60);
|
||||
|
||||
const auto & attributes_for_id = feed.get_fare_attributes("a");
|
||||
REQUIRE_EQ(attributes_for_id.size(), 1);
|
||||
CHECK_EQ(attributes_for_id[0].price, 5.25);
|
||||
|
||||
REQUIRE_EQ(feed.write_fare_attributes("data/output_feed"), ResultCode::OK);
|
||||
Feed feed_copy("data/output_feed");
|
||||
REQUIRE_EQ(feed_copy.read_fare_attributes(), ResultCode::OK);
|
||||
CHECK_EQ(attributes, feed_copy.get_fare_attributes());
|
||||
}
|
||||
|
||||
TEST_CASE("Fare rules")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_fare_rules(), ResultCode::OK);
|
||||
|
||||
const auto & fare_rules = feed.get_fare_rules();
|
||||
REQUIRE_EQ(fare_rules.size(), 4);
|
||||
CHECK_EQ(fare_rules[0].fare_id, "p");
|
||||
CHECK_EQ(fare_rules[0].route_id, "AB");
|
||||
|
||||
const auto & rules_for_id = feed.get_fare_rules("p");
|
||||
REQUIRE_EQ(rules_for_id.size(), 3);
|
||||
CHECK_EQ(rules_for_id[1].route_id, "STBA");
|
||||
}
|
||||
|
||||
TEST_CASE("Levels")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_levels(), ResultCode::OK);
|
||||
|
||||
const auto & levels = feed.get_levels();
|
||||
REQUIRE_EQ(levels.size(), 3);
|
||||
CHECK_EQ(levels[0].level_id, "U321L1");
|
||||
CHECK_EQ(levels[0].level_index, -1.5);
|
||||
|
||||
const auto & level = feed.get_level("U321L2");
|
||||
REQUIRE(level);
|
||||
|
||||
CHECK_EQ(level.value().level_index, -2);
|
||||
CHECK_EQ(level.value().level_name, "Vestibul2");
|
||||
}
|
||||
|
||||
TEST_CASE("Pathways")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_pathways(), ResultCode::OK);
|
||||
|
||||
const auto & pathways = feed.get_pathways();
|
||||
REQUIRE_EQ(pathways.size(), 3);
|
||||
CHECK_EQ(pathways[0].pathway_id, "T-A01C01");
|
||||
CHECK_EQ(pathways[0].from_stop_id, "1073S");
|
||||
CHECK_EQ(pathways[0].to_stop_id, "1098E");
|
||||
CHECK_EQ(pathways[0].pathway_mode, PathwayMode::Stairs);
|
||||
CHECK_EQ(pathways[0].signposted_as, "Sign1");
|
||||
CHECK_EQ(pathways[0].reversed_signposted_as, "Sign2");
|
||||
CHECK_EQ(pathways[0].is_bidirectional, PathwayDirection::Bidirectional);
|
||||
|
||||
const auto & pathways_by_id = feed.get_pathways("T-A01D01");
|
||||
REQUIRE_EQ(pathways_by_id.size(), 2);
|
||||
CHECK_EQ(pathways_by_id[0].is_bidirectional, PathwayDirection::Unidirectional);
|
||||
CHECK(pathways_by_id[0].reversed_signposted_as.empty());
|
||||
}
|
||||
|
||||
TEST_CASE("Translations")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_translations(), ResultCode::OK);
|
||||
|
||||
const auto & translations = feed.get_translations();
|
||||
REQUIRE_EQ(translations.size(), 1);
|
||||
CHECK_EQ(translations[0].table_name, "stop_times");
|
||||
CHECK_EQ(translations[0].field_name, "stop_headsign");
|
||||
CHECK_EQ(translations[0].language, "en");
|
||||
CHECK_EQ(translations[0].translation, "Downtown");
|
||||
CHECK(translations[0].record_id.empty());
|
||||
CHECK(translations[0].record_sub_id.empty());
|
||||
CHECK(translations[0].field_value.empty());
|
||||
|
||||
CHECK_EQ(feed.get_translations("stop_times").size(), 1);
|
||||
}
|
||||
|
||||
TEST_CASE("Attributions")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_attributions(), ResultCode::OK);
|
||||
|
||||
const auto & attributions = feed.get_attributions();
|
||||
REQUIRE_EQ(attributions.size(), 1);
|
||||
CHECK_EQ(attributions[0].attribution_id, "0");
|
||||
CHECK_EQ(attributions[0].organization_name, "Test inc");
|
||||
CHECK_EQ(attributions[0].is_producer, AttributionRole::Yes);
|
||||
CHECK_EQ(attributions[0].is_operator, AttributionRole::No);
|
||||
CHECK_EQ(attributions[0].is_authority, AttributionRole::No);
|
||||
CHECK_EQ(attributions[0].attribution_url, "https://test.pl/gtfs/");
|
||||
CHECK(attributions[0].attribution_email.empty());
|
||||
CHECK(attributions[0].attribution_phone.empty());
|
||||
}
|
||||
|
||||
TEST_CASE("Feed info")
|
||||
{
|
||||
Feed feed("data/sample_feed");
|
||||
REQUIRE_EQ(feed.read_feed_info(), ResultCode::OK);
|
||||
|
||||
const auto & info = feed.get_feed_info();
|
||||
|
||||
CHECK_EQ(info.feed_publisher_name, "Test Solutions, Inc.");
|
||||
CHECK_EQ(info.feed_publisher_url, "http://test");
|
||||
CHECK_EQ(info.feed_lang, "en");
|
||||
}
|
||||
|
||||
TEST_SUITE_END();
|
||||
|
||||
TEST_SUITE_BEGIN("Simple pipelines");
|
||||
|
||||
TEST_CASE("Agencies create & save")
|
||||
{
|
||||
Feed feed_for_writing;
|
||||
|
||||
Agency agency1;
|
||||
agency1.agency_id = "0Id_b^3 Company";
|
||||
agency1.agency_name = R"(Big Big "Bus Company")";
|
||||
agency1.agency_email = "b3c@gtfs.com";
|
||||
agency1.agency_fare_url = "b3c.no";
|
||||
|
||||
Agency agency2;
|
||||
agency2.agency_id = "kwf";
|
||||
agency2.agency_name = R"("killer whale ferries")";
|
||||
agency2.agency_lang = "en";
|
||||
agency2.agency_phone = "842";
|
||||
agency2.agency_timezone = "Asia/Tokyo";
|
||||
agency2.agency_fare_url = "f@mail.com";
|
||||
|
||||
feed_for_writing.add_agency(agency1);
|
||||
feed_for_writing.add_agency(agency2);
|
||||
|
||||
REQUIRE_EQ(feed_for_writing.write_agencies("data/output_feed"), ResultCode::OK);
|
||||
Feed feed_for_testing("data/output_feed");
|
||||
|
||||
REQUIRE_EQ(feed_for_testing.read_agencies(), ResultCode::OK);
|
||||
CHECK_EQ(feed_for_writing.get_agencies(), feed_for_testing.get_agencies());
|
||||
}
|
||||
TEST_SUITE_END();
|
Loading…
Add table
Reference in a new issue