Compare commits

..

9 commits

Author SHA1 Message Date
Viktor Govako
240a45011c [planet] 230408 with addr:interpolation. 2023-04-10 21:45:17 -03:00
Viktor Govako
76460434db [search] Added addr:interpolation matching.
Signed-off-by: Viktor Govako <viktor.govako@gmail.com>
2023-04-10 21:44:51 -03:00
Viktor Govako
0e6507a2a8 [strings] Regenerated.
Signed-off-by: Viktor Govako <viktor.govako@gmail.com>
2023-04-10 21:44:51 -03:00
Viktor Govako
6077d9824d [classifier] Regenerated.
Signed-off-by: Viktor Govako <viktor.govako@gmail.com>
2023-04-10 21:44:51 -03:00
Viktor Govako
a3d54b22c8 [classifier] Added addr:interpolation type.
Signed-off-by: Viktor Govako <viktor.govako@gmail.com>
2023-04-10 21:44:51 -03:00
Viktor Govako
eea26725e6 Use fully qualified std::move at least in header files to avoid warnings.
Signed-off-by: Viktor Govako <viktor.govako@gmail.com>
2023-04-10 17:26:29 -03:00
Viktor Govako
9182d33485 [search] Removed "street" tokens from HN list.
Signed-off-by: Viktor Govako <viktor.govako@gmail.com>
2023-04-10 17:26:29 -03:00
Viktor Govako
a8ef807b8c [search] Minor fixes and TODOs.
Signed-off-by: Viktor Govako <viktor.govako@gmail.com>
2023-04-10 17:26:29 -03:00
Viktor Govako
230dbd2474 [search] Fixed MatchLatLon.
Signed-off-by: Viktor Govako <viktor.govako@gmail.com>
2023-04-10 17:26:29 -03:00
10705 changed files with 841675 additions and 1361047 deletions

View file

@ -4,7 +4,7 @@
BasedOnStyle: Google
IndentWidth: 2
BreakBeforeBraces: Allman
ColumnLimit: 120
ColumnLimit: 100
---
Language: Cpp
@ -21,7 +21,7 @@ IndentCaseLabels: false
NamespaceIndentation: None
PointerAlignment: Middle
SortIncludes: true
Standard: c++20
Standard: c++17
IncludeBlocks: Regroup
IncludeCategories:
# Tests --------------------------------------------------------------------------------------------
@ -163,6 +163,8 @@ IncludeCategories:
- Regex: '^"openlr/openlr_stat/'
Priority: 19400
- Regex: '^"mapshot/'
Priority: 19500
- Regex: '^"search/search_quality/booking_dataset_generator/'
Priority: 19707
@ -258,6 +260,9 @@ IncludeCategories:
- Regex: '^"editor/'
Priority: 48310
- Regex: '^"software_renderer/'
Priority: 48400
- Regex: '^"drape_frontend/'
Priority: 48500

View file

@ -1,4 +1 @@
6aa73face8b5eb8e026cfafa40d1983d4a0502c0
480fa6c2fcf53be296504ac6ba8e6b3d70f92b42
a6ede2b1466f0c9d8a443600ef337ba6b5832e58
1377b81bf1cac72bb6da192da7fed6696d5d5281

69
.github/CODEOWNERS vendored
View file

@ -1,69 +0,0 @@
# All non-assigned.
* @organicmaps/mergers
# Visual design.
/android/app/src/main/res/drawable*/ @organicmaps/design
/android/app/src/main/res/font/ @organicmaps/design
/android/app/src/main/res/mipmap*/ @organicmaps/design
/data/*.ttf @organicmaps/design
/data/resources-svg/ @organicmaps/design
/data/search-icons/ @organicmaps/design
/iphone/Maps/Images.xcassets/ @organicmaps/design
# Android.
/android/ @organicmaps/android
/android/app/src/main/java/app/organicmaps/car/ @organicmaps/android-auto
/docs/ANDROID_LOCATION_TEST.md @organicmaps/android
/docs/JAVA_STYLE.md @organicmaps/android
# no owner for translation changes
/android/app/src/main/res/values*/strings.xml
# iOS.
/iphone/ @organicmaps/ios
/xcode/ @organicmaps/ios
/docs/OBJC_STYLE.md @organicmaps/ios
# no owner for translation changes
/iphone/plist.txt
/iphone/Maps/LocalizedStrings/
# Qt
/qt/ @organicmaps/qt
# Rendering
/drape/ @organicmaps/rendering
/drape_frontend/ @organicmaps/rendering
# Map Data.
/tools/python/maps_generator/ @organicmaps/data
/generator/ @organicmaps/data
/topography_generator/ @organicmaps/data
/data/borders/ @organicmaps/data
/data/conf/isolines/ @organicmaps/data
/docs/SUBWAY_GENERATION.md @organicmaps/data
/docs/MAPS.md @organicmaps/data
/docs/EXPERIMENTAL_PUBLIC_TRANSPORT_SUPPORT.md @organicmaps/data
# no owner (changed often to add a new POI)
/generator/generator_tests/osm_type_test.cpp
# Map Styles.
/data/styles/ @organicmaps/styles
/data/types.txt @organicmaps/styles
/data/visibility.txt @organicmaps/styles
/data/mapcss-mapping.csv @organicmaps/styles
/data/replaced_tags.txt @organicmaps/styles
/data/classificator.txt @organicmaps/styles
/data/drules_* @organicmaps/styles
/docs/STYLES.md
/tools/kothic/ @organicmaps/styles
# DevOps.
/.github/workflows @organicmaps/devops
/android/*gradle* @organicmaps/devops
/docs/RELEASE_MANAGEMENT.md @organicmaps/devops
/xcode/fastlane/ @organicmaps/devops
# Growth.
README.md @organicmaps/growth
/.github/FUNDING.yml @organicmaps/growth
/android/app/src/fdroid/play/ @organicmaps/growth
/android/app/src/google/play/ @organicmaps/growth
/iphone/metadata/ @organicmaps/growth
# Legal.
LEGAL @organicmaps/legal
LICENSE @organicmaps/legal
NOTICE @organicmaps/legal
CONTRIBUTORS @organicmaps/legal
/docs/CODE_OF_CONDUCT.md @organicmaps/legal
/docs/DCO.md @organicmaps/legal
/docs/GOVERNANCE.md @organicmaps/legal

3
.github/FUNDING.yml vendored
View file

@ -1,4 +1,3 @@
github: organicmaps
liberapay: OrganicMaps
open_collective: organicmaps
custom: ["https://organicmaps.app/donate/", "https://donate.organicmaps.app/"]
custom: ["https://organicmaps.app/donate"]

View file

@ -1,17 +1,14 @@
---
name: Bug Report
about: Describe your issue in detail to help us improve Organic Maps
about: Describe your issue in details to help us improve Organic Maps
title: ''
labels: ''
assignees: ''
---
⚠ Have you searched for similar, already existing issues?
**Describe the issue**
Please write a clear and concise description of the issue here.
Please write here a clear and concise description of what the bug/issue is about.
**Steps to reproduce**
1. Go to '...'
@ -19,20 +16,16 @@ Please write a clear and concise description of the issue here.
3. Scroll down to '....'
4. See error
**Expected behaviour**
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots or screen recordings to help explain your problem.
**System information:**
- Operating system and its version: [iOS 12, Android 10, Ubuntu 22, MacOS Big Sur, etc.]
- Organic Maps version: [you can find it by tapping the button with the green Organic Maps logo]
- Device Model: [e.g. iPhone 6, Samsung S22]
- Organic Maps version: [you can find it by clicking the "?" button]
- Device Model: [e.g. iPhone6, Samsung S22]
**Additional context**
Please add any other context or comments here that may be useful.
Please add any other context and important details/notes/comments about the problem here.

View file

@ -2,10 +2,10 @@ blank_issues_enabled: true
contact_links:
- name: Discussions
url: https://github.com/organicmaps/organicmaps/discussions
about: Discuss the usage of Organic Maps, ask questions, or talk about ideas that aren't yet actionable.
about: Discuss the usage of OrganicMaps, ask questions, or talk about ideas that are not yet actionable.
- name: Translations
url: https://github.com/organicmaps/organicmaps/blob/master/docs/TRANSLATIONS.md
about: Translate Organic Maps into your language
- name: News
url: https://organicmaps.app/news/
about: Check the latest project news
about: Check the latest project news

View file

@ -1,27 +1,22 @@
---
name: Feature Request
about: Suggest an idea for Organic Maps
name: Feature request
about: Suggest an idea for OrganicMaps
title: ''
labels: []
labels: [Enhancement]
assignees: ''
---
⚠ Have you searched for similar, already existing issues?
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. For example:
I'm always frustrated when [...]
**Describe the ideal solution**
**Describe the solution you would like**
A clear and concise description of what you want to see in Organic Maps.
**Describe alternatives you have considered**
- How do you solve this issue now with Organic Maps or other apps?
- Attach any examples, screenshots, or screen recordings from other apps that help us to better understand the idea.
**Additional context**
Add any other context or screenshots about the feature request here.

View file

@ -19,12 +19,6 @@ on:
- docs/**
- generator/**
- packaging/**
- platform/*apple*
- platform/*_ios*
- platform/*_linux*
- platform/*_mac*
- platform/*qt*
- platform/*_win*
- pyhelpers/**
- qt*/**
- skin_generator/**
@ -32,9 +26,6 @@ on:
- track_generator/**
- xcode/**
env:
JAVA_HOME: /usr/lib/jvm/temurin-17-jdk-amd64 # Java 17 is required for Android Gradle 8 plugin
jobs:
android-google-beta:
name: Android Google Beta
@ -48,7 +39,7 @@ jobs:
sudo apt-get install -y ninja-build
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 100 # enough to get all commits for the current day
@ -56,24 +47,19 @@ jobs:
shell: bash
run: git submodule update --depth 1 --init --recursive --jobs=$(($(nproc) * 20))
- name: Restore beta keys
- name: Checkout private keys
uses: actions/checkout@v3
with:
repository: ${{ secrets.PRIVATE_REPO }}
ssh-key: ${{ secrets.PRIVATE_SSH_KEY }}
ref: master
path: private.git
- name: Configure repo with private keys
shell: bash
run: |
echo "$PRIVATE_H" | base64 -d > private.h
echo "$FIREBASE_APP_DISTRIBUTION_JSON" | base64 -d > android/app/firebase-app-distribution.json
echo "$GOOGLE_SERVICES_JSON" | base64 -d > android/app/google-services.json
echo "$SECURE_PROPERTIES" | base64 -d > android/app/secure.properties
echo "$RELEASE_KEYSTORE" | base64 -d > android/app/release.keystore
env:
PRIVATE_H: ${{ secrets.PRIVATE_H }}
FIREBASE_APP_DISTRIBUTION_JSON: ${{ secrets.FIREBASE_APP_DISTRIBUTION_JSON }}
GOOGLE_SERVICES_JSON: ${{ secrets.GOOGLE_SERVICES_JSON }}
SECURE_PROPERTIES: ${{ secrets.SECURE_PROPERTIES }}
RELEASE_KEYSTORE: ${{ secrets.RELEASE_KEYSTORE }}
- name: Configure repository
shell: bash
run: ./configure.sh
./configure.sh ./private.git
rm -rf ./private.git
- name: Compile
shell: bash
@ -81,7 +67,7 @@ jobs:
run: |
cmake --version
ninja --version
./gradlew -Pfirebase assembleGoogleBeta uploadCrashlyticsSymbolFileGoogleBeta uploadCrashlyticsMappingFileGoogleBeta
gradle -x lint -x lintVitalGoogleBeta assembleGoogleBeta uploadCrashlyticsSymbolFileGoogleBeta uploadCrashlyticsMappingFileGoogleBeta
- name: Upload beta apk to App Distribution
shell: bash

View file

@ -4,8 +4,8 @@ on:
pull_request:
paths:
- .github/workflows/android-check-metadata.yaml # Run check on self change
- android/app/src/fdroid/**
- android/app/src/google/**
- android/src/fdroid/**
- android/src/google/**
- tools/python/check_store_metadata.py
jobs:
@ -14,12 +14,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 1
sparse-checkout: |
android
tools/python/check_store_metadata.py
uses: actions/checkout@v3
- name: Check metadata
run: ./tools/python/check_store_metadata.py android

View file

@ -1,9 +1,6 @@
name: Android Check
on:
workflow_dispatch: # Manual trigger
push:
branches:
- master
pull_request:
paths-ignore:
- .gitignore
@ -14,19 +11,13 @@ on:
- LICENSE
- NOTICE
- README.md
- android/app/src/fdroid/**
- android/app/src/google/**
- android/src/fdroid/**
- android/src/google/**
- iphone/**
- data/strings/**
- docs/**
- generator/**
- packaging/**
- platform/*apple*
- platform/*_ios*
- platform/*_linux*
- platform/*_mac*
- platform/*qt*
- platform/*_win*
- pyhelpers/**
- qt*/**
- skin_generator/**
@ -34,16 +25,13 @@ on:
- track_generator/**
- xcode/**
env:
JAVA_HOME: /usr/lib/jvm/temurin-17-jdk-amd64 # Java 17 is required for Android Gradle 8 plugin
jobs:
lint:
name: Android Lint
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 1
@ -51,14 +39,14 @@ jobs:
shell: bash
run: git submodule update --depth 1 --init --recursive --jobs=$(($(nproc) * 20))
- name: Configure repository
- name: Configure in Open Source mode
shell: bash
run: ./configure.sh
- name: Lint
shell: bash
working-directory: android
run: ./gradlew -Pandroidauto=true lint
run: gradle lint
android-check:
name: Build Android Debug
@ -66,12 +54,7 @@ jobs:
strategy:
fail-fast: false
matrix:
flavor: [WebDebug, FdroidDebug]
include:
- flavor: WebDebug
arch: arm64
- flavor: FdroidDebug
arch: arm32
flavor: [WebDebug, FdroidBeta]
# Cancels previous jobs if the same branch or PR was updated again.
concurrency:
group: ${{ github.workflow }}-${{ matrix.flavor }}-${{ github.event.pull_request.number || github.ref }}
@ -85,7 +68,7 @@ jobs:
sudo apt-get install -y ninja-build
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 200 # enough to get all commits for the current day
@ -93,29 +76,21 @@ jobs:
shell: bash
run: git submodule update --depth 1 --init --recursive --jobs=$(($(nproc) * 20))
- name: Configure repository
- name: Configure in Open Source mode
shell: bash
run: ./configure.sh
- name: Configure ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ github.workflow }}-${{ matrix.flavor }}
- name: Compile ${{ matrix.flavor }}
shell: bash
working-directory: android
env:
CMAKE_C_COMPILER_LAUNCHER: ccache
CMAKE_CXX_COMPILER_LAUNCHER: ccache
run: |
cmake --version
ninja --version
./gradlew -P${{ matrix.arch }} assemble${{ matrix.flavor }}
gradle -Parm64 assemble${{ matrix.flavor }}
- name: Upload ${{ matrix.flavor }} apk
uses: actions/upload-artifact@v4
- name: Upload arm64-v8a ${{ matrix.flavor }} apk
uses: actions/upload-artifact@v3
with:
name: android-${{ matrix.flavor }}
path: android/app/build/outputs/apk/**/OrganicMaps-*.apk
name: android-arm64-v8a-${{ matrix.flavor }}
path: android/build/outputs/apk/**/OrganicMaps-*.apk
if-no-files-found: error

View file

@ -2,10 +2,7 @@ name: Android Monkey
on:
workflow_dispatch: # Manual trigger
schedule:
- cron: '0 5 * * 0' # Once per week at 05:00 UTC
env:
JAVA_HOME: /usr/lib/jvm/temurin-17-jdk-amd64 # Java 17 is required for Android Gradle 8 plugin
- cron: '0 5 * * *' # Once per day at 05:00 UTC
jobs:
precondition:
@ -13,7 +10,7 @@ jobs:
name: Check preconditions
steps:
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 1000 # fetch month or so
@ -31,8 +28,8 @@ jobs:
outputs:
updated: ${{ steps.check.outputs.updated }}
android-google-beta:
name: Android Google Beta
android-google-debug:
name: Android Google Debug
runs-on: ubuntu-latest
needs: precondition
if: ${{ needs.precondition.outputs.updated != '' }}
@ -48,7 +45,7 @@ jobs:
uses: google-github-actions/setup-gcloud@v0
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 100 # enough to get all commits for the current day
@ -56,26 +53,19 @@ jobs:
shell: bash
run: git submodule update --depth 1 --init --recursive --jobs=$(($(nproc) * 20))
- name: Restore beta keys
- name: Checkout private keys
uses: actions/checkout@v3
with:
repository: ${{ secrets.PRIVATE_REPO }}
ssh-key: ${{ secrets.PRIVATE_SSH_KEY }}
ref: master
path: private.git
- name: Configure repo with private keys
shell: bash
run: |
echo "$PRIVATE_H" | base64 -d > private.h
echo "$FIREBASE_TEST_LAB_JSON" | base64 -d > android/app/firebase-test-lab.json
echo "$FIREBASE_APP_DISTRIBUTION_JSON" | base64 -d > android/app/firebase-app-distribution.json
echo "$GOOGLE_SERVICES_JSON" | base64 -d > android/app/google-services.json
echo "$SECURE_PROPERTIES" | base64 -d > android/app/secure.properties
echo "$RELEASE_KEYSTORE" | base64 -d > android/app/release.keystore
env:
PRIVATE_H: ${{ secrets.PRIVATE_H }}
FIREBASE_TEST_LAB_JSON: ${{ secrets.FIREBASE_TEST_LAB_JSON }}
FIREBASE_APP_DISTRIBUTION_JSON: ${{ secrets.FIREBASE_APP_DISTRIBUTION_JSON }}
GOOGLE_SERVICES_JSON: ${{ secrets.GOOGLE_SERVICES_JSON }}
SECURE_PROPERTIES: ${{ secrets.SECURE_PROPERTIES }}
RELEASE_KEYSTORE: ${{ secrets.RELEASE_KEYSTORE }}
- name: Configure repository
shell: bash
run: ./configure.sh
./configure.sh ./private.git
rm -rf ./private.git
- name: Compile
shell: bash
@ -83,20 +73,26 @@ jobs:
run: |
cmake --version
ninja --version
./gradlew -Pfirebase -Parm64 -Parmeabi-v7a assembleGoogleBeta uploadCrashlyticsSymbolFileGoogleBeta uploadCrashlyticsMappingFileGoogleBeta
gradle -Pfirebase assembleGoogleDebug uploadCrashlyticsSymbolFileGoogleDebug
- name: Run monkey
run: |
gcloud auth activate-service-account --key-file android/app/firebase-test-lab.json
gcloud auth activate-service-account --key-file android/firebase-test-lab.json
gcloud config set project omapsapp
gcloud firebase test android run --app ./android/app/build/outputs/apk/google/beta/OrganicMaps-*-google-beta.apk \
--device model=husky,version=34 \
--device model=tangorpro,version=33,orientation=landscape \
gcloud firebase test android run --app ./android/build/outputs/apk/google/debug/OrganicMaps-*-google-debug.apk \
--device model=panther,version=33 \
--device model=bluejay,version=32 \
--device model=a51,version=31 \
--device model=f2q,version=30,orientation=landscape \
--device model=a10,version=29,orientation=landscape \
--device model=cactus,version=27 \
--device model=sailfish,version=25 \
--device model=harpia,version=23 \
--device model=b2q,version=31 \
--device model=f2q,version=30 \
--device model=a10,version=29 \
--device model=Pixel2.arm,version=30 \
--device model=MediumPhone.arm,version=29 \
--device model=MediumPhone.arm,version=28 \
--device model=MediumPhone.arm,version=27 \
--device model=Pixel2.arm,version=26,orientation=landscape \
--device model=Nexus6,version=25 \
--device model=NexusLowRes,version=24 \
--device model=NexusLowRes,version=23,orientation=landscape \
--device model=Nexus6,version=22 \
--device model=Nexus7,version=21 \
--timeout 15m

View file

@ -2,9 +2,6 @@ name: Android Release Metadata
on:
workflow_dispatch: # Manual trigger
env:
JAVA_HOME: /usr/lib/jvm/temurin-17-jdk-amd64 # Java 17 is required for Android Gradle 8 plugin
jobs:
android-release-metadata:
name: Upload Google Play metadata
@ -12,28 +9,33 @@ jobs:
environment: production
steps:
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Parallel submodules checkout
shell: bash
run: git submodule update --depth 1 --init --recursive --jobs=$(($(nproc) * 20))
- name: Checkout screenshots
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: ${{ secrets.SCREENSHOTS_REPO }}
ssh-key: ${{ secrets.SCREENSHOTS_SSH_KEY }}
ref: master
path: screenshots
- name: Restore release keys
- name: Checkout private keys
uses: actions/checkout@v3
with:
repository: ${{ secrets.PRIVATE_REPO }}
ssh-key: ${{ secrets.PRIVATE_SSH_KEY }}
ref: master
path: private.git
- name: Configure repo with private keys
shell: bash
run: |
echo "$PRIVATE_H" | base64 -d > private.h
echo "$GOOGLE_PLAY_JSON" | base64 -d > android/app/google-play.json
env:
PRIVATE_H: ${{ secrets.PRIVATE_H }}
GOOGLE_PLAY_JSON: ${{ secrets.GOOGLE_PLAY_JSON }}
./configure.sh ./private.git
rm -rf ./private.git
- name: Upload
shell: bash

View file

@ -3,9 +3,8 @@ on:
workflow_dispatch: # Manual trigger
env:
RELEASE_NOTES: android/app/src/google/play/release-notes/en-US/default.txt
FDROID_VERSION: android/app/src/fdroid/play/version.yaml
JAVA_HOME: /usr/lib/jvm/temurin-17-jdk-amd64 # Java 17 is required for Android Gradle 8 plugin
RELEASE_NOTES: android/src/google/play/release-notes/en-US/default.txt
FDROID_VERSION: android/src/fdroid/play/version.yaml
jobs:
tag:
@ -14,7 +13,7 @@ jobs:
environment: production
steps:
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 100 # Enough to get all commits for the last day.
ssh-key: ${{ secrets.RELEASE_SSH_KEY }}
@ -29,7 +28,7 @@ jobs:
version=$(tools/unix/version.sh ios_version)
# +1 because below a "Bump versions" commit is created.
# TODO: Find a way to refactor FDroid versioning without that additional commit.
build=$(($(tools/unix/version.sh count) + 1))
build=$(($(tools/unix/version.sh ios_build) + 1))
code=$(($(tools/unix/version.sh android_code) + 1))
tag=$version-$build-android
echo "::set-output name=version::$version"
@ -76,7 +75,7 @@ jobs:
sudo apt-get install -y ninja-build
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 100 # enough to get all commits for the current day
ref: 'refs/tags/${{ needs.tag.outputs.tag }}'
@ -93,74 +92,65 @@ jobs:
run: git submodule update --depth 1 --init --recursive --jobs=$(($(nproc) * 20))
- name: Checkout screenshots
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: ${{ secrets.SCREENSHOTS_REPO }}
ssh-key: ${{ secrets.SCREENSHOTS_SSH_KEY }}
ref: master
path: screenshots
- name: Restore release keys
- name: Checkout private keys
uses: actions/checkout@v3
with:
repository: ${{ secrets.PRIVATE_REPO }}
ssh-key: ${{ secrets.PRIVATE_SSH_KEY }}
ref: master
path: private.git
- name: Configure repo with private keys
shell: bash
run: |
echo "$PRIVATE_H" | base64 -d > private.h
echo "$GOOGLE_PLAY_JSON" | base64 -d > android/app/google-play.json
echo "$HUAWEI_APPGALLERY_JSON" | base64 -d > android/app/huawei-appgallery.json
echo "$AGCONNECT_SERVICES_JSON" | base64 -d > android/app/agconnect-services.json
echo "$SECURE_PROPERTIES" | base64 -d > android/app/secure.properties
echo "$RELEASE_KEYSTORE" | base64 -d > android/app/release.keystore
env:
PRIVATE_H: ${{ secrets.PRIVATE_H }}
GOOGLE_PLAY_JSON: ${{ secrets.GOOGLE_PLAY_JSON }}
HUAWEI_APPGALLERY_JSON: ${{ secrets.HUAWEI_APPGALLERY_JSON }}
AGCONNECT_SERVICES_JSON: ${{ secrets.AGCONNECT_SERVICES_JSON }}
SECURE_PROPERTIES: ${{ secrets.SECURE_PROPERTIES }}
RELEASE_KEYSTORE: ${{ secrets.RELEASE_KEYSTORE }}
- name: Configure repository
shell: bash
run: ./configure.sh
./configure.sh ./private.git
rm -rf ./private.git
- name: Set up SDK
shell: bash
run: echo "sdk.dir=$ANDROID_SDK_ROOT" > android/local.properties
run: (cd tools/android; ./set_up_android.py --sdk $ANDROID_SDK_ROOT)
- name: Compile and upload to Google Play
shell: bash
working-directory: android
run: |
./gradlew bundleGoogleRelease publishGoogleReleaseBundle
gradle bundleGoogleRelease publishGoogleReleaseBundle
if: ${{ matrix.flavor == 'google' }}
- name: Compile and upload to Huawei AppGallery
shell: bash
working-directory: android
run: |
./gradlew bundleHuaweiRelease
./gradlew publishHuaweiAppGalleryHuaweiRelease
gradle bundleHuaweiRelease
gradle publishHuaweiAppGalleryHuaweiRelease
if: ${{ matrix.flavor == 'huawei' }}
- name: Compile universal APK
shell: bash
working-directory: android
run: |
./gradlew assembleWebRelease
gradle assembleWebRelease
if: ${{ matrix.flavor == 'web' }}
- name: Prepare release notes
if: ${{ matrix.flavor == 'web' }}
shell: bash
run: |
(cd ./android/app/build/outputs/apk/web/release/ && sha256sum OrganicMaps-${{ needs.tag.outputs.code }}-web-release.apk > OrganicMaps-${{ needs.tag.outputs.code }}-web-release.apk.sha256sum)
{
cat ${{ env.RELEASE_NOTES }}
echo ""
echo "See [a detailed announce](https://organicmaps.app/news/) on our website when app updates are published in all stores."
echo "You can get automatic app updates from GitHub [using Obtainium](https://github.com/organicmaps/organicmaps/wiki/Installing-Organic-Maps-from-GitHub-using-Obtainium)."
echo "See [more details](https://organicmaps.app/news/) on our website when apps are published."
echo ""
echo "sha256sum:"
echo -e '\n```'
tr -d '\n' < ./android/app/build/outputs/apk/web/release/OrganicMaps-${{ needs.tag.outputs.code }}-web-release.apk.sha256sum
(cd ./android/build/outputs/apk/web/release/ && sha256sum OrganicMaps-${{ needs.tag.outputs.code }}-web-release.apk) | tr -d '\n'
echo -e '\n```'
} > ${{ runner.temp }}/release-notes.txt
@ -173,8 +163,5 @@ jobs:
name: ${{ needs.tag.outputs.tag }}
tag_name: ${{ needs.tag.outputs.tag }}
discussion_category_name: 'Announcements'
prerelease: true
files: |
./android/app/build/outputs/apk/web/release/OrganicMaps-${{ needs.tag.outputs.code }}-web-release.apk
./android/app/build/outputs/apk/web/release/OrganicMaps-${{ needs.tag.outputs.code }}-web-release.apk.sha256sum
files: ./android/build/outputs/apk/web/release/OrganicMaps-${{ needs.tag.outputs.code }}-web-release.apk
fail_on_unmatched_files: true

View file

@ -9,32 +9,21 @@ on:
jobs:
validate-appstream:
name: Validate appstream metadata xml
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
steps:
- name: Checkout sources
uses: actions/checkout@v4
with:
fetch-depth: 1
sparse-checkout: |
packaging/app.organicmaps.desktop.metainfo.xml
uses: actions/checkout@v3
- name: Install appstream validator and flatpak Builder
- name: Install appstream validator
shell: bash
run: |
sudo apt update -y
sudo apt install -y \
flatpak
sudo flatpak remote-add --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo
sudo flatpak install -y org.freedesktop.appstream-glib org.flatpak.Builder
# We get it from flathub to ensure we have a recent version
sudo flatpak install -y org.freedesktop.appstream-glib
- name: Validate appstream data
shell: bash
run: flatpak run org.freedesktop.appstream-glib validate --nonet packaging/app.organicmaps.desktop.metainfo.xml
- name: Lint appstream data with flatpak Builder
shell: bash
run: flatpak run --command=flatpak-builder-lint org.flatpak.Builder appstream packaging/app.organicmaps.desktop.metainfo.xml
- name: Run appstreamcli in pedantic mode
shell: bash
run: flatpak run --command=appstreamcli org.flatpak.Builder validate --pedantic packaging/app.organicmaps.desktop.metainfo.xml

View file

@ -1,30 +0,0 @@
# https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#force-deleting-cache-entries
name: Cleanup caches by a branch
on:
pull_request:
types:
- closed
jobs:
cleanup:
runs-on: ubuntu-latest
steps:
- name: Cleanup
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPO: ${{ github.repository }}
BRANCH: refs/pull/${{ github.event.pull_request.number }}/merge
run: |
gh extension install actions/gh-actions-cache
echo "Fetching list of cache key"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR
do
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done
echo "Done"

View file

@ -1,162 +0,0 @@
name: Coverage Report
on:
workflow_dispatch: # Manual trigger
pull_request:
types:
- opened
- synchronize
- labeled
- unlabeled
paths-ignore:
- .gitignore
- CONTRIBUTORS
- LICENSE
- NOTICE
- README.md
- docs/**
- packaging/**
- platform/*apple*
- platform/*_android*
- platform/*_ios*
- platform/*_mac*
- platform/*_win*
- pyhelpers/**
- tools/**
- '!tools/python/test_server/**'
- xcode/**
# Cancels previous jobs if the same branch or PR was updated again.
concurrency:
group: ${{ github.workflow }}-coverage-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
should-run-check:
name: Should run coverage
runs-on: ubuntu-24.04
outputs:
run-from-pr: ${{ steps.run-from-pr.outputs.run-from-pr }}
manually-triggered: ${{ steps.manually-triggered.outputs.manually-triggered }}
steps:
- name: Check if PR has 'Coverage' label
id: run-from-pr
if: github.event_name == 'pull_request'
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
GH_TOKEN: ${{ github.token }}
run: |
LABEL_NAME="Coverage"
LABELS=$(gh pr view https://github.com/$GITHUB_REPOSITORY/pull/$PR_NUMBER --json labels)
if echo "$LABELS" | jq -e '.labels[].name' | grep -q "$LABEL_NAME"; then
echo "run-from-pr=true" >> $GITHUB_OUTPUT
echo "'Coverage' label found in PR."
fi
- name: Check if manually triggered
id: manually-triggered
if: github.event_name == 'workflow_dispatch'
run: echo "manually-triggered=true" >> $GITHUB_OUTPUT
coverage:
needs: should-run-check
name: Generate coverage report
runs-on: ubuntu-24.04
if: ${{ needs.should-run-check.outputs.run-from-pr == 'true' || needs.should-run-check.outputs.manually-triggered == 'true'}}
steps:
- name: Free disk space by removing .NET, Android and Haskell
shell: bash
run: |
sudo rm -rf /usr/share/dotnet /usr/local/lib/android /opt/ghc
- name: Checkout sources
uses: actions/checkout@v4
with:
fetch-depth: 100 # enough to get all commits for the current day
- name: Parallel submodules checkout
shell: bash
run: git submodule update --depth 1 --init --recursive --jobs=$(($(nproc) * 20))
- name: Install build tools and dependencies
shell: bash
run: |
sudo apt update -y
sudo apt install -y \
ninja-build \
libgl1-mesa-dev \
libglvnd-dev \
qt6-base-dev \
libfreetype-dev \
libharfbuzz-dev \
libqt6svg6-dev \
qt6-positioning-dev \
libqt6positioning6-plugins \
libqt6positioning6 \
llvm \
gcovr
- name: Configure repository
shell: bash
run: ./configure.sh
- name: Configure ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ github.workflow }}-coverage
- name: CMake
shell: bash
env:
CC: clang-18
CXX: clang++-18
CMAKE_C_COMPILER_LAUNCHER: ccache
CMAKE_CXX_COMPILER_LAUNCHER: ccache
# -g1 should slightly reduce build time.
run: |
cmake . -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_CXX_FLAGS=-g1 -DCOVERAGE_REPORT=ON
- name: Compile
shell: bash
working-directory: build
run: ninja
- name: Tests
shell: bash
working-directory: build
env:
QT_QPA_PLATFORM: "offscreen"
# generator_integration_tests - https://github.com/organicmaps/organicmaps/issues/225
# opening_hours_integration_tests - https://github.com/organicmaps/organicmaps/issues/219
# opening_hours_supported_features_tests - https://github.com/organicmaps/organicmaps/issues/219
# routing_integration_tests - https://github.com/organicmaps/organicmaps/issues/221
# shaders_tests - https://github.com/organicmaps/organicmaps/issues/223
# world_feed_integration_tests - https://github.com/organicmaps/organicmaps/issues/215
CTEST_EXCLUDE_REGEX: "generator_integration_tests|opening_hours_integration_tests|opening_hours_supported_features_tests|routing_benchmarks|routing_integration_tests|routing_quality_tests|search_quality_tests|storage_integration_tests|shaders_tests|world_feed_integration_tests"
run: |
sudo locale-gen en_US
sudo locale-gen en_US.UTF-8
sudo locale-gen es_ES
sudo locale-gen es_ES.UTF-8
sudo locale-gen fr_FR
sudo locale-gen fr_FR.UTF-8
sudo locale-gen ru_RU
sudo locale-gen ru_RU.UTF-8
sudo update-locale
ctest -L "omim-test" -E "$CTEST_EXCLUDE_REGEX" --output-on-failure
- name: Run coverage report generation
shell: bash
working-directory: build
run: |
cmake --build . --target omim_coverage
cat coverage_report/summary.txt
- name: Archive the coverage report
working-directory: build/coverage_report
run: zip -r coverage_report.zip html/
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: coverage-report
path: build/coverage_report/coverage_report.zip

View file

@ -1,30 +0,0 @@
name: Validate .desktop file
on:
workflow_dispatch: # Manual trigger
pull_request:
paths:
- qt/res/app.organicmaps.desktop.desktop
- .github/workflows/desktop-file-check.yaml # Run check on self change
jobs:
validate-desktop-file:
name: Validate .desktop file
runs-on: ubuntu-24.04
steps:
- name: Checkout sources
uses: actions/checkout@v4
with:
fetch-depth: 1
sparse-checkout: |
qt/res/app.organicmaps.desktop.desktop
- name: Install desktop-file-validate tool
shell: bash
run: |
sudo apt update -y
sudo apt install -y \
desktop-file-utils
- name: Validate desktop file
shell: bash
run: desktop-file-validate qt/res/app.organicmaps.desktop.desktop && echo "Successfully validated .desktop file"

View file

@ -19,11 +19,6 @@ on:
- docs/**
- generator/**
- packaging/**
- platform/*_android*
- platform/*_linux*
- platform/*_mac*
- platform/*qt*
- platform/*_win*
- pyhelpers/**
- qt*/**
- skin_generator/**
@ -33,9 +28,8 @@ on:
jobs:
ios-beta:
name: Apple TestFlight
runs-on: macos-15
runs-on: macos-12
env:
DEVELOPER_DIR: /Applications/Xcode_16.app/Contents/Developer
LANG: en_US.UTF-8 # Fastlane complains that the terminal is using ASCII.
LANGUAGE: en_US.UTF-8
LC_ALL: en_US.UTF-8
@ -45,42 +39,28 @@ jobs:
shell: bash
steps:
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 100 # enough to get all commits for the current day
- name: Parallel submodules checkout
run: git submodule update --depth 1 --init --recursive --jobs=$(($(sysctl -n hw.logicalcpu) * 20))
- name: Restore beta keys
shell: bash
run: |
mkdir -p xcode/keys
echo "$PRIVATE_H" | base64 -d > private.h
echo "$APPSTORE_JSON" | base64 -d > xcode/keys/appstore.json
echo "$CERTIFICATES_DEV_P12" | base64 -d > xcode/keys/CertificatesDev.p12
echo "$CERTIFICATES_DISTR_P12" | base64 -d > xcode/keys/CertificatesDistr.p12
env:
PRIVATE_H: ${{ secrets.PRIVATE_H }}
APPSTORE_JSON: ${{ secrets.APPSTORE_JSON }}
CERTIFICATES_DEV_P12: ${{ secrets.CERTIFICATES_DEV_P12 }}
CERTIFICATES_DISTR_P12: ${{ secrets.CERTIFICATES_DISTR_P12 }}
- name: Checkout private keys
uses: actions/checkout@v3
with:
repository: ${{ secrets.PRIVATE_REPO }}
ssh-key: ${{ secrets.PRIVATE_SSH_KEY }}
ref: master
path: private.git
- name: Configure repository
shell: bash
run: ./configure.sh
- name: Configure repo with private keys
run: |
./configure.sh ./private.git
rm -rf ./private.git
- name: Compile and upload to TestFlight
run: |
echo "IOS_VERSION=$(../tools/unix/version.sh ios_version)-$(../tools/unix/version.sh ios_build)" >> "$GITHUB_ENV"
./fastlane.sh upload_testflight
run: ./fastlane.sh upload_testflight
env:
APPSTORE_CERTIFICATE_PASSWORD: '${{ secrets.APPSTORE_CERTIFICATE_PASSWORD }}'
working-directory: xcode
- name: Upload ipa and DSYMs artifacts
uses: actions/upload-artifact@v4
with:
name: ipa and DSYM archive ${{ env.IOS_VERSION }}
path: xcode/build/*
if-no-files-found: error

View file

@ -13,12 +13,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 1
sparse-checkout: |
iphone/metadata
tools/python/check_store_metadata.py
uses: actions/checkout@v3
- name: Check metadata
run: ./tools/python/check_store_metadata.py ios

View file

@ -1,9 +1,6 @@
name: iOS Check
on:
workflow_dispatch: # Manual trigger
push:
branches:
- master
pull_request:
paths-ignore:
- .gitignore
@ -21,11 +18,6 @@ on:
- generator/**
- iphone/metadata/**
- packaging/**
- platform/*_android*
- platform/*_linux*
- platform/*_mac*
- platform/*qt*
- platform/*_win*
- pyhelpers/**
- qt*/**
- skin_generator/**
@ -35,13 +27,11 @@ on:
jobs:
ios-check:
name: Build iOS
runs-on: macos-15
runs-on: macos-12
env:
DEVELOPER_DIR: /Applications/Xcode_16.app/Contents/Developer
LANG: en_US.UTF-8 # Fastlane complains that the terminal is using ASCII.
LANGUAGE: en_US.UTF-8
LC_ALL: en_US.UTF-8
TEST_RESULTS_BUNDLE_NAME: OMaps-Test-Results
strategy:
fail-fast: false
matrix:
@ -53,54 +43,24 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Parallel submodules checkout
shell: bash
run: git submodule update --depth 1 --init --recursive --jobs=$(($(sysctl -n hw.logicalcpu) * 20))
- name: Configure repository
- name: Configure
shell: bash
run: ./configure.sh
- name: Configure XCode cache
uses: irgaly/xcode-cache@v1
with:
key: xcode-cache-deriveddata-${{ github.workflow }}-${{ matrix.buildType }}-${{ github.sha }}
restore-keys: xcode-cache-deriveddata-${{ github.workflow }}-${{ matrix.buildType }}
- name: Build and Run Tests (Debug)
if: matrix.buildType == 'Debug'
- name: Compile
shell: bash
# Check for compilation errors.
run: |
xcodebuild test \
xcodebuild \
-workspace xcode/omim.xcworkspace \
-scheme OMaps \
-configuration Debug \
-sdk iphonesimulator \
-destination 'platform=iOS Simulator,name=iPhone 16 Pro Max,OS=latest' \
-quiet \
-resultBundlePath ${{ env.TEST_RESULTS_BUNDLE_NAME }}.xcresult \
-configuration ${{ matrix.buildType }} build \
'generic/platform=iOS' \
CODE_SIGNING_REQUIRED=NO \
CODE_SIGNING_ALLOWED=NO
- name: Upload Test Results On Failure (Debug)
if: ${{ matrix.buildType == 'Debug' && failure() }}
uses: actions/upload-artifact@v4
with:
name: ${{ env.TEST_RESULTS_BUNDLE_NAME }}-${{ github.run_number }}.xcresult
path: ${{ env.TEST_RESULTS_BUNDLE_NAME }}.xcresult
if-no-files-found: error
- name: Build (Release)
if: matrix.buildType == 'Release'
shell: bash
run: |
xcodebuild build \
-workspace xcode/omim.xcworkspace \
-scheme OMaps \
-configuration Release \
-destination 'generic/platform=iOS' \
-quiet \
CODE_SIGNING_REQUIRED=NO \
CODE_SIGNING_ALLOWED=NO

View file

@ -5,27 +5,33 @@ on:
jobs:
ios-release:
name: iOS Release
runs-on: macos-15
runs-on: macos-12
env:
DEVELOPER_DIR: /Applications/Xcode_16.app/Contents/Developer
LANG: en_US.UTF-8 # Fastlane complains that the terminal is using ASCII.
LANGUAGE: en_US.UTF-8
LC_ALL: en_US.UTF-8
environment: production
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Restore release keys
- name: Checkout private keys
uses: actions/checkout@v3
with:
repository: ${{ secrets.PRIVATE_REPO }}
ssh-key: ${{ secrets.PRIVATE_SSH_KEY }}
ref: master
path: ./private.git
- name: Configure repo with private keys
shell: bash
run: |
mkdir -p xcode/keys
echo "$APPSTORE_JSON" | base64 -d > xcode/keys/appstore.json
env:
APPSTORE_JSON: ${{ secrets.APPSTORE_JSON }}
mkdir -p xcode/keys/
cp -p ./private.git/xcode/keys/appstore.json xcode/keys/
rm -rf ./private.git
- name: Checkout screenshots
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: ${{ secrets.SCREENSHOTS_REPO }}
ssh-key: ${{ secrets.SCREENSHOTS_SSH_KEY }}
@ -33,7 +39,7 @@ jobs:
path: screenshots
- name: Checkout keywords
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: ${{ secrets.KEYWORDS_REPO }}
ssh-key: ${{ secrets.KEYWORDS_SSH_KEY }}

View file

@ -1,9 +1,6 @@
name: Linux Check
on:
workflow_dispatch: # Manual trigger
push:
branches:
- master
pull_request:
paths-ignore:
- .gitignore
@ -18,23 +15,22 @@ on:
- data/strings/**
- docs/**
- packaging/**
- platform/*apple*
- platform/*_android*
- platform/*_ios*
- platform/*_mac*
- platform/*_win*
- pyhelpers/**
- tools/**
- '!tools/python/test_server/**'
- '!tools/python/run_desktop_tests.py'
- '!tools/python/testserver.py'
- '!tools/python/SiblingKiller.py'
- xcode/**
jobs:
linux-no-unity:
name: Linux no unity build
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
fail-fast: false
# Cancels previous jobs if the same branch or PR was updated again.
concurrency:
group: ${{ github.workflow }}-no-unity-${{ github.event.pull_request.number || github.ref }}
group: ${{ github.workflow }}-${{ matrix.compiler.CC }}-${{ matrix.CMAKE_BUILD_TYPE }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
steps:
@ -44,7 +40,7 @@ jobs:
sudo rm -rf /usr/share/dotnet /usr/local/lib/android /opt/ghc
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 100 # enough to get all commits for the current day
@ -58,39 +54,21 @@ jobs:
sudo apt update -y
sudo apt install -y \
ninja-build \
libgl1-mesa-dev \
libglvnd-dev \
libharfbuzz-dev \
libxrandr-dev \
libxinerama-dev \
libxcursor-dev \
libxi-dev \
qt6-base-dev \
libqt6svg6-dev \
qt6-positioning-dev \
libqt6positioning6-plugins \
libqt6positioning6
qtbase5-dev \
libqt5svg5-dev
- name: Configure repository
- name: Configure
shell: bash
run: ./configure.sh
- name: Configure ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ github.workflow }}-no-unity
- name: CMake
shell: bash
env:
CC: clang-18
CXX: clang++-18
CMAKE_C_COMPILER_LAUNCHER: ccache
CMAKE_CXX_COMPILER_LAUNCHER: ccache
CC: clang-14
CXX: clang++-14
# -g1 should slightly reduce build time.
run: |
cmake . -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug \
-DCMAKE_CXX_FLAGS=-g1 -DUNITY_DISABLE=ON
cmake . -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug -DCMAKE_CXX_FLAGS=-g1 -DUNITY_DISABLE=ON
- name: Compile
shell: bash
@ -99,16 +77,12 @@ jobs:
linux-matrix:
name: Linux builds and tests
runs-on: ubuntu-24.04
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
compiler: [{ CXX: g++-14, CC: gcc-14 }, { CXX: clang++-18, CC: clang-18 }]
compiler: [{ CXX: g++-12, CC: gcc-12 }, { CXX: clang++-14, CC: clang-14 }]
CMAKE_BUILD_TYPE: [Debug, RelWithDebInfo]
# Cancels previous jobs if the same branch or PR was updated again.
concurrency:
group: ${{ github.workflow }}-unity-${{ matrix.compiler.CC }}-${{ matrix.CMAKE_BUILD_TYPE }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
steps:
- name: Free disk space by removing .NET, Android and Haskell
@ -117,7 +91,7 @@ jobs:
sudo rm -rf /usr/share/dotnet /usr/local/lib/android /opt/ghc
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Parallel submodules checkout
shell: bash
@ -128,67 +102,61 @@ jobs:
run: |
sudo apt update -y
sudo apt install -y \
g++-12 \
gcc-12 \
ninja-build \
libgl1-mesa-dev \
libglvnd-dev \
libharfbuzz-dev \
libxrandr-dev \
libxinerama-dev \
libxcursor-dev \
libxi-dev \
qt6-base-dev \
libqt6svg6-dev \
qt6-positioning-dev \
libqt6positioning6-plugins \
libqt6positioning6
qtbase5-dev \
libqt5svg5-dev
- name: Configure repository
- name: Configure
shell: bash
run: ./configure.sh
- name: Configure ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ github.workflow }}-unity-${{ matrix.compiler.CC }}-${{ matrix.CMAKE_BUILD_TYPE }}
- name: CMake
shell: bash
env:
CC: ${{ matrix.compiler.CC }}
CXX: ${{ matrix.compiler.CXX }}
CMAKE_C_COMPILER_LAUNCHER: ccache
CMAKE_CXX_COMPILER_LAUNCHER: ccache
# -g1 should slightly reduce build time.
run: |
echo "Building ${{ matrix.CMAKE_BUILD_TYPE }}"
cmake . -B build -G Ninja -DCMAKE_BUILD_TYPE=${{ matrix.CMAKE_BUILD_TYPE }} \
-DCMAKE_C_FLAGS=-g1 -DCMAKE_CXX_FLAGS=-g1
cmake . -B build -G Ninja -DCMAKE_BUILD_TYPE=${{ matrix.CMAKE_BUILD_TYPE }} -DCMAKE_C_FLAGS=-g1 -DCMAKE_CXX_FLAGS=-g1
- name: Compile
shell: bash
working-directory: build
run: ninja
- name: Checkout world_feed_integration_tests_data
uses: actions/checkout@v3
with:
repository: organicmaps/world_feed_integration_tests_data
path: data/world_feed_integration_tests_data
- name: Tests
shell: bash
working-directory: build
env:
QT_QPA_PLATFORM: "offscreen"
# generator_integration_tests - https://github.com/organicmaps/organicmaps/issues/225
# opening_hours_integration_tests - https://github.com/organicmaps/organicmaps/issues/219
# opening_hours_supported_features_tests - https://github.com/organicmaps/organicmaps/issues/219
# routing_integration_tests - https://github.com/organicmaps/organicmaps/issues/221
# shaders_tests - https://github.com/organicmaps/organicmaps/issues/223
# world_feed_integration_tests - https://github.com/organicmaps/organicmaps/issues/215
CTEST_EXCLUDE_REGEX: "generator_integration_tests|opening_hours_integration_tests|opening_hours_supported_features_tests|routing_benchmarks|routing_integration_tests|routing_quality_tests|search_quality_tests|storage_integration_tests|shaders_tests|world_feed_integration_tests"
# generator_integration_tests - https://github.com/organicmaps/organicmaps/issues/225
# routing_integration_tests - https://github.com/organicmaps/organicmaps/issues/221
# routing_quality_tests - https://github.com/organicmaps/organicmaps/issues/215
# drape_tests - requires X Window
# Separate run of OH boost-based test
run: |
sudo locale-gen en_US
sudo locale-gen en_US.UTF-8
sudo locale-gen es_ES
sudo locale-gen es_ES.UTF-8
sudo locale-gen fr_FR
sudo locale-gen fr_FR.UTF-8
sudo locale-gen ru_RU
sudo locale-gen ru_RU.UTF-8
sudo update-locale
ctest -L "omim-test" -E "$CTEST_EXCLUDE_REGEX" --output-on-failure
./build/opening_hours_tests |
./tools/python/run_desktop_tests.py \
-f ./build \
-u ./data \
-d ./data \
-e generator_integration_tests \
-e routing_integration_tests \
-e routing_quality_tests \
-e search_quality_tests \
-e world_feed_integration_tests \
-e drape_tests \
-e shaders_tests \
\
-e opening_hours_tests \
-e opening_hours_integration_tests \
-e routing_consistency_tests \
-e opening_hours_supported_features_tests \
-e storage_integration_tests \

View file

@ -1,9 +1,6 @@
name: macOS Check
on:
workflow_dispatch: # Manual trigger
push:
branches:
- master
pull_request:
paths-ignore:
- .gitignore
@ -18,21 +15,18 @@ on:
- data/strings/**
- docs/**
- packaging/**
- platform/*_android*
- platform/*_ios*
- platform/*_linux*
- platform/*_win*
- pyhelpers/**
- tools/**
- '!tools/python/test_server/**'
- '!tools/python/run_desktop_tests.py'
- '!tools/python/testserver.py'
- '!tools/python/SiblingKiller.py'
- xcode/**
jobs:
macos-matrix:
name: macOS builds and tests
runs-on: macos-15
runs-on: macos-12
env:
DEVELOPER_DIR: /Applications/Xcode_16.app/Contents/Developer
HOMEBREW_NO_ANALYTICS: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
strategy:
@ -46,7 +40,7 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Parallel submodules checkout
shell: bash
@ -55,43 +49,53 @@ jobs:
- name: Install build tools and dependencies
shell: bash
run: |
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1 brew install ninja qt@6
brew install ninja qt@5
- name: Configure repository
- name: Configure
shell: bash
run: ./configure.sh
- name: Configure ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ github.workflow }}-${{ matrix.CMAKE_BUILD_TYPE }}
- name: CMake
shell: bash
env:
CMAKE_C_COMPILER_LAUNCHER: ccache
CMAKE_CXX_COMPILER_LAUNCHER: ccache
run: |
echo "Building ${{ matrix.CMAKE_BUILD_TYPE }}"
cmake . -B build -G Ninja -DCMAKE_BUILD_TYPE=${{ matrix.CMAKE_BUILD_TYPE }} \
-DCMAKE_C_FLAGS=-g1 -DCMAKE_CXX_FLAGS=-g1
cmake . -B build -G Ninja -DCMAKE_BUILD_TYPE=${{ matrix.CMAKE_BUILD_TYPE }} -DCMAKE_C_FLAGS=-g1 -DCMAKE_CXX_FLAGS=-g1
- name: Compile
shell: bash
working-directory: build
run: ninja
- name: Checkout world_feed_integration_tests_data
uses: actions/checkout@v3
with:
repository: organicmaps/world_feed_integration_tests_data
path: data/world_feed_integration_tests_data
- name: Tests
shell: bash
working-directory: build
env:
# drape_tests - requires X Window
# generator_integration_tests - https://github.com/organicmaps/organicmaps/issues/225
# opening_hours_integration_tests - https://github.com/organicmaps/organicmaps/issues/219
# opening_hours_supported_features_tests - https://github.com/organicmaps/organicmaps/issues/219
# routing_integration_tests - https://github.com/organicmaps/organicmaps/issues/221
# shaders_tests - https://github.com/organicmaps/organicmaps/issues/223
# world_feed_integration_tests - https://github.com/organicmaps/organicmaps/issues/215
CTEST_EXCLUDE_REGEX: "drape_tests|generator_integration_tests|opening_hours_integration_tests|opening_hours_supported_features_tests|routing_benchmarks|routing_integration_tests|routing_quality_tests|search_quality_tests|storage_integration_tests|shaders_tests|world_feed_integration_tests"
# generator_integration_tests - https://github.com/organicmaps/organicmaps/issues/225
# # routing_integration_tests - https://github.com/organicmaps/organicmaps/issues/221
# routing_quality_tests - https://github.com/organicmaps/organicmaps/issues/215
# drape_tests - requires X Window
# Separate run of OH boost-based test
run: |
ctest -L "omim-test" -E "$CTEST_EXCLUDE_REGEX" --output-on-failure
./build/opening_hours_tests |
./tools/python/run_desktop_tests.py \
-f ./build \
-u ./data \
-d ./data \
-e generator_integration_tests \
-e routing_integration_tests \
-e routing_quality_tests \
-e search_quality_tests \
-e world_feed_integration_tests \
-e drape_tests \
-e shaders_tests \
\
-e opening_hours_tests \
-e opening_hours_integration_tests \
-e routing_consistency_tests \
-e opening_hours_supported_features_tests \
-e storage_integration_tests

View file

@ -1,22 +0,0 @@
name: Close stale PRs
on:
schedule:
- cron: "0 0 * * *" # Runs every day at midnight
jobs:
stale:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- uses: actions/stale@v9
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-pr-stale: 180 # 6 months before warning
days-before-pr-close: 365 # Closed after 12 months
stale-pr-label: "stale"
stale-pr-message: "Hi! This PR has been inactive for 6 months. If it's still relevant, please update it to let us know youd like to keep it open 😊"
close-pr-message: "This PR has been automatically closed after 12 months of inactivity."
days-before-issue-stale: -1 # Issues are never stale
days-before-issue-close: -1 # Issues are never closed
remove-stale-when-updated: true

26
.github/workflows/strings-check.yaml vendored Normal file
View file

@ -0,0 +1,26 @@
name: Validate translation strings
on:
workflow_dispatch: # Manual trigger
pull_request:
paths:
- .github/workflows/strings-check.yaml # Run check on self change
- data/countries_names.txt
- data/strings/*
- tools/python/strings_utils.py
jobs:
validate-translation-strings:
name: Validate translation strings
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v2
with:
python-version: '3'
- name: Validate strings.txt and types_strings.txt files
shell: bash
run: |
./tools/python/strings_utils.py --validate
./tools/python/strings_utils.py --types-strings --validate

12
.gitignore vendored
View file

@ -16,11 +16,10 @@ screenlog.0
data/styles/*/*/out/*
data/resources-*_design/*
data/drules_proto_default_design.bin
data/drules_proto_design.bin
data/colors_design.txt
data/patterns_design.txt
data/bookmarks
data/edits.xml
# Compiled Python
*.pyc
@ -85,7 +84,6 @@ data/[0-9][0-9][0-9][0-9][0-9][0-9]
data/gps_track.dat
# temporary files for downloader
data/settings.ini
data/test_data/world_feed_integration_tests_data
# benchmark results
data/benchmarks/*.trace
@ -124,13 +122,11 @@ tizen/*/.*
tizen/*/crash-info/*
.idea/*
.idea
!android/.idea/icon.svg
# Private repository files.
.private_repository_url
.private_repository_branch
private.h
# ignore old android secrets during the transition period to the new project structure
android/release.keystore
android/secure.properties
android/libnotify.properties
@ -140,14 +136,13 @@ android/firebase-app-distribution.json
android/firebase-test-lab.json
android/huawei-appgallery.json
android/res/xml/network_security_config.xml
./server/
server
iphone/Maps/app.omaps/
*.li
*.autosave
# CMake
cmake-build-*
build/
@ -178,9 +173,6 @@ tools/python/routing/etc/*.ini
/node_modules/
/package-lock.json
# Visual Studio
.vs
# VS Code
.vscode

58
.gitmodules vendored
View file

@ -1,23 +1,29 @@
[submodule "tools/osmctools"]
path = tools/osmctools
url = https://git.omaps.dev/organicmaps/osmctools.git
url = https://github.com/organicmaps/osmctools.git
[submodule "tools/kothic"]
path = tools/kothic
url = https://git.omaps.dev/organicmaps/kothic.git
url = https://github.com/organicmaps/kothic.git
[submodule "tools/macdeployqtfix"]
path = tools/macdeployqtfix
url = https://github.com/aurelien-rainone/macdeployqtfix.git
[submodule "3party/protobuf/protobuf"]
path = 3party/protobuf/protobuf
url = https://git.omaps.dev/organicmaps/protobuf.git
url = https://github.com/organicmaps/protobuf.git
[submodule "tools/twine"]
path = tools/twine
url = https://github.com/organicmaps/twine.git
[submodule "3party/Vulkan-Headers"]
path = 3party/Vulkan-Headers
url = https://github.com/KhronosGroup/Vulkan-Headers.git
[submodule "3party/boost"]
path = 3party/boost
url = https://github.com/boostorg/boost.git
branch = boost-1.85.0
branch = boost-1.76.0
ignore = dirty
[submodule "3party/just_gtfs"]
path = 3party/just_gtfs
url = https://git.omaps.dev/organicmaps/just_gtfs.git
url = https://github.com/organicmaps/just_gtfs.git
branch = for-usage-as-submodule
[submodule "3party/expat"]
path = 3party/expat
@ -30,38 +36,20 @@
path = 3party/icu/icu
url = https://github.com/unicode-org/icu.git
[submodule "3party/freetype/freetype"]
path = 3party/freetype/freetype
url = https://git.omaps.dev/organicmaps/freetype.git
path = 3party/freetype/freetype
url = https://github.com/freetype/freetype.git
[submodule "3party/googletest"]
path = 3party/googletest
url = https://github.com/google/googletest.git
path = 3party/googletest
url = https://github.com/google/googletest.git
[submodule "3party/fast_double_parser"]
path = 3party/fast_double_parser
url = https://github.com/lemire/fast_double_parser.git
path = 3party/fast_double_parser
url = https://github.com/lemire/fast_double_parser.git
[submodule "3party/pugixml/pugixml"]
path = 3party/pugixml/pugixml
url = https://github.com/zeux/pugixml.git
path = 3party/pugixml/pugixml
url = https://github.com/zeux/pugixml.git
[submodule "3party/jansson/jansson"]
path = 3party/jansson/jansson
url = https://github.com/akheron/jansson.git
path = 3party/jansson/jansson
url = https://github.com/akheron/jansson.git
[submodule "3party/gflags"]
path = 3party/gflags
url = https://github.com/gflags/gflags
[submodule "3party/fast_obj"]
path = 3party/fast_obj
url = https://github.com/thisistherk/fast_obj
[submodule "3party/harfbuzz/harfbuzz"]
path = 3party/harfbuzz/harfbuzz
url = https://github.com/harfbuzz/harfbuzz.git
[submodule "3party/utfcpp"]
path = 3party/utfcpp
url = https://github.com/nemtrif/utfcpp.git
[submodule "3party/glfw"]
path = 3party/glfw
url = https://github.com/glfw/glfw.git
[submodule "3party/CMake-MetalShaderSupport"]
path = 3party/CMake-MetalShaderSupport
url = https://github.com/dpogue/CMake-MetalShaderSupport.git
[submodule "3party/imgui/imgui"]
path = 3party/imgui/imgui
url = https://github.com/ocornut/imgui
path = 3party/gflags
url = https://github.com/gflags/gflags

@ -1 +0,0 @@
Subproject commit 989857d2e5e54869c35ad06fb21a67d12a2dbc67

View file

@ -1,84 +0,0 @@
# Fixes CMake deprecation warning:
# Compatibility with CMake < 3.5 will be removed from a future version of CMake.
set(CMAKE_WARN_DEPRECATED OFF CACHE BOOL "" FORCE)
if (NOT WITH_SYSTEM_PROVIDED_3PARTY)
# Suppress "Policy CMP0077 is not set: option() honors normal variables"
# for the freetype, expat and jansson options.
set(CMAKE_POLICY_DEFAULT_CMP0077 NEW)
# Suppress "Policy CMP0063 is not set: Honor visibility properties for all target types."
# for jansson
set(CMAKE_POLICY_DEFAULT_CMP0063 NEW)
# Configure expat library.
set(EXPAT_BUILD_TOOLS OFF)
set(EXPAT_BUILD_EXAMPLES OFF)
set(EXPAT_BUILD_TESTS OFF)
set(EXPAT_BUILD_DOCS OFF)
set(EXPAT_BUILD_PKGCONFIG OFF)
set(EXPAT_ENABLE_INSTALL OFF)
set(EXPAT_SHARED_LIBS OFF)
set(EXPAT_GE OFF)
set(EXPAT_DTD OFF)
set(EXPAT_NS ON)
add_subdirectory(expat/expat)
# Configure Jansson library.
set(JANSSON_BUILD_DOCS OFF)
set(JANSSON_BUILD_MAN OFF)
set(JANSSON_EXAMPLES OFF)
set(JANSSON_INSTALL OFF)
set(JANSSON_WITHOUT_TESTS ON)
add_subdirectory(jansson/jansson/)
target_include_directories(jansson INTERFACE "${PROJECT_BINARY_DIR}/3party/jansson/jansson/include")
add_library(jansson::jansson ALIAS jansson)
# Add gflags library.
add_subdirectory(gflags)
target_compile_options(gflags_nothreads_static PRIVATE $<$<CXX_COMPILER_ID:GNU>:-Wno-subobject-linkage>)
# Add pugixml library.
add_subdirectory(pugixml)
# Add protobuf library.
add_subdirectory(protobuf)
if (NOT PLATFORM_LINUX)
add_subdirectory(freetype)
add_subdirectory(icu)
add_subdirectory(harfbuzz)
endif()
add_library(utf8cpp INTERFACE)
add_library(utf8cpp::utf8cpp ALIAS utf8cpp)
target_include_directories(utf8cpp INTERFACE "${OMIM_ROOT}/3party/utfcpp/source")
endif()
add_subdirectory(agg)
add_subdirectory(bsdiff-courgette)
add_subdirectory(minizip)
add_subdirectory(open-location-code)
add_subdirectory(opening_hours)
add_subdirectory(stb_image)
add_subdirectory(succinct)
add_subdirectory(vulkan_wrapper)
if (PLATFORM_DESKTOP)
add_subdirectory(libtess2)
set(GLFW_BUILD_DOCS OFF CACHE BOOL "")
set(GLFW_BUILD_EXAMPLES OFF CACHE BOOL "")
set(GLFW_BUILD_TESTS OFF CACHE BOOL "")
set(GLFW_INSTALL OFF CACHE BOOL "")
set(GLFW_VULKAN_STATIC OFF CACHE BOOL "")
set(GLFW_BUILD_WAYLAND OFF CACHE BOOL "")
# Disable ARC for glfw and re-enable after it because it's globally set in the root CMakeLists.txt
set(CMAKE_OBJC_FLAGS "")
add_subdirectory(glfw)
set_target_properties(glfw PROPERTIES UNITY_BUILD OFF)
set_target_properties(glfw PROPERTIES XCODE_ATTRIBUTE_CLANG_ENABLE_OBJC_ARC NO)
set(CMAKE_OBJC_FLAGS -fobjc-arc)
add_subdirectory(imgui)
endif()

@ -1 +1 @@
Subproject commit 595c8d4794410a4e64b98dc58d27c0310d7ea2fd
Subproject commit 83e1a9ed8ce289cebb1c02c8167d663dc1befb24

View file

@ -422,7 +422,7 @@ namespace agg
inline bool is_close(unsigned c)
{
return (c & ~(path_flags_cw | path_flags_ccw)) ==
(unsigned(path_cmd_end_poly) | path_flags_close);
(path_cmd_end_poly | path_flags_close);
}
//------------------------------------------------------------is_next_poly

View file

@ -68,7 +68,7 @@ namespace agg
*x = m_vertices[m_vertex];
*y = m_vertices[m_vertex + 1];
m_vertex += 2;
return (m_vertex == 2) ? unsigned(path_cmd_move_to) : m_cmd;
return (m_vertex == 2) ? path_cmd_move_to : m_cmd;
}
// Supplemantary functions. num_vertices() actually returns doubled

View file

@ -429,7 +429,7 @@ namespace agg
static value_type luminance(const rgba& c)
{
// Calculate grayscale value as per ITU-R BT.709.
return value_type(uround((0.2126 * c.r + 0.7152 * c.g + 0.0722 * c.b) * double(base_mask)));
return value_type(uround((0.2126 * c.r + 0.7152 * c.g + 0.0722 * c.b) * base_mask));
}
static value_type luminance(const rgba16& c)
@ -530,13 +530,13 @@ namespace agg
//--------------------------------------------------------------------
static AGG_INLINE double to_double(value_type a)
{
return double(a) / double(base_mask);
return double(a) / base_mask;
}
//--------------------------------------------------------------------
static AGG_INLINE value_type from_double(double a)
{
return value_type(uround(a * double(base_mask)));
return value_type(uround(a * base_mask));
}
//--------------------------------------------------------------------
@ -674,7 +674,7 @@ namespace agg
else
{
calc_type v_ = (calc_type(v) * base_mask) / a;
v = value_type((v_ > base_mask) ? calc_type(base_mask) : v_);
v = value_type((v_ > base_mask) ? base_mask : v_);
}
}
return *this;
@ -684,7 +684,7 @@ namespace agg
self_type gradient(self_type c, double k) const
{
self_type ret;
calc_type ik = uround(k * double(base_scale));
calc_type ik = uround(k * base_scale);
ret.v = lerp(v, c.v, ik);
ret.a = lerp(a, c.a, ik);
return ret;
@ -921,7 +921,7 @@ namespace agg
//--------------------------------------------------------------------
static AGG_INLINE value_type mult_cover(value_type a, cover_type b)
{
return value_type(a * value_type(b) / value_type(cover_mask));
return value_type(a * b / cover_mask);
}
//--------------------------------------------------------------------

View file

@ -323,13 +323,13 @@ namespace agg
//--------------------------------------------------------------------
static AGG_INLINE double to_double(value_type a)
{
return double(a) / double(base_mask);
return double(a) / base_mask;
}
//--------------------------------------------------------------------
static AGG_INLINE value_type from_double(double a)
{
return value_type(uround(a * double(base_mask)));
return value_type(uround(a * base_mask));
}
//--------------------------------------------------------------------
@ -701,13 +701,13 @@ namespace agg
//--------------------------------------------------------------------
static AGG_INLINE double to_double(value_type a)
{
return double(a) / double(base_mask);
return double(a) / base_mask;
}
//--------------------------------------------------------------------
static AGG_INLINE value_type from_double(double a)
{
return value_type(uround(a * double(base_mask)));
return value_type(uround(a * base_mask));
}
//--------------------------------------------------------------------
@ -888,7 +888,7 @@ namespace agg
AGG_INLINE self_type gradient(const self_type& c, double k) const
{
self_type ret;
calc_type ik = uround(k * double(base_mask));
calc_type ik = uround(k * base_mask);
ret.r = lerp(r, c.r, ik);
ret.g = lerp(g, c.g, ik);
ret.b = lerp(b, c.b, ik);
@ -1120,7 +1120,7 @@ namespace agg
//--------------------------------------------------------------------
static AGG_INLINE value_type mult_cover(value_type a, cover_type b)
{
return value_type(a * double(b) / double(cover_mask));
return value_type(a * b / cover_mask);
}
//--------------------------------------------------------------------

View file

@ -98,7 +98,6 @@ namespace agg
m_markers.remove_all();
m_last_cmd = m_source->vertex(&m_start_x, &m_start_y);
m_status = accumulate;
[[fallthrough]];
case accumulate:
if(is_stop(m_last_cmd)) return path_cmd_stop;
@ -138,7 +137,6 @@ namespace agg
}
m_generator.rewind(0);
m_status = generate;
[[fallthrough]];
case generate:
cmd = m_generator.vertex(x, y);

View file

@ -398,7 +398,7 @@ namespace agg
if(m_closed && !m_stop)
{
m_stop = true;
return unsigned(path_cmd_end_poly) | path_flags_close;
return path_cmd_end_poly | path_flags_close;
}
return path_cmd_stop;
}
@ -463,7 +463,7 @@ namespace agg
if(m_closed && !m_stop)
{
m_stop = true;
return unsigned(path_cmd_end_poly) | path_flags_close;
return path_cmd_end_poly | path_flags_close;
}
return path_cmd_stop;
}
@ -525,7 +525,7 @@ namespace agg
if(m_closed && !m_stop)
{
m_stop = true;
return unsigned(path_cmd_end_poly) | path_flags_close;
return path_cmd_end_poly | path_flags_close;
}
return path_cmd_stop;
}

View file

@ -54,7 +54,7 @@ namespace agg
if (cover < cover_full)
{
double x = double(cover) / double(cover_full);
double x = double(cover) / cover_full;
c.r *= x;
c.g *= x;
c.b *= x;

View file

@ -659,7 +659,7 @@ namespace agg
while(nb)
{
cell_ptr = *block_ptr++;
i = (nb > unsigned(cell_block_size)) ? unsigned(cell_block_size) : nb;
i = (nb > cell_block_size) ? cell_block_size : nb;
nb -= i;
while(i--)
{
@ -683,7 +683,7 @@ namespace agg
while(nb)
{
cell_ptr = *block_ptr++;
i = (nb > unsigned(cell_block_size)) ? unsigned(cell_block_size) : nb;
i = (nb > cell_block_size) ? cell_block_size : nb;
nb -= i;
while(i--)
{

View file

@ -35,7 +35,7 @@ namespace agg
}
static int xi(int v) { return v; }
static int yi(int v) { return v; }
static int upscale(double v) { return iround(v * double(poly_subpixel_scale)); }
static int upscale(double v) { return iround(v * poly_subpixel_scale); }
static int downscale(int v) { return v; }
};
@ -51,7 +51,7 @@ namespace agg
static int yi(int v) { return v; }
static int upscale(double v)
{
return saturation<poly_max_coord>::iround(v * double(poly_subpixel_scale));
return saturation<poly_max_coord>::iround(v * poly_subpixel_scale);
}
static int downscale(int v) { return v; }
};
@ -66,7 +66,7 @@ namespace agg
}
static int xi(int v) { return v * 3; }
static int yi(int v) { return v; }
static int upscale(double v) { return iround(v * double(poly_subpixel_scale)); }
static int upscale(double v) { return iround(v * poly_subpixel_scale); }
static int downscale(int v) { return v; }
};
@ -78,8 +78,8 @@ namespace agg
{
return a * b / c;
}
static int xi(double v) { return iround(v * double(poly_subpixel_scale)); }
static int yi(double v) { return iround(v * double(poly_subpixel_scale)); }
static int xi(double v) { return iround(v * poly_subpixel_scale); }
static int yi(double v) { return iround(v * poly_subpixel_scale); }
static double upscale(double v) { return v; }
static double downscale(int v) { return v / double(poly_subpixel_scale); }
};
@ -92,8 +92,8 @@ namespace agg
{
return a * b / c;
}
static int xi(double v) { return iround(v * double(poly_subpixel_scale) * 3); }
static int yi(double v) { return iround(v * double(poly_subpixel_scale)); }
static int xi(double v) { return iround(v * poly_subpixel_scale * 3); }
static int yi(double v) { return iround(v * poly_subpixel_scale); }
static double upscale(double v) { return v; }
static double downscale(int v) { return v / double(poly_subpixel_scale); }
};

View file

@ -196,11 +196,11 @@ namespace agg
case end_poly1:
m_status = m_prev_status;
return unsigned(path_cmd_end_poly) | path_flags_close | path_flags_ccw;
return path_cmd_end_poly | path_flags_close | path_flags_ccw;
case end_poly2:
m_status = m_prev_status;
return unsigned(path_cmd_end_poly) | path_flags_close | path_flags_cw;
return path_cmd_end_poly | path_flags_close | path_flags_cw;
case stop:
cmd = path_cmd_stop;

@ -1 +1 @@
Subproject commit ab7968a0bbcf574a7859240d1d8443f58ed6f6cf
Subproject commit 5002c2d6a2b5ed56a82128797828de95dab2ddba

@ -1 +1 @@
Subproject commit a0dc7d5efacbe2b744211289c276e2b9168bd4ae
Subproject commit 654d2de0da85662fcc7644a7acd7c2dd2cfb21f0

@ -1 +1 @@
Subproject commit 252029ddac664370bdda3f0761675785d92a1573
Subproject commit efec03532ef65984786e5e32dbc81f6e6a55a115

@ -1 +0,0 @@
Subproject commit 42629f744269e004907a6fb4f16c6c7f69acc586

View file

@ -1,11 +1,7 @@
# TODO: Check if enabling it provides benefits.
set(FT_DISABLE_HARFBUZZ ON)
add_subdirectory(freetype)
# Fix warning with ONE_PIXEL macro clash.
target_compile_options(freetype PRIVATE -Wno-macro-redefined)
# Use ft2build.h from the current directory instead of the default.
target_include_directories(freetype
BEFORE PUBLIC
@ -14,4 +10,3 @@ target_include_directories(freetype
)
add_library(Freetype::Freetype ALIAS freetype)

@ -1 +1 @@
Subproject commit 97069edd163b66ce11e8152bee3055b2fa627e15
Subproject commit 4eb6cb8818057a022f97176b53738ee3098c8eb6

@ -1 +0,0 @@
Subproject commit 21fea01161e0d6b70c0c5c1f52dc8e7a7df14a50

View file

@ -1,41 +0,0 @@
project(harfbuzz)
set(SOURCES
harfbuzz/src/harfbuzz.cc
)
add_library(${PROJECT_NAME} ${SOURCES})
target_include_directories(${PROJECT_NAME}
PUBLIC
harfbuzz/src
)
# Keep these settigns in sync with xcode/harfbuzz project.
target_compile_options(${PROJECT_NAME}
PRIVATE
-fno-rtti
-fno-exceptions
-fno-threadsafe-statics
$<$<OR:$<CXX_COMPILER_ID:Clang>,$<CXX_COMPILER_ID:AppleClang>>:-Wno-format-pedantic>
)
target_compile_definitions(${PROJECT_NAME}
PRIVATE
HAVE_FREETYPE=1
# TODO: Enable later if necessary, and sync with xcode/harfbuzz project.
#HAVE_ICU
#$<$<BOOL:${APPLE}>:HAVE_CORETEXT>
HAVE_ATEXIT
HAVE_GETPAGESIZE
HAVE_MMAP
HAVE_MPROTECT
HAVE_PTHREAD
HAVE_SYSCONF
HAVE_SYS_MMAN_H
HAVE_UNISTD_H
)
target_link_libraries(${PROJECT_NAME} Freetype::Freetype)
add_library(harfbuzz::harfbuzz ALIAS harfbuzz)

@ -1 +0,0 @@
Subproject commit 788b469ad5e5f78611f665b6eb17afd0eb040f21

View file

@ -32,13 +32,10 @@ add_library(icuuc
icu/icu4c/source/common/locbased.h
icu/icu4c/source/common/locid.cpp
icu/icu4c/source/common/loclikely.cpp
icu/icu4c/source/common/loclikelysubtags.cpp
icu/icu4c/source/common/loclikelysubtags.h
icu/icu4c/source/common/locmap.cpp
icu/icu4c/source/common/locutil.cpp
icu/icu4c/source/common/locutil.h
icu/icu4c/source/common/lsr.h
icu/icu4c/source/common/lsr.cpp
icu/icu4c/source/common/messageimpl.h
icu/icu4c/source/common/msvcres.h
icu/icu4c/source/common/mutex.h

@ -1 +1 @@
Subproject commit 7750081bda4b3bc1768ae03849ec70f67ea10625
Subproject commit 904cf62457de2440e8526deb75c95a3f7296f517

View file

@ -1,16 +0,0 @@
project(imgui)
set(SRC
imgui/imgui_draw.cpp
imgui/imgui_tables.cpp
imgui/imgui_widgets.cpp
imgui/imgui.cpp
imgui/backends/imgui_impl_glfw.cpp
)
add_library(${PROJECT_NAME} ${SRC})
target_include_directories(${PROJECT_NAME}
PRIVATE ${OMIM_ROOT}/3party/glfw/include
PUBLIC ${OMIM_ROOT}/3party/imgui/imgui
PUBLIC .
)

@ -1 +0,0 @@
Subproject commit 6982ce43f5b143c5dce5fab0ce07dd4867b705ae

@ -1 +1 @@
Subproject commit 61fc3d0e28e1a35410af42e329cd977095ec32d2
Subproject commit e23f5580072cb64ce3ab27de2b5110d7ac252424

9
3party/liboauthcpp/.gitignore vendored Normal file
View file

@ -0,0 +1,9 @@
# Build (by)products
build/CMakeCache.txt
build/CMakeFiles
build/Makefile
build/cmake_install.cmake
build/liboauthcpp.a
build/simple_auth
build/simple_request
build/tests

View file

@ -0,0 +1,21 @@
project(oauthcpp)
set(SRC
include/liboauthcpp/liboauthcpp.h
src/base64.cpp
src/HMAC_SHA1.cpp
src/SHA1.cpp
src/urlencode.cpp
src/liboauthcpp.cpp
)
add_library(${PROJECT_NAME} ${SRC})
target_include_directories(${PROJECT_NAME}
PRIVATE src
PUBLIC include
)
target_compile_options(${PROJECT_NAME}
PRIVATE $<$<CXX_COMPILER_ID:AppleClang,Clang>:-Wno-shorten-64-to-32>
)

View file

@ -0,0 +1,21 @@
Copyright (c) 2011 Stanford University (liboauthcpp)
Copyright (C) 2011 by swatkat (swatkat.thinkdigitATgmailDOTcom) (libtwitcurl)
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -0,0 +1,172 @@
liboauthcpp
-----------
liboauthcpp is a pure C++ library for performing OAuth requests. It
doesn't contain any networking code -- you provide for performing HTTP
requests yourself, however you like -- instead focusing on performing
OAuth-specific functionality and providing a nice interface for it.
If you already have infrastructure for making HTTP requests and are
looking to add OAuth support, liboauthcpp is for you.
liboauthcpp currently implements OAuth 1.0a (see
http://tools.ietf.org/html/rfc5849).
Buildbot
--------
[![Build Status](https://secure.travis-ci.org/sirikata/liboauthcpp.png)](http://travis-ci.org/sirikata/liboauthcpp)
Requirements
------------
You should only need:
* CMake
* A C++ compiler for your platform (e.g. g++, Microsoft Visual C++)
Compiling
---------
The build process is simple:
cd liboauthcpp
cd build
cmake .
make # or open Visual Studio and build the solution
If your own project uses CMake you can also include
build/CMakeLists.txt directly into your project and reference the
target "oauthcpp", a static library, in your project.
Percent (URL) Encoding
----------------------
To get correct results, you need to pass your URL properly encoded to
liboauthcpp. If you are not at all familiar, you should probably start
by reading the [URI Spec](http://tools.ietf.org/html/rfc3986), especially
Section 2. Alternatively,
[this article](http://blog.lunatech.com/2009/02/03/what-every-web-developer-must-know-about-url-encoding)
gives a more readable overview.
The basic idea is that there are 3 classes of characters: reserved,
unreserved, and other. Reserved characters are special characters that
are used in the URI syntax itself, e.g. ':' (after the scheme), '/'
(the hierarchical path separator), and '?' (prefixing the query
string). Unreserved characters are characters that are always safe to
include unencoded, e.g. the alphanumerics. Other characters must
always be encoded, mainly covering special characters like ' ', '<' or
'>', and '{' or '}'.
The basic rule is that reserved characters must be encoded if they
appear in any part of the URI when not being used as a
separator. Unreserved characters are always safe. And the other
characters they didn't know if they would be safe or not so they must
always be encoded.
Unfortunately, the reserved set is a bit more complicated. They are
broken down into 'general delimiters' and 'sub delimiters'. The ones
already mentioned, like ':', can appear in many forms of URIs (say,
http, ftp, about, gopher, mailto, etc. Those are called general
delimiters. Others (e.g. '(', ')', '!', '$', '+', ',', '=', and more)
are called subdelimiters because their use depends on the URI
scheme. Worse, their use depends on the *part of the URI*. Depending
on the particular URI scheme, these may or may not have to be encoded,
and it might also depend on where they appear. (As an example, an '&'
in an http URI isn't an issue if it appears in the path -- before the
query string -- i.e. before a '?' appears. Worse, '=' can appear unencoded in
the path, or in a query parameter value, but not in a query parameter key since
it would be interpreted as the end of the key.)
*Additionally*, in many cases it is permitted to encode a character
unnecessarily and the result is supposed to be the same. This means
that it's possible to percent encode some URLs in multiple ways
(e.g. encoding the unreserved set unnecessarily). It is possible, but not
guaranteed, that if you pass *exactly* the same URI to liboauthcpp and the
OAuth server, it will handle it regardless of the variant of encoding, so long
as it is a valid encoding.
The short version: percent encoding a URL properly is non-trivial and
you can even encode the same URL multiple ways, but has to be done
correctly so that the OAuth signature can be computed. Sadly,
"correctly" in this case really means "in whatever way the server your
interacting with wants it encoded".
Internally, liboauthcpp needs to do another step of percent encoding,
but the OAuth spec is very precise about how that works (none of these
scheme-dependent issues). liboauth applies this percent encoding, but
assumes that you have encoded your URLs properly. This assumption
makes sense since the actual request is made separately, and the URI
has to be specified in it, so you should already have a form which the
server will accept.
However, in order to aid you, a very simple percent encoding API is exposed. It
should help you encode URLs minimally and in a way that many services accept. In
most cases you should use `HttpPercentEncodePath()`,
`HttpPercentEncodeQueryKey()`, and `HttpPercentEncodeQueryValue()` to encode
those parts of your http URL, then combine them and pass them to liboauthcpp for
signing.
Thread Safety
-------------
liboauthcpp doesn't provide any thread safety guarantees. That said, there is
very little shared state, and some classes (e.g. Consumer) are naturally
immutable and therefore thread safe. Similarly, nearly the entire library uses
no static/shared state, so as long as you create separate objects for separate
threads, you should be safe.
The one exception is nonces: the Client class needs to generate a nonce for
authorization. To do so, the random number generator needs to be seeded. We do
this with the current time, but fast, repeated use of the Client class from
different threads could result in the same nonce. To avoid requiring an entire
thread library just for this one case, you can call Client::initialize()
explicitly before using the Client from multiple threads. For single-threaded
use, you are not required to call it.
Demos
-----
There are two demos included in the demos/ directory, and they are built by
default with the instructions above. In both, you enter key/secret information
and it generates URLs for you to visit (in a browser) and copy data back into
the program.
simple_auth should be executed first. It starts with only a consumer key and
secret and performs 3-legged auth: you enter in consumer keys, it generates URLs
to authenticate the user and generate access tokens. It requires 3 steps:
request_token, authorize, and access_token (which correspond the URLs
accessed). At the end of this process, you'll be provided an access key/secret
pair which you can use to access actual resources.
simple_request actually does something useful now that your application is
authorized. Enter your consumer key/secret and the access key/secret from
simple_auth (or which you've generated elsewhere) and it will generate a URL you
can use to access your home timeline in JSON format. It adds a parameter to ask
for only 5 entries (demonstrating that signing works properly over additional
query parameters). This is a one-step process -- it just gives you the URL and
you get the results in your browser.
In both, the URLs accessed are specified at the top of the demo
files. simple_auth requires URLs for request_token, authorize_url, and
access_token. Some providers require additional parameters (notably an
oauth_callback for Twitter, even if its out of band, or oob), which you can also
specify in that location. simple_request only needs the URL of the resource
being accessed (i.e. the URL for the home_timeline JSON data used by default in
the demo), with optional parameters stored as a query string.
Both demos only use GET requests with query strings, but all HTTP methods
(e.g. POST, PUT, DELETE) and approaches to sending parameters (e.g. HTTP
headers, url-encoded body) should be supported in the API.
License
-------
liboauthcpp is MIT licensed. See the LICENSE file for more details.
liboauthcpp is mostly taken from libtwitcurl
(http://code.google.com/p/twitcurl/), which is similarly licensed. It
mostly serves to isolate the OAuth code from libtwitcurl's Twitter and
cURL specific code.
libtwitcurl also borrowed code from other projects:
twitcurl uses HMAC_SHA1 from http://www.codeproject.com/KB/recipes/HMACSHA1class.aspx
twitcurl uses base64 from http://www.adp-gmbh.ch/cpp/common/base64.html

View file

@ -0,0 +1,286 @@
#ifndef __LIBOAUTHCPP_LIBOAUTHCPP_H__
#define __LIBOAUTHCPP_LIBOAUTHCPP_H__
#include <string>
#include <list>
#include <map>
#include <stdexcept>
#include <ctime>
namespace OAuth {
namespace Http {
typedef enum _RequestType
{
Invalid = 0,
Head,
Get,
Post,
Delete,
Put
} RequestType;
} // namespace Http
typedef std::list<std::string> KeyValueList;
typedef std::multimap<std::string, std::string> KeyValuePairs;
typedef enum _LogLevel
{
LogLevelNone = 0,
LogLevelDebug = 1
} LogLevel;
/** Set the log level. Log messages are sent to stderr. Currently, and for the
* foreseeable future, logging only consists of debug messages to help track
* down protocol implementation issues.
*/
void SetLogLevel(LogLevel lvl);
/** Deprecated. Complete percent encoding of URLs. Equivalent to
* PercentEncode.
*/
std::string URLEncode(const std::string& decoded);
/** Percent encode a string value. This version is *thorough* about
* encoding: it encodes all reserved characters (even those safe in
* http URLs) and "other" characters not specified by the URI
* spec. If you're looking to encode http:// URLs, see the
* HttpEncode* functions.
*/
std::string PercentEncode(const std::string& decoded);
/** Percent encodes the path portion of an http URL (i.e. the /foo/bar
* in http://foo/bar?a=1&b=2). This encodes minimally, so reserved
* subdelimiters that have no meaning in the path are *not* encoded.
*/
std::string HttpEncodePath(const std::string& decoded);
/** Percent encodes a query string key in an http URL (i.e. 'a', 'b' in
* http://foo/bar?a=1&b=2). This encodes minimally, so reserved subdelimiters
* that have no meaning in the query string are *not* encoded.
*/
std::string HttpEncodeQueryKey(const std::string& decoded);
/** Percent encodes a query string value in an http URL (i.e. '1', '2' in
* http://foo/bar?a=1&b=2). This encodes minimally, so reserved subdelimiters
* that have no meaning in the query string are *not* encoded.
*/
std::string HttpEncodeQueryValue(const std::string& decoded);
/** Parses key value pairs into a map.
* \param encoded the encoded key value pairs, i.e. the url encoded parameters
* \returns a map of string keys to string values
* \throws ParseError if the encoded data cannot be decoded
*/
KeyValuePairs ParseKeyValuePairs(const std::string& encoded);
class ParseError : public std::runtime_error {
public:
ParseError(const std::string msg)
: std::runtime_error(msg)
{}
};
class MissingKeyError : public std::runtime_error {
public:
MissingKeyError(const std::string msg)
: std::runtime_error(msg)
{}
};
/** A consumer of OAuth-protected services. It is the client to an
* OAuth service provider and is usually registered with the service
* provider, resulting in a consumer *key* and *secret* used to
* identify the consumer. The key is included in all requests and the
* secret is used to *sign* all requests. Signed requests allow the
* consumer to securely perform operations, including kicking off
* three-legged authentication to enable performing operations on
* behalf of a user of the service provider.
*/
class Consumer {
public:
Consumer(const std::string& key, const std::string& secret);
const std::string& key() const { return mKey; }
const std::string& secret() const { return mSecret; }
private:
const std::string mKey;
const std::string mSecret;
};
/** An OAuth credential used to request authorization or a protected
* resource.
*
* Tokens in OAuth comprise a *key* and a *secret*. The key is
* included in requests to identify the token being used, but the
* secret is used only in the signature, to prove that the requester
* is who the server gave the token to.
*
* When first negotiating the authorization, the consumer asks for a
* *request token* that the live user authorizes with the service
* provider. The consumer then exchanges the request token for an
* *access token* that can be used to access protected resources.
*/
class Token {
public:
Token(const std::string& key, const std::string& secret);
Token(const std::string& key, const std::string& secret, const std::string& pin);
/** Construct a token, extracting the key and secret from a set of
* key-value pairs (e.g. those parsed from an request or access
* token request).
*/
static Token extract(const KeyValuePairs& response);
/** Construct a token, extracting the key and secret from a raw,
* encoded response.
*/
static Token extract(const std::string& requestTokenResponse);
const std::string& key() const { return mKey; }
const std::string& secret() const { return mSecret; }
const std::string& pin() const { return mPin; }
void setPin(const std::string& pin_) { mPin = pin_; }
private:
const std::string mKey;
const std::string mSecret;
std::string mPin;
};
class Client {
public:
/** Perform static initialization. This will be called automatically, but
* you can call it explicitly to ensure thread safety. If you do not call
* this explicitly before using the Client class, the same nonce may be
* generated twice.
*/
static void initialize();
/** Alternative initialize method which lets you specify the seed and
* control the timestamp used in generating signatures. This only exists
* for testing purposes and should not be used in practice.
*/
static void initialize(int nonce, time_t timestamp);
/** Exposed for testing only.
*/
static void __resetInitialize();
/** Construct an OAuth Client using only a consumer key and
* secret. You can use this to start a three-legged
* authentication (to acquire an access token for a user) or for
* simple two-legged authentication (signing with empty access
* token info).
*
* \param consumer Consumer information. The caller must ensure
* it remains valid during the lifetime of this object
*/
Client(const Consumer* consumer);
/** Construct an OAuth Client with consumer key and secret (yours)
* and access token key and secret (acquired and stored during
* three-legged authentication).
*
* \param consumer Consumer information. The caller must ensure
* it remains valid during the lifetime of this object
* \param token Access token information. The caller must ensure
* it remains valid during the lifetime of this object
*/
Client(const Consumer* consumer, const Token* token);
~Client();
/** Build an OAuth HTTP header for the given request. This version provides
* only the field value.
*
* \param eType the HTTP request type, e.g. GET or POST
* \param rawUrl the raw request URL (should include query parameters)
* \param rawData the raw HTTP request data (can be empty)
* \param includeOAuthVerifierPin if true, adds oauth_verifier parameter
* \returns a string containing the HTTP header
*/
std::string getHttpHeader(const Http::RequestType eType,
const std::string& rawUrl,
const std::string& rawData = "",
const bool includeOAuthVerifierPin = false);
/** Build an OAuth HTTP header for the given request. This version gives a
* fully formatted header, i.e. including the header field name.
*
* \param eType the HTTP request type, e.g. GET or POST
* \param rawUrl the raw request URL (should include query parameters)
* \param rawData the raw HTTP request data (can be empty)
* \param includeOAuthVerifierPin if true, adds oauth_verifier parameter
* \returns a string containing the HTTP header
*/
std::string getFormattedHttpHeader(const Http::RequestType eType,
const std::string& rawUrl,
const std::string& rawData = "",
const bool includeOAuthVerifierPin = false);
/** Build an OAuth HTTP header for the given request.
*
* \param eType the HTTP request type, e.g. GET or POST
* \param rawUrl the raw request URL (should include query parameters)
* \param rawData the raw HTTP request data (can be empty)
* \param includeOAuthVerifierPin if true, adds oauth_verifier parameter
* \returns a string containing the query string, including the query
* parameters in the rawUrl
*/
std::string getURLQueryString(const Http::RequestType eType,
const std::string& rawUrl,
const std::string& rawData = "",
const bool includeOAuthVerifierPin = false);
private:
/** Disable default constructur -- must provide consumer
* information.
*/
Client();
static bool initialized;
static int testingNonce;
static time_t testingTimestamp;
/* OAuth data */
const Consumer* mConsumer;
const Token* mToken;
std::string m_nonce;
std::string m_timeStamp;
/* OAuth related utility methods */
bool buildOAuthTokenKeyValuePairs( const bool includeOAuthVerifierPin, /* in */
const std::string& rawData, /* in */
const std::string& oauthSignature, /* in */
KeyValuePairs& keyValueMap /* out */,
const bool urlEncodeValues /* in */,
const bool generateTimestamp /* in */);
bool getStringFromOAuthKeyValuePairs( const KeyValuePairs& rawParamMap, /* in */
std::string& rawParams, /* out */
const std::string& paramsSeperator /* in */ );
typedef enum _ParameterStringType {
QueryStringString,
AuthorizationHeaderString
} ParameterStringType;
// Utility for building OAuth HTTP header or query string. The string type
// controls the separator and also filters parameters: for query strings,
// all parameters are included. For HTTP headers, only auth parameters are
// included.
std::string buildOAuthParameterString(
ParameterStringType string_type,
const Http::RequestType eType,
const std::string& rawUrl,
const std::string& rawData,
const bool includeOAuthVerifierPin);
bool getSignature( const Http::RequestType eType, /* in */
const std::string& rawUrl, /* in */
const KeyValuePairs& rawKeyValuePairs, /* in */
std::string& oAuthSignature /* out */ );
void generateNonceTimeStamp();
};
} // namespace OAuth
#endif // __LIBOAUTHCPP_LIBOAUTHCPP_H__

View file

@ -0,0 +1,59 @@
//******************************************************************************
//* HMAC_SHA1.cpp : Implementation of HMAC SHA1 algorithm
//* Comfort to RFC 2104
//*
//******************************************************************************
#include "HMAC_SHA1.h"
#include <iostream>
#include <memory>
void CHMAC_SHA1::HMAC_SHA1(BYTE *text, int text_len, BYTE *key, int key_len, BYTE *digest)
{
memset(SHA1_Key, 0, SHA1_BLOCK_SIZE);
/* repeated 64 times for values in ipad and opad */
memset(m_ipad, 0x36, sizeof(m_ipad));
memset(m_opad, 0x5c, sizeof(m_opad));
/* STEP 1 */
if (key_len > SHA1_BLOCK_SIZE)
{
CSHA1::Reset();
CSHA1::Update((UINT_8 *)key, key_len);
CSHA1::Final();
CSHA1::GetHash((UINT_8 *)SHA1_Key);
}
else
memcpy(SHA1_Key, key, key_len);
/* STEP 2 */
for (int i=0; i<(int)sizeof(m_ipad); i++)
{
m_ipad[i] ^= SHA1_Key[i];
}
/* STEP 4 */
CSHA1::Reset();
CSHA1::Update((UINT_8 *)m_ipad, sizeof(m_ipad));
CSHA1::Update((UINT_8 *)text, text_len);
CSHA1::Final();
char szReport[SHA1_DIGEST_LENGTH];
CSHA1::GetHash((UINT_8 *)szReport);
/* STEP 5 */
for (int j=0; j<(int)sizeof(m_opad); j++)
{
m_opad[j] ^= SHA1_Key[j];
}
/*STEP 7 */
CSHA1::Reset();
CSHA1::Update((UINT_8 *)m_opad, sizeof(m_opad));
CSHA1::Update((UINT_8 *)szReport, SHA1_DIGEST_LENGTH);
CSHA1::Final();
CSHA1::GetHash((UINT_8 *)digest);
}

View file

@ -0,0 +1,37 @@
/*
100% free public domain implementation of the HMAC-SHA1 algorithm
by Chien-Chung, Chung (Jim Chung) <jimchung1221@gmail.com>
*/
#ifndef __HMAC_SHA1_H__
#define __HMAC_SHA1_H__
#include "SHA1.h"
typedef unsigned char BYTE ;
class CHMAC_SHA1 : public CSHA1
{
public:
enum {
SHA1_DIGEST_LENGTH = 20,
SHA1_BLOCK_SIZE = 64
} ;
private:
BYTE m_ipad[SHA1_BLOCK_SIZE];
BYTE m_opad[SHA1_BLOCK_SIZE];
// This holds one SHA1 block's worth of data, zero padded if necessary.
char SHA1_Key[SHA1_BLOCK_SIZE];
public:
CHMAC_SHA1() {}
void HMAC_SHA1(BYTE *text, int text_len, BYTE *key, int key_len, BYTE *digest);
};
#endif /* __HMAC_SHA1_H__ */

View file

@ -0,0 +1,277 @@
/*
100% free public domain implementation of the SHA-1 algorithm
by Dominik Reichl <dominik.reichl@t-online.de>
Web: http://www.dominik-reichl.de/
Version 1.6 - 2005-02-07 (thanks to Howard Kapustein for patches)
- You can set the endianness in your files, no need to modify the
header file of the CSHA1 class any more
- Aligned data support
- Made support/compilation of the utility functions (ReportHash
and HashFile) optional (useful, if bytes count, for example in
embedded environments)
Version 1.5 - 2005-01-01
- 64-bit compiler compatibility added
- Made variable wiping optional (define SHA1_WIPE_VARIABLES)
- Removed unnecessary variable initializations
- ROL32 improvement for the Microsoft compiler (using _rotl)
======== Test Vectors (from FIPS PUB 180-1) ========
SHA1("abc") =
A9993E36 4706816A BA3E2571 7850C26C 9CD0D89D
SHA1("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq") =
84983E44 1C3BD26E BAAE4AA1 F95129E5 E54670F1
SHA1(A million repetitions of "a") =
34AA973C D4C4DAA4 F61EEB2B DBAD2731 6534016F
*/
#include "SHA1.h"
#include <cassert>
#ifdef SHA1_UTILITY_FUNCTIONS
#define SHA1_MAX_FILE_BUFFER 8000
#endif
// Rotate x bits to the left
#ifndef ROL32
#ifdef _MSC_VER
#define ROL32(_val32, _nBits) _rotl(_val32, _nBits)
#else
#define ROL32(_val32, _nBits) (((_val32)<<(_nBits))|((_val32)>>(32-(_nBits))))
#endif
#endif
#ifdef SHA1_LITTLE_ENDIAN
#define SHABLK0(i) (m_block->l[i] = \
(ROL32(m_block->l[i],24) & 0xFF00FF00) | (ROL32(m_block->l[i],8) & 0x00FF00FF))
#else
#define SHABLK0(i) (m_block->l[i])
#endif
#define SHABLK(i) (m_block->l[i&15] = ROL32(m_block->l[(i+13)&15] ^ m_block->l[(i+8)&15] \
^ m_block->l[(i+2)&15] ^ m_block->l[i&15],1))
// SHA-1 rounds
#define _R0(v,w,x,y,z,i) { z+=((w&(x^y))^y)+SHABLK0(i)+0x5A827999+ROL32(v,5); w=ROL32(w,30); }
#define _R1(v,w,x,y,z,i) { z+=((w&(x^y))^y)+SHABLK(i)+0x5A827999+ROL32(v,5); w=ROL32(w,30); }
#define _R2(v,w,x,y,z,i) { z+=(w^x^y)+SHABLK(i)+0x6ED9EBA1+ROL32(v,5); w=ROL32(w,30); }
#define _R3(v,w,x,y,z,i) { z+=(((w|x)&y)|(w&x))+SHABLK(i)+0x8F1BBCDC+ROL32(v,5); w=ROL32(w,30); }
#define _R4(v,w,x,y,z,i) { z+=(w^x^y)+SHABLK(i)+0xCA62C1D6+ROL32(v,5); w=ROL32(w,30); }
CSHA1::CSHA1()
{
m_block = (SHA1_WORKSPACE_BLOCK *)m_workspace;
Reset();
}
CSHA1::~CSHA1()
{
Reset();
}
void CSHA1::Reset()
{
// SHA1 initialization constants
m_state[0] = 0x67452301;
m_state[1] = 0xEFCDAB89;
m_state[2] = 0x98BADCFE;
m_state[3] = 0x10325476;
m_state[4] = 0xC3D2E1F0;
m_count[0] = 0;
m_count[1] = 0;
}
void CSHA1::Transform(UINT_32 *state, UINT_8 *buffer)
{
// Copy state[] to working vars
UINT_32 a = state[0], b = state[1], c = state[2], d = state[3], e = state[4];
memcpy(m_block, buffer, 64);
// 4 rounds of 20 operations each. Loop unrolled.
_R0(a,b,c,d,e, 0); _R0(e,a,b,c,d, 1); _R0(d,e,a,b,c, 2); _R0(c,d,e,a,b, 3);
_R0(b,c,d,e,a, 4); _R0(a,b,c,d,e, 5); _R0(e,a,b,c,d, 6); _R0(d,e,a,b,c, 7);
_R0(c,d,e,a,b, 8); _R0(b,c,d,e,a, 9); _R0(a,b,c,d,e,10); _R0(e,a,b,c,d,11);
_R0(d,e,a,b,c,12); _R0(c,d,e,a,b,13); _R0(b,c,d,e,a,14); _R0(a,b,c,d,e,15);
_R1(e,a,b,c,d,16); _R1(d,e,a,b,c,17); _R1(c,d,e,a,b,18); _R1(b,c,d,e,a,19);
_R2(a,b,c,d,e,20); _R2(e,a,b,c,d,21); _R2(d,e,a,b,c,22); _R2(c,d,e,a,b,23);
_R2(b,c,d,e,a,24); _R2(a,b,c,d,e,25); _R2(e,a,b,c,d,26); _R2(d,e,a,b,c,27);
_R2(c,d,e,a,b,28); _R2(b,c,d,e,a,29); _R2(a,b,c,d,e,30); _R2(e,a,b,c,d,31);
_R2(d,e,a,b,c,32); _R2(c,d,e,a,b,33); _R2(b,c,d,e,a,34); _R2(a,b,c,d,e,35);
_R2(e,a,b,c,d,36); _R2(d,e,a,b,c,37); _R2(c,d,e,a,b,38); _R2(b,c,d,e,a,39);
_R3(a,b,c,d,e,40); _R3(e,a,b,c,d,41); _R3(d,e,a,b,c,42); _R3(c,d,e,a,b,43);
_R3(b,c,d,e,a,44); _R3(a,b,c,d,e,45); _R3(e,a,b,c,d,46); _R3(d,e,a,b,c,47);
_R3(c,d,e,a,b,48); _R3(b,c,d,e,a,49); _R3(a,b,c,d,e,50); _R3(e,a,b,c,d,51);
_R3(d,e,a,b,c,52); _R3(c,d,e,a,b,53); _R3(b,c,d,e,a,54); _R3(a,b,c,d,e,55);
_R3(e,a,b,c,d,56); _R3(d,e,a,b,c,57); _R3(c,d,e,a,b,58); _R3(b,c,d,e,a,59);
_R4(a,b,c,d,e,60); _R4(e,a,b,c,d,61); _R4(d,e,a,b,c,62); _R4(c,d,e,a,b,63);
_R4(b,c,d,e,a,64); _R4(a,b,c,d,e,65); _R4(e,a,b,c,d,66); _R4(d,e,a,b,c,67);
_R4(c,d,e,a,b,68); _R4(b,c,d,e,a,69); _R4(a,b,c,d,e,70); _R4(e,a,b,c,d,71);
_R4(d,e,a,b,c,72); _R4(c,d,e,a,b,73); _R4(b,c,d,e,a,74); _R4(a,b,c,d,e,75);
_R4(e,a,b,c,d,76); _R4(d,e,a,b,c,77); _R4(c,d,e,a,b,78); _R4(b,c,d,e,a,79);
// Add the working vars back into state
state[0] += a;
state[1] += b;
state[2] += c;
state[3] += d;
state[4] += e;
// Wipe variables
#ifdef SHA1_WIPE_VARIABLES
a = b = c = d = e = 0;
#endif
}
// Use this function to hash in binary data and strings
void CSHA1::Update(UINT_8 *data, UINT_32 len)
{
UINT_32 i, j;
j = (m_count[0] >> 3) & 63;
if((m_count[0] += len << 3) < (len << 3)) m_count[1]++;
m_count[1] += (len >> 29);
if((j + len) > 63)
{
i = 64 - j;
memcpy(&m_buffer[j], data, i);
Transform(m_state, m_buffer);
for(; i + 63 < len; i += 64) Transform(m_state, &data[i]);
j = 0;
}
else i = 0;
memcpy(&m_buffer[j], &data[i], len - i);
}
#ifdef SHA1_UTILITY_FUNCTIONS
// Hash in file contents
bool CSHA1::HashFile(char *szFileName)
{
unsigned long ulFileSize, ulRest, ulBlocks;
unsigned long i;
UINT_8 uData[SHA1_MAX_FILE_BUFFER];
FILE *fIn;
if(szFileName == NULL) return false;
fIn = fopen(szFileName, "rb");
if(fIn == NULL) return false;
fseek(fIn, 0, SEEK_END);
ulFileSize = (unsigned long)ftell(fIn);
fseek(fIn, 0, SEEK_SET);
if(ulFileSize != 0)
{
ulBlocks = ulFileSize / SHA1_MAX_FILE_BUFFER;
ulRest = ulFileSize % SHA1_MAX_FILE_BUFFER;
}
else
{
ulBlocks = 0;
ulRest = 0;
}
for(i = 0; i < ulBlocks; i++)
{
size_t nread = fread(uData, 1, SHA1_MAX_FILE_BUFFER, fIn);
assert(nread == SHA1_MAX_FILE_BUFFER);
Update((UINT_8 *)uData, SHA1_MAX_FILE_BUFFER);
}
if(ulRest != 0)
{
size_t nread = fread(uData, 1, ulRest, fIn);
assert(nread == ulRest);
Update((UINT_8 *)uData, ulRest);
}
fclose(fIn); fIn = NULL;
return true;
}
#endif
void CSHA1::Final()
{
UINT_32 i;
UINT_8 finalcount[8];
for(i = 0; i < 8; i++)
finalcount[i] = (UINT_8)((m_count[((i >= 4) ? 0 : 1)]
>> ((3 - (i & 3)) * 8) ) & 255); // Endian independent
Update((UINT_8 *)"\200", 1);
while ((m_count[0] & 504) != 448)
Update((UINT_8 *)"\0", 1);
Update(finalcount, 8); // Cause a SHA1Transform()
for(i = 0; i < 20; i++)
{
m_digest[i] = (UINT_8)((m_state[i >> 2] >> ((3 - (i & 3)) * 8) ) & 255);
}
// Wipe variables for security reasons
#ifdef SHA1_WIPE_VARIABLES
i = 0;
memset(m_buffer, 0, 64);
memset(m_state, 0, 20);
memset(m_count, 0, 8);
memset(finalcount, 0, 8);
Transform(m_state, m_buffer);
#endif
}
#ifdef SHA1_UTILITY_FUNCTIONS
// Get the final hash as a pre-formatted string
void CSHA1::ReportHash(char *szReport, unsigned char uReportType)
{
unsigned char i;
char szTemp[16];
if(szReport == NULL) return;
if(uReportType == REPORT_HEX)
{
snprintf(szTemp, sizeof(szTemp), "%02X", m_digest[0]);
strcat(szReport, szTemp);
for(i = 1; i < 20; i++)
{
snprintf(szTemp, sizeof(szTemp), " %02X", m_digest[i]);
strcat(szReport, szTemp);
}
}
else if(uReportType == REPORT_DIGIT)
{
snprintf(szTemp, sizeof(szTemp), "%u", m_digest[0]);
strcat(szReport, szTemp);
for(i = 1; i < 20; i++)
{
snprintf(szTemp, sizeof(szTemp), " %u", m_digest[i]);
strcat(szReport, szTemp);
}
}
else strcpy(szReport, "Error: Unknown report type!");
}
#endif
// Get the raw message digest
void CSHA1::GetHash(UINT_8 *puDest)
{
memcpy(puDest, m_digest, 20);
}

View file

@ -0,0 +1,148 @@
/*
100% free public domain implementation of the SHA-1 algorithm
by Dominik Reichl <dominik.reichl@t-online.de>
Web: http://www.dominik-reichl.de/
Version 1.6 - 2005-02-07 (thanks to Howard Kapustein for patches)
- You can set the endianness in your files, no need to modify the
header file of the CSHA1 class any more
- Aligned data support
- Made support/compilation of the utility functions (ReportHash
and HashFile) optional (useful, if bytes count, for example in
embedded environments)
Version 1.5 - 2005-01-01
- 64-bit compiler compatibility added
- Made variable wiping optional (define SHA1_WIPE_VARIABLES)
- Removed unnecessary variable initializations
- ROL32 improvement for the Microsoft compiler (using _rotl)
======== Test Vectors (from FIPS PUB 180-1) ========
SHA1("abc") =
A9993E36 4706816A BA3E2571 7850C26C 9CD0D89D
SHA1("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq") =
84983E44 1C3BD26E BAAE4AA1 F95129E5 E54670F1
SHA1(A million repetitions of "a") =
34AA973C D4C4DAA4 F61EEB2B DBAD2731 6534016F
*/
#ifndef ___SHA1_HDR___
#define ___SHA1_HDR___
#if !defined(SHA1_UTILITY_FUNCTIONS) && !defined(SHA1_NO_UTILITY_FUNCTIONS)
#define SHA1_UTILITY_FUNCTIONS
#endif
#include <memory.h> // Needed for memset and memcpy
#ifdef SHA1_UTILITY_FUNCTIONS
#include <stdio.h> // Needed for file access and sprintf
#include <string.h> // Needed for strcat and strcpy
#endif
#ifdef _MSC_VER
#include <stdlib.h>
#endif
// You can define the endian mode in your files, without modifying the SHA1
// source files. Just #define SHA1_LITTLE_ENDIAN or #define SHA1_BIG_ENDIAN
// in your files, before including the SHA1.h header file. If you don't
// define anything, the class defaults to little endian.
#if !defined(SHA1_LITTLE_ENDIAN) && !defined(SHA1_BIG_ENDIAN)
#define SHA1_LITTLE_ENDIAN
#endif
// Same here. If you want variable wiping, #define SHA1_WIPE_VARIABLES, if
// not, #define SHA1_NO_WIPE_VARIABLES. If you don't define anything, it
// defaults to wiping.
#if !defined(SHA1_WIPE_VARIABLES) && !defined(SHA1_NO_WIPE_VARIABLES)
#define SHA1_WIPE_VARIABLES
#endif
/////////////////////////////////////////////////////////////////////////////
// Define 8- and 32-bit variables
#ifndef UINT_32
#ifdef _MSC_VER
#define UINT_8 unsigned __int8
#define UINT_32 unsigned __int32
#else
#define UINT_8 unsigned char
#if (ULONG_MAX == 0xFFFFFFFF && UINT_MAX < ULONG_MAX)
#define UINT_32 unsigned long
#else
#define UINT_32 unsigned int
#endif
#endif
#endif
/////////////////////////////////////////////////////////////////////////////
// Declare SHA1 workspace
typedef union
{
UINT_8 c[64];
UINT_32 l[16];
} SHA1_WORKSPACE_BLOCK;
class CSHA1
{
public:
#ifdef SHA1_UTILITY_FUNCTIONS
// Two different formats for ReportHash(...)
enum
{
REPORT_HEX = 0,
REPORT_DIGIT = 1
};
#endif
// Constructor and Destructor
CSHA1();
~CSHA1();
UINT_32 m_state[5];
UINT_32 m_count[2];
UINT_32 __reserved1[1];
UINT_8 m_buffer[64];
UINT_8 m_digest[20];
UINT_32 __reserved2[3];
void Reset();
// Update the hash value
void Update(UINT_8 *data, UINT_32 len);
#ifdef SHA1_UTILITY_FUNCTIONS
bool HashFile(char *szFileName);
#endif
// Finalize hash and report
void Final();
// Report functions: as pre-formatted and raw data
#ifdef SHA1_UTILITY_FUNCTIONS
void ReportHash(char *szReport, unsigned char uReportType = REPORT_HEX);
#endif
void GetHash(UINT_8 *puDest);
private:
// Private SHA-1 transformation
void Transform(UINT_32 *state, UINT_8 *buffer);
// Member variables
UINT_8 m_workspace[64];
SHA1_WORKSPACE_BLOCK *m_block; // SHA1 pointer to the byte array above
};
#endif

View file

@ -0,0 +1,123 @@
/*
base64.cpp and base64.h
Copyright (C) 2004-2008 René Nyffenegger
This source code is provided 'as-is', without any express or implied
warranty. In no event will the author be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this source code must not be misrepresented; you must not
claim that you wrote the original source code. If you use this source code
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original source code.
3. This notice may not be removed or altered from any source distribution.
René Nyffenegger rene.nyffenegger@adp-gmbh.ch
*/
#include "base64.h"
#include <iostream>
static const std::string base64_chars =
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789+/";
static inline bool is_base64(unsigned char c) {
return (isalnum(c) || (c == '+') || (c == '/'));
}
std::string base64_encode(unsigned char const* bytes_to_encode, unsigned int in_len) {
std::string ret;
int i = 0;
int j = 0;
unsigned char char_array_3[3];
unsigned char char_array_4[4];
while (in_len--) {
char_array_3[i++] = *(bytes_to_encode++);
if (i == 3) {
char_array_4[0] = (char_array_3[0] & 0xfc) >> 2;
char_array_4[1] = ((char_array_3[0] & 0x03) << 4) + ((char_array_3[1] & 0xf0) >> 4);
char_array_4[2] = ((char_array_3[1] & 0x0f) << 2) + ((char_array_3[2] & 0xc0) >> 6);
char_array_4[3] = char_array_3[2] & 0x3f;
for(i = 0; (i <4) ; i++)
ret += base64_chars[char_array_4[i]];
i = 0;
}
}
if (i)
{
for(j = i; j < 3; j++)
char_array_3[j] = '\0';
char_array_4[0] = (char_array_3[0] & 0xfc) >> 2;
char_array_4[1] = ((char_array_3[0] & 0x03) << 4) + ((char_array_3[1] & 0xf0) >> 4);
char_array_4[2] = ((char_array_3[1] & 0x0f) << 2) + ((char_array_3[2] & 0xc0) >> 6);
char_array_4[3] = char_array_3[2] & 0x3f;
for (j = 0; (j < i + 1); j++)
ret += base64_chars[char_array_4[j]];
while((i++ < 3))
ret += '=';
}
return ret;
}
std::string base64_decode(std::string const& encoded_string) {
int in_len = encoded_string.size();
int i = 0;
int j = 0;
int in_ = 0;
unsigned char char_array_4[4], char_array_3[3];
std::string ret;
while (in_len-- && ( encoded_string[in_] != '=') && is_base64(encoded_string[in_])) {
char_array_4[i++] = encoded_string[in_]; in_++;
if (i ==4) {
for (i = 0; i <4; i++)
char_array_4[i] = base64_chars.find(char_array_4[i]);
char_array_3[0] = (char_array_4[0] << 2) + ((char_array_4[1] & 0x30) >> 4);
char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2);
char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3];
for (i = 0; (i < 3); i++)
ret += char_array_3[i];
i = 0;
}
}
if (i) {
for (j = i; j <4; j++)
char_array_4[j] = 0;
for (j = 0; j <4; j++)
char_array_4[j] = base64_chars.find(char_array_4[j]);
char_array_3[0] = (char_array_4[0] << 2) + ((char_array_4[1] & 0x30) >> 4);
char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2);
char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3];
for (j = 0; (j < i - 1); j++) ret += char_array_3[j];
}
return ret;
}

View file

@ -0,0 +1,4 @@
#include <string>
std::string base64_encode(unsigned char const* , unsigned int len);
std::string base64_decode(std::string const& s);

View file

@ -0,0 +1,621 @@
#include <liboauthcpp/liboauthcpp.h>
#include "HMAC_SHA1.h"
#include "base64.h"
#include "urlencode.h"
#include <cstdlib>
#include <vector>
#include <cassert>
namespace OAuth {
namespace Defaults
{
/* Constants */
const int BUFFSIZE = 1024;
const int BUFFSIZE_LARGE = 1024;
const std::string CONSUMERKEY_KEY = "oauth_consumer_key";
const std::string CALLBACK_KEY = "oauth_callback";
const std::string VERSION_KEY = "oauth_version";
const std::string SIGNATUREMETHOD_KEY = "oauth_signature_method";
const std::string SIGNATURE_KEY = "oauth_signature";
const std::string TIMESTAMP_KEY = "oauth_timestamp";
const std::string NONCE_KEY = "oauth_nonce";
const std::string TOKEN_KEY = "oauth_token";
const std::string TOKENSECRET_KEY = "oauth_token_secret";
const std::string VERIFIER_KEY = "oauth_verifier";
const std::string AUTHHEADER_FIELD = "Authorization: ";
const std::string AUTHHEADER_PREFIX = "OAuth ";
};
/** std::string -> std::string conversion function */
typedef std::string(*StringConvertFunction)(const std::string&);
LogLevel gLogLevel = LogLevelNone;
void SetLogLevel(LogLevel lvl) {
gLogLevel = lvl;
}
#define LOG(lvl, msg) \
do { \
if (lvl <= gLogLevel) std::cerr << "OAUTH: " << msg << std::endl; \
} while(0)
std::string PercentEncode(const std::string& decoded) {
return urlencode(decoded, URLEncode_Everything);
}
std::string URLEncode(const std::string& decoded) {
return PercentEncode(decoded);
}
std::string HttpEncodePath(const std::string& decoded) {
return urlencode(decoded, URLEncode_Path);
}
std::string HttpEncodeQueryKey(const std::string& decoded) {
return urlencode(decoded, URLEncode_QueryKey);
}
std::string HttpEncodeQueryValue(const std::string& decoded) {
return urlencode(decoded, URLEncode_QueryValue);
}
namespace {
std::string PassThrough(const std::string& decoded) {
return decoded;
}
std::string RequestTypeString(const Http::RequestType rt) {
switch(rt) {
case Http::Invalid: return "Invalid Request Type"; break;
case Http::Head: return "HEAD"; break;
case Http::Get: return "GET"; break;
case Http::Post: return "POST"; break;
case Http::Delete: return "DELETE"; break;
case Http::Put: return "PUT"; break;
default: return "Unknown Request Type"; break;
}
return "";
}
}
// Parse a single key-value pair
static std::pair<std::string, std::string> ParseKeyValuePair(const std::string& encoded) {
std::size_t eq_pos = encoded.find("=");
if (eq_pos == std::string::npos)
throw ParseError("Failed to find '=' in key-value pair.");
return std::pair<std::string, std::string>(
encoded.substr(0, eq_pos),
encoded.substr(eq_pos+1)
);
}
KeyValuePairs ParseKeyValuePairs(const std::string& encoded) {
KeyValuePairs result;
if (encoded.length() == 0) return result;
// Split by &
std::size_t last_amp = 0;
// We can bail when the last one "found" was the end of the string
while(true) {
std::size_t next_amp = encoded.find('&', last_amp+1);
std::string keyval =
(next_amp == std::string::npos) ?
encoded.substr(last_amp) :
encoded.substr(last_amp, next_amp-last_amp);
result.insert(ParseKeyValuePair(keyval));
// Track spot after the & so the first iteration works without dealing
// with -1 index
last_amp = next_amp+1;
// Exit condition
if (next_amp == std::string::npos) break;
}
return result;
}
// Helper for parameters in key-value pair lists that should only appear
// once. Either replaces an existing entry or adds a new entry.
static void ReplaceOrInsertKeyValuePair(KeyValuePairs& kvp, const std::string& key, const std::string& value) {
assert(kvp.count(key) <= 1);
KeyValuePairs::iterator it = kvp.find(key);
if (it != kvp.end())
it->second = value;
else
kvp.insert(KeyValuePairs::value_type(key, value));
}
Consumer::Consumer(const std::string& key, const std::string& secret)
: mKey(key), mSecret(secret)
{
}
Token::Token(const std::string& key, const std::string& secret)
: mKey(key), mSecret(secret)
{
}
Token::Token(const std::string& key, const std::string& secret, const std::string& pin)
: mKey(key), mSecret(secret), mPin(pin)
{
}
Token Token::extract(const std::string& response) {
return Token::extract(ParseKeyValuePairs(response));
}
Token Token::extract(const KeyValuePairs& response) {
std::string token_key, token_secret;
KeyValuePairs::const_iterator it = response.find(Defaults::TOKEN_KEY);
if (it == response.end())
throw MissingKeyError("Couldn't find oauth_token in response");
token_key = it->second;
it = response.find(Defaults::TOKENSECRET_KEY);
if (it == response.end())
throw MissingKeyError("Couldn't find oauth_token_secret in response");
token_secret = it->second;
return Token(token_key, token_secret);
}
bool Client::initialized = false;
int Client::testingNonce = 0;
time_t Client::testingTimestamp = 0;
void Client::initialize() {
if(!initialized) {
srand( time( NULL ) );
initialized = true;
}
}
void Client::initialize(int nonce, time_t timestamp) {
if(!initialized) {
testingNonce = nonce;
testingTimestamp = timestamp;
initialized = true;
}
}
void Client::__resetInitialize() {
testingNonce = 0;
testingTimestamp = 0;
initialized = false;
}
Client::Client(const Consumer* consumer)
: mConsumer(consumer),
mToken(NULL)
{
}
Client::Client(const Consumer* consumer, const Token* token)
: mConsumer(consumer),
mToken(token)
{
}
Client::~Client()
{
}
/*++
* @method: Client::generateNonceTimeStamp
*
* @description: this method generates nonce and timestamp for OAuth header
*
* @input: none
*
* @output: none
*
* @remarks: internal method
*
*--*/
void Client::generateNonceTimeStamp()
{
// Make sure the random seed has been initialized
Client::initialize();
char szTime[Defaults::BUFFSIZE];
char szRand[Defaults::BUFFSIZE];
memset( szTime, 0, Defaults::BUFFSIZE );
memset( szRand, 0, Defaults::BUFFSIZE );
// Any non-zero timestamp triggers testing mode with fixed values. Fixing
// both values makes life easier because generating a signature is
// idempotent -- otherwise using macros can cause double evaluation and
// incorrect results because of repeated calls to rand().
snprintf( szRand, sizeof(szRand), "%x", ((testingTimestamp != 0) ? testingNonce : rand()) );
snprintf( szTime, sizeof(szTime), "%ld", ((testingTimestamp != 0) ? testingTimestamp : time( NULL )) );
m_nonce.assign( szTime );
m_nonce.append( szRand );
m_timeStamp.assign( szTime );
}
/*++
* @method: Client::buildOAuthTokenKeyValuePairs
*
* @description: this method prepares key-value pairs required for OAuth header
* and signature generation.
*
* @input: includeOAuthVerifierPin - flag to indicate whether oauth_verifer key-value
* pair needs to be included. oauth_verifer is only
* used during exchanging request token with access token.
* rawData - url encoded data. this is used during signature generation.
* oauthSignature - base64 and url encoded OAuth signature.
* generateTimestamp - If true, then generate new timestamp for nonce.
*
* @input: urlEncodeValues - if true, URLEncode the values inserted into the
* output keyValueMap
* @output: keyValueMap - map in which key-value pairs are populated
*
* @remarks: internal method
*
*--*/
bool Client::buildOAuthTokenKeyValuePairs( const bool includeOAuthVerifierPin,
const std::string& rawData,
const std::string& oauthSignature,
KeyValuePairs& keyValueMap,
const bool urlEncodeValues,
const bool generateTimestamp )
{
// Encodes value part of key-value pairs depending on type of output (query
// string vs. HTTP headers.
StringConvertFunction value_encoder = (urlEncodeValues ? HttpEncodeQueryValue : PassThrough);
/* Generate nonce and timestamp if required */
if( generateTimestamp )
{
generateNonceTimeStamp();
}
/* Consumer key and its value */
ReplaceOrInsertKeyValuePair(keyValueMap, Defaults::CONSUMERKEY_KEY, value_encoder(mConsumer->key()));
/* Nonce key and its value */
ReplaceOrInsertKeyValuePair(keyValueMap, Defaults::NONCE_KEY, value_encoder(m_nonce));
/* Signature if supplied */
if( oauthSignature.length() )
{
// Signature is exempt from encoding. The procedure for
// computing it already percent-encodes it as required by the
// spec for both query string and Auth header
// methods. Therefore, it's pass-through in both cases.
ReplaceOrInsertKeyValuePair(keyValueMap, Defaults::SIGNATURE_KEY, oauthSignature);
}
/* Signature method, only HMAC-SHA1 as of now */
ReplaceOrInsertKeyValuePair(keyValueMap, Defaults::SIGNATUREMETHOD_KEY, std::string( "HMAC-SHA1" ));
/* Timestamp */
ReplaceOrInsertKeyValuePair(keyValueMap, Defaults::TIMESTAMP_KEY, value_encoder(m_timeStamp));
/* Token */
if( mToken && mToken->key().length() )
{
ReplaceOrInsertKeyValuePair(keyValueMap, Defaults::TOKEN_KEY, value_encoder(mToken->key()));
}
/* Verifier */
if( includeOAuthVerifierPin && mToken && mToken->pin().length() )
{
ReplaceOrInsertKeyValuePair(keyValueMap, Defaults::VERIFIER_KEY, value_encoder(mToken->pin()));
}
/* Version */
ReplaceOrInsertKeyValuePair(keyValueMap, Defaults::VERSION_KEY, std::string( "1.0" ));
/* Data if it's present */
if( rawData.length() )
{
/* Data should already be urlencoded once */
std::string dummyStrKey;
std::string dummyStrValue;
size_t nPos = rawData.find_first_of( "=" );
if( std::string::npos != nPos )
{
dummyStrKey = rawData.substr( 0, nPos );
dummyStrValue = rawData.substr( nPos + 1 );
ReplaceOrInsertKeyValuePair(keyValueMap, dummyStrKey, dummyStrValue);
}
}
return ( keyValueMap.size() ) ? true : false;
}
/*++
* @method: Client::getSignature
*
* @description: this method calculates HMAC-SHA1 signature of OAuth header
*
* @input: eType - HTTP request type
* rawUrl - raw url of the HTTP request
* rawKeyValuePairs - key-value pairs containing OAuth headers and HTTP data
*
* @output: oAuthSignature - base64 and url encoded signature
*
* @remarks: internal method
*
*--*/
bool Client::getSignature( const Http::RequestType eType,
const std::string& rawUrl,
const KeyValuePairs& rawKeyValuePairs,
std::string& oAuthSignature )
{
std::string rawParams;
std::string paramsSeperator;
std::string sigBase;
/* Initially empty signature */
oAuthSignature.assign( "" );
/* Build a string using key-value pairs */
paramsSeperator = "&";
getStringFromOAuthKeyValuePairs( rawKeyValuePairs, rawParams, paramsSeperator );
LOG(LogLevelDebug, "Normalized parameters: " << rawParams);
/* Start constructing base signature string. Refer http://dev.twitter.com/auth#intro */
switch( eType )
{
case Http::Head:
{
sigBase.assign( "HEAD&" );
}
break;
case Http::Get:
{
sigBase.assign( "GET&" );
}
break;
case Http::Post:
{
sigBase.assign( "POST&" );
}
break;
case Http::Delete:
{
sigBase.assign( "DELETE&" );
}
break;
case Http::Put:
{
sigBase.assign( "PUT&" );
}
break;
default:
{
return false;
}
break;
}
sigBase.append( PercentEncode( rawUrl ) );
sigBase.append( "&" );
sigBase.append( PercentEncode( rawParams ) );
LOG(LogLevelDebug, "Signature base string: " << sigBase);
/* Now, hash the signature base string using HMAC_SHA1 class */
CHMAC_SHA1 objHMACSHA1;
std::string secretSigningKey;
unsigned char strDigest[Defaults::BUFFSIZE_LARGE];
memset( strDigest, 0, Defaults::BUFFSIZE_LARGE );
/* Signing key is composed of consumer_secret&token_secret */
secretSigningKey.assign( PercentEncode(mConsumer->secret()) );
secretSigningKey.append( "&" );
if( mToken && mToken->secret().length() )
{
secretSigningKey.append( PercentEncode(mToken->secret()) );
}
objHMACSHA1.HMAC_SHA1( (unsigned char*)sigBase.c_str(),
sigBase.length(),
(unsigned char*)secretSigningKey.c_str(),
secretSigningKey.length(),
strDigest );
/* Do a base64 encode of signature */
std::string base64Str = base64_encode( strDigest, 20 /* SHA 1 digest is 160 bits */ );
LOG(LogLevelDebug, "Signature: " << base64Str);
/* Do an url encode */
oAuthSignature = PercentEncode( base64Str );
LOG(LogLevelDebug, "Percent-encoded Signature: " << oAuthSignature);
return ( oAuthSignature.length() ) ? true : false;
}
std::string Client::getHttpHeader(const Http::RequestType eType,
const std::string& rawUrl,
const std::string& rawData,
const bool includeOAuthVerifierPin)
{
return Defaults::AUTHHEADER_PREFIX + buildOAuthParameterString(AuthorizationHeaderString, eType, rawUrl, rawData, includeOAuthVerifierPin);
}
std::string Client::getFormattedHttpHeader(const Http::RequestType eType,
const std::string& rawUrl,
const std::string& rawData,
const bool includeOAuthVerifierPin)
{
return Defaults::AUTHHEADER_FIELD + Defaults::AUTHHEADER_PREFIX + buildOAuthParameterString(AuthorizationHeaderString, eType, rawUrl, rawData, includeOAuthVerifierPin);
}
std::string Client::getURLQueryString(const Http::RequestType eType,
const std::string& rawUrl,
const std::string& rawData,
const bool includeOAuthVerifierPin)
{
return buildOAuthParameterString(QueryStringString, eType, rawUrl, rawData, includeOAuthVerifierPin);
}
std::string Client::buildOAuthParameterString(
ParameterStringType string_type,
const Http::RequestType eType,
const std::string& rawUrl,
const std::string& rawData,
const bool includeOAuthVerifierPin)
{
KeyValuePairs rawKeyValuePairs;
std::string rawParams;
std::string oauthSignature;
std::string paramsSeperator;
std::string pureUrl( rawUrl );
LOG(LogLevelDebug, "Signing request " << RequestTypeString(eType) << " " << rawUrl << " " << rawData);
std::string separator;
bool do_urlencode;
if (string_type == AuthorizationHeaderString) {
separator = ",";
do_urlencode = false;
}
else { // QueryStringString
separator = "&";
do_urlencode = true;
}
/* Clear header string initially */
rawKeyValuePairs.clear();
/* If URL itself contains ?key=value, then extract and put them in map */
size_t nPos = rawUrl.find_first_of( "?" );
if( std::string::npos != nPos )
{
/* Get only URL */
pureUrl = rawUrl.substr( 0, nPos );
/* Get only key=value data part */
std::string dataPart = rawUrl.substr( nPos + 1 );
rawKeyValuePairs = ParseKeyValuePairs(dataPart);
}
// NOTE: We always request URL encoding on the first pass so that the
// signature generation works properly. This *relies* on
// buildOAuthTokenKeyValuePairs overwriting values when we do the second
// pass to get the values in the form we actually want. The signature and
// rawdata are the only things that change, but the signature is only used
// in the second pass and the rawdata is already encoded, regardless of
// request type.
/* Build key-value pairs needed for OAuth request token, without signature */
buildOAuthTokenKeyValuePairs( includeOAuthVerifierPin, rawData, std::string( "" ), rawKeyValuePairs, true, true );
/* Get url encoded base64 signature using request type, url and parameters */
getSignature( eType, pureUrl, rawKeyValuePairs, oauthSignature );
/* Now, again build key-value pairs with signature this time */
buildOAuthTokenKeyValuePairs( includeOAuthVerifierPin, std::string( "" ), oauthSignature, rawKeyValuePairs, do_urlencode, false );
/* Get OAuth header in string format. If we're getting the Authorization
* header, we need to filter out other parameters.
*/
if (string_type == AuthorizationHeaderString) {
KeyValuePairs oauthKeyValuePairs;
std::vector<std::string> oauth_keys;
oauth_keys.push_back(Defaults::CONSUMERKEY_KEY);
oauth_keys.push_back(Defaults::NONCE_KEY);
oauth_keys.push_back(Defaults::SIGNATURE_KEY);
oauth_keys.push_back(Defaults::SIGNATUREMETHOD_KEY);
oauth_keys.push_back(Defaults::TIMESTAMP_KEY);
oauth_keys.push_back(Defaults::TOKEN_KEY);
oauth_keys.push_back(Defaults::VERIFIER_KEY);
oauth_keys.push_back(Defaults::VERSION_KEY);
for(size_t i = 0; i < oauth_keys.size(); i++) {
assert(rawKeyValuePairs.count(oauth_keys[i]) <= 1);
KeyValuePairs::iterator oauth_key_it = rawKeyValuePairs.find(oauth_keys[i]);
if (oauth_key_it != rawKeyValuePairs.end())
ReplaceOrInsertKeyValuePair(oauthKeyValuePairs, oauth_keys[i], oauth_key_it->second);
}
getStringFromOAuthKeyValuePairs( oauthKeyValuePairs, rawParams, separator );
}
else if (string_type == QueryStringString) {
getStringFromOAuthKeyValuePairs( rawKeyValuePairs, rawParams, separator );
}
/* Build authorization header */
return rawParams;
}
/*++
* @method: Client::getStringFromOAuthKeyValuePairs
*
* @description: this method builds a sorted string from key-value pairs
*
* @input: rawParamMap - key-value pairs map
* paramsSeperator - sepearator, either & or ,
*
* @output: rawParams - sorted string of OAuth parameters
*
* @remarks: internal method
*
*--*/
bool Client::getStringFromOAuthKeyValuePairs( const KeyValuePairs& rawParamMap,
std::string& rawParams,
const std::string& paramsSeperator )
{
rawParams.assign( "" );
if( rawParamMap.size() )
{
KeyValueList keyValueList;
std::string dummyStr;
/* Push key-value pairs to a list of strings */
keyValueList.clear();
KeyValuePairs::const_iterator itMap = rawParamMap.begin();
for( ; itMap != rawParamMap.end(); itMap++ )
{
dummyStr.assign( itMap->first );
dummyStr.append( "=" );
if( paramsSeperator == "," )
{
dummyStr.append( "\"" );
}
dummyStr.append( itMap->second );
if( paramsSeperator == "," )
{
dummyStr.append( "\"" );
}
keyValueList.push_back( dummyStr );
}
/* Sort key-value pairs based on key name */
keyValueList.sort();
/* Now, form a string */
dummyStr.assign( "" );
KeyValueList::iterator itKeyValue = keyValueList.begin();
for( ; itKeyValue != keyValueList.end(); itKeyValue++ )
{
if( dummyStr.length() )
{
dummyStr.append( paramsSeperator );
}
dummyStr.append( itKeyValue->c_str() );
}
rawParams.assign( dummyStr );
}
return ( rawParams.length() ) ? true : false;
}
} // namespace OAuth

View file

@ -0,0 +1,102 @@
#include "urlencode.h"
#include <cassert>
std::string char2hex( char dec )
{
char dig1 = (dec&0xF0)>>4;
char dig2 = (dec&0x0F);
if ( 0<= dig1 && dig1<= 9) dig1+=48; //0,48 in ascii
if (10<= dig1 && dig1<=15) dig1+=65-10; //A,65 in ascii
if ( 0<= dig2 && dig2<= 9) dig2+=48;
if (10<= dig2 && dig2<=15) dig2+=65-10;
std::string r;
r.append( &dig1, 1);
r.append( &dig2, 1);
return r;
}
std::string urlencode( const std::string &c, URLEncodeType enctype)
{
std::string escaped;
int max = c.length();
for(int i=0; i<max; i++)
{
// Unreserved chars
if ( (48 <= c[i] && c[i] <= 57) ||//0-9
(65 <= c[i] && c[i] <= 90) ||//ABC...XYZ
(97 <= c[i] && c[i] <= 122) || //abc...xyz
(c[i]=='~' || c[i]=='-' || c[i]=='_' || c[i]=='.')
)
{
escaped.append( &c[i], 1);
}
else if (c[i] != ':' && c[i] != '/' && c[i] != '?' && c[i] != '#' &&
c[i] != '[' && c[i] != ']' && c[i] != '@' && c[i] != '%' &&
c[i] != '!' && c[i] != '$' && c[i] != '&' && c[i] != '\'' &&
c[i] != '(' && c[i] != ')' && c[i] != '*' && c[i] != '+' &&
c[i] != ',' && c[i] != ';' && c[i] != '=')
{
// Characters not in unreserved (first if block) and not in
// the reserved set are always encoded.
escaped.append("%");
escaped.append( char2hex(c[i]) );//converts char 255 to string "FF"
}
else
{
// Finally, the reserved set. Encoding here depends on the
// context (where in the URI we are, what type of URI, and
// which character).
bool enc = false;
// Always encode reserved gen-delims + '%' (which always
// needs encoding
if (c[i] == ':' || c[i] == '/' || c[i] == '?' || c[i] == '#' ||
c[i] == '[' || c[i] == ']' || c[i] == '@' || c[i] == '%')
{
enc = true;
}
else {
switch (enctype) {
case URLEncode_Everything:
enc = true;
break;
case URLEncode_Path:
// Only reserved sub-delim that needs encoding is %,
// taken care of above. Otherwise, leave unencoded
enc = false;
break;
case URLEncode_QueryKey:
if (c[i] == '&' ||
c[i] == '+' ||
c[i] == '=')
enc = true;
else
enc = false;
break;
case URLEncode_QueryValue:
if (c[i] == '&' ||
c[i] == '+')
enc = true;
else
enc = false;
break;
default:
assert(false && "Unknown urlencode type");
break;
}
}
if (enc) {
escaped.append("%");
escaped.append( char2hex(c[i]) );//converts char 255 to string "FF"
} else {
escaped.append( &c[i], 1);
}
}
}
return escaped;
}

View file

@ -0,0 +1,16 @@
#ifndef __URLENCODE_H__
#define __URLENCODE_H__
#include <iostream>
#include <string>
std::string char2hex( char dec );
enum URLEncodeType {
URLEncode_Everything,
URLEncode_Path,
URLEncode_QueryKey,
URLEncode_QueryValue,
};
std::string urlencode( const std::string &c, URLEncodeType enctype );
#endif // __URLENCODE_H__

View file

@ -22,5 +22,3 @@ target_compile_definitions(${PROJECT_NAME}
)
target_compile_options(${PROJECT_NAME} PRIVATE $<$<C_COMPILER_ID:Clang,AppleClang,GNU>:-Wno-unused-value>)
target_link_libraries(${PROJECT_NAME} PUBLIC ZLIB::ZLIB)

View file

@ -221,7 +221,8 @@ static int ZCALLBACK ferror_file_func (voidpf opaque, voidpf stream)
return ret;
}
void fill_fopen_filefunc (zlib_filefunc_def* pzlib_filefunc_def)
void fill_fopen_filefunc (pzlib_filefunc_def)
zlib_filefunc_def* pzlib_filefunc_def;
{
pzlib_filefunc_def->zopen_file = fopen_file_func;
pzlib_filefunc_def->zread_file = fread_file_func;

View file

@ -4,7 +4,6 @@
#include <algorithm>
#include <cmath>
#include <cstdint>
#include "codearea.h"

View file

@ -21,6 +21,8 @@ omim_add_library(${PROJECT_NAME} ${SRC})
target_include_directories(${PROJECT_NAME} INTERFACE .)
target_compile_options(${PROJECT_NAME} PRIVATE -Wno-deprecated-copy)
omim_add_test_subdirectory(opening_hours_tests)
omim_add_test_subdirectory(opening_hours_integration_tests)
omim_add_test_subdirectory(opening_hours_supported_features_tests)

View file

@ -32,7 +32,6 @@
#include <iomanip>
#include <ios>
#include <ostream>
#include <sstream>
#include <tuple>
#include <type_traits>
#include <vector>
@ -97,24 +96,26 @@ class StreamFlagsKeeper
std::ios_base::fmtflags m_flags;
};
template <typename TNumber>
constexpr bool IsChar(TNumber) noexcept
{
return std::is_same<signed char, TNumber>::value ||
std::is_same<unsigned char, TNumber>::value ||
std::is_same<char, TNumber>::value;
};
template <typename TNumber, typename std::enable_if<!IsChar(TNumber{}), void*>::type = nullptr>
void PrintPaddedNumber(std::ostream & ost, TNumber const number, uint32_t const padding = 1)
{
static constexpr bool isChar = std::is_same_v<signed char, TNumber> ||
std::is_same_v<unsigned char, TNumber> ||
std::is_same_v<char, TNumber>;
static_assert(std::is_integral<TNumber>::value, "number should be of integral type.");
StreamFlagsKeeper keeper(ost);
ost << std::setw(padding) << std::setfill('0') << number;
}
if constexpr (isChar)
{
PrintPaddedNumber(ost, static_cast<int32_t>(number), padding);
}
else
{
static_assert(std::is_integral<TNumber>::value, "number should be of integral type.");
StreamFlagsKeeper keeper(ost);
ost << std::setw(padding) << std::setfill('0') << number;
}
template <typename TNumber, typename std::enable_if<IsChar(TNumber{}), void*>::type = nullptr>
void PrintPaddedNumber(std::ostream & ost, TNumber const number, uint32_t const padding = 1)
{
PrintPaddedNumber(ost, static_cast<int32_t>(number), padding);
}
void PrintHoursMinutes(std::ostream & ost,
@ -386,7 +387,7 @@ bool operator==(Timespan const & lhs, Timespan const & rhs)
return lhs.GetStart() == rhs.GetStart() &&
lhs.GetEnd() == rhs.GetEnd() &&
lhs.GetPeriod() == rhs.GetPeriod();
lhs.GetPeriod() == lhs.GetPeriod();
}
// NthWeekdayOfTheMonthEntry -----------------------------------------------------------------------

View file

@ -2,11 +2,22 @@ project(opening_hours_integration_tests)
set(SRC opening_hours_integration_tests.cpp)
omim_add_test(${PROJECT_NAME} ${SRC} BOOST_TEST)
omim_add_executable(${PROJECT_NAME} ${SRC})
target_link_libraries(${PROJECT_NAME} opening_hours)
# Silence boost::test warnings.
target_compile_options(${PROJECT_NAME} PRIVATE
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-deprecated-declarations>
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-unused-but-set-variable>
)
if(${CMAKE_SYSTEM_NAME} MATCHES "Linux")
set(COPY_CMD cp -u)
else()
set(COPY_CMD rsync -a)
endif()
add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD
COMMAND "${CMAKE_COMMAND}" -E copy_if_different "${CMAKE_CURRENT_SOURCE_DIR}/opening-count.lst" "${CMAKE_BINARY_DIR}/"
COMMAND ${COPY_CMD} "${CMAKE_CURRENT_SOURCE_DIR}/opening-count.lst" "${CMAKE_BINARY_DIR}/"
COMMENT "Copying opening-count.lst file for testing"
)

View file

@ -2,6 +2,11 @@ project(opening_hours_supported_features_tests)
set(SRC opening_hours_supported_features_tests.cpp)
omim_add_test(${PROJECT_NAME} ${SRC} BOOST_TEST)
omim_add_executable(${PROJECT_NAME} ${SRC})
target_link_libraries(${PROJECT_NAME} opening_hours)
# Silence boost::test warnings.
target_compile_options(${PROJECT_NAME} PRIVATE
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-deprecated-declarations>
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-unused-but-set-variable>
)

View file

@ -2,6 +2,11 @@ project(opening_hours_tests)
set(SRC opening_hours_tests.cpp)
omim_add_test(${PROJECT_NAME} ${SRC} BOOST_TEST)
omim_add_executable(${PROJECT_NAME} ${SRC})
target_link_libraries(${PROJECT_NAME} opening_hours)
# Silence boost::test warnings.
target_compile_options(${PROJECT_NAME} PRIVATE
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-deprecated-declarations>
$<$<CXX_COMPILER_ID:AppleClang>:-Wno-unused-but-set-variable>
)

View file

@ -31,17 +31,17 @@ namespace osmoh
date_offset = ((lit('+')[_a = true] | lit('-')[_a = false])
>> charset::no_case[wdays] >> day_offset)
[(bind(&DateOffset::SetWDayOffset, _val, _1),
bind(&DateOffset::SetOffset, _val, _2),
bind(&DateOffset::SetWDayOffsetPositive, _val, _a))]
[bind(&DateOffset::SetWDayOffset, _val, _1),
bind(&DateOffset::SetOffset, _val, _2),
bind(&DateOffset::SetWDayOffsetPositive, _val, _a)]
| ((lit('+')[_a = true] | lit('-') [_a = false]) >> charset::no_case[wdays])
[(bind(&DateOffset::SetWDayOffset, _val, _1),
bind(&DateOffset::SetWDayOffsetPositive, _val, _a))]
[bind(&DateOffset::SetWDayOffset, _val, _1),
bind(&DateOffset::SetWDayOffsetPositive, _val, _a)]
| day_offset [bind(&DateOffset::SetOffset, _val, _1)]
;
date_left = (year >> charset::no_case[month]) [(bind(&MonthDay::SetYear, _val, _1),
bind(&MonthDay::SetMonth, _val, _2))]
date_left = (year >> charset::no_case[month]) [bind(&MonthDay::SetYear, _val, _1),
bind(&MonthDay::SetMonth, _val, _2)]
| charset::no_case[month] [bind(&MonthDay::SetMonth, _val, _1)]
;
@ -50,10 +50,10 @@ namespace osmoh
;
date_from = (date_left >> (daynum >> !(lit(':') >> qi::digit)))
[(_val = _1, bind(&MonthDay::SetDayNum, _val, _2))]
| (year >> charset::no_case[lit("easter")]) [(bind(&MonthDay::SetYear, _val, _1),
bind(&MonthDay::SetVariableDate, _val,
MonthDay::VariableDate::Easter))]
[_val = _1, bind(&MonthDay::SetDayNum, _val, _2)]
| (year >> charset::no_case[lit("easter")]) [bind(&MonthDay::SetYear, _val, _1),
bind(&MonthDay::SetVariableDate, _val,
MonthDay::VariableDate::Easter)]
| charset::no_case[lit("easter")] [bind(&MonthDay::SetVariableDate, _val,
MonthDay::VariableDate::Easter)]
;
@ -63,26 +63,26 @@ namespace osmoh
;
date_from_with_offset = (date_from >> date_offset)
[(_val = _1, bind(&MonthDay::SetOffset, _val, _2))]
[_val = _1, bind(&MonthDay::SetOffset, _val, _2)]
| date_from [_val = _1]
;
date_to_with_offset = (date_to >> date_offset)
[(_val = _1, bind(&MonthDay::SetOffset, _val, _2))]
[_val = _1, bind(&MonthDay::SetOffset, _val, _2)]
| date_to [_val = _1]
;
monthday_range = (date_from_with_offset >> dash >> date_to_with_offset)
[(bind(&MonthdayRange::SetStart, _val, _1),
bind(&MonthdayRange::SetEnd, _val, _2))]
| (date_from_with_offset >> '+') [(bind(&MonthdayRange::SetStart, _val, _1),
bind(&MonthdayRange::SetPlus, _val, true))]
[bind(&MonthdayRange::SetStart, _val, _1),
bind(&MonthdayRange::SetEnd, _val, _2)]
| (date_from_with_offset >> '+') [bind(&MonthdayRange::SetStart, _val, _1),
bind(&MonthdayRange::SetPlus, _val, true)]
| (date_left >> dash >> date_right >> '/' >> uint_)
[(bind(&MonthdayRange::SetStart, _val, _1),
bind(&MonthdayRange::SetEnd, _val, _2),
bind(&MonthdayRange::SetPeriod, _val, _3))]
| (date_left >> lit("-") >> date_right) [(bind(&MonthdayRange::SetStart, _val, _1),
bind(&MonthdayRange::SetEnd, _val, _2))]
[bind(&MonthdayRange::SetStart, _val, _1),
bind(&MonthdayRange::SetEnd, _val, _2),
bind(&MonthdayRange::SetPeriod, _val, _3)]
| (date_left >> lit("-") >> date_right) [bind(&MonthdayRange::SetStart, _val, _1),
bind(&MonthdayRange::SetEnd, _val, _2)]
| date_from [bind(&MonthdayRange::SetStart, _val, _1)]
| date_left [bind(&MonthdayRange::SetStart, _val, _1)]
;

View file

@ -90,8 +90,8 @@ namespace parsing
[bind(&RuleSequence::SetModifier, _r1, Modifier::Unknown)] >>
-(comment [bind(&RuleSequence::SetModifierComment, _r1, _1)]))
| comment [(bind(&RuleSequence::SetModifier, _r1, Modifier::Comment),
bind(&RuleSequence::SetModifierComment, _r1, _1))]
| comment [bind(&RuleSequence::SetModifier, _r1, Modifier::Comment),
bind(&RuleSequence::SetModifierComment, _r1, _1)]
;
rule_sequence =

View file

@ -24,13 +24,13 @@ namespace osmoh
using osmoh::Timespan;
hour_minutes =
(hours >> lit(':') >> minutes) [(bind(&HourMinutes::AddDuration, _val, _1),
bind(&HourMinutes::AddDuration, _val, _2))]
(hours >> lit(':') >> minutes) [bind(&HourMinutes::AddDuration, _val, _1),
bind(&HourMinutes::AddDuration, _val, _2)]
;
extended_hour_minutes =
(exthours >> lit(':') >> minutes)[(bind(&HourMinutes::AddDuration, _val, _1),
bind(&HourMinutes::AddDuration, _val, _2))]
(exthours >> lit(':') >> minutes)[bind(&HourMinutes::AddDuration, _val, _1),
bind(&HourMinutes::AddDuration, _val, _2)]
;
variable_time =
@ -53,27 +53,27 @@ namespace osmoh
timespan =
(time >> dash >> extended_time >> '/' >> hour_minutes)
[(bind(&Timespan::SetStart, _val, _1),
bind(&Timespan::SetEnd, _val, _2),
bind(&Timespan::SetPeriod, _val, _3))]
[bind(&Timespan::SetStart, _val, _1),
bind(&Timespan::SetEnd, _val, _2),
bind(&Timespan::SetPeriod, _val, _3)]
| (time >> dash >> extended_time >> '/' >> minutes)
[(bind(&Timespan::SetStart, _val, _1),
bind(&Timespan::SetEnd, _val, _2),
bind(&Timespan::SetPeriod, _val, _3))]
[bind(&Timespan::SetStart, _val, _1),
bind(&Timespan::SetEnd, _val, _2),
bind(&Timespan::SetPeriod, _val, _3)]
| (time >> dash >> extended_time >> '+')
[(bind(&Timespan::SetStart, _val, _1),
bind(&Timespan::SetEnd, _val, _2),
bind(&Timespan::SetPlus, _val, true))]
[bind(&Timespan::SetStart, _val, _1),
bind(&Timespan::SetEnd, _val, _2),
bind(&Timespan::SetPlus, _val, true)]
| (time >> dash >> extended_time)
[(bind(&Timespan::SetStart, _val, _1),
bind(&Timespan::SetEnd, _val, _2))]
[bind(&Timespan::SetStart, _val, _1),
bind(&Timespan::SetEnd, _val, _2)]
| (time >> '+')
[(bind(&Timespan::SetStart, _val, _1),
bind(&Timespan::SetPlus, _val, true))]
[bind(&Timespan::SetStart, _val, _1),
bind(&Timespan::SetPlus, _val, true)]
// This rule is only used for collection_times tag wish is not in our interest.
// | time[bind(&Timespan::SetStart, _val, _1)]

View file

@ -27,8 +27,8 @@ namespace osmoh
| ushort_(4) [_val = NthWeekdayOfTheMonthEntry::NthDayOfTheMonth::Fourth]
| ushort_(5) [_val = NthWeekdayOfTheMonthEntry::NthDayOfTheMonth::Fifth];
nth_entry = (nth >> dash >> nth) [(bind(&NthWeekdayOfTheMonthEntry::SetStart, _val, _1),
bind(&NthWeekdayOfTheMonthEntry::SetEnd, _val, _2))]
nth_entry = (nth >> dash >> nth) [bind(&NthWeekdayOfTheMonthEntry::SetStart, _val, _1),
bind(&NthWeekdayOfTheMonthEntry::SetEnd, _val, _2)]
| (lit('-') >> nth) [bind(&NthWeekdayOfTheMonthEntry::SetEnd, _val, _1)]
| nth [bind(&NthWeekdayOfTheMonthEntry::SetStart, _val, _1)]
;
@ -50,8 +50,8 @@ namespace osmoh
( charset::no_case[wdays] [bind(&WeekdayRange::SetStart, _val, _1)] >>
'[' >> (nth_entry [bind(&WeekdayRange::AddNth, _val, _1)]) % ',') >> ']' >>
-(day_offset [bind(&WeekdayRange::SetOffset, _val, _1)])
| charset::no_case[(wdays >> dash >> wdays)] [(bind(&WeekdayRange::SetStart, _val, _1),
bind(&WeekdayRange::SetEnd, _val, _2))]
| charset::no_case[(wdays >> dash >> wdays)] [bind(&WeekdayRange::SetStart, _val, _1),
bind(&WeekdayRange::SetEnd, _val, _2)]
| charset::no_case[wdays] [bind(&WeekdayRange::SetStart, _val, _1)]
;
@ -59,8 +59,8 @@ namespace osmoh
;
main = (holiday_sequence >> -lit(',') >> weekday_sequence)
[(bind(&Weekdays::SetHolidays, _val, _1),
bind(&Weekdays::SetWeekdayRanges, _val, _2))]
[bind(&Weekdays::SetHolidays, _val, _1),
bind(&Weekdays::SetWeekdayRanges, _val, _2)]
| holiday_sequence [bind(&Weekdays::SetHolidays, _val, _1)]
| weekday_sequence [bind(&Weekdays::SetWeekdayRanges, _val, _1)]
;

View file

@ -18,11 +18,11 @@ namespace osmoh
using qi::_val;
using osmoh::WeekRange;
week = (weeknum >> dash >> weeknum >> '/' >> uint_) [(bind(&WeekRange::SetStart, _val, _1),
bind(&WeekRange::SetEnd, _val, _2),
bind(&WeekRange::SetPeriod, _val, _3))]
| (weeknum >> dash >> weeknum) [(bind(&WeekRange::SetStart, _val, _1),
bind(&WeekRange::SetEnd, _val, _2))]
week = (weeknum >> dash >> weeknum >> '/' >> uint_) [bind(&WeekRange::SetStart, _val, _1),
bind(&WeekRange::SetEnd, _val, _2),
bind(&WeekRange::SetPeriod, _val, _3)]
| (weeknum >> dash >> weeknum) [bind(&WeekRange::SetStart, _val, _1),
bind(&WeekRange::SetEnd, _val, _2)]
| weeknum [bind(&WeekRange::SetStart, _val, _1)]
;

View file

@ -20,13 +20,13 @@ namespace osmoh
static const qi::int_parser<unsigned, 10, 4, 4> year = {};
year_range = (year >> dash >> year >> '/' >> uint_) [(bind(&YearRange::SetStart, _val, _1),
bind(&YearRange::SetEnd, _val, _2),
bind(&YearRange::SetPeriod, _val, _3))]
| (year >> dash >> year) [(bind(&YearRange::SetStart, _val, _1),
bind(&YearRange::SetEnd, _val, _2))]
| (year >> lit('+')) [(bind(&YearRange::SetStart, _val, _1),
bind(&YearRange::SetPlus, _val, true))]
year_range = (year >> dash >> year >> '/' >> uint_) [bind(&YearRange::SetStart, _val, _1),
bind(&YearRange::SetEnd, _val, _2),
bind(&YearRange::SetPeriod, _val, _3)]
| (year >> dash >> year) [bind(&YearRange::SetStart, _val, _1),
bind(&YearRange::SetEnd, _val, _2)]
| (year >> lit('+')) [bind(&YearRange::SetStart, _val, _1),
bind(&YearRange::SetPlus, _val, true)]
;
main %= (year_range % ',');

@ -1 +1 @@
Subproject commit caade5a28aad86b92a4b5337a9dc70c4ba73c5eb
Subproject commit a0e064336317c9347a91224112af9933598714e9

View file

@ -0,0 +1,12 @@
project(sdf_image)
set(SRC
sdf_image.cpp
sdf_image.h
)
add_library(${PROJECT_NAME} ${SRC})
target_compile_options(${PROJECT_NAME}
PRIVATE $<$<CXX_COMPILER_ID:AppleClang,Clang>:-Wno-shorten-64-to-32>
)

View file

@ -0,0 +1,510 @@
/*
Copyright (C) 2009 by Stefan Gustavson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#include "3party/sdf_image/sdf_image.h"
#include "base/math.hpp"
#include "base/scope_guard.hpp"
#include <algorithm>
#include <limits>
using namespace std::placeholders;
namespace sdf_image
{
namespace
{
float const SQRT2 = 1.4142136f;
float ComputeXGradient(float ul, float /*u*/, float ur, float l, float r, float dl, float /*d*/, float dr)
{
return (ur + SQRT2 * r + dr) - (ul + SQRT2 * l + dl);
}
float ComputeYGradient(float ul, float u, float ur, float /*l*/, float /*r*/, float dl, float d, float dr)
{
return (ur + SQRT2 * d + dr) - (ul + SQRT2 * u + dl);
}
}
#define BIND_GRADIENT(f) std::bind(&f, _1, _2, _3, _4, _5, _6, _7, _8)
#define TRANSFORM(offset, dx, dy) \
if (Transform(i, offset, dx, dy, xDist, yDist, oldDist)) \
{ \
dist.m_data[i] = oldDist; \
changed = true; \
}
SdfImage::SdfImage(uint32_t h, uint32_t w)
: m_height(h)
, m_width(w)
{
m_data.resize(m_width * m_height, 0);
}
SdfImage::SdfImage(uint32_t h, uint32_t w, uint8_t * imageData, uint8_t border)
{
int8_t doubleBorder = 2 * border;
m_width = w + doubleBorder;
m_height = h + doubleBorder;
uint32_t floatCount = m_width * m_height;
m_data.resize(floatCount, 0.0f);
for (size_t row = border; row < h + border; ++row)
{
size_t dstBaseIndex = row * m_width;
size_t srcBaseIndex = (row - border) * w;
for (size_t column = border; column < w + border; ++column)
m_data[dstBaseIndex + column] = (float)imageData[srcBaseIndex + column - border] / 255.0f;
}
}
SdfImage::SdfImage(SdfImage const & copy)
{
m_height = copy.m_height;
m_width = copy.m_width;
m_data = copy.m_data;
}
uint32_t SdfImage::GetWidth() const
{
return m_width;
}
uint32_t SdfImage::GetHeight() const
{
return m_height;
}
void SdfImage::GetData(std::vector<uint8_t> & dst)
{
ASSERT(m_data.size() <= dst.size(), ());
std::transform(m_data.begin(), m_data.end(), dst.begin(), [](float const & node)
{
return static_cast<uint8_t>(node * 255.0f);
});
}
void SdfImage::Scale()
{
float maxi = std::numeric_limits<float>::min();
float mini = std::numeric_limits<float>::max();
std::for_each(m_data.begin(), m_data.end(), [&maxi, &mini](float const & node)
{
maxi = std::max(maxi, node);
mini = std::min(mini, node);
});
maxi -= mini;
std::for_each(m_data.begin(), m_data.end(), [&maxi, &mini](float & node)
{
node = (node - mini) / maxi;
});
}
void SdfImage::Invert()
{
std::for_each(m_data.begin(), m_data.end(), [](float & node)
{
node = 1.0f - node;
});
}
void SdfImage::Minus(SdfImage & im)
{
ASSERT(m_data.size() == im.m_data.size(), ());
std::transform(m_data.begin(), m_data.end(), im.m_data.begin(), m_data.begin(), [](float const & n1, float const & n2)
{
return n1 - n2;
});
}
void SdfImage::Distquant()
{
std::for_each(m_data.begin(), m_data.end(), [](float & node)
{
node = base::Clamp(0.5f + node * 0.0325f, 0.0f, 1.0f);
});
}
void SdfImage::GenerateSDF(float sc)
{
Scale();
SdfImage outside(m_height, m_width);
SdfImage inside(m_height, m_width);
size_t shortCount = m_width * m_height;
std::vector<short> xDist;
std::vector<short> yDist;
xDist.resize(shortCount, 0);
yDist.resize(shortCount, 0);
MexFunction(*this, xDist, yDist, outside);
fill(xDist.begin(), xDist.end(), 0);
fill(yDist.begin(), yDist.end(), 0);
Invert();
MexFunction(*this, xDist, yDist, inside);
outside.Minus(inside);
outside.Distquant();
outside.Invert();
*this = outside.Bilinear(sc);
}
SdfImage SdfImage::Bilinear(float scale)
{
uint32_t srcWidth = GetWidth();
uint32_t srcHeight = GetHeight();
uint32_t dstWidth = std::round(srcWidth * scale);
uint32_t dstHeight = std::round(srcHeight * scale);
SdfImage result(dstHeight, dstWidth);
float xRatio = static_cast<float>(srcWidth) / dstWidth;
float yRatio = static_cast<float>(srcHeight) / dstHeight;
for (uint32_t i = 0; i < dstHeight; i++)
{
uint32_t baseIndex = i * dstWidth;
for (uint32_t j = 0; j < dstWidth; j++)
{
float fx = xRatio * j;
float fy = yRatio * i;
uint32_t x = static_cast<uint32_t>(fx);
uint32_t y = static_cast<uint32_t>(fy);
uint32_t index = y * srcWidth + x;
ASSERT_LESS(index, m_data.size(), ());
// range is 0 to 255 thus bitwise AND with 0xff
float A = m_data[index];
float B = m_data[index + 1];
float C = m_data[index + srcWidth];
float D = m_data[index + srcWidth + 1];
float xDiff = fx - x;
float yDiff = fy - y;
float xInvertDiff = 1.0f - xDiff;
float yInvertDiff = 1.0f - yDiff;
float gray = A * xInvertDiff * yInvertDiff + B * xDiff * yInvertDiff +
C * xInvertDiff * yDiff + D * xDiff * yDiff;
result.m_data[baseIndex + j] = gray;
}
}
return result;
}
float SdfImage::ComputeGradient(uint32_t x, uint32_t y, SdfImage::TComputeFn const & fn) const
{
if (x < 1 || x > m_width - 1 ||
y < 1 || y > m_height - 1)
{
return 0.0;
}
size_t k = y * m_width + x;
uint32_t l = k - 1;
uint32_t r = k + 1;
uint32_t u = k - m_width;
uint32_t d = k + m_width;
uint32_t ul = u - 1;
uint32_t dl = d -1;
uint32_t ur = u + 1;
uint32_t dr = d + 1;
if (m_data[k] > 0.0 && m_data[k] < 1.0)
{
return fn(m_data[ul], m_data[u], m_data[ur],
m_data[l], m_data[r],
m_data[dl], m_data[d], m_data[dr]);
}
else
return 0.0;
}
void SdfImage::MexFunction(SdfImage const & img, std::vector<short> & xDist, std::vector<short> & yDist, SdfImage & out)
{
ASSERT_EQUAL(img.GetWidth(), out.GetWidth(), ());
ASSERT_EQUAL(img.GetHeight(), out.GetHeight(), ());
img.EdtaA3(xDist, yDist, out);
// Pixels with grayscale>0.5 will have a negative distance.
// This is correct, but we don't want values <0 returned here.
std::for_each(out.m_data.begin(), out.m_data.end(), [](float & n)
{
n = std::max(0.0f, n);
});
}
float SdfImage::DistaA3(int c, int xc, int yc, int xi, int yi) const
{
int closest = c - xc - yc * m_width; // Index to the edge pixel pointed to from c
//if (closest < 0 || closest > m_data.size())
// return 1000000.0;
ASSERT_GREATER_OR_EQUAL(closest, 0, ());
ASSERT_LESS(closest, m_data.size(), ());
float a = base::Clamp(m_data[closest], 0.0f, 1.0f); // Grayscale value at the edge pixel
if(a == 0.0)
return 1000000.0; // Not an object pixel, return "very far" ("don't know yet")
double dx = static_cast<double>(xi);
double dy = static_cast<double>(yi);
double di = sqrt(dx * dx + dy * dy); // Length of integer vector, like a traditional EDT
double df = 0.0;
if(di == 0.0)
{
int y = closest / m_width;
int x = closest % m_width;
// Use local gradient only at edges
// Estimate based on local gradient only
df = EdgeDf(ComputeGradient(x, y, BIND_GRADIENT(ComputeXGradient)),
ComputeGradient(x, y, BIND_GRADIENT(ComputeYGradient)), a);
}
else
{
// Estimate gradient based on direction to edge (accurate for large di)
df = EdgeDf(dx, dy, a);
}
return static_cast<float>(di + df); // Same metric as edtaa2, except at edges (where di=0)
}
double SdfImage::EdgeDf(double gx, double gy, double a) const
{
double df = 0.0;
if ((gx == 0) || (gy == 0))
{
// Either A) gu or gv are zero
// B) both
df = 0.5 - a; // Linear approximation is A) correct or B) a fair guess
}
else
{
double glength = sqrt(gx * gx + gy * gy);
if(glength > 0)
{
gx = gx / glength;
gy = gy / glength;
}
// Everything is symmetric wrt sign and transposition,
// so move to first octant (gx>=0, gy>=0, gx>=gy) to
// avoid handling all possible edge directions.
gx = fabs(gx);
gy = fabs(gy);
if (gx < gy)
std::swap(gx, gy);
double a1 = 0.5 * gy / gx;
if (a < a1)
df = 0.5 * (gx + gy) - sqrt(2.0 * gx * gy * a);
else if (a < (1.0 - a1))
df = (0.5 - a) * gx;
else
df = -0.5 * (gx + gy) + sqrt(2.0 * gx * gy * (1.0 - a));
}
return df;
}
void SdfImage::EdtaA3(std::vector<short> & xDist, std::vector<short> & yDist, SdfImage & dist) const
{
ASSERT_EQUAL(dist.GetHeight(), GetHeight(), ());
ASSERT_EQUAL(dist.GetWidth(), GetWidth(), ());
ASSERT_EQUAL(dist.m_data.size(), m_data.size(), ());
int w = GetWidth();
int h = GetHeight();
/* Initialize the distance SdfImages */
for (size_t y = 0; y < h; ++y)
{
size_t baseIndex = y * w;
for (size_t x = 0; x < w; ++x)
{
size_t index = baseIndex + x;
if (m_data[index] <= 0.0)
dist.m_data[index]= 1000000.0; // Big value, means "not set yet"
else if (m_data[index] < 1.0)
{
dist.m_data[index] = EdgeDf(ComputeGradient(x, y, BIND_GRADIENT(ComputeXGradient)),
ComputeGradient(x, y, BIND_GRADIENT(ComputeYGradient)),
m_data[index]);
}
}
}
/* Initialize index offsets for the current SdfImage width */
int offsetU = -w;
int offsetD = w;
int offsetR = 1;
int offsetL = -1;
int offsetRu = -w + 1;
int offsetRd = w + 1;
int offsetLd = w - 1;
int offsetLu = -w - 1;
/* Perform the transformation */
bool changed;
do
{
changed = false;
for(int y = 1; y < h; ++y)
{
int i = y * w;
/* scan right, propagate distances from above & left */
/* Leftmost pixel is special, has no left neighbors */
float oldDist = dist.m_data[i];
if(oldDist > 0) // If non-zero distance or not set yet
{
TRANSFORM(offsetU, 0, 1);
TRANSFORM(offsetRu, -1, 1);
}
++i;
/* Middle pixels have all neighbors */
for(int x = 1; x < w - 1; ++x, ++i)
{
oldDist = dist.m_data[i];
if(oldDist > 0.0)
{
TRANSFORM(offsetL, 1, 0);
TRANSFORM(offsetLu, 1, 1);
TRANSFORM(offsetU, 0, 1);
TRANSFORM(offsetRu, -1, 1);
}
}
/* Rightmost pixel of row is special, has no right neighbors */
oldDist = dist.m_data[i];
if(oldDist > 0)
{
TRANSFORM(offsetL, 1, 0);
TRANSFORM(offsetLu, 1, 1);
TRANSFORM(offsetU, 0, 1);
}
/* Move index to second rightmost pixel of current row. */
/* Rightmost pixel is skipped, it has no right neighbor. */
i = y * w + w - 2;
/* scan left, propagate distance from right */
for(int x = w - 2; x >= 0; --x, --i)
{
oldDist = dist.m_data[i];
if(oldDist > 0.0)
TRANSFORM(offsetR, -1, 0);
}
}
/* Scan rows in reverse order, except last row */
for(int y = h - 2; y >= 0; --y)
{
/* move index to rightmost pixel of current row */
int i = y * w + w - 1;
/* Scan left, propagate distances from below & right */
/* Rightmost pixel is special, has no right neighbors */
float oldDist = dist.m_data[i];
if(oldDist > 0) // If not already zero distance
{
TRANSFORM(offsetD, 0, -1);
TRANSFORM(offsetLd, 1, -1);
}
--i;
/* Middle pixels have all neighbors */
for(int x = w - 2; x > 0; --x, --i)
{
oldDist = dist.m_data[i];
if(oldDist > 0.0)
{
TRANSFORM(offsetR, -1, 0);
TRANSFORM(offsetRd, -1, -1);
TRANSFORM(offsetD, 0, -1);
TRANSFORM(offsetLd, 1, -1);
}
}
/* Leftmost pixel is special, has no left neighbors */
oldDist = dist.m_data[i];
if(oldDist > 0)
{
TRANSFORM(offsetR, -1, 0);
TRANSFORM(offsetRd, -1, -1);
TRANSFORM(offsetD, 0, -1);
}
/* Move index to second leftmost pixel of current row. */
/* Leftmost pixel is skipped, it has no left neighbor. */
i = y * w + 1;
for(int x = 1; x < w; ++x, ++i)
{
/* scan right, propagate distance from left */
oldDist = dist.m_data[i];
if(oldDist > 0.0)
TRANSFORM(offsetL, 1, 0);
}
}
}
while(changed);
}
bool SdfImage::Transform(int baseIndex, int offset, int dx, int dy, std::vector<short> & xDist, std::vector<short> & yDist, float & oldDist) const
{
double const epsilon = 1e-3;
ASSERT_EQUAL(xDist.size(), yDist.size(), ());
ASSERT_GREATER_OR_EQUAL(baseIndex, 0, ());
ASSERT_LESS(baseIndex, xDist.size(), ());
int candidate = baseIndex + offset;
ASSERT_GREATER_OR_EQUAL(candidate, 0, ());
ASSERT_LESS(candidate, xDist.size(), ());
int cDistX = xDist[candidate];
int cDistY = yDist[candidate];
int newDistX = cDistX + dx;
int newDistY = cDistY + dy;
float newDist = DistaA3(candidate, cDistX, cDistY, newDistX, newDistY);
if(newDist < oldDist - epsilon)
{
xDist[baseIndex] = newDistX;
yDist[baseIndex] = newDistY;
oldDist = newDist;
return true;
}
return false;
}
} // namespace sdf_image

View file

@ -0,0 +1,81 @@
#pragma once
// +----------------------------------------+
// | |
// | http://contourtextures.wikidot.com |
// | |
// +----------------------------------------+
/*
Copyright (C) 2009 by Stefan Gustavson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#include "base/buffer_vector.hpp"
#include <cstdint>
#include <functional>
#include <vector>
namespace sdf_image
{
class SdfImage
{
public:
SdfImage() = default;
SdfImage(uint32_t h, uint32_t w);
SdfImage(uint32_t h, uint32_t w, uint8_t * imageData, uint8_t border);
SdfImage(SdfImage const & copy);
uint32_t GetWidth() const;
uint32_t GetHeight() const;
void GetData(std::vector<uint8_t> & dst);
void GenerateSDF(float sc);
private:
void Scale();
void Invert();
void Minus(SdfImage &im);
void Distquant();
SdfImage Bilinear(float Scale);
private:
/// ul = up left
/// u = up
/// ...
/// d = down
/// dr = down right
/// ul u ur l r dl d dr
using TComputeFn = std::function<float (float, float, float, float, float, float, float, float)>;
float ComputeGradient(uint32_t x, uint32_t y, TComputeFn const & fn) const;
void MexFunction(SdfImage const & img, std::vector<short> & xDist, std::vector<short> & yDist,
SdfImage & out);
float DistaA3(int c, int xc, int yc, int xi, int yi) const;
double EdgeDf(double gx, double gy, double a) const;
void EdtaA3(std::vector<short> & xDist, std::vector<short> & yDist, SdfImage & dist) const;
bool Transform(int baseIndex, int offset, int dx, int dy, std::vector<short> & xDist,
std::vector<short> & yDist, float & oldDist) const;
private:
uint32_t m_height = 0;
uint32_t m_width = 0;
buffer_vector<float, 512> m_data;
};
} // namespace sdf_image

View file

@ -511,7 +511,7 @@ static int stbi_write_bmp_core(stbi__write_context *s, int x, int y, int comp, c
STBIWDEF int stbi_write_bmp_to_func(stbi_write_func *func, void *context, int x, int y, int comp, const void *data)
{
stbi__write_context s = {};
stbi__write_context s = { 0 };
stbi__start_write_callbacks(&s, func, context);
return stbi_write_bmp_core(&s, x, y, comp, data);
}
@ -519,7 +519,7 @@ STBIWDEF int stbi_write_bmp_to_func(stbi_write_func *func, void *context, int x,
#ifndef STBI_WRITE_NO_STDIO
STBIWDEF int stbi_write_bmp(char const *filename, int x, int y, int comp, const void *data)
{
stbi__write_context s = {};
stbi__write_context s = { 0 };
if (stbi__start_write_file(&s,filename)) {
int r = stbi_write_bmp_core(&s, x, y, comp, data);
stbi__end_write_file(&s);
@ -610,7 +610,7 @@ static int stbi_write_tga_core(stbi__write_context *s, int x, int y, int comp, v
STBIWDEF int stbi_write_tga_to_func(stbi_write_func *func, void *context, int x, int y, int comp, const void *data)
{
stbi__write_context s = {};
stbi__write_context s = { 0 };
stbi__start_write_callbacks(&s, func, context);
return stbi_write_tga_core(&s, x, y, comp, (void *) data);
}
@ -618,7 +618,7 @@ STBIWDEF int stbi_write_tga_to_func(stbi_write_func *func, void *context, int x,
#ifndef STBI_WRITE_NO_STDIO
STBIWDEF int stbi_write_tga(char const *filename, int x, int y, int comp, const void *data)
{
stbi__write_context s = {};
stbi__write_context s = { 0 };
if (stbi__start_write_file(&s,filename)) {
int r = stbi_write_tga_core(&s, x, y, comp, (void *) data);
stbi__end_write_file(&s);
@ -786,14 +786,14 @@ static int stbi_write_hdr_core(stbi__write_context *s, int x, int y, int comp, f
STBIWDEF int stbi_write_hdr_to_func(stbi_write_func *func, void *context, int x, int y, int comp, const float *data)
{
stbi__write_context s = {};
stbi__write_context s = { 0 };
stbi__start_write_callbacks(&s, func, context);
return stbi_write_hdr_core(&s, x, y, comp, (float *) data);
}
STBIWDEF int stbi_write_hdr(char const *filename, int x, int y, int comp, const float *data)
{
stbi__write_context s = {};
stbi__write_context s = { 0 };
if (stbi__start_write_file(&s,filename)) {
int r = stbi_write_hdr_core(&s, x, y, comp, (float *) data);
stbi__end_write_file(&s);
@ -1606,7 +1606,7 @@ static int stbi_write_jpg_core(stbi__write_context *s, int width, int height, in
STBIWDEF int stbi_write_jpg_to_func(stbi_write_func *func, void *context, int x, int y, int comp, const void *data, int quality)
{
stbi__write_context s = {};
stbi__write_context s = { 0 };
stbi__start_write_callbacks(&s, func, context);
return stbi_write_jpg_core(&s, x, y, comp, (void *) data, quality);
}
@ -1615,7 +1615,7 @@ STBIWDEF int stbi_write_jpg_to_func(stbi_write_func *func, void *context, int x,
#ifndef STBI_WRITE_NO_STDIO
STBIWDEF int stbi_write_jpg(char const *filename, int x, int y, int comp, const void *data, int quality)
{
stbi__write_context s = {};
stbi__write_context s = { 0 };
if (stbi__start_write_file(&s,filename)) {
int r = stbi_write_jpg_core(&s, x, y, comp, data, quality);
stbi__end_write_file(&s);

View file

@ -38,14 +38,14 @@ namespace succinct {
typedef std::vector<uint64_t> bits_type;
bit_vector_builder(uint64_t size = 0, bool initBit = false)
bit_vector_builder(uint64_t size = 0, bool init = 0)
: m_size(size)
{
m_bits.resize(detail::words_for(size), initBit ? uint64_t(-1) : 0);
m_bits.resize(detail::words_for(size), uint64_t(-init));
if (size) {
m_cur_word = &m_bits.back();
// clear padding bits
if (initBit && size % 64) {
if (init && size % 64) {
*m_cur_word >>= 64 - (size % 64);
}
}

@ -1 +0,0 @@
Subproject commit 6be08bbea14ffa0a5c594257fb6285a054395cd7

View file

@ -0,0 +1,12 @@
utf8 cpp library
Release 2.3.4
A minor bug fix release. Thanks to all who reported bugs.
Note: Version 2.3.3 contained a regression, and therefore was removed.
Changes from version 2.3.2
- Bug fix [39]: checked.h Line 273 and unchecked.h Line 182 have an extra ';'
- Bug fix [36]: replace_invalid() only works with back_inserter
Files included in the release: utf8.h, core.h, checked.h, unchecked.h, utf8cpp.html, ReleaseNotes

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,34 @@
// Copyright 2006 Nemanja Trifunovic
/*
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
#ifndef UTF8_FOR_CPP_2675DCD0_9480_4c0c_B92A_CC14C027B731
#define UTF8_FOR_CPP_2675DCD0_9480_4c0c_B92A_CC14C027B731
#include "utf8/checked.h"
#include "utf8/unchecked.h"
#endif // header guard

View file

@ -0,0 +1,335 @@
// Copyright 2006-2016 Nemanja Trifunovic
/*
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
#ifndef UTF8_FOR_CPP_CHECKED_H_2675DCD0_9480_4c0c_B92A_CC14C027B731
#define UTF8_FOR_CPP_CHECKED_H_2675DCD0_9480_4c0c_B92A_CC14C027B731
#include "core.h"
#include <stdexcept>
namespace utf8
{
// Base for the exceptions that may be thrown from the library
class exception : public ::std::exception {
};
// Exceptions that may be thrown from the library functions.
class invalid_code_point : public exception {
uint32_t cp;
public:
invalid_code_point(uint32_t codepoint) : cp(codepoint) {}
virtual const char* what() const UTF_CPP_NOEXCEPT UTF_CPP_OVERRIDE { return "Invalid code point"; }
uint32_t code_point() const {return cp;}
};
class invalid_utf8 : public exception {
uint8_t u8;
public:
invalid_utf8 (uint8_t u) : u8(u) {}
virtual const char* what() const UTF_CPP_NOEXCEPT UTF_CPP_OVERRIDE { return "Invalid UTF-8"; }
uint8_t utf8_octet() const {return u8;}
};
class invalid_utf16 : public exception {
uint16_t u16;
public:
invalid_utf16 (uint16_t u) : u16(u) {}
virtual const char* what() const UTF_CPP_NOEXCEPT UTF_CPP_OVERRIDE { return "Invalid UTF-16"; }
uint16_t utf16_word() const {return u16;}
};
class not_enough_room : public exception {
public:
virtual const char* what() const UTF_CPP_NOEXCEPT UTF_CPP_OVERRIDE { return "Not enough space"; }
};
/// The library API - functions intended to be called by the users
template <typename octet_iterator>
octet_iterator append(uint32_t cp, octet_iterator result)
{
if (!utf8::internal::is_code_point_valid(cp))
throw invalid_code_point(cp);
if (cp < 0x80) // one octet
*(result++) = static_cast<uint8_t>(cp);
else if (cp < 0x800) { // two octets
*(result++) = static_cast<uint8_t>((cp >> 6) | 0xc0);
*(result++) = static_cast<uint8_t>((cp & 0x3f) | 0x80);
}
else if (cp < 0x10000) { // three octets
*(result++) = static_cast<uint8_t>((cp >> 12) | 0xe0);
*(result++) = static_cast<uint8_t>(((cp >> 6) & 0x3f) | 0x80);
*(result++) = static_cast<uint8_t>((cp & 0x3f) | 0x80);
}
else { // four octets
*(result++) = static_cast<uint8_t>((cp >> 18) | 0xf0);
*(result++) = static_cast<uint8_t>(((cp >> 12) & 0x3f) | 0x80);
*(result++) = static_cast<uint8_t>(((cp >> 6) & 0x3f) | 0x80);
*(result++) = static_cast<uint8_t>((cp & 0x3f) | 0x80);
}
return result;
}
template <typename octet_iterator, typename output_iterator>
output_iterator replace_invalid(octet_iterator start, octet_iterator end, output_iterator out, uint32_t replacement)
{
while (start != end) {
octet_iterator sequence_start = start;
internal::utf_error err_code = utf8::internal::validate_next(start, end);
switch (err_code) {
case internal::UTF8_OK :
for (octet_iterator it = sequence_start; it != start; ++it)
*out++ = *it;
break;
case internal::NOT_ENOUGH_ROOM:
out = utf8::append (replacement, out);
start = end;
break;
case internal::INVALID_LEAD:
out = utf8::append (replacement, out);
++start;
break;
case internal::INCOMPLETE_SEQUENCE:
case internal::OVERLONG_SEQUENCE:
case internal::INVALID_CODE_POINT:
out = utf8::append (replacement, out);
++start;
// just one replacement mark for the sequence
while (start != end && utf8::internal::is_trail(*start))
++start;
break;
}
}
return out;
}
template <typename octet_iterator, typename output_iterator>
inline output_iterator replace_invalid(octet_iterator start, octet_iterator end, output_iterator out)
{
static const uint32_t replacement_marker = utf8::internal::mask16(0xfffd);
return utf8::replace_invalid(start, end, out, replacement_marker);
}
template <typename octet_iterator>
uint32_t next(octet_iterator& it, octet_iterator end)
{
uint32_t cp = 0;
internal::utf_error err_code = utf8::internal::validate_next(it, end, cp);
switch (err_code) {
case internal::UTF8_OK :
break;
case internal::NOT_ENOUGH_ROOM :
throw not_enough_room();
case internal::INVALID_LEAD :
case internal::INCOMPLETE_SEQUENCE :
case internal::OVERLONG_SEQUENCE :
throw invalid_utf8(*it);
case internal::INVALID_CODE_POINT :
throw invalid_code_point(cp);
}
return cp;
}
template <typename octet_iterator>
uint32_t peek_next(octet_iterator it, octet_iterator end)
{
return utf8::next(it, end);
}
template <typename octet_iterator>
uint32_t prior(octet_iterator& it, octet_iterator start)
{
// can't do much if it == start
if (it == start)
throw not_enough_room();
octet_iterator end = it;
// Go back until we hit either a lead octet or start
while (utf8::internal::is_trail(*(--it)))
if (it == start)
throw invalid_utf8(*it); // error - no lead byte in the sequence
return utf8::peek_next(it, end);
}
template <typename octet_iterator, typename distance_type>
void advance (octet_iterator& it, distance_type n, octet_iterator end)
{
const distance_type zero(0);
if (n < zero) {
// backward
for (distance_type i = n; i < zero; ++i)
utf8::prior(it, end);
} else {
// forward
for (distance_type i = zero; i < n; ++i)
utf8::next(it, end);
}
}
template <typename octet_iterator>
typename std::iterator_traits<octet_iterator>::difference_type
distance (octet_iterator first, octet_iterator last)
{
typename std::iterator_traits<octet_iterator>::difference_type dist;
for (dist = 0; first < last; ++dist)
utf8::next(first, last);
return dist;
}
template <typename u16bit_iterator, typename octet_iterator>
octet_iterator utf16to8 (u16bit_iterator start, u16bit_iterator end, octet_iterator result)
{
while (start != end) {
uint32_t cp = utf8::internal::mask16(*start++);
// Take care of surrogate pairs first
if (utf8::internal::is_lead_surrogate(cp)) {
if (start != end) {
uint32_t trail_surrogate = utf8::internal::mask16(*start++);
if (utf8::internal::is_trail_surrogate(trail_surrogate))
cp = (cp << 10) + trail_surrogate + internal::SURROGATE_OFFSET;
else
throw invalid_utf16(static_cast<uint16_t>(trail_surrogate));
}
else
throw invalid_utf16(static_cast<uint16_t>(cp));
}
// Lone trail surrogate
else if (utf8::internal::is_trail_surrogate(cp))
throw invalid_utf16(static_cast<uint16_t>(cp));
result = utf8::append(cp, result);
}
return result;
}
template <typename u16bit_iterator, typename octet_iterator>
u16bit_iterator utf8to16 (octet_iterator start, octet_iterator end, u16bit_iterator result)
{
while (start < end) {
uint32_t cp = utf8::next(start, end);
if (cp > 0xffff) { //make a surrogate pair
*result++ = static_cast<uint16_t>((cp >> 10) + internal::LEAD_OFFSET);
*result++ = static_cast<uint16_t>((cp & 0x3ff) + internal::TRAIL_SURROGATE_MIN);
}
else
*result++ = static_cast<uint16_t>(cp);
}
return result;
}
template <typename octet_iterator, typename u32bit_iterator>
octet_iterator utf32to8 (u32bit_iterator start, u32bit_iterator end, octet_iterator result)
{
while (start != end)
result = utf8::append(*(start++), result);
return result;
}
template <typename octet_iterator, typename u32bit_iterator>
u32bit_iterator utf8to32 (octet_iterator start, octet_iterator end, u32bit_iterator result)
{
while (start < end)
(*result++) = utf8::next(start, end);
return result;
}
// The iterator class
template <typename octet_iterator>
class iterator {
octet_iterator it;
octet_iterator range_start;
octet_iterator range_end;
public:
typedef uint32_t value_type;
typedef uint32_t* pointer;
typedef uint32_t& reference;
typedef std::ptrdiff_t difference_type;
typedef std::bidirectional_iterator_tag iterator_category;
iterator () {}
explicit iterator (const octet_iterator& octet_it,
const octet_iterator& rangestart,
const octet_iterator& rangeend) :
it(octet_it), range_start(rangestart), range_end(rangeend)
{
if (it < range_start || it > range_end)
throw std::out_of_range("Invalid utf-8 iterator position");
}
// the default "big three" are OK
octet_iterator base () const { return it; }
uint32_t operator * () const
{
octet_iterator temp = it;
return utf8::next(temp, range_end);
}
bool operator == (const iterator& rhs) const
{
if (range_start != rhs.range_start || range_end != rhs.range_end)
throw std::logic_error("Comparing utf-8 iterators defined with different ranges");
return (it == rhs.it);
}
bool operator != (const iterator& rhs) const
{
return !(operator == (rhs));
}
iterator& operator ++ ()
{
utf8::next(it, range_end);
return *this;
}
iterator operator ++ (int)
{
iterator temp = *this;
utf8::next(it, range_end);
return temp;
}
iterator& operator -- ()
{
utf8::prior(it, range_start);
return *this;
}
iterator operator -- (int)
{
iterator temp = *this;
utf8::prior(it, range_start);
return temp;
}
}; // class iterator
} // namespace utf8
#if UTF_CPP_CPLUSPLUS >= 201703L // C++ 17 or later
#include "cpp17.h"
#elif UTF_CPP_CPLUSPLUS >= 201103L // C++ 11 or later
#include "cpp11.h"
#endif // C++ 11 or later
#endif //header guard

View file

@ -0,0 +1,338 @@
// Copyright 2006 Nemanja Trifunovic
/*
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
#ifndef UTF8_FOR_CPP_CORE_H_2675DCD0_9480_4c0c_B92A_CC14C027B731
#define UTF8_FOR_CPP_CORE_H_2675DCD0_9480_4c0c_B92A_CC14C027B731
#include <iterator>
// Determine the C++ standard version.
// If the user defines UTF_CPP_CPLUSPLUS, use that.
// Otherwise, trust the unreliable predefined macro __cplusplus
#if !defined UTF_CPP_CPLUSPLUS
#define UTF_CPP_CPLUSPLUS __cplusplus
#endif
#if UTF_CPP_CPLUSPLUS >= 201103L // C++ 11 or later
#define UTF_CPP_OVERRIDE override
#define UTF_CPP_NOEXCEPT noexcept
#else // C++ 98/03
#define UTF_CPP_OVERRIDE
#define UTF_CPP_NOEXCEPT throw()
#endif // C++ 11 or later
namespace utf8
{
// The typedefs for 8-bit, 16-bit and 32-bit unsigned integers
// You may need to change them to match your system.
// These typedefs have the same names as ones from cstdint, or boost/cstdint
typedef unsigned char uint8_t;
typedef unsigned short uint16_t;
typedef unsigned int uint32_t;
// Helper code - not intended to be directly called by the library users. May be changed at any time
namespace internal
{
// Unicode constants
// Leading (high) surrogates: 0xd800 - 0xdbff
// Trailing (low) surrogates: 0xdc00 - 0xdfff
const uint16_t LEAD_SURROGATE_MIN = 0xd800u;
const uint16_t LEAD_SURROGATE_MAX = 0xdbffu;
const uint16_t TRAIL_SURROGATE_MIN = 0xdc00u;
const uint16_t TRAIL_SURROGATE_MAX = 0xdfffu;
const uint16_t LEAD_OFFSET = 0xd7c0u; // LEAD_SURROGATE_MIN - (0x10000 >> 10)
const uint32_t SURROGATE_OFFSET = 0xfca02400u; // 0x10000u - (LEAD_SURROGATE_MIN << 10) - TRAIL_SURROGATE_MIN
// Maximum valid value for a Unicode code point
const uint32_t CODE_POINT_MAX = 0x0010ffffu;
template<typename octet_type>
inline uint8_t mask8(octet_type oc)
{
return static_cast<uint8_t>(0xff & oc);
}
template<typename u16_type>
inline uint16_t mask16(u16_type oc)
{
return static_cast<uint16_t>(0xffff & oc);
}
template<typename octet_type>
inline bool is_trail(octet_type oc)
{
return ((utf8::internal::mask8(oc) >> 6) == 0x2);
}
template <typename u16>
inline bool is_lead_surrogate(u16 cp)
{
return (cp >= LEAD_SURROGATE_MIN && cp <= LEAD_SURROGATE_MAX);
}
template <typename u16>
inline bool is_trail_surrogate(u16 cp)
{
return (cp >= TRAIL_SURROGATE_MIN && cp <= TRAIL_SURROGATE_MAX);
}
template <typename u16>
inline bool is_surrogate(u16 cp)
{
return (cp >= LEAD_SURROGATE_MIN && cp <= TRAIL_SURROGATE_MAX);
}
template <typename u32>
inline bool is_code_point_valid(u32 cp)
{
return (cp <= CODE_POINT_MAX && !utf8::internal::is_surrogate(cp));
}
template <typename octet_iterator>
inline typename std::iterator_traits<octet_iterator>::difference_type
sequence_length(octet_iterator lead_it)
{
uint8_t lead = utf8::internal::mask8(*lead_it);
if (lead < 0x80)
return 1;
else if ((lead >> 5) == 0x6)
return 2;
else if ((lead >> 4) == 0xe)
return 3;
else if ((lead >> 3) == 0x1e)
return 4;
else
return 0;
}
template <typename octet_difference_type>
inline bool is_overlong_sequence(uint32_t cp, octet_difference_type length)
{
if (cp < 0x80) {
if (length != 1)
return true;
}
else if (cp < 0x800) {
if (length != 2)
return true;
}
else if (cp < 0x10000) {
if (length != 3)
return true;
}
return false;
}
enum utf_error {UTF8_OK, NOT_ENOUGH_ROOM, INVALID_LEAD, INCOMPLETE_SEQUENCE, OVERLONG_SEQUENCE, INVALID_CODE_POINT};
/// Helper for get_sequence_x
template <typename octet_iterator>
utf_error increase_safely(octet_iterator& it, octet_iterator end)
{
if (++it == end)
return NOT_ENOUGH_ROOM;
if (!utf8::internal::is_trail(*it))
return INCOMPLETE_SEQUENCE;
return UTF8_OK;
}
#define UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(IT, END) {utf_error ret = increase_safely(IT, END); if (ret != UTF8_OK) return ret;}
/// get_sequence_x functions decode utf-8 sequences of the length x
template <typename octet_iterator>
utf_error get_sequence_1(octet_iterator& it, octet_iterator end, uint32_t& code_point)
{
if (it == end)
return NOT_ENOUGH_ROOM;
code_point = utf8::internal::mask8(*it);
return UTF8_OK;
}
template <typename octet_iterator>
utf_error get_sequence_2(octet_iterator& it, octet_iterator end, uint32_t& code_point)
{
if (it == end)
return NOT_ENOUGH_ROOM;
code_point = utf8::internal::mask8(*it);
UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end)
code_point = ((code_point << 6) & 0x7ff) + ((*it) & 0x3f);
return UTF8_OK;
}
template <typename octet_iterator>
utf_error get_sequence_3(octet_iterator& it, octet_iterator end, uint32_t& code_point)
{
if (it == end)
return NOT_ENOUGH_ROOM;
code_point = utf8::internal::mask8(*it);
UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end)
code_point = ((code_point << 12) & 0xffff) + ((utf8::internal::mask8(*it) << 6) & 0xfff);
UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end)
code_point += (*it) & 0x3f;
return UTF8_OK;
}
template <typename octet_iterator>
utf_error get_sequence_4(octet_iterator& it, octet_iterator end, uint32_t& code_point)
{
if (it == end)
return NOT_ENOUGH_ROOM;
code_point = utf8::internal::mask8(*it);
UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end)
code_point = ((code_point << 18) & 0x1fffff) + ((utf8::internal::mask8(*it) << 12) & 0x3ffff);
UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end)
code_point += (utf8::internal::mask8(*it) << 6) & 0xfff;
UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR(it, end)
code_point += (*it) & 0x3f;
return UTF8_OK;
}
#undef UTF8_CPP_INCREASE_AND_RETURN_ON_ERROR
template <typename octet_iterator>
utf_error validate_next(octet_iterator& it, octet_iterator end, uint32_t& code_point)
{
if (it == end)
return NOT_ENOUGH_ROOM;
// Save the original value of it so we can go back in case of failure
// Of course, it does not make much sense with i.e. stream iterators
octet_iterator original_it = it;
uint32_t cp = 0;
// Determine the sequence length based on the lead octet
typedef typename std::iterator_traits<octet_iterator>::difference_type octet_difference_type;
const octet_difference_type length = utf8::internal::sequence_length(it);
// Get trail octets and calculate the code point
utf_error err = UTF8_OK;
switch (length) {
case 0:
return INVALID_LEAD;
case 1:
err = utf8::internal::get_sequence_1(it, end, cp);
break;
case 2:
err = utf8::internal::get_sequence_2(it, end, cp);
break;
case 3:
err = utf8::internal::get_sequence_3(it, end, cp);
break;
case 4:
err = utf8::internal::get_sequence_4(it, end, cp);
break;
}
if (err == UTF8_OK) {
// Decoding succeeded. Now, security checks...
if (utf8::internal::is_code_point_valid(cp)) {
if (!utf8::internal::is_overlong_sequence(cp, length)){
// Passed! Return here.
code_point = cp;
++it;
return UTF8_OK;
}
else
err = OVERLONG_SEQUENCE;
}
else
err = INVALID_CODE_POINT;
}
// Failure branch - restore the original value of the iterator
it = original_it;
return err;
}
template <typename octet_iterator>
inline utf_error validate_next(octet_iterator& it, octet_iterator end) {
uint32_t ignored;
return utf8::internal::validate_next(it, end, ignored);
}
} // namespace internal
/// The library API - functions intended to be called by the users
// Byte order mark
const uint8_t bom[] = {0xef, 0xbb, 0xbf};
template <typename octet_iterator>
octet_iterator find_invalid(octet_iterator start, octet_iterator end)
{
octet_iterator result = start;
while (result != end) {
utf8::internal::utf_error err_code = utf8::internal::validate_next(result, end);
if (err_code != internal::UTF8_OK)
return result;
}
return result;
}
template <typename octet_iterator>
inline bool is_valid(octet_iterator start, octet_iterator end)
{
return (utf8::find_invalid(start, end) == end);
}
template <typename octet_iterator>
inline bool starts_with_bom (octet_iterator it, octet_iterator end)
{
return (
((it != end) && (utf8::internal::mask8(*it++)) == bom[0]) &&
((it != end) && (utf8::internal::mask8(*it++)) == bom[1]) &&
((it != end) && (utf8::internal::mask8(*it)) == bom[2])
);
}
} // namespace utf8
#endif // header guard

View file

@ -0,0 +1,103 @@
// Copyright 2018 Nemanja Trifunovic
/*
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
#ifndef UTF8_FOR_CPP_a184c22c_d012_11e8_a8d5_f2801f1b9fd1
#define UTF8_FOR_CPP_a184c22c_d012_11e8_a8d5_f2801f1b9fd1
#include "checked.h"
#include <string>
namespace utf8
{
inline void append(char32_t cp, std::string& s)
{
append(uint32_t(cp), std::back_inserter(s));
}
inline std::string utf16to8(const std::u16string& s)
{
std::string result;
utf16to8(s.begin(), s.end(), std::back_inserter(result));
return result;
}
inline std::u16string utf8to16(const std::string& s)
{
std::u16string result;
utf8to16(s.begin(), s.end(), std::back_inserter(result));
return result;
}
inline std::string utf32to8(const std::u32string& s)
{
std::string result;
utf32to8(s.begin(), s.end(), std::back_inserter(result));
return result;
}
inline std::u32string utf8to32(const std::string& s)
{
std::u32string result;
utf8to32(s.begin(), s.end(), std::back_inserter(result));
return result;
}
inline std::size_t find_invalid(const std::string& s)
{
std::string::const_iterator invalid = find_invalid(s.begin(), s.end());
return (invalid == s.end()) ? std::string::npos : (invalid - s.begin());
}
inline bool is_valid(const std::string& s)
{
return is_valid(s.begin(), s.end());
}
inline std::string replace_invalid(const std::string& s, char32_t replacement)
{
std::string result;
replace_invalid(s.begin(), s.end(), std::back_inserter(result), replacement);
return result;
}
inline std::string replace_invalid(const std::string& s)
{
std::string result;
replace_invalid(s.begin(), s.end(), std::back_inserter(result));
return result;
}
inline bool starts_with_bom(const std::string& s)
{
return starts_with_bom(s.begin(), s.end());
}
} // namespace utf8
#endif // header guard

View file

@ -0,0 +1,103 @@
// Copyright 2018 Nemanja Trifunovic
/*
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
#ifndef UTF8_FOR_CPP_7e906c01_03a3_4daf_b420_ea7ea952b3c9
#define UTF8_FOR_CPP_7e906c01_03a3_4daf_b420_ea7ea952b3c9
#include "checked.h"
#include <string>
namespace utf8
{
inline void append(char32_t cp, std::string& s)
{
append(uint32_t(cp), std::back_inserter(s));
}
inline std::string utf16to8(std::u16string_view s)
{
std::string result;
utf16to8(s.begin(), s.end(), std::back_inserter(result));
return result;
}
inline std::u16string utf8to16(std::string_view s)
{
std::u16string result;
utf8to16(s.begin(), s.end(), std::back_inserter(result));
return result;
}
inline std::string utf32to8(std::u32string_view s)
{
std::string result;
utf32to8(s.begin(), s.end(), std::back_inserter(result));
return result;
}
inline std::u32string utf8to32(std::string_view s)
{
std::u32string result;
utf8to32(s.begin(), s.end(), std::back_inserter(result));
return result;
}
inline std::size_t find_invalid(std::string_view s)
{
std::string_view::const_iterator invalid = find_invalid(s.begin(), s.end());
return (invalid == s.end()) ? std::string_view::npos : (invalid - s.begin());
}
inline bool is_valid(std::string_view s)
{
return is_valid(s.begin(), s.end());
}
inline std::string replace_invalid(std::string_view s, char32_t replacement)
{
std::string result;
replace_invalid(s.begin(), s.end(), std::back_inserter(result), replacement);
return result;
}
inline std::string replace_invalid(std::string_view s)
{
std::string result;
replace_invalid(s.begin(), s.end(), std::back_inserter(result));
return result;
}
inline bool starts_with_bom(std::string_view s)
{
return starts_with_bom(s.begin(), s.end());
}
} // namespace utf8
#endif // header guard

View file

@ -0,0 +1,274 @@
// Copyright 2006 Nemanja Trifunovic
/*
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
#ifndef UTF8_FOR_CPP_UNCHECKED_H_2675DCD0_9480_4c0c_B92A_CC14C027B731
#define UTF8_FOR_CPP_UNCHECKED_H_2675DCD0_9480_4c0c_B92A_CC14C027B731
#include "core.h"
namespace utf8
{
namespace unchecked
{
template <typename octet_iterator>
octet_iterator append(uint32_t cp, octet_iterator result)
{
if (cp < 0x80) // one octet
*(result++) = static_cast<uint8_t>(cp);
else if (cp < 0x800) { // two octets
*(result++) = static_cast<uint8_t>((cp >> 6) | 0xc0);
*(result++) = static_cast<uint8_t>((cp & 0x3f) | 0x80);
}
else if (cp < 0x10000) { // three octets
*(result++) = static_cast<uint8_t>((cp >> 12) | 0xe0);
*(result++) = static_cast<uint8_t>(((cp >> 6) & 0x3f) | 0x80);
*(result++) = static_cast<uint8_t>((cp & 0x3f) | 0x80);
}
else { // four octets
*(result++) = static_cast<uint8_t>((cp >> 18) | 0xf0);
*(result++) = static_cast<uint8_t>(((cp >> 12) & 0x3f)| 0x80);
*(result++) = static_cast<uint8_t>(((cp >> 6) & 0x3f) | 0x80);
*(result++) = static_cast<uint8_t>((cp & 0x3f) | 0x80);
}
return result;
}
template <typename octet_iterator, typename output_iterator>
output_iterator replace_invalid(octet_iterator start, octet_iterator end, output_iterator out, uint32_t replacement)
{
while (start != end) {
octet_iterator sequence_start = start;
internal::utf_error err_code = utf8::internal::validate_next(start, end);
switch (err_code) {
case internal::UTF8_OK :
for (octet_iterator it = sequence_start; it != start; ++it)
*out++ = *it;
break;
case internal::NOT_ENOUGH_ROOM:
out = utf8::unchecked::append (replacement, out);
start = end;
break;
case internal::INVALID_LEAD:
out = utf8::unchecked::append (replacement, out);
++start;
break;
case internal::INCOMPLETE_SEQUENCE:
case internal::OVERLONG_SEQUENCE:
case internal::INVALID_CODE_POINT:
out = utf8::unchecked::append (replacement, out);
++start;
// just one replacement mark for the sequence
while (start != end && utf8::internal::is_trail(*start))
++start;
break;
}
}
return out;
}
template <typename octet_iterator, typename output_iterator>
inline output_iterator replace_invalid(octet_iterator start, octet_iterator end, output_iterator out)
{
static const uint32_t replacement_marker = utf8::internal::mask16(0xfffd);
return utf8::unchecked::replace_invalid(start, end, out, replacement_marker);
}
template <typename octet_iterator>
uint32_t next(octet_iterator& it)
{
uint32_t cp = utf8::internal::mask8(*it);
typename std::iterator_traits<octet_iterator>::difference_type length = utf8::internal::sequence_length(it);
switch (length) {
case 1:
break;
case 2:
it++;
cp = ((cp << 6) & 0x7ff) + ((*it) & 0x3f);
break;
case 3:
++it;
cp = ((cp << 12) & 0xffff) + ((utf8::internal::mask8(*it) << 6) & 0xfff);
++it;
cp += (*it) & 0x3f;
break;
case 4:
++it;
cp = ((cp << 18) & 0x1fffff) + ((utf8::internal::mask8(*it) << 12) & 0x3ffff);
++it;
cp += (utf8::internal::mask8(*it) << 6) & 0xfff;
++it;
cp += (*it) & 0x3f;
break;
}
++it;
return cp;
}
template <typename octet_iterator>
uint32_t peek_next(octet_iterator it)
{
return utf8::unchecked::next(it);
}
template <typename octet_iterator>
uint32_t prior(octet_iterator& it)
{
while (utf8::internal::is_trail(*(--it))) ;
octet_iterator temp = it;
return utf8::unchecked::next(temp);
}
template <typename octet_iterator, typename distance_type>
void advance (octet_iterator& it, distance_type n)
{
const distance_type zero(0);
if (n < zero) {
// backward
for (distance_type i = n; i < zero; ++i)
utf8::unchecked::prior(it);
} else {
// forward
for (distance_type i = zero; i < n; ++i)
utf8::unchecked::next(it);
}
}
template <typename octet_iterator>
typename std::iterator_traits<octet_iterator>::difference_type
distance (octet_iterator first, octet_iterator last)
{
typename std::iterator_traits<octet_iterator>::difference_type dist;
for (dist = 0; first < last; ++dist)
utf8::unchecked::next(first);
return dist;
}
template <typename u16bit_iterator, typename octet_iterator>
octet_iterator utf16to8 (u16bit_iterator start, u16bit_iterator end, octet_iterator result)
{
while (start != end) {
uint32_t cp = utf8::internal::mask16(*start++);
// Take care of surrogate pairs first
if (utf8::internal::is_lead_surrogate(cp)) {
uint32_t trail_surrogate = utf8::internal::mask16(*start++);
cp = (cp << 10) + trail_surrogate + internal::SURROGATE_OFFSET;
}
result = utf8::unchecked::append(cp, result);
}
return result;
}
template <typename u16bit_iterator, typename octet_iterator>
u16bit_iterator utf8to16 (octet_iterator start, octet_iterator end, u16bit_iterator result)
{
while (start < end) {
uint32_t cp = utf8::unchecked::next(start);
if (cp > 0xffff) { //make a surrogate pair
*result++ = static_cast<uint16_t>((cp >> 10) + internal::LEAD_OFFSET);
*result++ = static_cast<uint16_t>((cp & 0x3ff) + internal::TRAIL_SURROGATE_MIN);
}
else
*result++ = static_cast<uint16_t>(cp);
}
return result;
}
template <typename octet_iterator, typename u32bit_iterator>
octet_iterator utf32to8 (u32bit_iterator start, u32bit_iterator end, octet_iterator result)
{
while (start != end)
result = utf8::unchecked::append(*(start++), result);
return result;
}
template <typename octet_iterator, typename u32bit_iterator>
u32bit_iterator utf8to32 (octet_iterator start, octet_iterator end, u32bit_iterator result)
{
while (start < end)
(*result++) = utf8::unchecked::next(start);
return result;
}
// The iterator class
template <typename octet_iterator>
class iterator {
octet_iterator it;
public:
typedef uint32_t value_type;
typedef uint32_t* pointer;
typedef uint32_t& reference;
typedef std::ptrdiff_t difference_type;
typedef std::bidirectional_iterator_tag iterator_category;
iterator () {}
explicit iterator (const octet_iterator& octet_it): it(octet_it) {}
// the default "big three" are OK
octet_iterator base () const { return it; }
uint32_t operator * () const
{
octet_iterator temp = it;
return utf8::unchecked::next(temp);
}
bool operator == (const iterator& rhs) const
{
return (it == rhs.it);
}
bool operator != (const iterator& rhs) const
{
return !(operator == (rhs));
}
iterator& operator ++ ()
{
::std::advance(it, utf8::internal::sequence_length(it));
return *this;
}
iterator operator ++ (int)
{
iterator temp = *this;
::std::advance(it, utf8::internal::sequence_length(it));
return temp;
}
iterator& operator -- ()
{
utf8::unchecked::prior(it);
return *this;
}
iterator operator -- (int)
{
iterator temp = *this;
utf8::unchecked::prior(it);
return temp;
}
}; // class iterator
} // namespace utf8::unchecked
} // namespace utf8
#endif // header guard

View file

@ -7,19 +7,7 @@ set(SRC
add_library(${PROJECT_NAME} ${SRC})
if (WITH_SYSTEM_PROVIDED_3PARTY)
find_package(VulkanHeaders REQUIRED)
target_link_libraries(${PROJECT_NAME} Vulkan::Headers)
else()
set(VulkanHeaders_INCLUDE_DIR ../Vulkan-Headers/include)
target_include_directories(${PROJECT_NAME} PUBLIC ../Vulkan-Headers/include)
endif()
target_include_directories(${PROJECT_NAME} PUBLIC .)
if (PLATFORM_LINUX)
target_compile_definitions(${PROJECT_NAME} PRIVATE VK_USE_PLATFORM_XLIB_KHR)
endif()
target_include_directories(${PROJECT_NAME} PUBLIC . ../Vulkan-Headers/include)
# dlopen
target_link_libraries(${PROJECT_NAME} $<$<BOOL:CMAKE_DL_LIBS>:${CMAKE_DL_LIBS}>)

View file

@ -22,20 +22,7 @@ extern "C" {
#include <dlfcn.h>
int InitVulkan(void) {
#if defined(__APPLE__)
void* libvulkan = dlopen("libvulkan.dylib", RTLD_NOW | RTLD_LOCAL);
if (!libvulkan) {
libvulkan = dlopen("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL);
}
if (!libvulkan) {
libvulkan = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL);
}
#else
void* libvulkan = dlopen("libvulkan.so.1", RTLD_NOW | RTLD_LOCAL);
if (!libvulkan) {
libvulkan = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL);
}
#endif
void* libvulkan = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL);
if (!libvulkan) return 0;
// Vulkan supported, set function addresses
@ -558,10 +545,6 @@ int InitVulkan(void) {
vkDestroyDebugReportCallbackEXT = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(dlsym(libvulkan, "vkDestroyDebugReportCallbackEXT"));
vkDebugReportMessageEXT = reinterpret_cast<PFN_vkDebugReportMessageEXT>(dlsym(libvulkan, "vkDebugReportMessageEXT"));
#if defined(__APPLE__)
vkCreateMacOSSurfaceMVK = reinterpret_cast<PFN_vkCreateMacOSSurfaceMVK>(dlsym(libvulkan, "vkCreateMacOSSurfaceMVK"));
#endif
return 1;
}
@ -978,10 +961,8 @@ PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT;
PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT;
PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT;
PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV;
#ifdef VK_USE_PLATFORM_WIN32_KHR
PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV;
PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV;
#endif
PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV;
PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV;
PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT;
PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI;
PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI;
@ -1154,10 +1135,6 @@ PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR;
PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR;
#endif
#if defined(__APPLE__)
PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK;
#endif
#ifdef __cplusplus
}
#endif

View file

@ -25,10 +25,6 @@ extern "C" {
#define VK_USE_PLATFORM_ANDROID_KHR 1
#include <vulkan/vulkan.h>
#if defined(__APPLE__)
#include <vulkan/vulkan_macos.h>
#endif
/* Initialize the Vulkan function pointer variables declared in this header.
* Returns 0 if vulkan is not available, non-zero if it is available.
*/
@ -504,10 +500,6 @@ extern PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT;
extern PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT;
extern PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT;
#if defined(__APPLE__)
extern PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK;
#endif
#ifdef __cplusplus
}
#endif

Some files were not shown because too many files have changed in this diff Show more