Compare commits
1 Commits
Author | SHA1 | Date |
---|---|---|
Admar Schoonen | eb46867f5b |
|
@ -1,45 +0,0 @@
|
|||
name: "Check, lint and test using Cargo"
|
||||
|
||||
on:
|
||||
- pull_request
|
||||
- push
|
||||
- workflow_dispatch
|
||||
|
||||
jobs:
|
||||
check_lint:
|
||||
name: Check, lint and test
|
||||
runs-on: debian-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust stable toolchain
|
||||
uses: https://github.com/actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Run cargo check
|
||||
uses: https://github.com/actions-rs/cargo@v1
|
||||
with:
|
||||
command: check
|
||||
|
||||
- name: Run cargo clippy
|
||||
uses: https://github.com/actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: -- -D warnings
|
||||
|
||||
- name: Run cargo fmt
|
||||
uses: https://github.com/actions-rs/cargo@v1
|
||||
with:
|
||||
command: fmt
|
||||
args: --all -- --check
|
||||
|
||||
- name: Run cargo test
|
||||
uses: https://github.com/actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --all-features
|
|
@ -1,112 +0,0 @@
|
|||
name: "Release"
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: "Release"
|
||||
runs-on: debian-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Determine the version of the release
|
||||
run: |
|
||||
VERSION=${GITHUB_REF_NAME#v}
|
||||
echo "Releasing version: $VERSION"
|
||||
echo "VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Get the release notes from the changelog
|
||||
run: |
|
||||
EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64)
|
||||
RELEASE_NOTES=$(sed -n -e "/^## \[$VERSION\]/,/^## \[/{//"'!'"p;}" CHANGELOG.md | sed -e '1d;$d')
|
||||
echo "Release notes:"
|
||||
echo
|
||||
echo "$RELEASE_NOTES"
|
||||
echo "RELEASE_NOTES<<$EOF" >> "$GITHUB_ENV"
|
||||
echo "$RELEASE_NOTES" >> "$GITHUB_ENV"
|
||||
echo "$EOF" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: '>=1.20.1'
|
||||
|
||||
- name: Release to Gitea
|
||||
uses: actions/release-action@main
|
||||
with:
|
||||
# This is available by default.
|
||||
api_key: '${{ secrets.RELEASE_TOKEN }}'
|
||||
files: FIXME
|
||||
title: 'Release ${{ env.VERSION }}'
|
||||
body: '${{ env.RELEASE_NOTES }}'
|
||||
|
||||
release-crate:
|
||||
name: "Release Rust crate"
|
||||
runs-on: debian-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Rust stable toolchain
|
||||
uses: https://github.com/actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Use sparse Cargo index for crates.io
|
||||
run: echo -e '[registries.crates-io]\nprotocol = "sparse"' >> /root/.cargo/config.toml
|
||||
|
||||
- name: Register the Gitea crate registry with Cargo
|
||||
run: echo -e '[registries.luon]\nindex = "https://git.luon.net/paul/_cargo-index.git"' >> /root/.cargo/config.toml
|
||||
|
||||
- name: Run cargo publish
|
||||
uses: https://github.com/actions-rs/cargo@v1
|
||||
env:
|
||||
# This needs to be provided for the repository; no login necessary as a result.
|
||||
CARGO_REGISTRIES_LUON_TOKEN: '${{ secrets.CARGO_TOKEN }}'
|
||||
with:
|
||||
command: publish
|
||||
args: --registry luon
|
||||
|
||||
release-deb:
|
||||
name: "Release Debian package"
|
||||
runs-on: debian-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Rust stable toolchain
|
||||
uses: https://github.com/actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
|
||||
- name: Install cargo-deb
|
||||
uses: https://github.com/brndnmtthws/rust-action-cargo-binstall@v1
|
||||
with:
|
||||
packages: cargo-deb
|
||||
|
||||
- name: Run cargo-deb
|
||||
uses: https://github.com/actions-rs/cargo@v1
|
||||
with:
|
||||
command: deb
|
||||
|
||||
- name: Publish Debian package
|
||||
env:
|
||||
DEB_REPO_TOKEN: '${{ secrets.DEB_REPO_TOKEN }}'
|
||||
run: |
|
||||
curl --config <(printf "user=%s:%s" paul "${DEB_REPO_TOKEN}") \
|
||||
--upload-file target/debian/sinoptik*.deb \
|
||||
https://git.luon.net/api/packages/paul/debian/pool/bookworm/main/upload
|
|
@ -1,2 +1 @@
|
|||
/target
|
||||
Rocket.toml
|
||||
|
|
206
CHANGELOG.md
206
CHANGELOG.md
|
@ -1,206 +0,0 @@
|
|||
# Changelog
|
||||
|
||||
All notable changes to Sinoptik will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.2.12] - 2024-05-09
|
||||
|
||||
### Security
|
||||
|
||||
* Updated dependencies, fixes security advisiories:
|
||||
* [RUSTSEC-2024-0019](https://rustsec.org/advisories/RUSTSEC-2024-0019)
|
||||
* [RUSTSEC-2024-0332](https://rustsec.org/advisories/RUSTSEC-2024-0332)
|
||||
|
||||
### Changed
|
||||
|
||||
* Update dependency on `cached`, `chrono-tz`, `image` and `reqwest`
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix tests; reduce required accuracy for geocoded coordinates again and
|
||||
don't run background map updates during tests
|
||||
|
||||
## [0.2.11] - 2024-02-27
|
||||
|
||||
### Security
|
||||
|
||||
* Updated dependencies, fixes security advisories:
|
||||
* [RUSTSEC-2024-0003](https://rustsec.org/advisories/RUSTSEC-2024-0003)
|
||||
* [RUSTSEC-2023-0072](https://rustsec.org/advisories/RUSTSEC-2024-0072)
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix clippy issue
|
||||
* Tweak/fix tests; reduce required accuracy for geocoded coordinates
|
||||
|
||||
## [0.2.10] - 2023-11-03
|
||||
|
||||
### Security
|
||||
|
||||
* Update dependencies
|
||||
([RUSTSEC-2020-0071](https://rustsec.org/advisories/RUSTSEC-2020-0071.html),
|
||||
[RUSTSEC-2023-0044](https://rustsec.org/advisories/RUSTSEC-2023-0044.html))
|
||||
|
||||
### Changed
|
||||
|
||||
* Switch to Rocket 0.5 RC4
|
||||
* Update dependency on `cached`
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix clippy issues
|
||||
|
||||
## [0.2.9] - 2023-08-25
|
||||
|
||||
### Changed
|
||||
|
||||
* Update release Gitea Actions workflow; add seperate job to release Debian
|
||||
package to the new repository
|
||||
|
||||
### Security
|
||||
|
||||
* Update dependencies ([RUSTSEC-2023-0044](https://rustsec.org/advisories/RUSTSEC-2023-0044))
|
||||
|
||||
## [0.2.8] - 2023-06-05
|
||||
|
||||
### Added
|
||||
|
||||
* Print the version on lift off (#30)
|
||||
* Add a `/version` endpoint to the API (#30)
|
||||
|
||||
### Changed
|
||||
|
||||
* Update dependency on `cached`
|
||||
|
||||
### Fixed
|
||||
|
||||
* Properly attribute the PAQI metric in its description(s)
|
||||
|
||||
### Removed
|
||||
|
||||
* No longer provide a map for the PAQI metric; the map used is only for pollen
|
||||
|
||||
## [0.2.7] - 2023-05-26
|
||||
|
||||
### Fixed
|
||||
|
||||
* Switch back to the original Buienradar color scheme/maps key (#27)
|
||||
* Fix the token used to publish the crate to the Cargo package index
|
||||
|
||||
## [0.2.6] - 2023-05-24
|
||||
|
||||
### Added
|
||||
|
||||
* Add full release Gitea Actions workflow
|
||||
|
||||
### Changed
|
||||
|
||||
* Simplify Gitea Actions check, lint and test workflow
|
||||
* Improve no known map colors found error description
|
||||
|
||||
### Fixed
|
||||
|
||||
* Update coordinates of Eindhoven in tests (Nomatim changed its geocoding)
|
||||
* Increase sampling area to 31×31 pixels (#26)
|
||||
* Switch to new Buienradar color scheme/maps key (#27)
|
||||
|
||||
## [0.2.5] - 2023-03-24
|
||||
|
||||
### Added
|
||||
|
||||
* Add Gitea Actions workflow for cargo
|
||||
|
||||
### Changed
|
||||
|
||||
* Updated dependencies on `cached`, `chrono-tz` and `geocoding`
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix float comparison in tests
|
||||
* Fix clippy issues
|
||||
|
||||
### Security
|
||||
|
||||
* Update dependencies ([RUSTSEC-2023-0018](https://rustsec.org/advisories/RUSTSEC-2023-0018.html))
|
||||
|
||||
## [0.2.4] - 2022-07-05
|
||||
|
||||
### Added
|
||||
|
||||
* Add proper error handling and show them via the API (#25)
|
||||
|
||||
### Changed
|
||||
|
||||
* Run map refresher as an ad hoc liftoff fairing in Rocket
|
||||
* Changed emojis in log output
|
||||
|
||||
### Removed
|
||||
|
||||
* Removed `AQI_max` and `pollen_max` from the forecast JSON introduced in
|
||||
version 0.2.0
|
||||
|
||||
### Fixed
|
||||
|
||||
* Verify sample coordinate bounds (#24)
|
||||
* Default to current time if `Last-Modified` HTTP header is missing for
|
||||
retrieved maps
|
||||
|
||||
## [0.2.3] - 2022-05-21
|
||||
|
||||
### Fixed
|
||||
|
||||
* Update the examples in `README.md`
|
||||
* Fix tests by adding missing type
|
||||
* Fix map key color code for level 8 used by map sampling
|
||||
|
||||
## [0.2.2] - 2022-05-10
|
||||
|
||||
### Changed
|
||||
|
||||
* Switch to Rocket 0.5 RC2
|
||||
|
||||
### Fixed
|
||||
|
||||
* Fix timestamps for map samples not being correct (AQI, PAQI, UVI metrics) (#22)
|
||||
* Valid samples/items will no longer be discarded too early
|
||||
|
||||
## [0.2.1] - 2022-05-08
|
||||
|
||||
### Added
|
||||
|
||||
* Add tests for the merge functionality of the combined provider (PAQI)
|
||||
|
||||
### Fixed
|
||||
|
||||
* Filter out old item/samples in combined provider (PAQI)
|
||||
|
||||
## [0.2.0] - 2022-05-07
|
||||
|
||||
### Added
|
||||
|
||||
* Add `AQI_max` and `pollen_max` to the forecast JSON (only when the PAQI
|
||||
metric is selected) (#20)
|
||||
|
||||
## [0.1.0] - 2022-03-07
|
||||
|
||||
Initial release.
|
||||
|
||||
[Unreleased]: https://git.luon.net/paul/sinoptik/compare/v0.2.12...HEAD
|
||||
[0.2.12]: https://git.luon.net/paul/sinoptik/compare/v0.2.11...v0.2.12
|
||||
[0.2.11]: https://git.luon.net/paul/sinoptik/compare/v0.2.10...v0.2.11
|
||||
[0.2.10]: https://git.luon.net/paul/sinoptik/compare/v0.2.9...v0.2.10
|
||||
[0.2.9]: https://git.luon.net/paul/sinoptik/compare/v0.2.8...v0.2.9
|
||||
[0.2.8]: https://git.luon.net/paul/sinoptik/compare/v0.2.7...v0.2.8
|
||||
[0.2.7]: https://git.luon.net/paul/sinoptik/compare/v0.2.6...v0.2.7
|
||||
[0.2.6]: https://git.luon.net/paul/sinoptik/compare/v0.2.5...v0.2.6
|
||||
[0.2.5]: https://git.luon.net/paul/sinoptik/compare/v0.2.4...v0.2.5
|
||||
[0.2.4]: https://git.luon.net/paul/sinoptik/compare/v0.2.3...v0.2.4
|
||||
[0.2.3]: https://git.luon.net/paul/sinoptik/compare/v0.2.2...v0.2.3
|
||||
[0.2.2]: https://git.luon.net/paul/sinoptik/compare/v0.2.1...v0.2.2
|
||||
[0.2.1]: https://git.luon.net/paul/sinoptik/compare/v0.2.0...v0.2.1
|
||||
[0.2.0]: https://git.luon.net/paul/sinoptik/compare/v0.1.0...v0.2.0
|
||||
[0.1.0]: https://git.luon.net/paul/sinoptik/commits/tag/v0.1.0
|
File diff suppressed because it is too large
Load Diff
64
Cargo.toml
64
Cargo.toml
|
@ -1,67 +1,9 @@
|
|||
[package]
|
||||
name = "sinoptik"
|
||||
version = "0.2.12"
|
||||
authors = [
|
||||
"Admar Schoonen <admar@luon.net",
|
||||
"Paul van Tilburg <paul@luon.net>"
|
||||
]
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
description = "Web service that provides an API for today's weather forecast"
|
||||
readme = "README.md"
|
||||
repository = "https://git.luon.net/paul/sinoptik"
|
||||
license = "MIT"
|
||||
|
||||
[dependencies]
|
||||
cached = { version = "0.51.3", features = ["async"] }
|
||||
chrono = "0.4.19"
|
||||
chrono-tz = "0.9.0"
|
||||
csv = "1.1.6"
|
||||
geocoding = "0.4.0"
|
||||
image = { version = "0.25.1", default-features = false, features = ["png"]}
|
||||
reqwest = { version = "0.12.4", features = ["json"] }
|
||||
rocket = { version = "0.5.0-rc.3", features = ["json"] }
|
||||
thiserror = "1.0.31"
|
||||
|
||||
[build-dependencies]
|
||||
vergen = { version = "8.2.1", default-features = false, features = ["build", "git", "gitcl"] }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_float_eq = "1.1.3"
|
||||
assert_matches = "1.5.0"
|
||||
|
||||
[package.metadata.deb]
|
||||
maintainer = "Paul van Tilburg <paul@luon.net>"
|
||||
copyright = "2022, Paul van Tilburg"
|
||||
depends = "$auto, systemd"
|
||||
extended-description = """\
|
||||
Sinoptik is a (REST) API service that provides an API for today's weather
|
||||
forecast. It can provide you with a specific set or all available metrics that
|
||||
it supports.
|
||||
|
||||
Currently supported metrics are:
|
||||
|
||||
* Air quality index (per hour, from Luchtmeetnet)
|
||||
* NO₂ concentration (per hour, from Luchtmeetnet)
|
||||
* O₃ concentration (per hour, from Luchtmeetnet)
|
||||
* Particulate matter (PM10) concentration (per hour, from Luchtmeetnet)
|
||||
* Pollen (per hour, from Buienradar)
|
||||
* Pollen/air quality index (per hour, combined from Buienradar and
|
||||
Luchtmeetnet)
|
||||
* Precipitation (per 5 minutes, from Buienradar)
|
||||
* UV index (per day, from Buienradar)
|
||||
|
||||
Because of the currently supported data providers, only data for The
|
||||
Netherlands can be queried.
|
||||
"""
|
||||
section = "net"
|
||||
priority = "optional"
|
||||
assets = [
|
||||
["README.md", "usr/share/doc/sinoptik/", "664"],
|
||||
["Rocket.toml.example", "/etc/sinoptik.toml", "644"],
|
||||
["target/release/sinoptik", "usr/sbin/sinoptik", "755"]
|
||||
]
|
||||
conf-files = [
|
||||
"/etc/sinoptik.toml"
|
||||
]
|
||||
maintainer-scripts = "debian/"
|
||||
systemd-units = { unit-name = "sinoptik" }
|
||||
geocoding = "0.3.1"
|
||||
rocket = { version = "0.5.0-rc.1", features = ["json"] }
|
||||
|
|
19
LICENSE
19
LICENSE
|
@ -1,19 +0,0 @@
|
|||
The MIT License (MIT)
|
||||
Copyright (c) 2018 Paul van Tilburg
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
248
README.md
248
README.md
|
@ -1,248 +0,0 @@
|
|||
# Sinoptik
|
||||
|
||||
Sinoptik is a (REST) API service that provides an API for today's weather
|
||||
forecast. It can provide you with a specific set or all available metrics
|
||||
that it supports.
|
||||
|
||||
Currently supported metrics are:
|
||||
|
||||
* Air quality index (per hour, from [Luchtmeetnet])
|
||||
* NO₂ concentration (per hour, from [Luchtmeetnet])
|
||||
* O₃ concentration (per hour, from [Luchtmeetnet])
|
||||
* Particulate matter (PM10) concentration (per hour, from [Luchtmeetnet])
|
||||
* Pollen (per hour, from [Buienradar])
|
||||
* Pollen/air quality index (per hour, combined from [Buienradar] and
|
||||
[Luchtmeetnet])
|
||||
* Precipitation (per 5 minutes, from [Buienradar])
|
||||
* UV index (per day, from [Buienradar])
|
||||
|
||||
[Buienradar]: https://buienradar.nl
|
||||
[Luchtmeetnet]: https://luchtmeetnet.nl
|
||||
|
||||
Because of the currently supported data providers, only data for
|
||||
The Netherlands can be queried.
|
||||
|
||||
## Building & running
|
||||
|
||||
Using Cargo, it is easy to build and run Sinoptik, just run:
|
||||
|
||||
```shell
|
||||
$ cargo run --release
|
||||
...
|
||||
Compiling sinoptik v0.1.0 (/path/to/sinoptik)
|
||||
Finished release [optimized] target(s) in 9m 26s
|
||||
Running `/path/to/sinoptik/target/release/sinoptik`
|
||||
```
|
||||
|
||||
(Note that Rocket listens on `127.0.0.1:3000` by default for debug builds, i.e.
|
||||
builds when you don't add `--release`.)
|
||||
|
||||
You can provide Rocket with configuration to use a different address and/or port.
|
||||
Just create a `Rocket.toml` file that contains (or copy `Rocket.toml.example`):
|
||||
|
||||
```toml
|
||||
[default]
|
||||
address = "0.0.0.0"
|
||||
port = 2356
|
||||
```
|
||||
|
||||
This will work independent of the type of build. For more about Rocket's
|
||||
configuration, see: <https://rocket.rs/v0.5-rc/guide/configuration/>.
|
||||
|
||||
## Forecast API endpoint
|
||||
|
||||
The `/forecast` API endpoint provides forecasts per requested metric a list of
|
||||
forecast item which are each comprised of a value and its (UNIX) timestamp. It
|
||||
does so for a requested location.
|
||||
|
||||
### Locations
|
||||
|
||||
To select a location, you can either provide an address, or a geocoded position
|
||||
by providing a latitude and longitude.
|
||||
For example, to get forecasts for all metrics for the Stationsplein in Utrecht,
|
||||
use:
|
||||
|
||||
```http
|
||||
GET /forecast?address=Stationsplein,Utrecht&metrics[]=all
|
||||
```
|
||||
|
||||
or directly by using its geocoded position:
|
||||
|
||||
```http
|
||||
GET /forecast?lat=52.0902&lon=5.1114&metrics[]=all
|
||||
```
|
||||
|
||||
### Metrics
|
||||
|
||||
When querying, the metrics need to be selected. It can be one of: `AQI`, `NO2`,
|
||||
`O3`, `PAQI`, `PM10`, `pollen`, `precipitation` or `UVI`. If you use metric
|
||||
`all`, or `all` is part of the selected metrics, all metrics will be retrieved.
|
||||
Note that the parameter "array" notation as well as the repeated parameter
|
||||
notation are supported. For example:
|
||||
|
||||
```http
|
||||
GET /forecast?address=Stationsplein,Utrecht&metrics[]=AQI&metrics[]=pollen
|
||||
GET /forecast?address=Stationsplein,Utrecht&metrics=AQI&metrics=pollen
|
||||
GET /forecast?address=Stationsplein,Utrecht&metrics=all
|
||||
```
|
||||
|
||||
### Forecast responses
|
||||
|
||||
The response of the API is a JSON object that contains three fixed fields:
|
||||
|
||||
* `lat`: the latitude of the geocoded position the forecast is for (number)
|
||||
* `lon`: the longitude of the geocoded position the forecast is for (number)
|
||||
* `time`: the (UNIX) timestamp of the forecast, basically "now" (number)
|
||||
|
||||
Then, it contains a field per requested metric with a list of forecast items
|
||||
with two fixed fields as value:
|
||||
|
||||
* `time`: the (UNIX) timestamp for that forecasted value (number)
|
||||
* `value`: the forecasted value for the metric (number)
|
||||
|
||||
An example when requesting just UVI (because it's short) for some random
|
||||
position:
|
||||
|
||||
```json
|
||||
{
|
||||
"lat": 52.0905169,
|
||||
"lon": 5.1109709,
|
||||
"time": 1652188682,
|
||||
"UVI": [
|
||||
{
|
||||
"time": 1652140800,
|
||||
"value": 4
|
||||
},
|
||||
{
|
||||
"time": 1652227200,
|
||||
"value": 4
|
||||
},
|
||||
{
|
||||
"time": 1652313600,
|
||||
"value": 4
|
||||
},
|
||||
{
|
||||
"time": 1652400000,
|
||||
"value": 4
|
||||
},
|
||||
{
|
||||
"time": 1652486400,
|
||||
"value": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Combined metric PAQI
|
||||
|
||||
The PAQI (pollen/air quality index) metric is a special combined metric.
|
||||
If selected, it merges items from the AQI and pollen metric into `PAQI` by
|
||||
selecting the maximum value for each hour:
|
||||
|
||||
```json
|
||||
{
|
||||
"lat": 52.0905169,
|
||||
"lon": 5.1109709,
|
||||
"time": 1652189065,
|
||||
"PAQI": [
|
||||
{
|
||||
"time": 1652187600,
|
||||
"value": 6.09
|
||||
},
|
||||
{
|
||||
"time": 1652191200,
|
||||
"value": 6.09
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### Errors
|
||||
|
||||
If geocoding of an address is requested but fails, a not found error is
|
||||
returned (HTTP 404). with the following body (this will change in the future):
|
||||
|
||||
```json
|
||||
{
|
||||
"error": {
|
||||
"code": 404,
|
||||
"reason": "Not Found",
|
||||
"description": "The requested resource could not be found."
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If for any specific metric an error occurs, the list with forecast items will
|
||||
be absent. However, the `errors` field will contain the error message for each
|
||||
failed metric. For example, say Buienradar is down and precipitation forecast
|
||||
items can not be retrieved:
|
||||
|
||||
```json
|
||||
{
|
||||
"lat": 52.0905169,
|
||||
"lon": 5.1109709,
|
||||
"time": 1654524574,
|
||||
...
|
||||
"errors": {
|
||||
"precipitation": "HTTP request error: error sending request for url (https://gpsgadget.buienradar.nl/data/raintext?lat=52.09&lon=5.11): error trying to connect: tcp connect error: Connection refused (os error 111)"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Map API endpoint
|
||||
|
||||
The `/map` API endpoint basically only exists for debugging purposes. Given an
|
||||
address or geocoded position, it shows the current map for the provided metric
|
||||
and draws a crosshair on the position.
|
||||
Currently, only the `PAQI`, `pollen` and `UVI` metrics are backed by a map.
|
||||
|
||||
For example, to get the current pollen map with a crosshair on Stationsplein in
|
||||
Utrecht, use:
|
||||
|
||||
```http
|
||||
GET /map?address=Stationsplein,Utrecht&metric=pollen
|
||||
```
|
||||
|
||||
or directly by using its geocoded position:
|
||||
|
||||
```http
|
||||
GET /map?lat=52.0902&lon=5.1114&metric=pollen
|
||||
```
|
||||
|
||||
### Map responses
|
||||
|
||||
The response is a PNG image with a crosshair drawn on the map. If geocoding of
|
||||
an address fails or if the position is out of bounds of the map, nothing is
|
||||
returned (HTTP 404). If the maps cannot/have not been downloaded or cached yet,
|
||||
a service unavailable error is returned (HTTP 503).
|
||||
|
||||
## Version API endpoint
|
||||
|
||||
The `/version` API endpoint provides information of the current version and
|
||||
build of the service. This can be used to check if it needs to be updated.
|
||||
Again, there is no path and no query parameters, just:
|
||||
|
||||
```http
|
||||
GET /version
|
||||
```
|
||||
|
||||
### Version responses
|
||||
|
||||
The response uses the JSON format and typically looks like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.7",
|
||||
"timestamp": "2023-05-29T13:34:34.701323159Z",
|
||||
"git_sha": "bb5962d",
|
||||
"git_timestamp": "2023-05-29T15:32:17.000000000+02:00"
|
||||
}
|
||||
```
|
||||
|
||||
(Build and git information in example output may be out of date.)
|
||||
|
||||
## License
|
||||
|
||||
Sinoptik is licensed under the MIT license (see the `LICENSE` file or
|
||||
<http://opensource.org/licenses/MIT>).
|
|
@ -1,3 +0,0 @@
|
|||
[default]
|
||||
address = "0.0.0.0"
|
||||
port = 2356
|
9
build.rs
9
build.rs
|
@ -1,9 +0,0 @@
|
|||
use std::error::Error;
|
||||
use vergen::EmitBuilder;
|
||||
|
||||
fn main() -> Result<(), Box<dyn Error>> {
|
||||
// Generate the `cargo:` instructions to fill the appropriate environment variables.
|
||||
EmitBuilder::builder().all_build().all_git().emit()?;
|
||||
|
||||
Ok(())
|
||||
}
|
|
@ -1,45 +0,0 @@
|
|||
[Unit]
|
||||
Description=Sinoptik API web server
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
|
||||
AmbientCapabilities=
|
||||
CapabilityBoundingSet=
|
||||
DynamicUser=yes
|
||||
LockPersonality=yes
|
||||
MemoryDenyWriteExecute=yes
|
||||
NoNewPrivileges=yes
|
||||
ProtectClock=yes
|
||||
ProtectControlGroups=yes
|
||||
ProtectHome=yes
|
||||
ProtectHostname=yes
|
||||
ProtectKernelLogs=yes
|
||||
ProtectKernelModules=yes
|
||||
ProtectKernelTunables=yes
|
||||
ProtectSystem=strict
|
||||
PrivateDevices=yes
|
||||
PrivateMounts=yes
|
||||
PrivateTmp=yes
|
||||
PrivateUsers=yes
|
||||
RemoveIPC=yes
|
||||
RestrictAddressFamilies=AF_INET AF_INET6
|
||||
RestrictNamespaces=yes
|
||||
RestrictRealtime=yes
|
||||
RestrictSUIDSGID=yes
|
||||
SystemCallArchitectures=native
|
||||
SystemCallFilter=@system-service
|
||||
SystemCallErrorNumber=EPERM
|
||||
UMask=0077
|
||||
|
||||
ExecStart=/usr/sbin/sinoptik
|
||||
Restart=on-failure
|
||||
RestartSec=10
|
||||
StartLimitInterval=1m
|
||||
StartLimitBurst=5
|
||||
|
||||
Environment="ROCKET_CONFIG=/etc/sinoptik.toml"
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
218
src/forecast.rs
218
src/forecast.rs
|
@ -1,218 +0,0 @@
|
|||
//! Forecast retrieval and construction.
|
||||
//!
|
||||
//! This module is used to construct a [`Forecast`] for the given position by retrieving data for
|
||||
//! the requested metrics from their providers.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt;
|
||||
|
||||
use rocket::serde::Serialize;
|
||||
|
||||
use crate::maps::MapsHandle;
|
||||
use crate::position::Position;
|
||||
use crate::providers::buienradar::{Item as BuienradarItem, Sample as BuienradarSample};
|
||||
use crate::providers::combined::Item as CombinedItem;
|
||||
use crate::providers::luchtmeetnet::Item as LuchtmeetnetItem;
|
||||
use crate::{providers, Error};
|
||||
|
||||
/// The current forecast for a specific location.
|
||||
///
|
||||
/// Only the metrics asked for are included as well as the position and current time.
|
||||
#[derive(Debug, Default, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Forecast {
|
||||
/// The latitude of the position.
|
||||
lat: f64,
|
||||
|
||||
/// The longitude of the position.
|
||||
lon: f64,
|
||||
|
||||
/// The current time (in seconds since the UNIX epoch).
|
||||
time: i64,
|
||||
|
||||
/// The air quality index (when asked for).
|
||||
#[serde(rename = "AQI", skip_serializing_if = "Option::is_none")]
|
||||
aqi: Option<Vec<LuchtmeetnetItem>>,
|
||||
|
||||
/// The NO₂ concentration (when asked for).
|
||||
#[serde(rename = "NO2", skip_serializing_if = "Option::is_none")]
|
||||
no2: Option<Vec<LuchtmeetnetItem>>,
|
||||
|
||||
/// The O₃ concentration (when asked for).
|
||||
#[serde(rename = "O3", skip_serializing_if = "Option::is_none")]
|
||||
o3: Option<Vec<LuchtmeetnetItem>>,
|
||||
|
||||
/// The combination of pollen + air quality index (when asked for).
|
||||
#[serde(rename = "PAQI", skip_serializing_if = "Option::is_none")]
|
||||
paqi: Option<Vec<CombinedItem>>,
|
||||
|
||||
/// The particulate matter in the air (when asked for).
|
||||
#[serde(rename = "PM10", skip_serializing_if = "Option::is_none")]
|
||||
pm10: Option<Vec<LuchtmeetnetItem>>,
|
||||
|
||||
/// The pollen in the air (when asked for).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pollen: Option<Vec<BuienradarSample>>,
|
||||
|
||||
/// The precipitation (when asked for).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
precipitation: Option<Vec<BuienradarItem>>,
|
||||
|
||||
/// The UV index (when asked for).
|
||||
#[serde(rename = "UVI", skip_serializing_if = "Option::is_none")]
|
||||
uvi: Option<Vec<BuienradarSample>>,
|
||||
|
||||
/// Any errors that occurred.
|
||||
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
|
||||
errors: BTreeMap<Metric, String>,
|
||||
}
|
||||
|
||||
impl Forecast {
|
||||
fn new(position: Position) -> Self {
|
||||
Self {
|
||||
lat: position.lat,
|
||||
lon: position.lon,
|
||||
time: chrono::Utc::now().timestamp(),
|
||||
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
fn log_error(&mut self, metric: Metric, error: Error) {
|
||||
eprintln!("💥 Encountered error during forecast: {}", error);
|
||||
self.errors.insert(metric, error.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
/// The supported forecast metrics.
|
||||
///
|
||||
/// This is used for selecting which metrics should be calculated & returned.
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
#[derive(
|
||||
Copy, Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq, Serialize, rocket::FromFormField,
|
||||
)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) enum Metric {
|
||||
/// All metrics.
|
||||
#[field(value = "all")]
|
||||
All,
|
||||
/// The air quality index.
|
||||
AQI,
|
||||
/// The NO₂ concentration.
|
||||
NO2,
|
||||
/// The O₃ concentration.
|
||||
O3,
|
||||
/// The combination of pollen + air quality index.
|
||||
PAQI,
|
||||
/// The particulate matter in the air.
|
||||
PM10,
|
||||
/// The pollen in the air.
|
||||
#[serde(rename(serialize = "pollen"))]
|
||||
Pollen,
|
||||
#[serde(rename(serialize = "precipitation"))]
|
||||
/// The precipitation.
|
||||
Precipitation,
|
||||
/// The UV index.
|
||||
UVI,
|
||||
}
|
||||
|
||||
impl Metric {
|
||||
/// Returns all supported metrics.
|
||||
fn all() -> Vec<Metric> {
|
||||
use Metric::*;
|
||||
|
||||
Vec::from([AQI, NO2, O3, PAQI, PM10, Pollen, Precipitation, UVI])
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Metric {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Metric::All => write!(f, "All"),
|
||||
Metric::AQI => write!(f, "AQI"),
|
||||
Metric::NO2 => write!(f, "NO2"),
|
||||
Metric::O3 => write!(f, "O3"),
|
||||
Metric::PAQI => write!(f, "PAQI"),
|
||||
Metric::PM10 => write!(f, "PM10"),
|
||||
Metric::Pollen => write!(f, "pollen"),
|
||||
Metric::Precipitation => write!(f, "precipitation"),
|
||||
Metric::UVI => write!(f, "UVI"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculates and returns the forecast.
|
||||
///
|
||||
/// The provided list `metrics` determines what will be included in the forecast.
|
||||
pub(crate) async fn forecast(
|
||||
position: Position,
|
||||
metrics: Vec<Metric>,
|
||||
maps_handle: &MapsHandle,
|
||||
) -> Forecast {
|
||||
let mut forecast = Forecast::new(position);
|
||||
|
||||
// Expand the `All` metric if present, deduplicate otherwise.
|
||||
let mut metrics = metrics;
|
||||
if metrics.contains(&Metric::All) {
|
||||
metrics = Metric::all();
|
||||
} else {
|
||||
metrics.dedup()
|
||||
}
|
||||
|
||||
for metric in metrics {
|
||||
match metric {
|
||||
// This should have been expanded to all the metrics matched below.
|
||||
Metric::All => unreachable!("The all metric should have been expanded"),
|
||||
Metric::AQI => {
|
||||
forecast.aqi = providers::luchtmeetnet::get(position, metric)
|
||||
.await
|
||||
.map_err(|err| forecast.log_error(metric, err))
|
||||
.ok()
|
||||
}
|
||||
Metric::NO2 => {
|
||||
forecast.no2 = providers::luchtmeetnet::get(position, metric)
|
||||
.await
|
||||
.map_err(|err| forecast.log_error(metric, err))
|
||||
.ok()
|
||||
}
|
||||
Metric::O3 => {
|
||||
forecast.o3 = providers::luchtmeetnet::get(position, metric)
|
||||
.await
|
||||
.map_err(|err| forecast.log_error(metric, err))
|
||||
.ok()
|
||||
}
|
||||
Metric::PAQI => {
|
||||
forecast.paqi = providers::combined::get(position, metric, maps_handle)
|
||||
.await
|
||||
.map_err(|err| forecast.log_error(metric, err))
|
||||
.ok()
|
||||
}
|
||||
Metric::PM10 => {
|
||||
forecast.pm10 = providers::luchtmeetnet::get(position, metric)
|
||||
.await
|
||||
.map_err(|err| forecast.log_error(metric, err))
|
||||
.ok()
|
||||
}
|
||||
Metric::Pollen => {
|
||||
forecast.pollen = providers::buienradar::get_samples(position, metric, maps_handle)
|
||||
.await
|
||||
.map_err(|err| forecast.log_error(metric, err))
|
||||
.ok()
|
||||
}
|
||||
Metric::Precipitation => {
|
||||
forecast.precipitation = providers::buienradar::get_items(position, metric)
|
||||
.await
|
||||
.map_err(|err| forecast.log_error(metric, err))
|
||||
.ok()
|
||||
}
|
||||
Metric::UVI => {
|
||||
forecast.uvi = providers::buienradar::get_samples(position, metric, maps_handle)
|
||||
.await
|
||||
.map_err(|err| forecast.log_error(metric, err))
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
forecast
|
||||
}
|
410
src/lib.rs
410
src/lib.rs
|
@ -1,410 +0,0 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
#![warn(
|
||||
clippy::all,
|
||||
missing_copy_implementations,
|
||||
missing_debug_implementations,
|
||||
rust_2018_idioms,
|
||||
rustdoc::broken_intra_doc_links,
|
||||
trivial_casts,
|
||||
trivial_numeric_casts,
|
||||
renamed_and_removed_lints,
|
||||
unsafe_code,
|
||||
unstable_features,
|
||||
unused_import_braces,
|
||||
unused_qualifications
|
||||
)]
|
||||
#![deny(missing_docs)]
|
||||
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use rocket::fairing::AdHoc;
|
||||
use rocket::http::Status;
|
||||
use rocket::response::Responder;
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::serde::Serialize;
|
||||
use rocket::{get, routes, Build, Request, Rocket, State};
|
||||
|
||||
use self::forecast::{forecast, Forecast, Metric};
|
||||
use self::maps::{mark_map, Error as MapsError, Maps, MapsHandle};
|
||||
use self::position::{resolve_address, Position};
|
||||
|
||||
pub(crate) mod forecast;
|
||||
pub(crate) mod maps;
|
||||
pub(crate) mod position;
|
||||
pub(crate) mod providers;
|
||||
|
||||
/// The possible provider errors that can occur.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub(crate) enum Error {
|
||||
/// A CSV parse error occurred.
|
||||
#[error("CSV parse error: {0}")]
|
||||
CsvParse(#[from] csv::Error),
|
||||
|
||||
/// A geocoding error occurred.
|
||||
#[error("Geocoding error: {0}")]
|
||||
Geocoding(#[from] geocoding::GeocodingError),
|
||||
|
||||
/// An HTTP request error occurred.
|
||||
#[error("HTTP request error: {0}")]
|
||||
HttpRequest(#[from] reqwest::Error),
|
||||
|
||||
/// Failed to join a task.
|
||||
#[error("Failed to join a task: {0}")]
|
||||
Join(#[from] rocket::tokio::task::JoinError),
|
||||
|
||||
/// Failed to merge AQI & pollen items.
|
||||
#[error("Failed to merge AQI & pollen items: {0}")]
|
||||
Merge(#[from] providers::combined::MergeError),
|
||||
|
||||
/// Failed to retrieve or sample the maps.
|
||||
#[error("Failed to retrieve or sample the maps: {0}")]
|
||||
Maps(#[from] maps::Error),
|
||||
|
||||
/// No geocoded position could be found.
|
||||
#[error("No geocoded position could be found")]
|
||||
NoPositionFound,
|
||||
|
||||
/// Encountered an unsupported metric.
|
||||
#[error("Encountered an unsupported metric: {0}")]
|
||||
UnsupportedMetric(Metric),
|
||||
}
|
||||
|
||||
impl<'r, 'o: 'r> Responder<'r, 'o> for Error {
|
||||
fn respond_to(self, _request: &'r Request<'_>) -> rocket::response::Result<'o> {
|
||||
eprintln!("💥 Encountered error during request: {}", self);
|
||||
|
||||
let status = match self {
|
||||
Error::NoPositionFound => Status::NotFound,
|
||||
Error::Maps(MapsError::NoMapsYet) => Status::ServiceUnavailable,
|
||||
Error::Maps(MapsError::OutOfBoundCoords(_, _)) => Status::NotFound,
|
||||
Error::Maps(MapsError::OutOfBoundOffset(_)) => Status::NotFound,
|
||||
_ => Status::InternalServerError,
|
||||
};
|
||||
|
||||
Err(status)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Responder)]
|
||||
#[response(content_type = "image/png")]
|
||||
struct PngImageData(Vec<u8>);
|
||||
|
||||
/// Result type that defaults to [`Error`] as the default error type.
|
||||
pub(crate) type Result<T, E = Error> = std::result::Result<T, E>;
|
||||
|
||||
/// The version information as JSON response.
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
struct VersionInfo {
|
||||
/// The version of the build.
|
||||
version: String,
|
||||
|
||||
/// The timestamp of the build.
|
||||
timestamp: String,
|
||||
|
||||
/// The (most recent) git SHA used for the build.
|
||||
git_sha: String,
|
||||
|
||||
/// The timestamp of the last git commit used for the build.
|
||||
git_timestamp: String,
|
||||
}
|
||||
impl VersionInfo {
|
||||
/// Retrieves the version information from the environment variables.
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
version: String::from(env!("CARGO_PKG_VERSION")),
|
||||
timestamp: String::from(env!("VERGEN_BUILD_TIMESTAMP")),
|
||||
git_sha: String::from(&env!("VERGEN_GIT_SHA")[0..7]),
|
||||
git_timestamp: String::from(env!("VERGEN_GIT_COMMIT_TIMESTAMP")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Handler for retrieving the forecast for an address.
|
||||
#[get("/forecast?<address>&<metrics>")]
|
||||
async fn forecast_address(
|
||||
address: String,
|
||||
metrics: Vec<Metric>,
|
||||
maps_handle: &State<MapsHandle>,
|
||||
) -> Result<Json<Forecast>> {
|
||||
let position = resolve_address(address).await?;
|
||||
let forecast = forecast(position, metrics, maps_handle).await;
|
||||
|
||||
Ok(Json(forecast))
|
||||
}
|
||||
|
||||
/// Handler for retrieving the forecast for a geocoded position.
|
||||
#[get("/forecast?<lat>&<lon>&<metrics>", rank = 2)]
|
||||
async fn forecast_geo(
|
||||
lat: f64,
|
||||
lon: f64,
|
||||
metrics: Vec<Metric>,
|
||||
maps_handle: &State<MapsHandle>,
|
||||
) -> Json<Forecast> {
|
||||
let position = Position::new(lat, lon);
|
||||
let forecast = forecast(position, metrics, maps_handle).await;
|
||||
|
||||
Json(forecast)
|
||||
}
|
||||
|
||||
/// Handler for showing the current map with the geocoded position of an address for a specific
|
||||
/// metric.
|
||||
///
|
||||
/// Note: This handler is mosly used for debugging purposes!
|
||||
#[get("/map?<address>&<metric>")]
|
||||
async fn map_address(
|
||||
address: String,
|
||||
metric: Metric,
|
||||
maps_handle: &State<MapsHandle>,
|
||||
) -> Result<PngImageData> {
|
||||
let position = resolve_address(address).await?;
|
||||
let image_data = mark_map(position, metric, maps_handle).await;
|
||||
|
||||
image_data.map(PngImageData)
|
||||
}
|
||||
|
||||
/// Handler for showing the current map with the geocoded position for a specific metric.
|
||||
///
|
||||
/// Note: This handler is mosly used for debugging purposes!
|
||||
#[get("/map?<lat>&<lon>&<metric>", rank = 2)]
|
||||
async fn map_geo(
|
||||
lat: f64,
|
||||
lon: f64,
|
||||
metric: Metric,
|
||||
maps_handle: &State<MapsHandle>,
|
||||
) -> Result<PngImageData> {
|
||||
let position = Position::new(lat, lon);
|
||||
let image_data = mark_map(position, metric, maps_handle).await;
|
||||
|
||||
image_data.map(PngImageData)
|
||||
}
|
||||
|
||||
/// Returns the version information.
|
||||
#[get("/version", format = "application/json")]
|
||||
async fn version() -> Result<Json<VersionInfo>> {
|
||||
Ok(Json(VersionInfo::new()))
|
||||
}
|
||||
|
||||
/// Sets up Rocket without fairings.
|
||||
fn rocket_core(maps_handle: MapsHandle) -> Rocket<Build> {
|
||||
rocket::build()
|
||||
.mount(
|
||||
"/",
|
||||
routes![
|
||||
forecast_address,
|
||||
forecast_geo,
|
||||
map_address,
|
||||
map_geo,
|
||||
version
|
||||
],
|
||||
)
|
||||
.manage(maps_handle)
|
||||
}
|
||||
|
||||
/// Sets up Rocket.
|
||||
fn rocket(maps_handle: MapsHandle) -> Rocket<Build> {
|
||||
let rocket = rocket_core(Arc::clone(&maps_handle));
|
||||
let maps_refresher = maps::run(maps_handle);
|
||||
|
||||
rocket
|
||||
.attach(AdHoc::on_liftoff("Maps refresher", |_| {
|
||||
Box::pin(async move {
|
||||
// We don't care about the join handle nor error results?
|
||||
let _refresher = rocket::tokio::spawn(maps_refresher);
|
||||
})
|
||||
}))
|
||||
.attach(AdHoc::on_liftoff("Version", |_| {
|
||||
Box::pin(async move {
|
||||
let name = env!("CARGO_PKG_NAME");
|
||||
let version = env!("CARGO_PKG_VERSION");
|
||||
let git_sha = &env!("VERGEN_GIT_SHA")[0..7];
|
||||
|
||||
println!("🌁 Started {name} v{version} (git @{git_sha})");
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
/// Sets up Rocket and the maps cache refresher task.
|
||||
pub fn setup() -> Rocket<Build> {
|
||||
let maps = Maps::new();
|
||||
let maps_handle = Arc::new(Mutex::new(maps));
|
||||
|
||||
rocket(maps_handle)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use assert_float_eq::*;
|
||||
use assert_matches::assert_matches;
|
||||
use image::{DynamicImage, Rgba, RgbaImage};
|
||||
use rocket::http::{ContentType, Status};
|
||||
use rocket::local::blocking::Client;
|
||||
use rocket::serde::json::Value as JsonValue;
|
||||
|
||||
use super::maps::RetrievedMaps;
|
||||
use super::*;
|
||||
|
||||
fn maps_stub(map_count: u32) -> RetrievedMaps {
|
||||
let map_color = Rgba::from([73, 218, 33, 255]); // First color from map key.
|
||||
let image =
|
||||
DynamicImage::ImageRgba8(RgbaImage::from_pixel(820 * map_count, 988, map_color));
|
||||
|
||||
RetrievedMaps::new(image)
|
||||
}
|
||||
|
||||
fn maps_handle_stub() -> MapsHandle {
|
||||
let mut maps = Maps::new();
|
||||
maps.pollen = Some(maps_stub(24));
|
||||
maps.uvi = Some(maps_stub(5));
|
||||
|
||||
Arc::new(Mutex::new(maps))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn forecast_address() {
|
||||
let maps_handle = maps_handle_stub();
|
||||
let client = Client::tracked(rocket(maps_handle)).expect("Not a valid Rocket instance");
|
||||
|
||||
// Get an empty forecast for the provided address.
|
||||
let response = client.get("/forecast?address=eindhoven").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let json = response.into_json::<JsonValue>().expect("Not valid JSON");
|
||||
assert_float_absolute_eq!(json["lat"].as_f64().unwrap(), 51.448557, 1e-1);
|
||||
assert_float_absolute_eq!(json["lon"].as_f64().unwrap(), 5.450123, 1e-1);
|
||||
assert_matches!(json["time"], JsonValue::Number(_));
|
||||
assert_matches!(json.get("AQI"), None);
|
||||
assert_matches!(json.get("NO2"), None);
|
||||
assert_matches!(json.get("O3"), None);
|
||||
assert_matches!(json.get("PAQI"), None);
|
||||
assert_matches!(json.get("PM10"), None);
|
||||
assert_matches!(json.get("pollen"), None);
|
||||
assert_matches!(json.get("precipitation"), None);
|
||||
assert_matches!(json.get("UVI"), None);
|
||||
|
||||
// Get a forecast with all metrics for the provided address.
|
||||
let response = client
|
||||
.get("/forecast?address=eindhoven&metrics=all")
|
||||
.dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let json = response.into_json::<JsonValue>().expect("Not valid JSON");
|
||||
assert_float_absolute_eq!(json["lat"].as_f64().unwrap(), 51.448557, 1e-1);
|
||||
assert_float_absolute_eq!(json["lon"].as_f64().unwrap(), 5.450123, 1e-1);
|
||||
assert_matches!(json["time"], JsonValue::Number(_));
|
||||
assert_matches!(json.get("AQI"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("NO2"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("O3"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("PAQI"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("PM10"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("pollen"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("precipitation"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("UVI"), Some(JsonValue::Array(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn forecast_geo() {
|
||||
let maps_handle = maps_handle_stub();
|
||||
let client = Client::tracked(rocket(maps_handle)).expect("valid Rocket instance");
|
||||
|
||||
// Get an empty forecast for the geocoded location.
|
||||
let response = client.get("/forecast?lat=51.4&lon=5.5").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let json = response.into_json::<JsonValue>().expect("Not valid JSON");
|
||||
assert_f64_near!(json["lat"].as_f64().unwrap(), 51.4);
|
||||
assert_f64_near!(json["lon"].as_f64().unwrap(), 5.5);
|
||||
assert_matches!(json["time"], JsonValue::Number(_));
|
||||
assert_matches!(json.get("AQI"), None);
|
||||
assert_matches!(json.get("NO2"), None);
|
||||
assert_matches!(json.get("O3"), None);
|
||||
assert_matches!(json.get("PAQI"), None);
|
||||
assert_matches!(json.get("PM10"), None);
|
||||
assert_matches!(json.get("pollen"), None);
|
||||
assert_matches!(json.get("precipitation"), None);
|
||||
assert_matches!(json.get("UVI"), None);
|
||||
|
||||
// Get a forecast with all metrics for the geocoded location.
|
||||
let response = client
|
||||
.get("/forecast?lat=51.4&lon=5.5&metrics=all")
|
||||
.dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let json = response.into_json::<JsonValue>().expect("Not valid JSON");
|
||||
assert_f64_near!(json["lat"].as_f64().unwrap(), 51.4);
|
||||
assert_f64_near!(json["lon"].as_f64().unwrap(), 5.5);
|
||||
assert_matches!(json["time"], JsonValue::Number(_));
|
||||
assert_matches!(json.get("AQI"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("NO2"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("O3"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("PAQI"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("PM10"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("pollen"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("precipitation"), Some(JsonValue::Array(_)));
|
||||
assert_matches!(json.get("UVI"), Some(JsonValue::Array(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn map_address() {
|
||||
let maps_handle = Arc::new(Mutex::new(Maps::new()));
|
||||
let maps_handle_clone = Arc::clone(&maps_handle);
|
||||
let client =
|
||||
Client::tracked(rocket_core(maps_handle)).expect("Not a valid Rocket instance");
|
||||
|
||||
// No maps available yet.
|
||||
let response = client
|
||||
.get("/map?address=eindhoven&metric=pollen")
|
||||
.dispatch();
|
||||
assert_eq!(response.status(), Status::ServiceUnavailable);
|
||||
|
||||
// Load some dummy map.
|
||||
let mut maps = maps_handle_clone
|
||||
.lock()
|
||||
.expect("Maps handle mutex was poisoned");
|
||||
maps.pollen = Some(maps_stub(24));
|
||||
drop(maps);
|
||||
|
||||
// There should be a map now.
|
||||
let response = client
|
||||
.get("/map?address=eindhoven&metric=pollen")
|
||||
.dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
assert_eq!(response.content_type(), Some(ContentType::PNG));
|
||||
|
||||
// ... but not if it is out of bounds.
|
||||
let response = client.get("/map?address=berlin&metric=pollen").dispatch();
|
||||
assert_eq!(response.status(), Status::NotFound);
|
||||
|
||||
// No metric selected, don't know which map to show?
|
||||
let response = client.get("/map?address=eindhoven").dispatch();
|
||||
assert_eq!(response.status(), Status::UnprocessableEntity);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn map_geo() {
|
||||
let maps_handle = Arc::new(Mutex::new(Maps::new()));
|
||||
let maps_handle_clone = Arc::clone(&maps_handle);
|
||||
let client =
|
||||
Client::tracked(rocket_core(maps_handle)).expect("Not a valid Rocket instance");
|
||||
|
||||
// No maps available yet.
|
||||
let response = client.get("/map?lat=51.4&lon=5.5&metric=pollen").dispatch();
|
||||
assert_eq!(response.status(), Status::ServiceUnavailable);
|
||||
|
||||
// Load some dummy map.
|
||||
let mut maps = maps_handle_clone
|
||||
.lock()
|
||||
.expect("Maps handle mutex was poisoned");
|
||||
maps.pollen = Some(maps_stub(24));
|
||||
drop(maps);
|
||||
|
||||
// There should be a map now.
|
||||
let response = client.get("/map?lat=51.4&lon=5.5&metric=pollen").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
assert_eq!(response.content_type(), Some(ContentType::PNG));
|
||||
|
||||
// ... but not if it is out of bounds.
|
||||
let response = client.get("/map?lat=0.0&lon=0.0&metric=pollen").dispatch();
|
||||
assert_eq!(response.status(), Status::NotFound);
|
||||
|
||||
// No metric passed, don't know which map to show?
|
||||
let response = client.get("/map?lat=51.4&lon=5.5").dispatch();
|
||||
assert_eq!(response.status(), Status::UnprocessableEntity);
|
||||
}
|
||||
}
|
186
src/main.rs
186
src/main.rs
|
@ -1,22 +1,180 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
//! Service that provides today's weather forecast for air quality, rain and UV metrics.
|
||||
//!
|
||||
//! This is useful if you want to prepare for going outside and need to know what happens in the
|
||||
//! near future or later today.
|
||||
|
||||
#![warn(
|
||||
clippy::all,
|
||||
missing_copy_implementations,
|
||||
missing_debug_implementations,
|
||||
rust_2018_idioms,
|
||||
rustdoc::broken_intra_doc_links,
|
||||
trivial_casts,
|
||||
trivial_numeric_casts,
|
||||
renamed_and_removed_lints,
|
||||
unsafe_code,
|
||||
unstable_features,
|
||||
unused_import_braces,
|
||||
unused_qualifications
|
||||
rustdoc::broken_intra_doc_links
|
||||
)]
|
||||
#![deny(missing_docs)]
|
||||
|
||||
/// Starts the main maps refresh task and sets up and launches Rocket.
|
||||
#[rocket::launch]
|
||||
async fn rocket() -> _ {
|
||||
sinoptik::setup()
|
||||
use geocoding::{Forward, Openstreetmap, Point};
|
||||
use rocket::serde::json::Json;
|
||||
use rocket::serde::Serialize;
|
||||
use rocket::{get, launch, routes, FromFormField};
|
||||
|
||||
/// The current for a specific location.
|
||||
///
|
||||
/// Only the metrics asked for are included as well as the position and current time.
|
||||
///
|
||||
/// TODO: Fill the metrics with actual data!
|
||||
#[derive(Debug, Default, PartialEq, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
struct Forecast {
|
||||
/// The latitude of the position.
|
||||
lat: f64,
|
||||
|
||||
/// The longitude of the position.
|
||||
lon: f64,
|
||||
|
||||
/// The current time (in seconds since the UNIX epoch).
|
||||
time: i64,
|
||||
|
||||
/// The air quality index (when asked for).
|
||||
#[serde(rename = "AQI", skip_serializing_if = "Option::is_none")]
|
||||
aqi: Option<u8>,
|
||||
|
||||
/// The NO₂ concentration (when asked for).
|
||||
#[serde(rename = "NO2", skip_serializing_if = "Option::is_none")]
|
||||
no2: Option<u8>,
|
||||
|
||||
/// The O₃ concentration (when asked for).
|
||||
#[serde(rename = "O3", skip_serializing_if = "Option::is_none")]
|
||||
o3: Option<u8>,
|
||||
|
||||
/// The combination of pollen + air quality index (when asked for).
|
||||
#[serde(rename = "PAQI", skip_serializing_if = "Option::is_none")]
|
||||
paqi: Option<u8>,
|
||||
|
||||
/// The particulate matter in the air (when asked for).
|
||||
#[serde(rename = "PM10", skip_serializing_if = "Option::is_none")]
|
||||
pm10: Option<u8>,
|
||||
|
||||
/// The pollen in the air (when asked for).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pollen: Option<u8>,
|
||||
|
||||
/// The precipitation (when asked for).
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
precipitation: Option<u8>,
|
||||
|
||||
/// The UV index (when asked for).
|
||||
#[serde(rename = "UVI", skip_serializing_if = "Option::is_none")]
|
||||
uvi: Option<u8>,
|
||||
}
|
||||
|
||||
impl Forecast {
|
||||
fn new(lat: f64, lon: f64) -> Self {
|
||||
let time = chrono::Utc::now().timestamp();
|
||||
|
||||
Self {
|
||||
lat,
|
||||
lon,
|
||||
time,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The supported metrics.
|
||||
///
|
||||
/// This is used for selecting which metrics should be calculated & returned.
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, FromFormField)]
|
||||
enum Metric {
|
||||
/// All metrics.
|
||||
#[field(value = "all")]
|
||||
All,
|
||||
/// The air quality index.
|
||||
AQI,
|
||||
/// The NO₂ concentration.
|
||||
NO2,
|
||||
/// The O₃ concentration.
|
||||
O3,
|
||||
/// The combination of pollen + air quality index.
|
||||
PAQI,
|
||||
/// The particulate matter in the air.
|
||||
PM10,
|
||||
/// The pollen in the air.
|
||||
Pollen,
|
||||
/// The precipitation.
|
||||
Precipitation,
|
||||
/// The UV index.
|
||||
UVI,
|
||||
}
|
||||
|
||||
impl Metric {
|
||||
/// Returns all supported metrics.
|
||||
fn all() -> Vec<Metric> {
|
||||
use Metric::*;
|
||||
|
||||
Vec::from([AQI, NO2, O3, PAQI, PM10, Pollen, Precipitation, UVI])
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculates and returns the forecast.
|
||||
///
|
||||
/// The provided list `metrics` determines what will be included in the forecast.
|
||||
async fn forecast(lat: f64, lon: f64, metrics: Vec<Metric>) -> Forecast {
|
||||
let mut forecast = Forecast::new(lat, lon);
|
||||
|
||||
// Expand the `All` metric if present, deduplicate otherwise.
|
||||
let mut metrics = metrics;
|
||||
if metrics.contains(&Metric::All) {
|
||||
metrics = Metric::all();
|
||||
} else {
|
||||
metrics.dedup()
|
||||
}
|
||||
|
||||
for metric in metrics {
|
||||
match metric {
|
||||
// This should have been expanded to all the metrics matched below.
|
||||
Metric::All => unreachable!("should have been expanded"),
|
||||
Metric::AQI => forecast.aqi = Some(1),
|
||||
Metric::NO2 => forecast.no2 = Some(2),
|
||||
Metric::O3 => forecast.o3 = Some(3),
|
||||
Metric::PAQI => forecast.paqi = Some(4),
|
||||
Metric::PM10 => forecast.pm10 = Some(5),
|
||||
Metric::Pollen => forecast.pollen = Some(6),
|
||||
Metric::Precipitation => forecast.precipitation = Some(7),
|
||||
Metric::UVI => forecast.uvi = Some(8),
|
||||
}
|
||||
}
|
||||
|
||||
forecast
|
||||
}
|
||||
|
||||
/// Retrieves the geocoded position for the given address.
|
||||
async fn address_position(address: &str) -> Option<(f64, f64)> {
|
||||
let osm = Openstreetmap::new();
|
||||
// FIXME: Handle or log the error.
|
||||
let points: Vec<Point<f64>> = osm.forward(address).ok()?;
|
||||
|
||||
points.get(0).map(|point| (point.x(), point.y()))
|
||||
}
|
||||
|
||||
/// Handler for retrieving the forecast for an address.
|
||||
#[get("/forecast?<address>&<metrics>")]
|
||||
async fn forecast_address(address: String, metrics: Vec<Metric>) -> Option<Json<Forecast>> {
|
||||
let (lat, lon) = address_position(&address).await?;
|
||||
let forecast = forecast(lat, lon, metrics).await;
|
||||
|
||||
Some(Json(forecast))
|
||||
}
|
||||
|
||||
/// Handler for retrieving the forecast for a geocoded position.
|
||||
#[get("/forecast?<lat>&<lon>&<metrics>", rank = 2)]
|
||||
async fn forecast_geo(lat: f64, lon: f64, metrics: Vec<Metric>) -> Json<Forecast> {
|
||||
let forecast = forecast(lat, lon, metrics).await;
|
||||
|
||||
Json(forecast)
|
||||
}
|
||||
|
||||
/// Launches rocket.
|
||||
#[launch]
|
||||
async fn rocket() -> _ {
|
||||
rocket::build().mount("/", routes![forecast_address, forecast_geo])
|
||||
}
|
||||
|
|
613
src/maps.rs
613
src/maps.rs
|
@ -1,613 +0,0 @@
|
|||
//! Maps retrieval and caching.
|
||||
//!
|
||||
//! This module provides a task that keeps maps up-to-date using a maps-specific refresh interval.
|
||||
//! It stores all the maps as [`DynamicImage`]s in memory.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::f64::consts::PI;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use chrono::serde::ts_seconds;
|
||||
use chrono::{DateTime, Duration, NaiveDateTime, TimeZone, Utc};
|
||||
use image::{
|
||||
DynamicImage, GenericImage, GenericImageView, ImageError, ImageFormat, Pixel, Rgb, Rgba,
|
||||
};
|
||||
use reqwest::Url;
|
||||
use rocket::serde::Serialize;
|
||||
use rocket::tokio;
|
||||
use rocket::tokio::time::sleep;
|
||||
|
||||
use crate::forecast::Metric;
|
||||
use crate::position::Position;
|
||||
|
||||
/// The possible maps errors that can occur.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub(crate) enum Error {
|
||||
/// A timestamp parse error occurred.
|
||||
#[error("Timestamp parse error: {0}")]
|
||||
ChronoParse(#[from] chrono::ParseError),
|
||||
|
||||
/// A HTTP request error occurred.
|
||||
#[error("HTTP request error: {0}")]
|
||||
HttpRequest(#[from] reqwest::Error),
|
||||
|
||||
/// Failed to represent HTTP header as a string.
|
||||
#[error("Failed to represent HTTP header as a string")]
|
||||
HttpHeaderToStr(#[from] reqwest::header::ToStrError),
|
||||
|
||||
/// An image error occurred.
|
||||
#[error("Image error: {0}")]
|
||||
Image(#[from] ImageError),
|
||||
|
||||
/// Encountered an invalid image file path.
|
||||
#[error("Invalid image file path: {0}")]
|
||||
InvalidImagePath(String),
|
||||
|
||||
/// Failed to join a task.
|
||||
#[error("Failed to join a task: {0}")]
|
||||
Join(#[from] tokio::task::JoinError),
|
||||
|
||||
/// Did not find any known (map key) colors in samples.
|
||||
#[error("Did not find any known colors in samples")]
|
||||
NoKnownColorsInSamples,
|
||||
|
||||
/// No maps found (yet).
|
||||
#[error("No maps found (yet)")]
|
||||
NoMapsYet,
|
||||
|
||||
/// Got out of bound coordinates for a map.
|
||||
#[error("Got out of bound coordinates for a map: ({0}, {1})")]
|
||||
OutOfBoundCoords(u32, u32),
|
||||
|
||||
/// Got out of bound offset for a map.
|
||||
#[error("Got out of bound offset for a map: {0}")]
|
||||
OutOfBoundOffset(u32),
|
||||
}
|
||||
|
||||
/// Result type that defaults to [`Error`] as the default error type.
|
||||
pub(crate) type Result<T, E = Error> = std::result::Result<T, E>;
|
||||
|
||||
/// A handle to access the in-memory cached maps.
|
||||
pub(crate) type MapsHandle = Arc<Mutex<Maps>>;
|
||||
|
||||
/// A histogram mapping map key colors to occurences/counts.
|
||||
type MapKeyHistogram = HashMap<Rgb<u8>, u32>;
|
||||
|
||||
/// The Buienradar map key used for determining the score of a coordinate by mapping its color.
|
||||
///
|
||||
/// Note that the actual score starts from 1, not 0 as per this array.
|
||||
#[rustfmt::skip]
|
||||
const MAP_KEY: [[u8; 3]; 10] = [
|
||||
[0x49, 0xDA, 0x21], // #49DA21
|
||||
[0x30, 0xD2, 0x00], // #30D200
|
||||
[0xFF, 0xF8, 0x8B], // #FFF88B
|
||||
[0xFF, 0xF6, 0x42], // #FFF642
|
||||
[0xFD, 0xBB, 0x31], // #FDBB31
|
||||
[0xFD, 0x8E, 0x24], // #FD8E24
|
||||
[0xFC, 0x10, 0x3E], // #FC103E
|
||||
[0x97, 0x0A, 0x33], // #970A33
|
||||
[0xA6, 0x6D, 0xBC], // #A66DBC
|
||||
[0xB3, 0x30, 0xA1], // #B330A1
|
||||
];
|
||||
|
||||
/// The Buienradar map sample size.
|
||||
///
|
||||
/// Determines the number of pixels in width/height that is sampled around the sampling coordinate.
|
||||
const MAP_SAMPLE_SIZE: [u32; 2] = [31, 31];
|
||||
|
||||
/// The interval between map refreshes (in seconds).
|
||||
const REFRESH_INTERVAL: tokio::time::Duration = tokio::time::Duration::from_secs(60);
|
||||
|
||||
/// The base URL for retrieving the pollen maps from Buienradar.
|
||||
const POLLEN_BASE_URL: &str =
|
||||
"https://image.buienradar.nl/2.0/image/sprite/WeatherMapPollenRadarHourlyNL\
|
||||
?width=820&height=988&extension=png&renderBackground=False&renderBranding=False\
|
||||
&renderText=False&history=0&forecast=24&skip=0";
|
||||
|
||||
/// The interval for retrieving pollen maps.
|
||||
///
|
||||
/// The endpoint provides a map for every hour, 24 in total.
|
||||
const POLLEN_INTERVAL: i64 = 3_600;
|
||||
|
||||
/// The number of pollen maps retained.
|
||||
const POLLEN_MAP_COUNT: u32 = 24;
|
||||
|
||||
/// The number of seconds each pollen map is for.
|
||||
const POLLEN_MAP_INTERVAL: i64 = 3_600;
|
||||
|
||||
/// The position reference points for the pollen map.
|
||||
///
|
||||
/// Maps the gecoded positions of two reference points as follows:
|
||||
/// * Latitude and longitude of Vlissingen to its y- and x-position
|
||||
/// * Latitude of Lauwersoog to its y-position and longitude of Enschede to its x-position
|
||||
const POLLEN_MAP_REF_POINTS: [(Position, (u32, u32)); 2] = [
|
||||
(Position::new(51.44, 3.57), (745, 84)), // Vlissingen
|
||||
(Position::new(53.40, 6.90), (111, 694)), // Lauwersoog (lat/y) and Enschede (lon/x)
|
||||
];
|
||||
|
||||
/// The base URL for retrieving the UV index maps from Buienradar.
|
||||
const UVI_BASE_URL: &str = "https://image.buienradar.nl/2.0/image/sprite/WeatherMapUVIndexNL\
|
||||
?width=820&height=988&extension=png&&renderBackground=False&renderBranding=False\
|
||||
&renderText=False&history=0&forecast=5&skip=0";
|
||||
|
||||
/// The interval for retrieving UV index maps.
|
||||
///
|
||||
/// The endpoint provides a map for every day, 5 in total.
|
||||
const UVI_INTERVAL: i64 = 24 * 3_600;
|
||||
|
||||
/// The number of UV index maps retained.
|
||||
const UVI_MAP_COUNT: u32 = 5;
|
||||
|
||||
/// The number of seconds each UV index map is for.
|
||||
const UVI_MAP_INTERVAL: i64 = 24 * 3_600;
|
||||
|
||||
/// The position reference points for the UV index map.
|
||||
const UVI_MAP_REF_POINTS: [(Position, (u32, u32)); 2] = POLLEN_MAP_REF_POINTS;
|
||||
|
||||
/// The `MapsRefresh` trait is used to reduce the time a lock needs to be held when updating maps.
|
||||
///
|
||||
/// When refreshing maps, the lock only needs to be held when checking whether a refresh is
|
||||
/// necessary and when the new maps have been retrieved and can be updated.
|
||||
trait MapsRefresh {
|
||||
/// Determines whether the pollen maps need to be refreshed.
|
||||
fn needs_pollen_refresh(&self) -> bool;
|
||||
|
||||
/// Determines whether the UV index maps need to be refreshed.
|
||||
fn needs_uvi_refresh(&self) -> bool;
|
||||
|
||||
/// Determines whether the pollen maps are stale.
|
||||
fn is_pollen_stale(&self) -> bool;
|
||||
|
||||
/// Determines whether the UV index maps are stale.
|
||||
fn is_uvi_stale(&self) -> bool;
|
||||
|
||||
/// Updates the pollen maps.
|
||||
fn set_pollen(&self, result: Result<RetrievedMaps>);
|
||||
|
||||
/// Updates the UV index maps.
|
||||
fn set_uvi(&self, result: Result<RetrievedMaps>);
|
||||
}
|
||||
|
||||
/// Container type for all in-memory cached maps.
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct Maps {
|
||||
/// The pollen maps (from Buienradar).
|
||||
pub(crate) pollen: Option<RetrievedMaps>,
|
||||
|
||||
/// The UV index maps (from Buienradar).
|
||||
pub(crate) uvi: Option<RetrievedMaps>,
|
||||
}
|
||||
|
||||
impl Maps {
|
||||
/// Creates a new maps cache.
|
||||
///
|
||||
/// It contains an [`DynamicImage`] per maps type, if downloaded, and the timestamp of the last
|
||||
/// update.
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
pollen: None,
|
||||
uvi: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a current pollen map that marks the provided position.
|
||||
pub(crate) fn pollen_mark(&self, position: Position) -> Result<DynamicImage> {
|
||||
let maps = self.pollen.as_ref().ok_or(Error::NoMapsYet)?;
|
||||
let image = &maps.image;
|
||||
let stamp = maps.timestamp_base;
|
||||
let marked_image = map_at(
|
||||
image,
|
||||
stamp,
|
||||
POLLEN_MAP_INTERVAL,
|
||||
POLLEN_MAP_COUNT,
|
||||
Utc::now(),
|
||||
)?;
|
||||
let coords = project(&marked_image, POLLEN_MAP_REF_POINTS, position)?;
|
||||
|
||||
Ok(mark(marked_image, coords))
|
||||
}
|
||||
|
||||
/// Samples the pollen maps for the given position.
|
||||
pub(crate) fn pollen_samples(&self, position: Position) -> Result<Vec<Sample>> {
|
||||
let maps = self.pollen.as_ref().ok_or(Error::NoMapsYet)?;
|
||||
let image = &maps.image;
|
||||
let map = image.view(0, 0, image.width() / UVI_MAP_COUNT, image.height());
|
||||
let coords = project(&*map, POLLEN_MAP_REF_POINTS, position)?;
|
||||
let stamp = maps.timestamp_base;
|
||||
|
||||
sample(image, stamp, POLLEN_MAP_INTERVAL, POLLEN_MAP_COUNT, coords)
|
||||
}
|
||||
|
||||
/// Returns a current UV index map that marks the provided position.
|
||||
pub(crate) fn uvi_mark(&self, position: Position) -> Result<DynamicImage> {
|
||||
let maps = self.uvi.as_ref().ok_or(Error::NoMapsYet)?;
|
||||
let image = &maps.image;
|
||||
let stamp = maps.timestamp_base;
|
||||
let marked_image = map_at(image, stamp, UVI_MAP_INTERVAL, UVI_MAP_COUNT, Utc::now())?;
|
||||
let coords = project(&marked_image, POLLEN_MAP_REF_POINTS, position)?;
|
||||
|
||||
Ok(mark(marked_image, coords))
|
||||
}
|
||||
|
||||
/// Samples the UV index maps for the given position.
|
||||
pub(crate) fn uvi_samples(&self, position: Position) -> Result<Vec<Sample>> {
|
||||
let maps = self.uvi.as_ref().ok_or(Error::NoMapsYet)?;
|
||||
let image = &maps.image;
|
||||
let map = image.view(0, 0, image.width() / UVI_MAP_COUNT, image.height());
|
||||
let coords = project(&*map, UVI_MAP_REF_POINTS, position)?;
|
||||
let stamp = maps.timestamp_base;
|
||||
|
||||
sample(image, stamp, UVI_MAP_INTERVAL, UVI_MAP_COUNT, coords)
|
||||
}
|
||||
}
|
||||
|
||||
impl MapsRefresh for MapsHandle {
|
||||
fn is_pollen_stale(&self) -> bool {
|
||||
let maps = self.lock().expect("Maps handle mutex was poisoned");
|
||||
|
||||
match &maps.pollen {
|
||||
Some(pollen_maps) => {
|
||||
Utc::now().signed_duration_since(pollen_maps.mtime)
|
||||
> Duration::seconds(POLLEN_MAP_COUNT as i64 * POLLEN_MAP_INTERVAL)
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_uvi_stale(&self) -> bool {
|
||||
let maps = self.lock().expect("Maps handle mutex was poisoned");
|
||||
|
||||
match &maps.uvi {
|
||||
Some(uvi_maps) => {
|
||||
Utc::now().signed_duration_since(uvi_maps.mtime)
|
||||
> Duration::seconds(UVI_MAP_COUNT as i64 * UVI_MAP_INTERVAL)
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn needs_pollen_refresh(&self) -> bool {
|
||||
let maps = self.lock().expect("Maps handle mutex was poisoned");
|
||||
|
||||
match &maps.pollen {
|
||||
Some(pollen_maps) => {
|
||||
Utc::now()
|
||||
.signed_duration_since(pollen_maps.mtime)
|
||||
.num_seconds()
|
||||
> POLLEN_INTERVAL
|
||||
}
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
|
||||
fn needs_uvi_refresh(&self) -> bool {
|
||||
let maps = self.lock().expect("Maps handle mutex was poisoned");
|
||||
|
||||
match &maps.uvi {
|
||||
Some(uvi_maps) => {
|
||||
Utc::now()
|
||||
.signed_duration_since(uvi_maps.mtime)
|
||||
.num_seconds()
|
||||
> UVI_INTERVAL
|
||||
}
|
||||
None => true,
|
||||
}
|
||||
}
|
||||
|
||||
fn set_pollen(&self, retrieved_maps: Result<RetrievedMaps>) {
|
||||
if retrieved_maps.is_ok() || self.is_pollen_stale() {
|
||||
let mut maps = self.lock().expect("Maps handle mutex was poisoned");
|
||||
maps.pollen = retrieved_maps.ok();
|
||||
}
|
||||
}
|
||||
|
||||
fn set_uvi(&self, retrieved_maps: Result<RetrievedMaps>) {
|
||||
if retrieved_maps.is_ok() || self.is_uvi_stale() {
|
||||
let mut maps = self.lock().expect("Maps handle mutex was poisoned");
|
||||
maps.uvi = retrieved_maps.ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A Buienradar map sample.
|
||||
///
|
||||
/// This represents a value at a given time.
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Sample {
|
||||
/// The time(stamp) of the forecast.
|
||||
#[serde(serialize_with = "ts_seconds::serialize")]
|
||||
pub(crate) time: DateTime<Utc>,
|
||||
|
||||
/// The forecasted score.
|
||||
///
|
||||
/// A value in the range `1..=10`.
|
||||
#[serde(rename(serialize = "value"))]
|
||||
pub(crate) score: u8,
|
||||
}
|
||||
|
||||
impl Sample {
|
||||
#[cfg(test)]
|
||||
pub(crate) fn new(time: DateTime<Utc>, score: u8) -> Self {
|
||||
Self { time, score }
|
||||
}
|
||||
}
|
||||
|
||||
/// Builds a scoring histogram for the map key.
|
||||
fn map_key_histogram() -> MapKeyHistogram {
|
||||
MAP_KEY
|
||||
.into_iter()
|
||||
.fold(HashMap::new(), |mut hm, channels| {
|
||||
hm.insert(Rgb::from(channels), 0);
|
||||
hm
|
||||
})
|
||||
}
|
||||
|
||||
/// Samples the provided maps at the given (map-relative) coordinates and starting timestamp.
|
||||
/// It assumes the provided coordinates are within bounds of at least one map.
|
||||
/// The interval is the number of seconds the timestamp is bumped for each map.
|
||||
fn sample<I: GenericImageView<Pixel = Rgba<u8>>>(
|
||||
image: &I,
|
||||
stamp: DateTime<Utc>,
|
||||
interval: i64,
|
||||
count: u32,
|
||||
coords: (u32, u32),
|
||||
) -> Result<Vec<Sample>> {
|
||||
let (x, y) = coords;
|
||||
let width = image.width() / count;
|
||||
let height = image.height();
|
||||
if x > width || y > height {
|
||||
return Err(Error::OutOfBoundCoords(x, y));
|
||||
}
|
||||
let max_sample_width = (width - x).min(MAP_SAMPLE_SIZE[0]);
|
||||
let max_sample_height = (height - y).min(MAP_SAMPLE_SIZE[1]);
|
||||
let mut samples = Vec::with_capacity(count as usize);
|
||||
let mut time = stamp;
|
||||
let mut offset = 0;
|
||||
|
||||
while offset < image.width() {
|
||||
let map = image.view(
|
||||
x.saturating_sub(MAP_SAMPLE_SIZE[0] / 2) + offset,
|
||||
y.saturating_sub(MAP_SAMPLE_SIZE[1] / 2),
|
||||
max_sample_width,
|
||||
max_sample_height,
|
||||
);
|
||||
let histogram = map
|
||||
.pixels()
|
||||
.fold(map_key_histogram(), |mut h, (_px, _py, color)| {
|
||||
h.entry(color.to_rgb()).and_modify(|count| *count += 1);
|
||||
h
|
||||
});
|
||||
let (max_color, &count) = histogram
|
||||
.iter()
|
||||
.max_by_key(|(_color, count)| *count)
|
||||
.expect("Map key is never empty");
|
||||
if count == 0 {
|
||||
return Err(Error::NoKnownColorsInSamples);
|
||||
}
|
||||
|
||||
let score = MAP_KEY
|
||||
.iter()
|
||||
.position(|&color| &Rgb::from(color) == max_color)
|
||||
.map(|score| score + 1) // Scores go from 1..=10, not 0..=9!
|
||||
.expect("Maximum color is always a map key color") as u8;
|
||||
|
||||
samples.push(Sample { time, score });
|
||||
time += Duration::seconds(interval);
|
||||
offset += width;
|
||||
}
|
||||
|
||||
Ok(samples)
|
||||
}
|
||||
|
||||
/// A retrieved image with some metadata.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct RetrievedMaps {
|
||||
/// The image data.
|
||||
pub(crate) image: DynamicImage,
|
||||
|
||||
/// The date/time the image was last modified.
|
||||
pub(crate) mtime: DateTime<Utc>,
|
||||
|
||||
/// The starting date/time the image corresponds with.
|
||||
pub(crate) timestamp_base: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl RetrievedMaps {
|
||||
#[cfg(test)]
|
||||
pub(crate) fn new(image: DynamicImage) -> Self {
|
||||
let mtime = Utc::now();
|
||||
let timestamp_base = Utc::now();
|
||||
|
||||
Self {
|
||||
image,
|
||||
mtime,
|
||||
timestamp_base,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves an image from the provided URL.
|
||||
async fn retrieve_image(url: Url) -> Result<RetrievedMaps> {
|
||||
let response = reqwest::get(url).await?;
|
||||
let mtime = match response.headers().get(reqwest::header::LAST_MODIFIED) {
|
||||
Some(mtime_header) => {
|
||||
let mtime_headr_str = mtime_header.to_str()?;
|
||||
|
||||
DateTime::from(DateTime::parse_from_rfc2822(mtime_headr_str)?)
|
||||
}
|
||||
None => Utc::now(),
|
||||
};
|
||||
|
||||
let timestamp_base = {
|
||||
let path = response.url().path();
|
||||
let (_, filename) = path
|
||||
.rsplit_once('/')
|
||||
.ok_or_else(|| Error::InvalidImagePath(path.to_owned()))?;
|
||||
let (timestamp_str, _) = filename
|
||||
.split_once("__")
|
||||
.ok_or_else(|| Error::InvalidImagePath(path.to_owned()))?;
|
||||
let timestamp = NaiveDateTime::parse_from_str(timestamp_str, "%Y%m%d%H%M")?;
|
||||
|
||||
Utc.from_utc_datetime(×tamp)
|
||||
};
|
||||
let bytes = response.bytes().await?;
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
image::load_from_memory_with_format(&bytes, ImageFormat::Png)
|
||||
.map(|image| RetrievedMaps {
|
||||
image,
|
||||
mtime,
|
||||
timestamp_base,
|
||||
})
|
||||
.map_err(Error::from)
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
/// Retrieves the pollen maps from Buienradar.
|
||||
///
|
||||
/// See [`POLLEN_BASE_URL`] for the base URL and [`retrieve_image`] for the retrieval function.
|
||||
async fn retrieve_pollen_maps() -> Result<RetrievedMaps> {
|
||||
let timestamp = format!("{}", chrono::Local::now().format("%y%m%d%H%M"));
|
||||
let mut url = Url::parse(POLLEN_BASE_URL).unwrap();
|
||||
url.query_pairs_mut().append_pair("timestamp", ×tamp);
|
||||
|
||||
println!("🗺️ Refreshing pollen maps from: {}", url);
|
||||
retrieve_image(url).await
|
||||
}
|
||||
|
||||
/// Retrieves the UV index maps from Buienradar.
|
||||
///
|
||||
/// See [`UVI_BASE_URL`] for the base URL and [`retrieve_image`] for the retrieval function.
|
||||
async fn retrieve_uvi_maps() -> Result<RetrievedMaps> {
|
||||
let timestamp = format!("{}", chrono::Local::now().format("%y%m%d%H%M"));
|
||||
let mut url = Url::parse(UVI_BASE_URL).unwrap();
|
||||
url.query_pairs_mut().append_pair("timestamp", ×tamp);
|
||||
|
||||
println!("🗺️ Refreshing UV index maps from: {}", url);
|
||||
retrieve_image(url).await
|
||||
}
|
||||
|
||||
/// Returns the map for the given instant.
|
||||
fn map_at(
|
||||
image: &DynamicImage,
|
||||
stamp: DateTime<Utc>,
|
||||
interval: i64,
|
||||
count: u32,
|
||||
instant: DateTime<Utc>,
|
||||
) -> Result<DynamicImage> {
|
||||
let duration = instant.signed_duration_since(stamp);
|
||||
let offset = (duration.num_seconds() / interval) as u32;
|
||||
// Check if out of bounds.
|
||||
if offset >= count {
|
||||
return Err(Error::OutOfBoundOffset(offset));
|
||||
}
|
||||
let width = image.width() / count;
|
||||
|
||||
Ok(image.crop_imm(offset * width, 0, width, image.height()))
|
||||
}
|
||||
|
||||
/// Marks the provided coordinates on the map using a horizontal and vertical line.
|
||||
fn mark(mut image: DynamicImage, coords: (u32, u32)) -> DynamicImage {
|
||||
let (x, y) = coords;
|
||||
|
||||
for py in 0..image.height() {
|
||||
image.put_pixel(x, py, Rgba::from([0x00, 0x00, 0x00, 0x70]));
|
||||
}
|
||||
for px in 0..image.width() {
|
||||
image.put_pixel(px, y, Rgba::from([0x00, 0x00, 0x00, 0x70]));
|
||||
}
|
||||
|
||||
image
|
||||
}
|
||||
|
||||
/// Projects the provided geocoded position to a coordinate on a map.
|
||||
///
|
||||
/// This uses two reference points and a Mercator projection on the y-coordinates of those points
|
||||
/// to calculate how the map scales with respect to the provided position.
|
||||
fn project<I: GenericImageView>(
|
||||
image: &I,
|
||||
ref_points: [(Position, (u32, u32)); 2],
|
||||
pos: Position,
|
||||
) -> Result<(u32, u32)> {
|
||||
// Get the data from the reference points.
|
||||
let (ref1, (ref1_y, ref1_x)) = ref_points[0];
|
||||
let (ref2, (ref2_y, ref2_x)) = ref_points[1];
|
||||
|
||||
// For the x-coordinate, use a linear scale.
|
||||
let scale_x = ((ref2_x - ref1_x) as f64) / (ref2.lon_as_rad() - ref1.lon_as_rad());
|
||||
let x = ((pos.lon_as_rad() - ref1.lon_as_rad()) * scale_x + ref1_x as f64).round() as u32;
|
||||
|
||||
// For the y-coordinate, use a Mercator-projected scale.
|
||||
let mercator_y = |lat: f64| (lat / 2.0 + PI / 4.0).tan().ln();
|
||||
let ref1_merc_y = mercator_y(ref1.lat_as_rad());
|
||||
let ref2_merc_y = mercator_y(ref2.lat_as_rad());
|
||||
let scale_y = ((ref1_y - ref2_y) as f64) / (ref2_merc_y - ref1_merc_y);
|
||||
let y = ((ref2_merc_y - mercator_y(pos.lat_as_rad())) * scale_y + ref2_y as f64).round() as u32;
|
||||
|
||||
if image.in_bounds(x, y) {
|
||||
Ok((x, y))
|
||||
} else {
|
||||
Err(Error::OutOfBoundCoords(x, y))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the data of a map with a crosshair drawn on it for the given position.
|
||||
///
|
||||
/// The map that is used is determined by the provided metric.
|
||||
pub(crate) async fn mark_map(
|
||||
position: Position,
|
||||
metric: Metric,
|
||||
maps_handle: &MapsHandle,
|
||||
) -> crate::Result<Vec<u8>> {
|
||||
use std::io::Cursor;
|
||||
|
||||
let maps_handle = Arc::clone(maps_handle);
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let maps = maps_handle.lock().expect("Maps handle lock was poisoned");
|
||||
let image = match metric {
|
||||
Metric::Pollen => maps.pollen_mark(position),
|
||||
Metric::UVI => maps.uvi_mark(position),
|
||||
_ => return Err(crate::Error::UnsupportedMetric(metric)),
|
||||
}?;
|
||||
drop(maps);
|
||||
|
||||
// Encode the image as PNG image data.
|
||||
let mut image_data = Cursor::new(Vec::new());
|
||||
match image.write_to(&mut image_data, ImageFormat::Png) {
|
||||
Ok(()) => Ok(image_data.into_inner()),
|
||||
Err(err) => Err(crate::Error::from(Error::from(err))),
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map_err(Error::from)?
|
||||
}
|
||||
|
||||
/// Runs a loop that keeps refreshing the maps when necessary.
|
||||
///
|
||||
/// Use [`MapsRefresh`] trait methods on `maps_handle` to check whether each maps type needs to be
|
||||
/// refreshed and uses its retrieval function to update it if necessary.
|
||||
pub(crate) async fn run(maps_handle: MapsHandle) {
|
||||
loop {
|
||||
println!("🕔 Refreshing the maps (if necessary)...");
|
||||
|
||||
if maps_handle.needs_pollen_refresh() {
|
||||
let retrieved_maps = retrieve_pollen_maps().await;
|
||||
if let Err(e) = retrieved_maps.as_ref() {
|
||||
eprintln!("💥 Encountered error during pollen maps refresh: {}", e);
|
||||
}
|
||||
maps_handle.set_pollen(retrieved_maps);
|
||||
}
|
||||
|
||||
if maps_handle.needs_uvi_refresh() {
|
||||
let retrieved_maps = retrieve_uvi_maps().await;
|
||||
if let Err(e) = retrieved_maps.as_ref() {
|
||||
eprintln!("💥 Encountered error during UVI maps refresh: {}", e);
|
||||
}
|
||||
maps_handle.set_uvi(retrieved_maps);
|
||||
}
|
||||
|
||||
sleep(REFRESH_INTERVAL).await;
|
||||
}
|
||||
}
|
117
src/position.rs
117
src/position.rs
|
@ -1,117 +0,0 @@
|
|||
//! Positions in the geographic coordinate system.
|
||||
//!
|
||||
//! This module contains everything related to geographic coordinate system functionality.
|
||||
|
||||
use std::f64::consts::PI;
|
||||
use std::hash::Hash;
|
||||
|
||||
use cached::proc_macro::cached;
|
||||
use geocoding::{Forward, Openstreetmap, Point};
|
||||
use rocket::tokio;
|
||||
|
||||
use crate::{Error, Result};
|
||||
|
||||
/// A (geocoded) position.
|
||||
///
|
||||
/// This is used for measuring and communication positions directly on the Earth as latitude and
|
||||
/// longitude.
|
||||
///
|
||||
/// # Position equivalence and hashing
|
||||
///
|
||||
/// For caching purposes we need to check equivalence between two positions. If the positions match
|
||||
/// up to the 5th decimal, we consider them the same (see [`Position::lat_as_i32`] and
|
||||
/// [`Position::lon_as_i32`]).
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
pub(crate) struct Position {
|
||||
/// The latitude of the position.
|
||||
pub(crate) lat: f64,
|
||||
|
||||
/// The longitude of the position.
|
||||
pub(crate) lon: f64,
|
||||
}
|
||||
|
||||
impl Position {
|
||||
/// Creates a new (geocoded) position.
|
||||
pub(crate) const fn new(lat: f64, lon: f64) -> Self {
|
||||
Self { lat, lon }
|
||||
}
|
||||
|
||||
/// Returns the latitude as an integer.
|
||||
///
|
||||
/// This is achieved by multiplying it by `10_000` and rounding it. Thus, this gives a
|
||||
/// precision of 5 decimals.
|
||||
fn lat_as_i32(&self) -> i32 {
|
||||
(self.lat * 10_000.0).round() as i32
|
||||
}
|
||||
|
||||
/// Returns the longitude as an integer.
|
||||
///
|
||||
/// This is achieved by multiplying it by `10_000` and rounding it. Thus, this gives a
|
||||
/// precision of 5 decimals.
|
||||
fn lon_as_i32(&self) -> i32 {
|
||||
(self.lon * 10_000.0).round() as i32
|
||||
}
|
||||
|
||||
/// Returns the latitude in radians.
|
||||
pub(crate) fn lat_as_rad(&self) -> f64 {
|
||||
self.lat * PI / 180.0
|
||||
}
|
||||
|
||||
/// Returns the longitude in radians.
|
||||
pub(crate) fn lon_as_rad(&self) -> f64 {
|
||||
self.lon * PI / 180.0
|
||||
}
|
||||
|
||||
/// Returns the latitude as a string with the given precision.
|
||||
pub(crate) fn lat_as_str(&self, precision: usize) -> String {
|
||||
format!("{:.*}", precision, self.lat)
|
||||
}
|
||||
|
||||
/// Returns the longitude as a string with the given precision.
|
||||
pub(crate) fn lon_as_str(&self, precision: usize) -> String {
|
||||
format!("{:.*}", precision, self.lon)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Point<f64>> for Position {
|
||||
fn from(point: &Point<f64>) -> Self {
|
||||
// The `geocoding` API always returns (longitude, latitude) as (x, y).
|
||||
Position::new(point.y(), point.x())
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Position {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
// Floats cannot be hashed. Use the 5-decimal precision integer representation of the
|
||||
// coordinates instead.
|
||||
self.lat_as_i32().hash(state);
|
||||
self.lon_as_i32().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Position {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.lat_as_i32() == other.lat_as_i32() && self.lon_as_i32() == other.lon_as_i32()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Position {}
|
||||
|
||||
/// Resolves the geocoded position for a given address.
|
||||
///
|
||||
/// If the result is [`Ok`], it will be cached.
|
||||
/// Note that only the 100 least recently used addresses will be cached.
|
||||
#[cached(size = 100, result = true)]
|
||||
pub(crate) async fn resolve_address(address: String) -> Result<Position> {
|
||||
println!("🌍 Geocoding the position of the address: {}", address);
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let osm = Openstreetmap::new();
|
||||
let points: Vec<Point<f64>> = osm.forward(&address)?;
|
||||
|
||||
points
|
||||
.first()
|
||||
.ok_or(Error::NoPositionFound)
|
||||
.map(Position::from)
|
||||
})
|
||||
.await?
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
//! All supported metric data providers.
|
||||
//!
|
||||
//! Data is either provided via a direct (JSON) API or via looking up values on maps.
|
||||
|
||||
pub(crate) mod buienradar;
|
||||
pub(crate) mod combined;
|
||||
pub(crate) mod luchtmeetnet;
|
|
@ -1,225 +0,0 @@
|
|||
//! The Buienradar data provider.
|
||||
//!
|
||||
//! For more information about Buienradar, see: <https://www.buienradar.nl/overbuienradar/contact>
|
||||
//! and <https://www.buienradar.nl/overbuienradar/gratis-weerdata>.
|
||||
|
||||
use cached::proc_macro::cached;
|
||||
use chrono::serde::ts_seconds;
|
||||
use chrono::{DateTime, Datelike, Duration, NaiveTime, ParseError, TimeZone, Utc};
|
||||
use chrono_tz::Europe;
|
||||
use csv::ReaderBuilder;
|
||||
use reqwest::Url;
|
||||
use rocket::serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::maps::MapsHandle;
|
||||
use crate::position::Position;
|
||||
use crate::{Error, Metric, Result};
|
||||
|
||||
/// The base URL for the Buienradar API.
|
||||
const BUIENRADAR_BASE_URL: &str = "https://gpsgadget.buienradar.nl/data/raintext";
|
||||
|
||||
/// The Buienradar pollen/UV index map sample.
|
||||
pub(crate) type Sample = crate::maps::Sample;
|
||||
|
||||
/// A row in the precipitation text output.
|
||||
///
|
||||
/// This is an intermediate type used to represent rows of the output.
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
struct Row {
|
||||
/// The precipitation value in the range `0..=255`.
|
||||
value: u16,
|
||||
|
||||
/// The time in the `HH:MM` format.
|
||||
time: String,
|
||||
}
|
||||
|
||||
/// The Buienradar API precipitation data item.
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
#[serde(crate = "rocket::serde", try_from = "Row")]
|
||||
pub(crate) struct Item {
|
||||
/// The time(stamp) of the forecast.
|
||||
#[serde(serialize_with = "ts_seconds::serialize")]
|
||||
pub(crate) time: DateTime<Utc>,
|
||||
|
||||
/// The forecasted value.
|
||||
///
|
||||
/// Its unit is mm/h.
|
||||
pub(crate) value: f32,
|
||||
}
|
||||
|
||||
impl TryFrom<Row> for Item {
|
||||
type Error = ParseError;
|
||||
|
||||
fn try_from(row: Row) -> Result<Self, Self::Error> {
|
||||
let time = parse_time(&row.time)?;
|
||||
let value = convert_value(row.value);
|
||||
|
||||
Ok(Item { time, value })
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a time string to date/time in the UTC time zone.
|
||||
///
|
||||
/// The provided time has the format `HH:MM` and is considered to be in the Europe/Amsterdam
|
||||
/// time zone.
|
||||
fn parse_time(t: &str) -> Result<DateTime<Utc>, ParseError> {
|
||||
// First, get the current date in the Europe/Amsterdam time zone.
|
||||
let today = Utc::now().with_timezone(&Europe::Amsterdam).date_naive();
|
||||
// Then, parse the time and interpret it relative to "today".
|
||||
let ntime = NaiveTime::parse_from_str(t, "%H:%M")?;
|
||||
let ndtime = today.and_time(ntime);
|
||||
// Finally, interpret the naive date/time in the Europe/Amsterdam time zone and convert it to
|
||||
// the UTC time zone.
|
||||
let ldtime = Europe::Amsterdam.from_local_datetime(&ndtime).unwrap();
|
||||
let dtime = ldtime.with_timezone(&Utc);
|
||||
|
||||
Ok(dtime)
|
||||
}
|
||||
|
||||
/// Converts a precipitation value into an precipitation intensity value in mm/h.
|
||||
///
|
||||
/// For the conversion formula, see: <https://www.buienradar.nl/overbuienradar/gratis-weerdata>.
|
||||
fn convert_value(v: u16) -> f32 {
|
||||
let base: f32 = 10.0;
|
||||
let value = base.powf((v as f32 - 109.0) / 32.0);
|
||||
|
||||
(value * 10.0).round() / 10.0
|
||||
}
|
||||
|
||||
/// Fix the timestamps of the items either before or after the day boundary.
|
||||
///
|
||||
/// If in the Europe/Amsterdam time zone it is still before 0:00, all timestamps after 0:00 need to
|
||||
/// be bumped up with a day. If it is already after 0:00, all timestamps before 0:00 need to be
|
||||
/// bumped back with a day.
|
||||
// TODO: If something in Sinoptik needs unit tests, it is this!
|
||||
fn fix_items_day_boundary(items: Vec<Item>) -> Vec<Item> {
|
||||
let now = Utc::now().with_timezone(&Europe::Amsterdam);
|
||||
// Use noon on the same day as "now" as a comparison moment.
|
||||
let noon = Europe::Amsterdam
|
||||
.with_ymd_and_hms(now.year(), now.month(), now.day(), 12, 0, 0)
|
||||
.single()
|
||||
.expect("Invalid date: input date is invalid or not unambiguous");
|
||||
|
||||
if now < noon {
|
||||
// It is still before noon, so bump timestamps after noon a day back.
|
||||
items
|
||||
.into_iter()
|
||||
.map(|mut item| {
|
||||
if item.time > noon {
|
||||
item.time -= Duration::days(1)
|
||||
}
|
||||
item
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
// It is already after noon, so bump the timestamps before noon a day forward.
|
||||
items
|
||||
.into_iter()
|
||||
.map(|mut item| {
|
||||
if item.time < noon {
|
||||
item.time += Duration::days(1)
|
||||
}
|
||||
item
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the Buienradar forecasted precipitation items for the provided position.
|
||||
///
|
||||
/// If the result is [`Ok`] it will be cached for 5 minutes for the the given position.
|
||||
#[cached(time = 300, result = true)]
|
||||
async fn get_precipitation(position: Position) -> Result<Vec<Item>> {
|
||||
let mut url = Url::parse(BUIENRADAR_BASE_URL).unwrap();
|
||||
url.query_pairs_mut()
|
||||
.append_pair("lat", &position.lat_as_str(2))
|
||||
.append_pair("lon", &position.lon_as_str(2));
|
||||
|
||||
println!("▶️ Retrieving Buienradar data from: {url}");
|
||||
let response = reqwest::get(url).await?;
|
||||
let output = response.error_for_status()?.text().await?;
|
||||
|
||||
let mut rdr = ReaderBuilder::new()
|
||||
.has_headers(false)
|
||||
.delimiter(b'|')
|
||||
.from_reader(output.as_bytes());
|
||||
let items: Vec<Item> = rdr.deserialize().collect::<Result<_, _>>()?;
|
||||
|
||||
// Check if the first item stamp is (timewise) later than the last item stamp.
|
||||
// In this case `parse_time` interpreted e.g. 23:00 and later 0:30 in the same day and some
|
||||
// time stamps need to be fixed.
|
||||
if items
|
||||
.first()
|
||||
.zip(items.last())
|
||||
.map(|(it1, it2)| it1.time > it2.time)
|
||||
== Some(true)
|
||||
{
|
||||
Ok(fix_items_day_boundary(items))
|
||||
} else {
|
||||
Ok(items)
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the Buienradar forecasted pollen samples for the provided position.
|
||||
///
|
||||
/// If the result is [`Ok`] if will be cached for 1 hour for the given position.
|
||||
#[cached(
|
||||
time = 3_600,
|
||||
key = "Position",
|
||||
convert = r#"{ position }"#,
|
||||
result = true
|
||||
)]
|
||||
async fn get_pollen(position: Position, maps_handle: &MapsHandle) -> Result<Vec<Sample>> {
|
||||
maps_handle
|
||||
.lock()
|
||||
.expect("Maps handle mutex was poisoned")
|
||||
.pollen_samples(position)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Retrieves the Buienradar forecasted UV index samples for the provided position.
|
||||
///
|
||||
/// If the result is [`Ok`] if will be cached for 1 day for the given position.
|
||||
#[cached(
|
||||
time = 86_400,
|
||||
key = "Position",
|
||||
convert = r#"{ position }"#,
|
||||
result = true
|
||||
)]
|
||||
async fn get_uvi(position: Position, maps_handle: &MapsHandle) -> Result<Vec<Sample>> {
|
||||
maps_handle
|
||||
.lock()
|
||||
.expect("Maps handle mutex was poisoned")
|
||||
.uvi_samples(position)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Retrieves the Buienradar forecasted map samples for the provided position.
|
||||
///
|
||||
/// It only supports the following metric:
|
||||
/// * [`Metric::Pollen`]
|
||||
/// * [`Metric::UVI`]
|
||||
pub(crate) async fn get_samples(
|
||||
position: Position,
|
||||
metric: Metric,
|
||||
maps_handle: &MapsHandle,
|
||||
) -> Result<Vec<Sample>> {
|
||||
match metric {
|
||||
Metric::Pollen => get_pollen(position, maps_handle).await,
|
||||
Metric::UVI => get_uvi(position, maps_handle).await,
|
||||
_ => Err(Error::UnsupportedMetric(metric)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the Buienradar forecasted items for the provided position.
|
||||
///
|
||||
/// It only supports the following metric:
|
||||
/// * [`Metric::Precipitation`]
|
||||
///
|
||||
pub(crate) async fn get_items(position: Position, metric: Metric) -> Result<Vec<Item>> {
|
||||
match metric {
|
||||
Metric::Precipitation => get_precipitation(position).await,
|
||||
_ => Err(Error::UnsupportedMetric(metric)),
|
||||
}
|
||||
}
|
|
@ -1,269 +0,0 @@
|
|||
//! The combined data provider.
|
||||
//!
|
||||
//! This combines and collates data using the other providers.
|
||||
|
||||
use cached::proc_macro::cached;
|
||||
use chrono::serde::ts_seconds;
|
||||
use chrono::{DateTime, Utc};
|
||||
use rocket::serde::Serialize;
|
||||
|
||||
pub(crate) use super::buienradar::{self, Sample as BuienradarSample};
|
||||
pub(crate) use super::luchtmeetnet::{self, Item as LuchtmeetnetItem};
|
||||
use crate::maps::MapsHandle;
|
||||
use crate::position::Position;
|
||||
use crate::{Error, Metric};
|
||||
|
||||
/// The possible merge errors that can occur.
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
#[derive(Debug, thiserror::Error, PartialEq)]
|
||||
pub(crate) enum MergeError {
|
||||
/// No AQI item found.
|
||||
#[error("No AQI item found")]
|
||||
NoAqiItemFound,
|
||||
|
||||
/// No pollen item found.
|
||||
#[error("No pollen item found")]
|
||||
NoPollenItemFound,
|
||||
|
||||
/// No AQI item found within 30 minutes of first pollen item.
|
||||
#[error("No AQI item found within 30 minutes of first pollen item")]
|
||||
NoCloseAqiItemFound,
|
||||
|
||||
/// No pollen item found within 30 minutes of first AQI item.
|
||||
#[error("No pollen item found within 30 minutes of first AQI item")]
|
||||
NoClosePollenItemFound,
|
||||
}
|
||||
|
||||
/// The combined data item.
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Item {
|
||||
/// The time(stamp) of the forecast.
|
||||
#[serde(serialize_with = "ts_seconds::serialize")]
|
||||
time: DateTime<Utc>,
|
||||
|
||||
/// The forecasted value.
|
||||
value: f32,
|
||||
}
|
||||
|
||||
impl Item {
|
||||
#[cfg(test)]
|
||||
pub(crate) fn new(time: DateTime<Utc>, value: f32) -> Self {
|
||||
Self { time, value }
|
||||
}
|
||||
}
|
||||
|
||||
/// Merges pollen samples and AQI items into combined items.
|
||||
///
|
||||
/// The merging drops items from either the pollen samples or from the AQI items if they are not
|
||||
/// stamped within half an hour of the first item of the latest starting series, thus lining them
|
||||
/// before they are combined.
|
||||
fn merge(
|
||||
pollen_samples: Vec<BuienradarSample>,
|
||||
aqi_items: Vec<LuchtmeetnetItem>,
|
||||
) -> Result<Vec<Item>, MergeError> {
|
||||
let mut pollen_samples = pollen_samples;
|
||||
let mut aqi_items = aqi_items;
|
||||
|
||||
// Only retain samples/items that have timestamps that are at least an hour ago.
|
||||
let now = Utc::now();
|
||||
pollen_samples.retain(|smp| smp.time.signed_duration_since(now).num_seconds() > -3600);
|
||||
aqi_items.retain(|item| item.time.signed_duration_since(now).num_seconds() > -3600);
|
||||
|
||||
// Align the iterators based on the (hourly) timestamps!
|
||||
let pollen_first_time = pollen_samples
|
||||
.first()
|
||||
.ok_or(MergeError::NoPollenItemFound)?
|
||||
.time;
|
||||
let aqi_first_time = aqi_items.first().ok_or(MergeError::NoAqiItemFound)?.time;
|
||||
if pollen_first_time < aqi_first_time {
|
||||
// Drain one or more pollen samples to line up.
|
||||
let idx = pollen_samples
|
||||
.iter()
|
||||
.position(|smp| {
|
||||
smp.time
|
||||
.signed_duration_since(aqi_first_time)
|
||||
.num_seconds()
|
||||
.abs()
|
||||
< 1800
|
||||
})
|
||||
.ok_or(MergeError::NoCloseAqiItemFound)?;
|
||||
pollen_samples.drain(..idx);
|
||||
} else {
|
||||
// Drain one or more AQI items to line up.
|
||||
let idx = aqi_items
|
||||
.iter()
|
||||
.position(|item| {
|
||||
item.time
|
||||
.signed_duration_since(pollen_first_time)
|
||||
.num_seconds()
|
||||
.abs()
|
||||
< 1800
|
||||
})
|
||||
.ok_or(MergeError::NoClosePollenItemFound)?;
|
||||
aqi_items.drain(..idx);
|
||||
}
|
||||
|
||||
// Combine the samples with items by taking the maximum of pollen sample score and AQI item
|
||||
// value.
|
||||
let items = pollen_samples
|
||||
.into_iter()
|
||||
.zip(aqi_items)
|
||||
.map(|(pollen_sample, aqi_item)| {
|
||||
let time = pollen_sample.time;
|
||||
let value = (pollen_sample.score as f32).max(aqi_item.value);
|
||||
|
||||
Item { time, value }
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
/// Retrieves the combined forecasted items for the provided position and metric.
|
||||
///
|
||||
/// It supports the following metric:
|
||||
/// * [`Metric::PAQI`]
|
||||
#[cached(
|
||||
time = 1800,
|
||||
key = "(Position, Metric)",
|
||||
convert = r#"{ (position, metric) }"#,
|
||||
result = true
|
||||
)]
|
||||
pub(crate) async fn get(
|
||||
position: Position,
|
||||
metric: Metric,
|
||||
maps_handle: &MapsHandle,
|
||||
) -> Result<Vec<Item>, Error> {
|
||||
if metric != Metric::PAQI {
|
||||
return Err(Error::UnsupportedMetric(metric));
|
||||
};
|
||||
let pollen_items = buienradar::get_samples(position, Metric::Pollen, maps_handle).await?;
|
||||
let aqi_items = luchtmeetnet::get(position, Metric::AQI).await?;
|
||||
let items = merge(pollen_items, aqi_items)?;
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use chrono::{Duration, Timelike};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn merge() {
|
||||
let t_now = Utc::now()
|
||||
.with_second(0)
|
||||
.unwrap()
|
||||
.with_nanosecond(0)
|
||||
.unwrap();
|
||||
let t_m2 = t_now.checked_sub_signed(Duration::days(1)).unwrap();
|
||||
let t_m1 = t_now.checked_sub_signed(Duration::hours(2)).unwrap();
|
||||
let t_0 = t_now.checked_add_signed(Duration::minutes(12)).unwrap();
|
||||
let t_1 = t_now.checked_add_signed(Duration::minutes(72)).unwrap();
|
||||
let t_2 = t_now.checked_add_signed(Duration::minutes(132)).unwrap();
|
||||
|
||||
let pollen_samples = Vec::from([
|
||||
BuienradarSample::new(t_m2, 4),
|
||||
BuienradarSample::new(t_m1, 5),
|
||||
BuienradarSample::new(t_0, 1),
|
||||
BuienradarSample::new(t_1, 3),
|
||||
BuienradarSample::new(t_2, 2),
|
||||
]);
|
||||
let aqi_items = Vec::from([
|
||||
LuchtmeetnetItem::new(t_m2, 4.0),
|
||||
LuchtmeetnetItem::new(t_m1, 5.0),
|
||||
LuchtmeetnetItem::new(t_0, 1.1),
|
||||
LuchtmeetnetItem::new(t_1, 2.9),
|
||||
LuchtmeetnetItem::new(t_2, 2.4),
|
||||
]);
|
||||
|
||||
// Perform a normal merge.
|
||||
let merged = super::merge(pollen_samples.clone(), aqi_items.clone());
|
||||
assert!(merged.is_ok());
|
||||
let paqi = merged.unwrap();
|
||||
assert_eq!(
|
||||
paqi,
|
||||
Vec::from([
|
||||
Item::new(t_0, 1.1),
|
||||
Item::new(t_1, 3.0),
|
||||
Item::new(t_2, 2.4),
|
||||
])
|
||||
);
|
||||
|
||||
// The pollen samples are shifted, i.e. one hour in the future.
|
||||
let shifted_pollen_samples = pollen_samples[2..]
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut item| {
|
||||
item.time = item.time.checked_add_signed(Duration::hours(1)).unwrap();
|
||||
item
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let merged = super::merge(shifted_pollen_samples, aqi_items.clone());
|
||||
assert!(merged.is_ok());
|
||||
let paqi = merged.unwrap();
|
||||
assert_eq!(paqi, Vec::from([Item::new(t_1, 2.9), Item::new(t_2, 3.0)]));
|
||||
|
||||
// The AQI items are shifted, i.e. one hour in the future.
|
||||
let shifted_aqi_items = aqi_items[2..]
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut item| {
|
||||
item.time = item.time.checked_add_signed(Duration::hours(1)).unwrap();
|
||||
item
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let merged = super::merge(pollen_samples.clone(), shifted_aqi_items);
|
||||
assert!(merged.is_ok());
|
||||
let paqi = merged.unwrap();
|
||||
assert_eq!(paqi, Vec::from([Item::new(t_1, 3.0), Item::new(t_2, 2.9)]));
|
||||
|
||||
// The maximum sample/item should not be later then the interval the PAQI items cover.
|
||||
let merged = super::merge(pollen_samples[..3].to_vec(), aqi_items.clone());
|
||||
assert!(merged.is_ok());
|
||||
let paqi = merged.unwrap();
|
||||
assert_eq!(paqi, Vec::from([Item::new(t_0, 1.1)]));
|
||||
|
||||
let merged = super::merge(pollen_samples.clone(), aqi_items[..3].to_vec());
|
||||
assert!(merged.is_ok());
|
||||
let paqi = merged.unwrap();
|
||||
assert_eq!(paqi, Vec::from([Item::new(t_0, 1.1)]));
|
||||
|
||||
// Merging fails because the samples/items are too far (6 hours) apart.
|
||||
let shifted_aqi_items = aqi_items
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut item| {
|
||||
item.time = item.time.checked_add_signed(Duration::hours(6)).unwrap();
|
||||
item
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let merged = super::merge(pollen_samples.clone(), shifted_aqi_items);
|
||||
assert_eq!(merged, Err(MergeError::NoCloseAqiItemFound));
|
||||
|
||||
let shifted_pollen_samples = pollen_samples
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut item| {
|
||||
item.time = item.time.checked_add_signed(Duration::hours(6)).unwrap();
|
||||
item
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let merged = super::merge(shifted_pollen_samples, aqi_items.clone());
|
||||
assert_eq!(merged, Err(MergeError::NoClosePollenItemFound));
|
||||
|
||||
// The pollen samples list is empty, or everything is too old.
|
||||
let merged = super::merge(Vec::new(), aqi_items.clone());
|
||||
assert_eq!(merged, Err(MergeError::NoPollenItemFound));
|
||||
let merged = super::merge(pollen_samples[0..2].to_vec(), aqi_items.clone());
|
||||
assert_eq!(merged, Err(MergeError::NoPollenItemFound));
|
||||
|
||||
// The AQI items list is empty, or everything is too old.
|
||||
let merged = super::merge(pollen_samples.clone(), Vec::new());
|
||||
assert_eq!(merged, Err(MergeError::NoAqiItemFound));
|
||||
let merged = super::merge(pollen_samples, aqi_items[0..2].to_vec());
|
||||
assert_eq!(merged, Err(MergeError::NoAqiItemFound));
|
||||
}
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
//! The Luchtmeetnet open data provider.
|
||||
//!
|
||||
//! For more information about Luchtmeetnet, see: <https://www.luchtmeetnet.nl/contact>.
|
||||
|
||||
use cached::proc_macro::cached;
|
||||
use chrono::serde::ts_seconds;
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use reqwest::Url;
|
||||
use rocket::serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::position::Position;
|
||||
use crate::{Error, Metric, Result};
|
||||
|
||||
/// The base URL for the Luchtmeetnet API.
|
||||
const LUCHTMEETNET_BASE_URL: &str = "https://api.luchtmeetnet.nl/open_api/concentrations";
|
||||
|
||||
/// The Luchtmeetnet API data container.
|
||||
///
|
||||
/// This is only used temporarily during deserialization.
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
struct Container {
|
||||
data: Vec<Item>,
|
||||
}
|
||||
|
||||
/// The Luchtmeetnet API data item.
|
||||
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Item {
|
||||
/// The time(stamp) of the forecast.
|
||||
#[serde(
|
||||
rename(deserialize = "timestamp_measured"),
|
||||
serialize_with = "ts_seconds::serialize"
|
||||
)]
|
||||
pub(crate) time: DateTime<Utc>,
|
||||
|
||||
/// The forecasted value.
|
||||
///
|
||||
/// The unit depends on the selected [metric](Metric).
|
||||
pub(crate) value: f32,
|
||||
}
|
||||
|
||||
impl Item {
|
||||
#[cfg(test)]
|
||||
pub(crate) fn new(time: DateTime<Utc>, value: f32) -> Self {
|
||||
Self { time, value }
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the Luchtmeetnet forecasted items for the provided position and metric.
|
||||
///
|
||||
/// It supports the following metrics:
|
||||
/// * [`Metric::AQI`]
|
||||
/// * [`Metric::NO2`]
|
||||
/// * [`Metric::O3`]
|
||||
/// * [`Metric::PM10`]
|
||||
#[cached(time = 1800, result = true)]
|
||||
pub(crate) async fn get(position: Position, metric: Metric) -> Result<Vec<Item>> {
|
||||
let formula = match metric {
|
||||
Metric::AQI => "lki",
|
||||
Metric::NO2 => "no2",
|
||||
Metric::O3 => "o3",
|
||||
Metric::PM10 => "pm10",
|
||||
_ => return Err(Error::UnsupportedMetric(metric)),
|
||||
};
|
||||
let mut url = Url::parse(LUCHTMEETNET_BASE_URL).unwrap();
|
||||
url.query_pairs_mut()
|
||||
.append_pair("formula", formula)
|
||||
.append_pair("latitude", &position.lat_as_str(5))
|
||||
.append_pair("longitude", &position.lon_as_str(5));
|
||||
|
||||
println!("▶️ Retrieving Luchtmeetnet data from: {url}");
|
||||
let response = reqwest::get(url).await?;
|
||||
let root: Container = response.error_for_status()?.json().await?;
|
||||
|
||||
// Filter items that are older than one hour before now. They seem to occur sometimes?
|
||||
let too_old = Utc::now() - Duration::hours(1);
|
||||
let items = root
|
||||
.data
|
||||
.into_iter()
|
||||
.filter(|item| item.time > too_old)
|
||||
.collect();
|
||||
|
||||
Ok(items)
|
||||
}
|
Loading…
Reference in New Issue