Compare commits

..

No commits in common. "main" and "v0.2.1" have entirely different histories.
main ... v0.2.1

15 changed files with 1814 additions and 2198 deletions

View file

@ -1,45 +0,0 @@
name: "Check, lint and test using Cargo"
on:
- pull_request
- push
- workflow_dispatch
jobs:
check_lint:
name: Check, lint and test
runs-on: debian-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install Rust stable toolchain
uses: https://github.com/actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
components: rustfmt, clippy
- name: Run cargo check
uses: https://github.com/actions-rs/cargo@v1
with:
command: check
- name: Run cargo clippy
uses: https://github.com/actions-rs/cargo@v1
with:
command: clippy
args: -- -D warnings
- name: Run cargo fmt
uses: https://github.com/actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
- name: Run cargo test
uses: https://github.com/actions-rs/cargo@v1
with:
command: test
args: --all-features

View file

@ -1,112 +0,0 @@
name: "Release"
on:
push:
tags:
- "v*"
jobs:
release:
name: "Release"
runs-on: debian-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Determine the version of the release
run: |
VERSION=${GITHUB_REF_NAME#v}
echo "Releasing version: $VERSION"
echo "VERSION=$VERSION" >> $GITHUB_ENV
- name: Get the release notes from the changelog
run: |
EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64)
RELEASE_NOTES=$(sed -n -e "/^## \[$VERSION\]/,/^## \[/{//"'!'"p;}" CHANGELOG.md | sed -e '1d;$d')
echo "Release notes:"
echo
echo "$RELEASE_NOTES"
echo "RELEASE_NOTES<<$EOF" >> "$GITHUB_ENV"
echo "$RELEASE_NOTES" >> "$GITHUB_ENV"
echo "$EOF" >> "$GITHUB_ENV"
- name: Install Go
uses: actions/setup-go@v4
with:
go-version: '>=1.20.1'
- name: Release to Gitea
uses: actions/release-action@main
with:
# This is available by default.
api_key: '${{ secrets.RELEASE_TOKEN }}'
files: FIXME
title: 'Release ${{ env.VERSION }}'
body: '${{ env.RELEASE_NOTES }}'
release-crate:
name: "Release Rust crate"
runs-on: debian-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install Rust stable toolchain
uses: https://github.com/actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Use sparse Cargo index for crates.io
run: echo -e '[registries.crates-io]\nprotocol = "sparse"' >> /root/.cargo/config.toml
- name: Register the Gitea crate registry with Cargo
run: echo -e '[registries.luon]\nindex = "https://git.luon.net/paul/_cargo-index.git"' >> /root/.cargo/config.toml
- name: Run cargo publish
uses: https://github.com/actions-rs/cargo@v1
env:
# This needs to be provided for the repository; no login necessary as a result.
CARGO_REGISTRIES_LUON_TOKEN: '${{ secrets.CARGO_TOKEN }}'
with:
command: publish
args: --registry luon
release-deb:
name: "Release Debian package"
runs-on: debian-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install Rust stable toolchain
uses: https://github.com/actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Install cargo-deb
uses: https://github.com/brndnmtthws/rust-action-cargo-binstall@v1
with:
packages: cargo-deb
- name: Run cargo-deb
uses: https://github.com/actions-rs/cargo@v1
with:
command: deb
- name: Publish Debian package
env:
DEB_REPO_TOKEN: '${{ secrets.DEB_REPO_TOKEN }}'
run: |
curl --config <(printf "user=%s:%s" paul "${DEB_REPO_TOKEN}") \
--upload-file target/debian/sinoptik*.deb \
https://git.luon.net/api/packages/paul/debian/pool/bookworm/main/upload

View file

@ -7,168 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
## [0.2.12] - 2024-05-09 ## [0.2.1] - 2002-05-08
### Security
* Updated dependencies, fixes security advisiories:
* [RUSTSEC-2024-0019](https://rustsec.org/advisories/RUSTSEC-2024-0019)
* [RUSTSEC-2024-0332](https://rustsec.org/advisories/RUSTSEC-2024-0332)
### Changed
* Update dependency on `cached`, `chrono-tz`, `image` and `reqwest`
### Fixed
* Fix tests; reduce required accuracy for geocoded coordinates again and
don't run background map updates during tests
## [0.2.11] - 2024-02-27
### Security
* Updated dependencies, fixes security advisories:
* [RUSTSEC-2024-0003](https://rustsec.org/advisories/RUSTSEC-2024-0003)
* [RUSTSEC-2023-0072](https://rustsec.org/advisories/RUSTSEC-2024-0072)
### Fixed
* Fix clippy issue
* Tweak/fix tests; reduce required accuracy for geocoded coordinates
## [0.2.10] - 2023-11-03
### Security
* Update dependencies
([RUSTSEC-2020-0071](https://rustsec.org/advisories/RUSTSEC-2020-0071.html),
[RUSTSEC-2023-0044](https://rustsec.org/advisories/RUSTSEC-2023-0044.html))
### Changed
* Switch to Rocket 0.5 RC4
* Update dependency on `cached`
### Fixed
* Fix clippy issues
## [0.2.9] - 2023-08-25
### Changed
* Update release Gitea Actions workflow; add seperate job to release Debian
package to the new repository
### Security
* Update dependencies ([RUSTSEC-2023-0044](https://rustsec.org/advisories/RUSTSEC-2023-0044))
## [0.2.8] - 2023-06-05
### Added
* Print the version on lift off (#30)
* Add a `/version` endpoint to the API (#30)
### Changed
* Update dependency on `cached`
### Fixed
* Properly attribute the PAQI metric in its description(s)
### Removed
* No longer provide a map for the PAQI metric; the map used is only for pollen
## [0.2.7] - 2023-05-26
### Fixed
* Switch back to the original Buienradar color scheme/maps key (#27)
* Fix the token used to publish the crate to the Cargo package index
## [0.2.6] - 2023-05-24
### Added
* Add full release Gitea Actions workflow
### Changed
* Simplify Gitea Actions check, lint and test workflow
* Improve no known map colors found error description
### Fixed
* Update coordinates of Eindhoven in tests (Nomatim changed its geocoding)
* Increase sampling area to 31×31 pixels (#26)
* Switch to new Buienradar color scheme/maps key (#27)
## [0.2.5] - 2023-03-24
### Added
* Add Gitea Actions workflow for cargo
### Changed
* Updated dependencies on `cached`, `chrono-tz` and `geocoding`
### Fixed
* Fix float comparison in tests
* Fix clippy issues
### Security
* Update dependencies ([RUSTSEC-2023-0018](https://rustsec.org/advisories/RUSTSEC-2023-0018.html))
## [0.2.4] - 2022-07-05
### Added
* Add proper error handling and show them via the API (#25)
### Changed
* Run map refresher as an ad hoc liftoff fairing in Rocket
* Changed emojis in log output
### Removed
* Removed `AQI_max` and `pollen_max` from the forecast JSON introduced in
version 0.2.0
### Fixed
* Verify sample coordinate bounds (#24)
* Default to current time if `Last-Modified` HTTP header is missing for
retrieved maps
## [0.2.3] - 2022-05-21
### Fixed
* Update the examples in `README.md`
* Fix tests by adding missing type
* Fix map key color code for level 8 used by map sampling
## [0.2.2] - 2022-05-10
### Changed
* Switch to Rocket 0.5 RC2
### Fixed
* Fix timestamps for map samples not being correct (AQI, PAQI, UVI metrics) (#22)
* Valid samples/items will no longer be discarded too early
## [0.2.1] - 2022-05-08
### Added ### Added
@ -189,18 +28,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
Initial release. Initial release.
[Unreleased]: https://git.luon.net/paul/sinoptik/compare/v0.2.12...HEAD [Unreleased]: https://git.luon.net/paul/sinoptik/compare/v0.2.1...HEAD
[0.2.12]: https://git.luon.net/paul/sinoptik/compare/v0.2.11...v0.2.12
[0.2.11]: https://git.luon.net/paul/sinoptik/compare/v0.2.10...v0.2.11
[0.2.10]: https://git.luon.net/paul/sinoptik/compare/v0.2.9...v0.2.10
[0.2.9]: https://git.luon.net/paul/sinoptik/compare/v0.2.8...v0.2.9
[0.2.8]: https://git.luon.net/paul/sinoptik/compare/v0.2.7...v0.2.8
[0.2.7]: https://git.luon.net/paul/sinoptik/compare/v0.2.6...v0.2.7
[0.2.6]: https://git.luon.net/paul/sinoptik/compare/v0.2.5...v0.2.6
[0.2.5]: https://git.luon.net/paul/sinoptik/compare/v0.2.4...v0.2.5
[0.2.4]: https://git.luon.net/paul/sinoptik/compare/v0.2.3...v0.2.4
[0.2.3]: https://git.luon.net/paul/sinoptik/compare/v0.2.2...v0.2.3
[0.2.2]: https://git.luon.net/paul/sinoptik/compare/v0.2.1...v0.2.2
[0.2.1]: https://git.luon.net/paul/sinoptik/compare/v0.2.0...v0.2.1 [0.2.1]: https://git.luon.net/paul/sinoptik/compare/v0.2.0...v0.2.1
[0.2.0]: https://git.luon.net/paul/sinoptik/compare/v0.1.0...v0.2.0 [0.2.0]: https://git.luon.net/paul/sinoptik/compare/v0.1.0...v0.2.0
[0.1.0]: https://git.luon.net/paul/sinoptik/commits/tag/v0.1.0 [0.1.0]: https://git.luon.net/paul/sinoptik/commits/tag/v0.1.0

2425
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[package] [package]
name = "sinoptik" name = "sinoptik"
version = "0.2.12" version = "0.2.1"
authors = [ authors = [
"Admar Schoonen <admar@luon.net", "Admar Schoonen <admar@luon.net",
"Paul van Tilburg <paul@luon.net>" "Paul van Tilburg <paul@luon.net>"
@ -12,18 +12,15 @@ repository = "https://git.luon.net/paul/sinoptik"
license = "MIT" license = "MIT"
[dependencies] [dependencies]
cached = { version = "0.51.3", features = ["async"] } cached = { version = "0.34.0", features = ["async"] }
chrono = "0.4.19" chrono = "0.4.19"
chrono-tz = "0.9.0" chrono-tz = "0.6.1"
color-eyre = "0.6.1"
csv = "1.1.6" csv = "1.1.6"
geocoding = "0.4.0" geocoding = "0.3.1"
image = { version = "0.25.1", default-features = false, features = ["png"]} image = "0.24.1"
reqwest = { version = "0.12.4", features = ["json"] } reqwest = { version = "0.11.9", features = ["json"] }
rocket = { version = "0.5.0-rc.3", features = ["json"] } rocket = { version = "0.5.0-rc.1", features = ["json"] }
thiserror = "1.0.31"
[build-dependencies]
vergen = { version = "8.2.1", default-features = false, features = ["build", "git", "gitcl"] }
[dev-dependencies] [dev-dependencies]
assert_float_eq = "1.1.3" assert_float_eq = "1.1.3"
@ -45,8 +42,7 @@ Currently supported metrics are:
* O concentration (per hour, from Luchtmeetnet) * O concentration (per hour, from Luchtmeetnet)
* Particulate matter (PM10) concentration (per hour, from Luchtmeetnet) * Particulate matter (PM10) concentration (per hour, from Luchtmeetnet)
* Pollen (per hour, from Buienradar) * Pollen (per hour, from Buienradar)
* Pollen/air quality index (per hour, combined from Buienradar and * Pollen/air quality index (per hour, from Buienradar)
Luchtmeetnet)
* Precipitation (per 5 minutes, from Buienradar) * Precipitation (per 5 minutes, from Buienradar)
* UV index (per day, from Buienradar) * UV index (per day, from Buienradar)

120
README.md
View file

@ -11,8 +11,7 @@ Currently supported metrics are:
* O₃ concentration (per hour, from [Luchtmeetnet]) * O₃ concentration (per hour, from [Luchtmeetnet])
* Particulate matter (PM10) concentration (per hour, from [Luchtmeetnet]) * Particulate matter (PM10) concentration (per hour, from [Luchtmeetnet])
* Pollen (per hour, from [Buienradar]) * Pollen (per hour, from [Buienradar])
* Pollen/air quality index (per hour, combined from [Buienradar] and * Pollen/air quality index (per hour, from [Buienradar])
[Luchtmeetnet])
* Precipitation (per 5 minutes, from [Buienradar]) * Precipitation (per 5 minutes, from [Buienradar])
* UV index (per day, from [Buienradar]) * UV index (per day, from [Buienradar])
@ -68,6 +67,7 @@ GET /forecast?address=Stationsplein,Utrecht&metrics[]=all
or directly by using its geocoded position: or directly by using its geocoded position:
```http ```http
GET /forecast?lat=52.0902&lon=5.1114&metrics[]=all GET /forecast?lat=52.0902&lon=5.1114&metrics[]=all
``` ```
@ -75,8 +75,8 @@ GET /forecast?lat=52.0902&lon=5.1114&metrics[]=all
### Metrics ### Metrics
When querying, the metrics need to be selected. It can be one of: `AQI`, `NO2`, When querying, the metrics need to be selected. It can be one of: `AQI`, `NO2`,
`O3`, `PAQI`, `PM10`, `pollen`, `precipitation` or `UVI`. If you use metric `O3`, `PAQI`, `PM10`, `pollen`, `precipitation` or `UVI`. If you use metric `all`, or
`all`, or `all` is part of the selected metrics, all metrics will be retrieved. `all` is part of the selected metrics, all metrics will be retrieved.
Note that the parameter "array" notation as well as the repeated parameter Note that the parameter "array" notation as well as the repeated parameter
notation are supported. For example: notation are supported. For example:
@ -86,7 +86,7 @@ GET /forecast?address=Stationsplein,Utrecht&metrics=AQI&metrics=pollen
GET /forecast?address=Stationsplein,Utrecht&metrics=all GET /forecast?address=Stationsplein,Utrecht&metrics=all
``` ```
### Forecast responses ### Response
The response of the API is a JSON object that contains three fixed fields: The response of the API is a JSON object that contains three fixed fields:
@ -107,27 +107,27 @@ position:
{ {
"lat": 52.0905169, "lat": 52.0905169,
"lon": 5.1109709, "lon": 5.1109709,
"time": 1652188682, "time": 1645800043,
"UVI": [ "UVI": [
{ {
"time": 1652140800, "time": 1645799526,
"value": 4 "value": 1
}, },
{ {
"time": 1652227200, "time": 1645885926,
"value": 4 "value": 2
}, },
{ {
"time": 1652313600, "time": 1645972326,
"value": 4 "value": 3
}, },
{ {
"time": 1652400000, "time": 1646058726,
"value": 4 "value": 2
}, },
{ {
"time": 1652486400, "time": 1646145126,
"value": 5 "value": 1
} }
] ]
} }
@ -136,56 +136,34 @@ position:
#### Combined metric PAQI #### Combined metric PAQI
The PAQI (pollen/air quality index) metric is a special combined metric. The PAQI (pollen/air quality index) metric is a special combined metric.
If selected, it merges items from the AQI and pollen metric into `PAQI` by If selected, it not only merges items from the AQI and pollen metric into
selecting the maximum value for each hour: `PAQI` by selecting the maximum value for each hour, but it also yields the
24-hour maximum forecast item for air quality index in `AQI_max` and for
pollen in `pollen_max` seperately:
```json ``` json
{ {
"lat": 52.0905169, "lat": 52.0905169,
"lon": 5.1109709, "lon": 5.1109709,
"time": 1652189065, "time": 1645800043,
"AQI_max": {
"time": 1652022000,
"value": 6.65
},
"PAQI": [ "PAQI": [
{ {
"time": 1652187600, "time": 1651951457,
"value": 6.09 "value": 6.04
}, },
{ {
"time": 1652191200, "time": 1651955057,
"value": 6.09 "value": 6.04
}, },
... ...
] ],
} "pollen_max": {
``` "time": 1652034257,
"value": 6
#### Errors
If geocoding of an address is requested but fails, a not found error is
returned (HTTP 404). with the following body (this will change in the future):
```json
{
"error": {
"code": 404,
"reason": "Not Found",
"description": "The requested resource could not be found."
}
}
```
If for any specific metric an error occurs, the list with forecast items will
be absent. However, the `errors` field will contain the error message for each
failed metric. For example, say Buienradar is down and precipitation forecast
items can not be retrieved:
```json
{
"lat": 52.0905169,
"lon": 5.1109709,
"time": 1654524574,
...
"errors": {
"precipitation": "HTTP request error: error sending request for url (https://gpsgadget.buienradar.nl/data/raintext?lat=52.09&lon=5.11): error trying to connect: tcp connect error: Connection refused (os error 111)"
} }
} }
``` ```
@ -210,37 +188,11 @@ or directly by using its geocoded position:
GET /map?lat=52.0902&lon=5.1114&metric=pollen GET /map?lat=52.0902&lon=5.1114&metric=pollen
``` ```
### Map responses ### Response
The response is a PNG image with a crosshair drawn on the map. If geocoding of The response is a PNG image with a crosshair drawn on the map. If geocoding of
an address fails or if the position is out of bounds of the map, nothing is an address fails or if the position is out of bounds of the map, nothing is
returned (HTTP 404). If the maps cannot/have not been downloaded or cached yet, returned (HTTP 404).
a service unavailable error is returned (HTTP 503).
## Version API endpoint
The `/version` API endpoint provides information of the current version and
build of the service. This can be used to check if it needs to be updated.
Again, there is no path and no query parameters, just:
```http
GET /version
```
### Version responses
The response uses the JSON format and typically looks like this:
```json
{
"version": "0.2.7",
"timestamp": "2023-05-29T13:34:34.701323159Z",
"git_sha": "bb5962d",
"git_timestamp": "2023-05-29T15:32:17.000000000+02:00"
}
```
(Build and git information in example output may be out of date.)
## License ## License

View file

@ -1,9 +0,0 @@
use std::error::Error;
use vergen::EmitBuilder;
fn main() -> Result<(), Box<dyn Error>> {
// Generate the `cargo:` instructions to fill the appropriate environment variables.
EmitBuilder::builder().all_build().all_git().emit()?;
Ok(())
}

View file

@ -3,21 +3,20 @@
//! This module is used to construct a [`Forecast`] for the given position by retrieving data for //! This module is used to construct a [`Forecast`] for the given position by retrieving data for
//! the requested metrics from their providers. //! the requested metrics from their providers.
use std::collections::BTreeMap;
use std::fmt;
use rocket::serde::Serialize; use rocket::serde::Serialize;
use crate::maps::MapsHandle; use crate::maps::MapsHandle;
use crate::position::Position; use crate::position::Position;
use crate::providers;
use crate::providers::buienradar::{Item as BuienradarItem, Sample as BuienradarSample}; use crate::providers::buienradar::{Item as BuienradarItem, Sample as BuienradarSample};
use crate::providers::combined::Item as CombinedItem; use crate::providers::combined::Item as CombinedItem;
use crate::providers::luchtmeetnet::Item as LuchtmeetnetItem; use crate::providers::luchtmeetnet::Item as LuchtmeetnetItem;
use crate::{providers, Error};
/// The current forecast for a specific location. /// The current forecast for a specific location.
/// ///
/// Only the metrics asked for are included as well as the position and current time. /// Only the metrics asked for are included as well as the position and current time.
///
// TODO: Fill in missing data (#4)
#[derive(Debug, Default, Serialize)] #[derive(Debug, Default, Serialize)]
#[serde(crate = "rocket::serde")] #[serde(crate = "rocket::serde")]
pub(crate) struct Forecast { pub(crate) struct Forecast {
@ -34,6 +33,10 @@ pub(crate) struct Forecast {
#[serde(rename = "AQI", skip_serializing_if = "Option::is_none")] #[serde(rename = "AQI", skip_serializing_if = "Option::is_none")]
aqi: Option<Vec<LuchtmeetnetItem>>, aqi: Option<Vec<LuchtmeetnetItem>>,
/// The maximum air quality index value (when asked for PAQI).
#[serde(rename = "AQI_max", skip_serializing_if = "Option::is_none")]
aqi_max: Option<LuchtmeetnetItem>,
/// The NO₂ concentration (when asked for). /// The NO₂ concentration (when asked for).
#[serde(rename = "NO2", skip_serializing_if = "Option::is_none")] #[serde(rename = "NO2", skip_serializing_if = "Option::is_none")]
no2: Option<Vec<LuchtmeetnetItem>>, no2: Option<Vec<LuchtmeetnetItem>>,
@ -54,6 +57,10 @@ pub(crate) struct Forecast {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pollen: Option<Vec<BuienradarSample>>, pollen: Option<Vec<BuienradarSample>>,
/// The maximum pollen in the air (when asked for PAQI).
#[serde(skip_serializing_if = "Option::is_none")]
pollen_max: Option<BuienradarSample>,
/// The precipitation (when asked for). /// The precipitation (when asked for).
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
precipitation: Option<Vec<BuienradarItem>>, precipitation: Option<Vec<BuienradarItem>>,
@ -61,10 +68,6 @@ pub(crate) struct Forecast {
/// The UV index (when asked for). /// The UV index (when asked for).
#[serde(rename = "UVI", skip_serializing_if = "Option::is_none")] #[serde(rename = "UVI", skip_serializing_if = "Option::is_none")]
uvi: Option<Vec<BuienradarSample>>, uvi: Option<Vec<BuienradarSample>>,
/// Any errors that occurred.
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
errors: BTreeMap<Metric, String>,
} }
impl Forecast { impl Forecast {
@ -77,21 +80,13 @@ impl Forecast {
..Default::default() ..Default::default()
} }
} }
fn log_error(&mut self, metric: Metric, error: Error) {
eprintln!("💥 Encountered error during forecast: {}", error);
self.errors.insert(metric, error.to_string());
}
} }
/// The supported forecast metrics. /// The supported forecast metrics.
/// ///
/// This is used for selecting which metrics should be calculated & returned. /// This is used for selecting which metrics should be calculated & returned.
#[allow(clippy::upper_case_acronyms)] #[allow(clippy::upper_case_acronyms)]
#[derive( #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, rocket::FromFormField)]
Copy, Clone, Debug, Eq, Hash, Ord, PartialOrd, PartialEq, Serialize, rocket::FromFormField,
)]
#[serde(crate = "rocket::serde")]
pub(crate) enum Metric { pub(crate) enum Metric {
/// All metrics. /// All metrics.
#[field(value = "all")] #[field(value = "all")]
@ -107,9 +102,7 @@ pub(crate) enum Metric {
/// The particulate matter in the air. /// The particulate matter in the air.
PM10, PM10,
/// The pollen in the air. /// The pollen in the air.
#[serde(rename(serialize = "pollen"))]
Pollen, Pollen,
#[serde(rename(serialize = "precipitation"))]
/// The precipitation. /// The precipitation.
Precipitation, Precipitation,
/// The UV index. /// The UV index.
@ -125,22 +118,6 @@ impl Metric {
} }
} }
impl fmt::Display for Metric {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Metric::All => write!(f, "All"),
Metric::AQI => write!(f, "AQI"),
Metric::NO2 => write!(f, "NO2"),
Metric::O3 => write!(f, "O3"),
Metric::PAQI => write!(f, "PAQI"),
Metric::PM10 => write!(f, "PM10"),
Metric::Pollen => write!(f, "pollen"),
Metric::Precipitation => write!(f, "precipitation"),
Metric::UVI => write!(f, "UVI"),
}
}
}
/// Calculates and returns the forecast. /// Calculates and returns the forecast.
/// ///
/// The provided list `metrics` determines what will be included in the forecast. /// The provided list `metrics` determines what will be included in the forecast.
@ -163,53 +140,29 @@ pub(crate) async fn forecast(
match metric { match metric {
// This should have been expanded to all the metrics matched below. // This should have been expanded to all the metrics matched below.
Metric::All => unreachable!("The all metric should have been expanded"), Metric::All => unreachable!("The all metric should have been expanded"),
Metric::AQI => { Metric::AQI => forecast.aqi = providers::luchtmeetnet::get(position, metric).await,
forecast.aqi = providers::luchtmeetnet::get(position, metric) Metric::NO2 => forecast.no2 = providers::luchtmeetnet::get(position, metric).await,
.await Metric::O3 => forecast.o3 = providers::luchtmeetnet::get(position, metric).await,
.map_err(|err| forecast.log_error(metric, err))
.ok()
}
Metric::NO2 => {
forecast.no2 = providers::luchtmeetnet::get(position, metric)
.await
.map_err(|err| forecast.log_error(metric, err))
.ok()
}
Metric::O3 => {
forecast.o3 = providers::luchtmeetnet::get(position, metric)
.await
.map_err(|err| forecast.log_error(metric, err))
.ok()
}
Metric::PAQI => { Metric::PAQI => {
forecast.paqi = providers::combined::get(position, metric, maps_handle) if let Some((paqi, pollen_max, aqi_max)) =
.await providers::combined::get(position, metric, maps_handle).await
.map_err(|err| forecast.log_error(metric, err)) {
.ok() forecast.paqi = Some(paqi);
} forecast.aqi_max = Some(aqi_max);
Metric::PM10 => { forecast.pollen_max = Some(pollen_max);
forecast.pm10 = providers::luchtmeetnet::get(position, metric) }
.await
.map_err(|err| forecast.log_error(metric, err))
.ok()
} }
Metric::PM10 => forecast.pm10 = providers::luchtmeetnet::get(position, metric).await,
Metric::Pollen => { Metric::Pollen => {
forecast.pollen = providers::buienradar::get_samples(position, metric, maps_handle) forecast.pollen =
.await providers::buienradar::get_samples(position, metric, maps_handle).await
.map_err(|err| forecast.log_error(metric, err))
.ok()
} }
Metric::Precipitation => { Metric::Precipitation => {
forecast.precipitation = providers::buienradar::get_items(position, metric) forecast.precipitation = providers::buienradar::get_items(position, metric).await
.await
.map_err(|err| forecast.log_error(metric, err))
.ok()
} }
Metric::UVI => { Metric::UVI => {
forecast.uvi = providers::buienradar::get_samples(position, metric, maps_handle) forecast.uvi =
.await providers::buienradar::get_samples(position, metric, maps_handle).await
.map_err(|err| forecast.log_error(metric, err))
.ok()
} }
} }
} }

View file

@ -1,31 +1,23 @@
#![doc = include_str!("../README.md")] #![doc = include_str!("../README.md")]
#![warn( #![warn(
clippy::all, clippy::all,
missing_copy_implementations,
missing_debug_implementations, missing_debug_implementations,
rust_2018_idioms, rust_2018_idioms,
rustdoc::broken_intra_doc_links, rustdoc::broken_intra_doc_links
trivial_casts,
trivial_numeric_casts,
renamed_and_removed_lints,
unsafe_code,
unstable_features,
unused_import_braces,
unused_qualifications
)] )]
#![deny(missing_docs)] #![deny(missing_docs)]
use std::future::Future;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use rocket::fairing::AdHoc; use rocket::http::ContentType;
use rocket::http::Status; use rocket::response::content::Custom;
use rocket::response::Responder;
use rocket::serde::json::Json; use rocket::serde::json::Json;
use rocket::serde::Serialize; use rocket::{get, routes, Build, Rocket, State};
use rocket::{get, routes, Build, Request, Rocket, State};
use self::forecast::{forecast, Forecast, Metric}; pub(crate) use self::forecast::Metric;
use self::maps::{mark_map, Error as MapsError, Maps, MapsHandle}; use self::forecast::{forecast, Forecast};
pub(crate) use self::maps::{mark_map, Maps, MapsHandle};
use self::position::{resolve_address, Position}; use self::position::{resolve_address, Position};
pub(crate) mod forecast; pub(crate) mod forecast;
@ -33,104 +25,17 @@ pub(crate) mod maps;
pub(crate) mod position; pub(crate) mod position;
pub(crate) mod providers; pub(crate) mod providers;
/// The possible provider errors that can occur.
#[derive(Debug, thiserror::Error)]
pub(crate) enum Error {
/// A CSV parse error occurred.
#[error("CSV parse error: {0}")]
CsvParse(#[from] csv::Error),
/// A geocoding error occurred.
#[error("Geocoding error: {0}")]
Geocoding(#[from] geocoding::GeocodingError),
/// An HTTP request error occurred.
#[error("HTTP request error: {0}")]
HttpRequest(#[from] reqwest::Error),
/// Failed to join a task.
#[error("Failed to join a task: {0}")]
Join(#[from] rocket::tokio::task::JoinError),
/// Failed to merge AQI & pollen items.
#[error("Failed to merge AQI & pollen items: {0}")]
Merge(#[from] providers::combined::MergeError),
/// Failed to retrieve or sample the maps.
#[error("Failed to retrieve or sample the maps: {0}")]
Maps(#[from] maps::Error),
/// No geocoded position could be found.
#[error("No geocoded position could be found")]
NoPositionFound,
/// Encountered an unsupported metric.
#[error("Encountered an unsupported metric: {0}")]
UnsupportedMetric(Metric),
}
impl<'r, 'o: 'r> Responder<'r, 'o> for Error {
fn respond_to(self, _request: &'r Request<'_>) -> rocket::response::Result<'o> {
eprintln!("💥 Encountered error during request: {}", self);
let status = match self {
Error::NoPositionFound => Status::NotFound,
Error::Maps(MapsError::NoMapsYet) => Status::ServiceUnavailable,
Error::Maps(MapsError::OutOfBoundCoords(_, _)) => Status::NotFound,
Error::Maps(MapsError::OutOfBoundOffset(_)) => Status::NotFound,
_ => Status::InternalServerError,
};
Err(status)
}
}
#[derive(Responder)]
#[response(content_type = "image/png")]
struct PngImageData(Vec<u8>);
/// Result type that defaults to [`Error`] as the default error type.
pub(crate) type Result<T, E = Error> = std::result::Result<T, E>;
/// The version information as JSON response.
#[derive(Debug, Serialize)]
#[serde(crate = "rocket::serde")]
struct VersionInfo {
/// The version of the build.
version: String,
/// The timestamp of the build.
timestamp: String,
/// The (most recent) git SHA used for the build.
git_sha: String,
/// The timestamp of the last git commit used for the build.
git_timestamp: String,
}
impl VersionInfo {
/// Retrieves the version information from the environment variables.
fn new() -> Self {
Self {
version: String::from(env!("CARGO_PKG_VERSION")),
timestamp: String::from(env!("VERGEN_BUILD_TIMESTAMP")),
git_sha: String::from(&env!("VERGEN_GIT_SHA")[0..7]),
git_timestamp: String::from(env!("VERGEN_GIT_COMMIT_TIMESTAMP")),
}
}
}
/// Handler for retrieving the forecast for an address. /// Handler for retrieving the forecast for an address.
#[get("/forecast?<address>&<metrics>")] #[get("/forecast?<address>&<metrics>")]
async fn forecast_address( async fn forecast_address(
address: String, address: String,
metrics: Vec<Metric>, metrics: Vec<Metric>,
maps_handle: &State<MapsHandle>, maps_handle: &State<MapsHandle>,
) -> Result<Json<Forecast>> { ) -> Option<Json<Forecast>> {
let position = resolve_address(address).await?; let position = resolve_address(address).await?;
let forecast = forecast(position, metrics, maps_handle).await; let forecast = forecast(position, metrics, maps_handle).await;
Ok(Json(forecast)) Some(Json(forecast))
} }
/// Handler for retrieving the forecast for a geocoded position. /// Handler for retrieving the forecast for a geocoded position.
@ -156,11 +61,11 @@ async fn map_address(
address: String, address: String,
metric: Metric, metric: Metric,
maps_handle: &State<MapsHandle>, maps_handle: &State<MapsHandle>,
) -> Result<PngImageData> { ) -> Option<Custom<Vec<u8>>> {
let position = resolve_address(address).await?; let position = resolve_address(address).await?;
let image_data = mark_map(position, metric, maps_handle).await; let image_data = mark_map(position, metric, maps_handle).await;
image_data.map(PngImageData) image_data.map(|id| Custom(ContentType::PNG, id))
} }
/// Handler for showing the current map with the geocoded position for a specific metric. /// Handler for showing the current map with the geocoded position for a specific metric.
@ -172,64 +77,29 @@ async fn map_geo(
lon: f64, lon: f64,
metric: Metric, metric: Metric,
maps_handle: &State<MapsHandle>, maps_handle: &State<MapsHandle>,
) -> Result<PngImageData> { ) -> Option<Custom<Vec<u8>>> {
let position = Position::new(lat, lon); let position = Position::new(lat, lon);
let image_data = mark_map(position, metric, maps_handle).await; let image_data = mark_map(position, metric, maps_handle).await;
image_data.map(PngImageData) image_data.map(|id| Custom(ContentType::PNG, id))
}
/// Returns the version information.
#[get("/version", format = "application/json")]
async fn version() -> Result<Json<VersionInfo>> {
Ok(Json(VersionInfo::new()))
}
/// Sets up Rocket without fairings.
fn rocket_core(maps_handle: MapsHandle) -> Rocket<Build> {
rocket::build()
.mount(
"/",
routes![
forecast_address,
forecast_geo,
map_address,
map_geo,
version
],
)
.manage(maps_handle)
} }
/// Sets up Rocket. /// Sets up Rocket.
fn rocket(maps_handle: MapsHandle) -> Rocket<Build> { fn rocket(maps_handle: MapsHandle) -> Rocket<Build> {
let rocket = rocket_core(Arc::clone(&maps_handle)); rocket::build().manage(maps_handle).mount(
let maps_refresher = maps::run(maps_handle); "/",
routes![forecast_address, forecast_geo, map_address, map_geo],
rocket )
.attach(AdHoc::on_liftoff("Maps refresher", |_| {
Box::pin(async move {
// We don't care about the join handle nor error results?
let _refresher = rocket::tokio::spawn(maps_refresher);
})
}))
.attach(AdHoc::on_liftoff("Version", |_| {
Box::pin(async move {
let name = env!("CARGO_PKG_NAME");
let version = env!("CARGO_PKG_VERSION");
let git_sha = &env!("VERGEN_GIT_SHA")[0..7];
println!("🌁 Started {name} v{version} (git @{git_sha})");
})
}))
} }
/// Sets up Rocket and the maps cache refresher task. /// Sets up Rocket and the maps cache refresher task.
pub fn setup() -> Rocket<Build> { pub fn setup() -> (Rocket<Build>, impl Future<Output = ()>) {
let maps = Maps::new(); let maps = Maps::new();
let maps_handle = Arc::new(Mutex::new(maps)); let maps_handle = Arc::new(Mutex::new(maps));
let maps_refresher = maps::run(Arc::clone(&maps_handle));
let rocket = rocket(maps_handle);
rocket(maps_handle) (rocket, maps_refresher)
} }
#[cfg(test)] #[cfg(test)]
@ -237,19 +107,16 @@ mod tests {
use assert_float_eq::*; use assert_float_eq::*;
use assert_matches::assert_matches; use assert_matches::assert_matches;
use image::{DynamicImage, Rgba, RgbaImage}; use image::{DynamicImage, Rgba, RgbaImage};
use rocket::http::{ContentType, Status}; use rocket::http::Status;
use rocket::local::blocking::Client; use rocket::local::blocking::Client;
use rocket::serde::json::Value as JsonValue; use rocket::serde::json::Value as JsonValue;
use super::maps::RetrievedMaps;
use super::*; use super::*;
fn maps_stub(map_count: u32) -> RetrievedMaps { fn maps_stub(map_count: u32) -> DynamicImage {
let map_color = Rgba::from([73, 218, 33, 255]); // First color from map key. let map_color = Rgba::from([73, 218, 33, 255]); // First color from map key.
let image =
DynamicImage::ImageRgba8(RgbaImage::from_pixel(820 * map_count, 988, map_color));
RetrievedMaps::new(image) DynamicImage::ImageRgba8(RgbaImage::from_pixel(820 * map_count, 988, map_color))
} }
fn maps_handle_stub() -> MapsHandle { fn maps_handle_stub() -> MapsHandle {
@ -269,15 +136,17 @@ mod tests {
let response = client.get("/forecast?address=eindhoven").dispatch(); let response = client.get("/forecast?address=eindhoven").dispatch();
assert_eq!(response.status(), Status::Ok); assert_eq!(response.status(), Status::Ok);
let json = response.into_json::<JsonValue>().expect("Not valid JSON"); let json = response.into_json::<JsonValue>().expect("Not valid JSON");
assert_float_absolute_eq!(json["lat"].as_f64().unwrap(), 51.448557, 1e-1); assert_f64_near!(json["lat"].as_f64().unwrap(), 51.4392648);
assert_float_absolute_eq!(json["lon"].as_f64().unwrap(), 5.450123, 1e-1); assert_f64_near!(json["lon"].as_f64().unwrap(), 5.478633);
assert_matches!(json["time"], JsonValue::Number(_)); assert_matches!(json["time"], JsonValue::Number(_));
assert_matches!(json.get("AQI"), None); assert_matches!(json.get("AQI"), None);
assert_matches!(json.get("AQI_max"), None);
assert_matches!(json.get("NO2"), None); assert_matches!(json.get("NO2"), None);
assert_matches!(json.get("O3"), None); assert_matches!(json.get("O3"), None);
assert_matches!(json.get("PAQI"), None); assert_matches!(json.get("PAQI"), None);
assert_matches!(json.get("PM10"), None); assert_matches!(json.get("PM10"), None);
assert_matches!(json.get("pollen"), None); assert_matches!(json.get("pollen"), None);
assert_matches!(json.get("pollen_max"), None);
assert_matches!(json.get("precipitation"), None); assert_matches!(json.get("precipitation"), None);
assert_matches!(json.get("UVI"), None); assert_matches!(json.get("UVI"), None);
@ -287,15 +156,17 @@ mod tests {
.dispatch(); .dispatch();
assert_eq!(response.status(), Status::Ok); assert_eq!(response.status(), Status::Ok);
let json = response.into_json::<JsonValue>().expect("Not valid JSON"); let json = response.into_json::<JsonValue>().expect("Not valid JSON");
assert_float_absolute_eq!(json["lat"].as_f64().unwrap(), 51.448557, 1e-1); assert_f64_near!(json["lat"].as_f64().unwrap(), 51.4392648);
assert_float_absolute_eq!(json["lon"].as_f64().unwrap(), 5.450123, 1e-1); assert_f64_near!(json["lon"].as_f64().unwrap(), 5.478633);
assert_matches!(json["time"], JsonValue::Number(_)); assert_matches!(json["time"], JsonValue::Number(_));
assert_matches!(json.get("AQI"), Some(JsonValue::Array(_))); assert_matches!(json.get("AQI"), Some(JsonValue::Array(_)));
assert_matches!(json.get("AQI_max"), Some(JsonValue::Object(_)));
assert_matches!(json.get("NO2"), Some(JsonValue::Array(_))); assert_matches!(json.get("NO2"), Some(JsonValue::Array(_)));
assert_matches!(json.get("O3"), Some(JsonValue::Array(_))); assert_matches!(json.get("O3"), Some(JsonValue::Array(_)));
assert_matches!(json.get("PAQI"), Some(JsonValue::Array(_))); assert_matches!(json.get("PAQI"), Some(JsonValue::Array(_)));
assert_matches!(json.get("PM10"), Some(JsonValue::Array(_))); assert_matches!(json.get("PM10"), Some(JsonValue::Array(_)));
assert_matches!(json.get("pollen"), Some(JsonValue::Array(_))); assert_matches!(json.get("pollen"), Some(JsonValue::Array(_)));
assert_matches!(json.get("pollen_max"), Some(JsonValue::Object(_)));
assert_matches!(json.get("precipitation"), Some(JsonValue::Array(_))); assert_matches!(json.get("precipitation"), Some(JsonValue::Array(_)));
assert_matches!(json.get("UVI"), Some(JsonValue::Array(_))); assert_matches!(json.get("UVI"), Some(JsonValue::Array(_)));
} }
@ -313,11 +184,13 @@ mod tests {
assert_f64_near!(json["lon"].as_f64().unwrap(), 5.5); assert_f64_near!(json["lon"].as_f64().unwrap(), 5.5);
assert_matches!(json["time"], JsonValue::Number(_)); assert_matches!(json["time"], JsonValue::Number(_));
assert_matches!(json.get("AQI"), None); assert_matches!(json.get("AQI"), None);
assert_matches!(json.get("AQI_max"), None);
assert_matches!(json.get("NO2"), None); assert_matches!(json.get("NO2"), None);
assert_matches!(json.get("O3"), None); assert_matches!(json.get("O3"), None);
assert_matches!(json.get("PAQI"), None); assert_matches!(json.get("PAQI"), None);
assert_matches!(json.get("PM10"), None); assert_matches!(json.get("PM10"), None);
assert_matches!(json.get("pollen"), None); assert_matches!(json.get("pollen"), None);
assert_matches!(json.get("pollen_max"), None);
assert_matches!(json.get("precipitation"), None); assert_matches!(json.get("precipitation"), None);
assert_matches!(json.get("UVI"), None); assert_matches!(json.get("UVI"), None);
@ -331,11 +204,13 @@ mod tests {
assert_f64_near!(json["lon"].as_f64().unwrap(), 5.5); assert_f64_near!(json["lon"].as_f64().unwrap(), 5.5);
assert_matches!(json["time"], JsonValue::Number(_)); assert_matches!(json["time"], JsonValue::Number(_));
assert_matches!(json.get("AQI"), Some(JsonValue::Array(_))); assert_matches!(json.get("AQI"), Some(JsonValue::Array(_)));
assert_matches!(json.get("AQI_max"), Some(JsonValue::Object(_)));
assert_matches!(json.get("NO2"), Some(JsonValue::Array(_))); assert_matches!(json.get("NO2"), Some(JsonValue::Array(_)));
assert_matches!(json.get("O3"), Some(JsonValue::Array(_))); assert_matches!(json.get("O3"), Some(JsonValue::Array(_)));
assert_matches!(json.get("PAQI"), Some(JsonValue::Array(_))); assert_matches!(json.get("PAQI"), Some(JsonValue::Array(_)));
assert_matches!(json.get("PM10"), Some(JsonValue::Array(_))); assert_matches!(json.get("PM10"), Some(JsonValue::Array(_)));
assert_matches!(json.get("pollen"), Some(JsonValue::Array(_))); assert_matches!(json.get("pollen"), Some(JsonValue::Array(_)));
assert_matches!(json.get("pollen_max"), Some(JsonValue::Object(_)));
assert_matches!(json.get("precipitation"), Some(JsonValue::Array(_))); assert_matches!(json.get("precipitation"), Some(JsonValue::Array(_)));
assert_matches!(json.get("UVI"), Some(JsonValue::Array(_))); assert_matches!(json.get("UVI"), Some(JsonValue::Array(_)));
} }
@ -344,14 +219,13 @@ mod tests {
fn map_address() { fn map_address() {
let maps_handle = Arc::new(Mutex::new(Maps::new())); let maps_handle = Arc::new(Mutex::new(Maps::new()));
let maps_handle_clone = Arc::clone(&maps_handle); let maps_handle_clone = Arc::clone(&maps_handle);
let client = let client = Client::tracked(rocket(maps_handle)).expect("Not a valid Rocket instance");
Client::tracked(rocket_core(maps_handle)).expect("Not a valid Rocket instance");
// No maps available yet. // No maps available yet.
let response = client let response = client
.get("/map?address=eindhoven&metric=pollen") .get("/map?address=eindhoven&metric=pollen")
.dispatch(); .dispatch();
assert_eq!(response.status(), Status::ServiceUnavailable); assert_eq!(response.status(), Status::NotFound);
// Load some dummy map. // Load some dummy map.
let mut maps = maps_handle_clone let mut maps = maps_handle_clone
@ -373,19 +247,22 @@ mod tests {
// No metric selected, don't know which map to show? // No metric selected, don't know which map to show?
let response = client.get("/map?address=eindhoven").dispatch(); let response = client.get("/map?address=eindhoven").dispatch();
assert_eq!(response.status(), Status::UnprocessableEntity); assert_eq!(response.status(), Status::NotFound);
} }
#[test] #[test]
fn map_geo() { fn map_geo() {
let maps_handle = Arc::new(Mutex::new(Maps::new())); let maps_handle = Arc::new(Mutex::new(Maps::new()));
let maps_handle_clone = Arc::clone(&maps_handle); let maps_handle_clone = Arc::clone(&maps_handle);
let client = let client = Client::tracked(rocket(maps_handle)).expect("Not a valid Rocket instance");
Client::tracked(rocket_core(maps_handle)).expect("Not a valid Rocket instance");
// No metric passed, don't know which map to show?
let response = client.get("/map?lat=51.4&lon=5.5").dispatch();
assert_eq!(response.status(), Status::NotFound);
// No maps available yet. // No maps available yet.
let response = client.get("/map?lat=51.4&lon=5.5&metric=pollen").dispatch(); let response = client.get("/map?lat=51.4&lon=5.5&metric=pollen").dispatch();
assert_eq!(response.status(), Status::ServiceUnavailable); assert_eq!(response.status(), Status::NotFound);
// Load some dummy map. // Load some dummy map.
let mut maps = maps_handle_clone let mut maps = maps_handle_clone
@ -399,12 +276,8 @@ mod tests {
assert_eq!(response.status(), Status::Ok); assert_eq!(response.status(), Status::Ok);
assert_eq!(response.content_type(), Some(ContentType::PNG)); assert_eq!(response.content_type(), Some(ContentType::PNG));
// ... but not if it is out of bounds.
let response = client.get("/map?lat=0.0&lon=0.0&metric=pollen").dispatch();
assert_eq!(response.status(), Status::NotFound);
// No metric passed, don't know which map to show? // No metric passed, don't know which map to show?
let response = client.get("/map?lat=51.4&lon=5.5").dispatch(); let response = client.get("/map?lat=51.4&lon=5.5").dispatch();
assert_eq!(response.status(), Status::UnprocessableEntity); assert_eq!(response.status(), Status::NotFound);
} }
} }

View file

@ -1,22 +1,34 @@
#![doc = include_str!("../README.md")] #![doc = include_str!("../README.md")]
#![warn( #![warn(
clippy::all, clippy::all,
missing_copy_implementations,
missing_debug_implementations, missing_debug_implementations,
rust_2018_idioms, rust_2018_idioms,
rustdoc::broken_intra_doc_links, rustdoc::broken_intra_doc_links
trivial_casts,
trivial_numeric_casts,
renamed_and_removed_lints,
unsafe_code,
unstable_features,
unused_import_braces,
unused_qualifications
)] )]
#![deny(missing_docs)] #![deny(missing_docs)]
use color_eyre::Result;
use rocket::tokio::{self, select};
/// Starts the main maps refresh task and sets up and launches Rocket. /// Starts the main maps refresh task and sets up and launches Rocket.
#[rocket::launch] #[rocket::main]
async fn rocket() -> _ { async fn main() -> Result<()> {
sinoptik::setup() color_eyre::install()?;
let (rocket, maps_refresher) = sinoptik::setup();
let rocket = rocket.ignite().await?;
let shutdown = rocket.shutdown();
let maps_refresher = tokio::spawn(maps_refresher);
select! {
result = rocket.launch() => {
result?
}
result = maps_refresher => {
shutdown.notify();
result?
}
}
Ok(())
} }

View file

@ -8,10 +8,8 @@ use std::f64::consts::PI;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use chrono::serde::ts_seconds; use chrono::serde::ts_seconds;
use chrono::{DateTime, Duration, NaiveDateTime, TimeZone, Utc}; use chrono::{DateTime, Duration, Utc};
use image::{ use image::{DynamicImage, GenericImage, GenericImageView, ImageFormat, Pixel, Rgb, Rgba};
DynamicImage, GenericImage, GenericImageView, ImageError, ImageFormat, Pixel, Rgb, Rgba,
};
use reqwest::Url; use reqwest::Url;
use rocket::serde::Serialize; use rocket::serde::Serialize;
use rocket::tokio; use rocket::tokio;
@ -20,53 +18,6 @@ use rocket::tokio::time::sleep;
use crate::forecast::Metric; use crate::forecast::Metric;
use crate::position::Position; use crate::position::Position;
/// The possible maps errors that can occur.
#[derive(Debug, thiserror::Error)]
pub(crate) enum Error {
/// A timestamp parse error occurred.
#[error("Timestamp parse error: {0}")]
ChronoParse(#[from] chrono::ParseError),
/// A HTTP request error occurred.
#[error("HTTP request error: {0}")]
HttpRequest(#[from] reqwest::Error),
/// Failed to represent HTTP header as a string.
#[error("Failed to represent HTTP header as a string")]
HttpHeaderToStr(#[from] reqwest::header::ToStrError),
/// An image error occurred.
#[error("Image error: {0}")]
Image(#[from] ImageError),
/// Encountered an invalid image file path.
#[error("Invalid image file path: {0}")]
InvalidImagePath(String),
/// Failed to join a task.
#[error("Failed to join a task: {0}")]
Join(#[from] tokio::task::JoinError),
/// Did not find any known (map key) colors in samples.
#[error("Did not find any known colors in samples")]
NoKnownColorsInSamples,
/// No maps found (yet).
#[error("No maps found (yet)")]
NoMapsYet,
/// Got out of bound coordinates for a map.
#[error("Got out of bound coordinates for a map: ({0}, {1})")]
OutOfBoundCoords(u32, u32),
/// Got out of bound offset for a map.
#[error("Got out of bound offset for a map: {0}")]
OutOfBoundOffset(u32),
}
/// Result type that defaults to [`Error`] as the default error type.
pub(crate) type Result<T, E = Error> = std::result::Result<T, E>;
/// A handle to access the in-memory cached maps. /// A handle to access the in-memory cached maps.
pub(crate) type MapsHandle = Arc<Mutex<Maps>>; pub(crate) type MapsHandle = Arc<Mutex<Maps>>;
@ -78,22 +29,22 @@ type MapKeyHistogram = HashMap<Rgb<u8>, u32>;
/// Note that the actual score starts from 1, not 0 as per this array. /// Note that the actual score starts from 1, not 0 as per this array.
#[rustfmt::skip] #[rustfmt::skip]
const MAP_KEY: [[u8; 3]; 10] = [ const MAP_KEY: [[u8; 3]; 10] = [
[0x49, 0xDA, 0x21], // #49DA21 [ 73, 218, 33],
[0x30, 0xD2, 0x00], // #30D200 [ 48, 210, 0],
[0xFF, 0xF8, 0x8B], // #FFF88B [255, 248, 139],
[0xFF, 0xF6, 0x42], // #FFF642 [255, 246, 66],
[0xFD, 0xBB, 0x31], // #FDBB31 [253, 187, 49],
[0xFD, 0x8E, 0x24], // #FD8E24 [253, 142, 36],
[0xFC, 0x10, 0x3E], // #FC103E [252, 16, 62],
[0x97, 0x0A, 0x33], // #970A33 [150, 10, 51],
[0xA6, 0x6D, 0xBC], // #A66DBC [166, 109, 188],
[0xB3, 0x30, 0xA1], // #B330A1 [179, 48, 161],
]; ];
/// The Buienradar map sample size. /// The Buienradar map sample size.
/// ///
/// Determines the number of pixels in width/height that is sampled around the sampling coordinate. /// Determiess the number of pixels in width/height that is samples around the sampling coordinate.
const MAP_SAMPLE_SIZE: [u32; 2] = [31, 31]; const MAP_SAMPLE_SIZE: [u32; 2] = [11, 11];
/// The interval between map refreshes (in seconds). /// The interval between map refreshes (in seconds).
const REFRESH_INTERVAL: tokio::time::Duration = tokio::time::Duration::from_secs(60); const REFRESH_INTERVAL: tokio::time::Duration = tokio::time::Duration::from_secs(60);
@ -162,20 +113,26 @@ trait MapsRefresh {
fn is_uvi_stale(&self) -> bool; fn is_uvi_stale(&self) -> bool;
/// Updates the pollen maps. /// Updates the pollen maps.
fn set_pollen(&self, result: Result<RetrievedMaps>); fn set_pollen(&self, result: Option<(DynamicImage, DateTime<Utc>)>);
/// Updates the UV index maps. /// Updates the UV index maps.
fn set_uvi(&self, result: Result<RetrievedMaps>); fn set_uvi(&self, result: Option<(DynamicImage, DateTime<Utc>)>);
} }
/// Container type for all in-memory cached maps. /// Container type for all in-memory cached maps.
#[derive(Debug, Default)] #[derive(Debug)]
pub(crate) struct Maps { pub(crate) struct Maps {
/// The pollen maps (from Buienradar). /// The pollen maps (from Buienradar).
pub(crate) pollen: Option<RetrievedMaps>, pub(crate) pollen: Option<DynamicImage>,
/// The timestamp the pollen maps were last refreshed.
pollen_stamp: DateTime<Utc>,
/// The UV index maps (from Buienradar). /// The UV index maps (from Buienradar).
pub(crate) uvi: Option<RetrievedMaps>, pub(crate) uvi: Option<DynamicImage>,
/// The timestamp the UV index maps were last refreshed.
uvi_stamp: DateTime<Utc>,
} }
impl Maps { impl Maps {
@ -184,60 +141,91 @@ impl Maps {
/// It contains an [`DynamicImage`] per maps type, if downloaded, and the timestamp of the last /// It contains an [`DynamicImage`] per maps type, if downloaded, and the timestamp of the last
/// update. /// update.
pub(crate) fn new() -> Self { pub(crate) fn new() -> Self {
let now = Utc::now();
Self { Self {
pollen: None, pollen: None,
pollen_stamp: now,
uvi: None, uvi: None,
uvi_stamp: now,
} }
} }
/// Returns a current pollen map that marks the provided position. /// Returns a current pollen map that marks the provided position.
pub(crate) fn pollen_mark(&self, position: Position) -> Result<DynamicImage> { ///
let maps = self.pollen.as_ref().ok_or(Error::NoMapsYet)?; /// This returns [`None`] if the maps are not in the cache yet, there is no matching map for
let image = &maps.image; /// the current moment or if the provided position is not within the bounds of the map.
let stamp = maps.timestamp_base; pub(crate) fn pollen_mark(&self, position: Position) -> Option<DynamicImage> {
let marked_image = map_at( self.pollen.as_ref().and_then(|maps| {
image, let map = map_at(
stamp, maps,
POLLEN_MAP_INTERVAL, self.pollen_stamp,
POLLEN_MAP_COUNT, POLLEN_MAP_INTERVAL,
Utc::now(), POLLEN_MAP_COUNT,
)?; Utc::now(),
let coords = project(&marked_image, POLLEN_MAP_REF_POINTS, position)?; )?;
let coords = project(&map, POLLEN_MAP_REF_POINTS, position)?;
Ok(mark(marked_image, coords)) Some(mark(map, coords))
})
} }
/// Samples the pollen maps for the given position. /// Samples the pollen maps for the given position.
pub(crate) fn pollen_samples(&self, position: Position) -> Result<Vec<Sample>> { ///
let maps = self.pollen.as_ref().ok_or(Error::NoMapsYet)?; /// This returns [`None`] if the maps are not in the cache yet.
let image = &maps.image; /// Otherwise, it returns [`Some`] with a list of pollen sample, one for each map
let map = image.view(0, 0, image.width() / UVI_MAP_COUNT, image.height()); /// in the series of maps.
let coords = project(&*map, POLLEN_MAP_REF_POINTS, position)?; pub(crate) fn pollen_samples(&self, position: Position) -> Option<Vec<Sample>> {
let stamp = maps.timestamp_base; self.pollen.as_ref().and_then(|maps| {
let map = maps.view(0, 0, maps.width() / UVI_MAP_COUNT, maps.height());
let coords = project(&*map, POLLEN_MAP_REF_POINTS, position)?;
sample(image, stamp, POLLEN_MAP_INTERVAL, POLLEN_MAP_COUNT, coords) sample(
maps,
self.pollen_stamp,
POLLEN_MAP_INTERVAL,
POLLEN_MAP_COUNT,
coords,
)
})
} }
/// Returns a current UV index map that marks the provided position. /// Returns a current UV index map that marks the provided position.
pub(crate) fn uvi_mark(&self, position: Position) -> Result<DynamicImage> { ///
let maps = self.uvi.as_ref().ok_or(Error::NoMapsYet)?; /// This returns [`None`] if the maps are not in the cache yet, there is no matching map for
let image = &maps.image; /// the current moment or if the provided position is not within the bounds of the map.
let stamp = maps.timestamp_base; pub(crate) fn uvi_mark(&self, position: Position) -> Option<DynamicImage> {
let marked_image = map_at(image, stamp, UVI_MAP_INTERVAL, UVI_MAP_COUNT, Utc::now())?; self.uvi.as_ref().and_then(|maps| {
let coords = project(&marked_image, POLLEN_MAP_REF_POINTS, position)?; let map = map_at(
maps,
self.uvi_stamp,
UVI_MAP_INTERVAL,
UVI_MAP_COUNT,
Utc::now(),
)?;
let coords = project(&map, POLLEN_MAP_REF_POINTS, position)?;
Ok(mark(marked_image, coords)) Some(mark(map, coords))
})
} }
/// Samples the UV index maps for the given position. /// Samples the UV index maps for the given position.
pub(crate) fn uvi_samples(&self, position: Position) -> Result<Vec<Sample>> { ///
let maps = self.uvi.as_ref().ok_or(Error::NoMapsYet)?; /// This returns [`None`] if the maps are not in the cache yet.
let image = &maps.image; /// Otherwise, it returns [`Some`] with a list of UV index sample, one for each map
let map = image.view(0, 0, image.width() / UVI_MAP_COUNT, image.height()); /// in the series of maps.
let coords = project(&*map, UVI_MAP_REF_POINTS, position)?; pub(crate) fn uvi_samples(&self, position: Position) -> Option<Vec<Sample>> {
let stamp = maps.timestamp_base; self.uvi.as_ref().and_then(|maps| {
let map = maps.view(0, 0, maps.width() / UVI_MAP_COUNT, maps.height());
let coords = project(&*map, UVI_MAP_REF_POINTS, position)?;
sample(image, stamp, UVI_MAP_INTERVAL, UVI_MAP_COUNT, coords) sample(
maps,
self.uvi_stamp,
UVI_MAP_INTERVAL,
UVI_MAP_COUNT,
coords,
)
})
} }
} }
@ -245,66 +233,60 @@ impl MapsRefresh for MapsHandle {
fn is_pollen_stale(&self) -> bool { fn is_pollen_stale(&self) -> bool {
let maps = self.lock().expect("Maps handle mutex was poisoned"); let maps = self.lock().expect("Maps handle mutex was poisoned");
match &maps.pollen { Utc::now().signed_duration_since(maps.pollen_stamp)
Some(pollen_maps) => { > Duration::seconds(POLLEN_MAP_COUNT as i64 * POLLEN_MAP_INTERVAL)
Utc::now().signed_duration_since(pollen_maps.mtime)
> Duration::seconds(POLLEN_MAP_COUNT as i64 * POLLEN_MAP_INTERVAL)
}
None => false,
}
} }
fn is_uvi_stale(&self) -> bool { fn is_uvi_stale(&self) -> bool {
let maps = self.lock().expect("Maps handle mutex was poisoned"); let maps = self.lock().expect("Maps handle mutex was poisoned");
match &maps.uvi { Utc::now().signed_duration_since(maps.uvi_stamp)
Some(uvi_maps) => { > Duration::seconds(UVI_MAP_COUNT as i64 * UVI_MAP_INTERVAL)
Utc::now().signed_duration_since(uvi_maps.mtime)
> Duration::seconds(UVI_MAP_COUNT as i64 * UVI_MAP_INTERVAL)
}
None => false,
}
} }
fn needs_pollen_refresh(&self) -> bool { fn needs_pollen_refresh(&self) -> bool {
let maps = self.lock().expect("Maps handle mutex was poisoned"); let maps = self.lock().expect("Maps handle mutex was poisoned");
match &maps.pollen { maps.pollen.is_none()
Some(pollen_maps) => { || Utc::now()
Utc::now() .signed_duration_since(maps.pollen_stamp)
.signed_duration_since(pollen_maps.mtime) .num_seconds()
.num_seconds() > POLLEN_INTERVAL
> POLLEN_INTERVAL
}
None => true,
}
} }
fn needs_uvi_refresh(&self) -> bool { fn needs_uvi_refresh(&self) -> bool {
let maps = self.lock().expect("Maps handle mutex was poisoned"); let maps = self.lock().expect("Maps handle mutex was poisoned");
match &maps.uvi { maps.uvi.is_none()
Some(uvi_maps) => { || Utc::now()
Utc::now() .signed_duration_since(maps.uvi_stamp)
.signed_duration_since(uvi_maps.mtime) .num_seconds()
.num_seconds() > UVI_INTERVAL
> UVI_INTERVAL }
fn set_pollen(&self, result: Option<(DynamicImage, DateTime<Utc>)>) {
if result.is_some() || self.is_pollen_stale() {
let mut maps = self.lock().expect("Maps handle mutex was poisoned");
if let Some((pollen, pollen_stamp)) = result {
maps.pollen = Some(pollen);
maps.pollen_stamp = pollen_stamp
} else {
maps.pollen = None
} }
None => true,
} }
} }
fn set_pollen(&self, retrieved_maps: Result<RetrievedMaps>) { fn set_uvi(&self, result: Option<(DynamicImage, DateTime<Utc>)>) {
if retrieved_maps.is_ok() || self.is_pollen_stale() { if result.is_some() || self.is_uvi_stale() {
let mut maps = self.lock().expect("Maps handle mutex was poisoned"); let mut maps = self.lock().expect("Maps handle mutex was poisoned");
maps.pollen = retrieved_maps.ok();
}
}
fn set_uvi(&self, retrieved_maps: Result<RetrievedMaps>) { if let Some((uvi, uvi_stamp)) = result {
if retrieved_maps.is_ok() || self.is_uvi_stale() { maps.uvi = Some(uvi);
let mut maps = self.lock().expect("Maps handle mutex was poisoned"); maps.uvi_stamp = uvi_stamp
maps.uvi = retrieved_maps.ok(); } else {
maps.uvi = None
}
} }
} }
} }
@ -346,27 +328,26 @@ fn map_key_histogram() -> MapKeyHistogram {
/// Samples the provided maps at the given (map-relative) coordinates and starting timestamp. /// Samples the provided maps at the given (map-relative) coordinates and starting timestamp.
/// It assumes the provided coordinates are within bounds of at least one map. /// It assumes the provided coordinates are within bounds of at least one map.
/// The interval is the number of seconds the timestamp is bumped for each map. /// The interval is the number of seconds the timestamp is bumped for each map.
///
/// Returns [`None`] if it encounters no known colors in any of the samples.
fn sample<I: GenericImageView<Pixel = Rgba<u8>>>( fn sample<I: GenericImageView<Pixel = Rgba<u8>>>(
image: &I, maps: &I,
stamp: DateTime<Utc>, stamp: DateTime<Utc>,
interval: i64, interval: i64,
count: u32, count: u32,
coords: (u32, u32), coords: (u32, u32),
) -> Result<Vec<Sample>> { ) -> Option<Vec<Sample>> {
let (x, y) = coords; let (x, y) = coords;
let width = image.width() / count; let width = maps.width() / count;
let height = image.height(); let height = maps.height();
if x > width || y > height {
return Err(Error::OutOfBoundCoords(x, y));
}
let max_sample_width = (width - x).min(MAP_SAMPLE_SIZE[0]); let max_sample_width = (width - x).min(MAP_SAMPLE_SIZE[0]);
let max_sample_height = (height - y).min(MAP_SAMPLE_SIZE[1]); let max_sample_height = (height - y).min(MAP_SAMPLE_SIZE[1]);
let mut samples = Vec::with_capacity(count as usize); let mut samples = Vec::with_capacity(count as usize);
let mut time = stamp; let mut time = stamp;
let mut offset = 0; let mut offset = 0;
while offset < image.width() { while offset < maps.width() {
let map = image.view( let map = maps.view(
x.saturating_sub(MAP_SAMPLE_SIZE[0] / 2) + offset, x.saturating_sub(MAP_SAMPLE_SIZE[0] / 2) + offset,
y.saturating_sub(MAP_SAMPLE_SIZE[1] / 2), y.saturating_sub(MAP_SAMPLE_SIZE[1] / 2),
max_sample_width, max_sample_width,
@ -383,7 +364,7 @@ fn sample<I: GenericImageView<Pixel = Rgba<u8>>>(
.max_by_key(|(_color, count)| *count) .max_by_key(|(_color, count)| *count)
.expect("Map key is never empty"); .expect("Map key is never empty");
if count == 0 { if count == 0 {
return Err(Error::NoKnownColorsInSamples); return None;
} }
let score = MAP_KEY let score = MAP_KEY
@ -393,144 +374,112 @@ fn sample<I: GenericImageView<Pixel = Rgba<u8>>>(
.expect("Maximum color is always a map key color") as u8; .expect("Maximum color is always a map key color") as u8;
samples.push(Sample { time, score }); samples.push(Sample { time, score });
time += Duration::seconds(interval); time = time + chrono::Duration::seconds(interval as i64);
offset += width; offset += width;
} }
Ok(samples) Some(samples)
}
/// A retrieved image with some metadata.
#[derive(Debug)]
pub(crate) struct RetrievedMaps {
/// The image data.
pub(crate) image: DynamicImage,
/// The date/time the image was last modified.
pub(crate) mtime: DateTime<Utc>,
/// The starting date/time the image corresponds with.
pub(crate) timestamp_base: DateTime<Utc>,
}
impl RetrievedMaps {
#[cfg(test)]
pub(crate) fn new(image: DynamicImage) -> Self {
let mtime = Utc::now();
let timestamp_base = Utc::now();
Self {
image,
mtime,
timestamp_base,
}
}
} }
/// Retrieves an image from the provided URL. /// Retrieves an image from the provided URL.
async fn retrieve_image(url: Url) -> Result<RetrievedMaps> { ///
let response = reqwest::get(url).await?; /// This returns [`None`] if it fails in either performing the request, parsing the `Last-Modified`
let mtime = match response.headers().get(reqwest::header::LAST_MODIFIED) { /// reponse HTTP header, retrieving the bytes from the image or loading and the decoding the data
Some(mtime_header) => { /// into [`DynamicImage`].
let mtime_headr_str = mtime_header.to_str()?; async fn retrieve_image(url: Url) -> Option<(DynamicImage, DateTime<Utc>)> {
// TODO: Handle or log errors!
DateTime::from(DateTime::parse_from_rfc2822(mtime_headr_str)?) let response = reqwest::get(url).await.ok()?;
} let mtime = response
None => Utc::now(), .headers()
}; .get(reqwest::header::LAST_MODIFIED)
.and_then(|dt| dt.to_str().ok())
let timestamp_base = { .map(chrono::DateTime::parse_from_rfc2822)?
let path = response.url().path(); .map(DateTime::<Utc>::from)
let (_, filename) = path .ok()?;
.rsplit_once('/') let bytes = response.bytes().await.ok()?;
.ok_or_else(|| Error::InvalidImagePath(path.to_owned()))?;
let (timestamp_str, _) = filename
.split_once("__")
.ok_or_else(|| Error::InvalidImagePath(path.to_owned()))?;
let timestamp = NaiveDateTime::parse_from_str(timestamp_str, "%Y%m%d%H%M")?;
Utc.from_utc_datetime(&timestamp)
};
let bytes = response.bytes().await?;
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
image::load_from_memory_with_format(&bytes, ImageFormat::Png) if let Ok(image) = image::load_from_memory_with_format(&bytes, ImageFormat::Png) {
.map(|image| RetrievedMaps { Some((image, mtime))
image, } else {
mtime, None
timestamp_base, }
})
.map_err(Error::from)
}) })
.await? .await
.ok()?
} }
/// Retrieves the pollen maps from Buienradar. /// Retrieves the pollen maps from Buienradar.
/// ///
/// See [`POLLEN_BASE_URL`] for the base URL and [`retrieve_image`] for the retrieval function. /// See [`POLLEN_BASE_URL`] for the base URL and [`retrieve_image`] for the retrieval function.
async fn retrieve_pollen_maps() -> Result<RetrievedMaps> { async fn retrieve_pollen_maps() -> Option<(DynamicImage, DateTime<Utc>)> {
let timestamp = format!("{}", chrono::Local::now().format("%y%m%d%H%M")); let timestamp = format!("{}", chrono::Local::now().format("%y%m%d%H%M"));
let mut url = Url::parse(POLLEN_BASE_URL).unwrap(); let mut url = Url::parse(POLLEN_BASE_URL).unwrap();
url.query_pairs_mut().append_pair("timestamp", &timestamp); url.query_pairs_mut().append_pair("timestamp", &timestamp);
println!("🗺️ Refreshing pollen maps from: {}", url); println!("🔽 Refreshing pollen maps from: {}", url);
retrieve_image(url).await retrieve_image(url).await
} }
/// Retrieves the UV index maps from Buienradar. /// Retrieves the UV index maps from Buienradar.
/// ///
/// See [`UVI_BASE_URL`] for the base URL and [`retrieve_image`] for the retrieval function. /// See [`UVI_BASE_URL`] for the base URL and [`retrieve_image`] for the retrieval function.
async fn retrieve_uvi_maps() -> Result<RetrievedMaps> { async fn retrieve_uvi_maps() -> Option<(DynamicImage, DateTime<Utc>)> {
let timestamp = format!("{}", chrono::Local::now().format("%y%m%d%H%M")); let timestamp = format!("{}", chrono::Local::now().format("%y%m%d%H%M"));
let mut url = Url::parse(UVI_BASE_URL).unwrap(); let mut url = Url::parse(UVI_BASE_URL).unwrap();
url.query_pairs_mut().append_pair("timestamp", &timestamp); url.query_pairs_mut().append_pair("timestamp", &timestamp);
println!("🗺️ Refreshing UV index maps from: {}", url); println!("🔽 Refreshing UV index maps from: {}", url);
retrieve_image(url).await retrieve_image(url).await
} }
/// Returns the map for the given instant. /// Returns the map for the given instant.
///
/// This returns [`None`] if `instant` is too far in the future with respect to the number of
/// cached maps.
fn map_at( fn map_at(
image: &DynamicImage, maps: &DynamicImage,
stamp: DateTime<Utc>, maps_stamp: DateTime<Utc>,
interval: i64, interval: i64,
count: u32, count: u32,
instant: DateTime<Utc>, instant: DateTime<Utc>,
) -> Result<DynamicImage> { ) -> Option<DynamicImage> {
let duration = instant.signed_duration_since(stamp); let duration = instant.signed_duration_since(maps_stamp);
let offset = (duration.num_seconds() / interval) as u32; let offset = (duration.num_seconds() / interval) as u32;
// Check if out of bounds. // Check if out of bounds.
if offset >= count { if offset >= count {
return Err(Error::OutOfBoundOffset(offset)); return None;
} }
let width = image.width() / count; let width = maps.width() / count;
Ok(image.crop_imm(offset * width, 0, width, image.height())) Some(maps.crop_imm(offset * width, 0, width, maps.height()))
} }
/// Marks the provided coordinates on the map using a horizontal and vertical line. /// Marks the provided coordinates on the map using a horizontal and vertical line.
fn mark(mut image: DynamicImage, coords: (u32, u32)) -> DynamicImage { fn mark(mut map: DynamicImage, coords: (u32, u32)) -> DynamicImage {
let (x, y) = coords; let (x, y) = coords;
for py in 0..image.height() { for py in 0..map.height() {
image.put_pixel(x, py, Rgba::from([0x00, 0x00, 0x00, 0x70])); map.put_pixel(x, py, Rgba::from([0x00, 0x00, 0x00, 0x70]));
} }
for px in 0..image.width() { for px in 0..map.width() {
image.put_pixel(px, y, Rgba::from([0x00, 0x00, 0x00, 0x70])); map.put_pixel(px, y, Rgba::from([0x00, 0x00, 0x00, 0x70]));
} }
image map
} }
/// Projects the provided geocoded position to a coordinate on a map. /// Projects the provided geocoded position to a coordinate on a map.
/// ///
/// This uses two reference points and a Mercator projection on the y-coordinates of those points /// This uses two reference points and a Mercator projection on the y-coordinates of those points
/// to calculate how the map scales with respect to the provided position. /// to calculate how the map scales with respect to the provided position.
///
/// Returns [`None`] if the resulting coordinate is not within the bounds of the map.
fn project<I: GenericImageView>( fn project<I: GenericImageView>(
image: &I, map: &I,
ref_points: [(Position, (u32, u32)); 2], ref_points: [(Position, (u32, u32)); 2],
pos: Position, pos: Position,
) -> Result<(u32, u32)> { ) -> Option<(u32, u32)> {
// Get the data from the reference points. // Get the data from the reference points.
let (ref1, (ref1_y, ref1_x)) = ref_points[0]; let (ref1, (ref1_y, ref1_x)) = ref_points[0];
let (ref2, (ref2_y, ref2_x)) = ref_points[1]; let (ref2, (ref2_y, ref2_x)) = ref_points[1];
@ -546,10 +495,10 @@ fn project<I: GenericImageView>(
let scale_y = ((ref1_y - ref2_y) as f64) / (ref2_merc_y - ref1_merc_y); let scale_y = ((ref1_y - ref2_y) as f64) / (ref2_merc_y - ref1_merc_y);
let y = ((ref2_merc_y - mercator_y(pos.lat_as_rad())) * scale_y + ref2_y as f64).round() as u32; let y = ((ref2_merc_y - mercator_y(pos.lat_as_rad())) * scale_y + ref2_y as f64).round() as u32;
if image.in_bounds(x, y) { if map.in_bounds(x, y) {
Ok((x, y)) Some((x, y))
} else { } else {
Err(Error::OutOfBoundCoords(x, y)) None
} }
} }
@ -560,28 +509,34 @@ pub(crate) async fn mark_map(
position: Position, position: Position,
metric: Metric, metric: Metric,
maps_handle: &MapsHandle, maps_handle: &MapsHandle,
) -> crate::Result<Vec<u8>> { ) -> Option<Vec<u8>> {
use std::io::Cursor; use std::io::Cursor;
let maps_handle = Arc::clone(maps_handle); let maps_handle = Arc::clone(maps_handle);
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
let maps = maps_handle.lock().expect("Maps handle lock was poisoned"); let maps = maps_handle.lock().expect("Maps handle lock was poisoned");
let image = match metric { let image = match metric {
Metric::PAQI => maps.pollen_mark(position),
Metric::Pollen => maps.pollen_mark(position), Metric::Pollen => maps.pollen_mark(position),
Metric::UVI => maps.uvi_mark(position), Metric::UVI => maps.uvi_mark(position),
_ => return Err(crate::Error::UnsupportedMetric(metric)), _ => return None, // Unsupported metric
}?; }?;
drop(maps); drop(maps);
// Encode the image as PNG image data. // Encode the image as PNG image data.
let mut image_data = Cursor::new(Vec::new()); let mut image_data = Cursor::new(Vec::new());
match image.write_to(&mut image_data, ImageFormat::Png) { image
Ok(()) => Ok(image_data.into_inner()), .write_to(
Err(err) => Err(crate::Error::from(Error::from(err))), &mut image_data,
} image::ImageOutputFormat::from(image::ImageFormat::Png),
)
.ok()?;
Some(image_data.into_inner())
}) })
.await .await
.map_err(Error::from)? .ok()
.flatten()
} }
/// Runs a loop that keeps refreshing the maps when necessary. /// Runs a loop that keeps refreshing the maps when necessary.
@ -593,19 +548,13 @@ pub(crate) async fn run(maps_handle: MapsHandle) {
println!("🕔 Refreshing the maps (if necessary)..."); println!("🕔 Refreshing the maps (if necessary)...");
if maps_handle.needs_pollen_refresh() { if maps_handle.needs_pollen_refresh() {
let retrieved_maps = retrieve_pollen_maps().await; let result = retrieve_pollen_maps().await;
if let Err(e) = retrieved_maps.as_ref() { maps_handle.set_pollen(result);
eprintln!("💥 Encountered error during pollen maps refresh: {}", e);
}
maps_handle.set_pollen(retrieved_maps);
} }
if maps_handle.needs_uvi_refresh() { if maps_handle.needs_uvi_refresh() {
let retrieved_maps = retrieve_uvi_maps().await; let result = retrieve_uvi_maps().await;
if let Err(e) = retrieved_maps.as_ref() { maps_handle.set_uvi(result);
eprintln!("💥 Encountered error during UVI maps refresh: {}", e);
}
maps_handle.set_uvi(retrieved_maps);
} }
sleep(REFRESH_INTERVAL).await; sleep(REFRESH_INTERVAL).await;

View file

@ -2,14 +2,13 @@
//! //!
//! This module contains everything related to geographic coordinate system functionality. //! This module contains everything related to geographic coordinate system functionality.
use std::f64::consts::PI;
use std::hash::Hash; use std::hash::Hash;
use cached::proc_macro::cached; use cached::proc_macro::cached;
use geocoding::{Forward, Openstreetmap, Point}; use geocoding::{Forward, Openstreetmap, Point};
use rocket::tokio; use rocket::tokio;
use crate::{Error, Result}; use std::f64::consts::PI;
/// A (geocoded) position. /// A (geocoded) position.
/// ///
@ -99,19 +98,21 @@ impl Eq for Position {}
/// Resolves the geocoded position for a given address. /// Resolves the geocoded position for a given address.
/// ///
/// If the result is [`Ok`], it will be cached. /// Returns [`None`] if the address could not be geocoded or the OpenStreetMap Nomatim API could
/// not be contacted.
///
/// If the result is [`Some`], it will be cached.
/// Note that only the 100 least recently used addresses will be cached. /// Note that only the 100 least recently used addresses will be cached.
#[cached(size = 100, result = true)] #[cached(size = 100)]
pub(crate) async fn resolve_address(address: String) -> Result<Position> { pub(crate) async fn resolve_address(address: String) -> Option<Position> {
println!("🌍 Geocoding the position of the address: {}", address); println!("🌍 Geocoding the position of the address: {}", address);
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
let osm = Openstreetmap::new(); let osm = Openstreetmap::new();
let points: Vec<Point<f64>> = osm.forward(&address)?; let points: Vec<Point<f64>> = osm.forward(&address).ok()?;
points points.get(0).map(Position::from)
.first()
.ok_or(Error::NoPositionFound)
.map(Position::from)
}) })
.await? .await
.ok()
.flatten()
} }

View file

@ -4,8 +4,9 @@
//! and <https://www.buienradar.nl/overbuienradar/gratis-weerdata>. //! and <https://www.buienradar.nl/overbuienradar/gratis-weerdata>.
use cached::proc_macro::cached; use cached::proc_macro::cached;
use chrono::offset::TimeZone;
use chrono::serde::ts_seconds; use chrono::serde::ts_seconds;
use chrono::{DateTime, Datelike, Duration, NaiveTime, ParseError, TimeZone, Utc}; use chrono::{DateTime, Datelike, Duration, NaiveTime, ParseError, Utc};
use chrono_tz::Europe; use chrono_tz::Europe;
use csv::ReaderBuilder; use csv::ReaderBuilder;
use reqwest::Url; use reqwest::Url;
@ -13,7 +14,7 @@ use rocket::serde::{Deserialize, Serialize};
use crate::maps::MapsHandle; use crate::maps::MapsHandle;
use crate::position::Position; use crate::position::Position;
use crate::{Error, Metric, Result}; use crate::Metric;
/// The base URL for the Buienradar API. /// The base URL for the Buienradar API.
const BUIENRADAR_BASE_URL: &str = "https://gpsgadget.buienradar.nl/data/raintext"; const BUIENRADAR_BASE_URL: &str = "https://gpsgadget.buienradar.nl/data/raintext";
@ -65,10 +66,10 @@ impl TryFrom<Row> for Item {
/// time zone. /// time zone.
fn parse_time(t: &str) -> Result<DateTime<Utc>, ParseError> { fn parse_time(t: &str) -> Result<DateTime<Utc>, ParseError> {
// First, get the current date in the Europe/Amsterdam time zone. // First, get the current date in the Europe/Amsterdam time zone.
let today = Utc::now().with_timezone(&Europe::Amsterdam).date_naive(); let today = Utc::now().with_timezone(&Europe::Amsterdam).date();
// Then, parse the time and interpret it relative to "today". // Then, parse the time and interpret it relative to "today".
let ntime = NaiveTime::parse_from_str(t, "%H:%M")?; let ntime = NaiveTime::parse_from_str(t, "%H:%M")?;
let ndtime = today.and_time(ntime); let ndtime = today.naive_local().and_time(ntime);
// Finally, interpret the naive date/time in the Europe/Amsterdam time zone and convert it to // Finally, interpret the naive date/time in the Europe/Amsterdam time zone and convert it to
// the UTC time zone. // the UTC time zone.
let ldtime = Europe::Amsterdam.from_local_datetime(&ndtime).unwrap(); let ldtime = Europe::Amsterdam.from_local_datetime(&ndtime).unwrap();
@ -97,9 +98,8 @@ fn fix_items_day_boundary(items: Vec<Item>) -> Vec<Item> {
let now = Utc::now().with_timezone(&Europe::Amsterdam); let now = Utc::now().with_timezone(&Europe::Amsterdam);
// Use noon on the same day as "now" as a comparison moment. // Use noon on the same day as "now" as a comparison moment.
let noon = Europe::Amsterdam let noon = Europe::Amsterdam
.with_ymd_and_hms(now.year(), now.month(), now.day(), 12, 0, 0) .ymd(now.year(), now.month(), now.day())
.single() .and_hms(12, 0, 0);
.expect("Invalid date: input date is invalid or not unambiguous");
if now < noon { if now < noon {
// It is still before noon, so bump timestamps after noon a day back. // It is still before noon, so bump timestamps after noon a day back.
@ -107,7 +107,7 @@ fn fix_items_day_boundary(items: Vec<Item>) -> Vec<Item> {
.into_iter() .into_iter()
.map(|mut item| { .map(|mut item| {
if item.time > noon { if item.time > noon {
item.time -= Duration::days(1) item.time = item.time - Duration::days(1)
} }
item item
}) })
@ -118,7 +118,7 @@ fn fix_items_day_boundary(items: Vec<Item>) -> Vec<Item> {
.into_iter() .into_iter()
.map(|mut item| { .map(|mut item| {
if item.time < noon { if item.time < noon {
item.time += Duration::days(1) item.time = item.time + Duration::days(1)
} }
item item
}) })
@ -128,23 +128,28 @@ fn fix_items_day_boundary(items: Vec<Item>) -> Vec<Item> {
/// Retrieves the Buienradar forecasted precipitation items for the provided position. /// Retrieves the Buienradar forecasted precipitation items for the provided position.
/// ///
/// If the result is [`Ok`] it will be cached for 5 minutes for the the given position. /// Returns [`None`] if retrieval or deserialization fails.
#[cached(time = 300, result = true)] ///
async fn get_precipitation(position: Position) -> Result<Vec<Item>> { /// If the result is [`Some`] it will be cached for 5 minutes for the the given position.
#[cached(time = 300, option = true)]
async fn get_precipitation(position: Position) -> Option<Vec<Item>> {
let mut url = Url::parse(BUIENRADAR_BASE_URL).unwrap(); let mut url = Url::parse(BUIENRADAR_BASE_URL).unwrap();
url.query_pairs_mut() url.query_pairs_mut()
.append_pair("lat", &position.lat_as_str(2)) .append_pair("lat", &position.lat_as_str(2))
.append_pair("lon", &position.lon_as_str(2)); .append_pair("lon", &position.lon_as_str(2));
println!("▶️ Retrieving Buienradar data from: {url}"); println!("▶️ Retrieving Buienradar data from: {url}");
let response = reqwest::get(url).await?; let response = reqwest::get(url).await.ok()?;
let output = response.error_for_status()?.text().await?; let output = match response.error_for_status() {
Ok(res) => res.text().await.ok()?,
Err(_err) => return None,
};
let mut rdr = ReaderBuilder::new() let mut rdr = ReaderBuilder::new()
.has_headers(false) .has_headers(false)
.delimiter(b'|') .delimiter(b'|')
.from_reader(output.as_bytes()); .from_reader(output.as_bytes());
let items: Vec<Item> = rdr.deserialize().collect::<Result<_, _>>()?; let items: Vec<Item> = rdr.deserialize().collect::<Result<_, _>>().ok()?;
// Check if the first item stamp is (timewise) later than the last item stamp. // Check if the first item stamp is (timewise) later than the last item stamp.
// In this case `parse_time` interpreted e.g. 23:00 and later 0:30 in the same day and some // In this case `parse_time` interpreted e.g. 23:00 and later 0:30 in the same day and some
@ -155,44 +160,46 @@ async fn get_precipitation(position: Position) -> Result<Vec<Item>> {
.map(|(it1, it2)| it1.time > it2.time) .map(|(it1, it2)| it1.time > it2.time)
== Some(true) == Some(true)
{ {
Ok(fix_items_day_boundary(items)) Some(fix_items_day_boundary(items))
} else { } else {
Ok(items) Some(items)
} }
} }
/// Retrieves the Buienradar forecasted pollen samples for the provided position. /// Retrieves the Buienradar forecasted pollen samples for the provided position.
/// ///
/// If the result is [`Ok`] if will be cached for 1 hour for the given position. /// Returns [`None`] if the sampling fails.
///
/// If the result is [`Some`] if will be cached for 1 hour for the given position.
#[cached( #[cached(
time = 3_600, time = 3_600,
key = "Position", key = "Position",
convert = r#"{ position }"#, convert = r#"{ position }"#,
result = true option = true
)] )]
async fn get_pollen(position: Position, maps_handle: &MapsHandle) -> Result<Vec<Sample>> { async fn get_pollen(position: Position, maps_handle: &MapsHandle) -> Option<Vec<Sample>> {
maps_handle maps_handle
.lock() .lock()
.expect("Maps handle mutex was poisoned") .expect("Maps handle mutex was poisoned")
.pollen_samples(position) .pollen_samples(position)
.map_err(Into::into)
} }
/// Retrieves the Buienradar forecasted UV index samples for the provided position. /// Retrieves the Buienradar forecasted UV index samples for the provided position.
/// ///
/// If the result is [`Ok`] if will be cached for 1 day for the given position. /// Returns [`None`] if the sampling fails.
///
/// If the result is [`Some`] if will be cached for 1 day for the given position.
#[cached( #[cached(
time = 86_400, time = 86_400,
key = "Position", key = "Position",
convert = r#"{ position }"#, convert = r#"{ position }"#,
result = true option = true
)] )]
async fn get_uvi(position: Position, maps_handle: &MapsHandle) -> Result<Vec<Sample>> { async fn get_uvi(position: Position, maps_handle: &MapsHandle) -> Option<Vec<Sample>> {
maps_handle maps_handle
.lock() .lock()
.expect("Maps handle mutex was poisoned") .expect("Maps handle mutex was poisoned")
.uvi_samples(position) .uvi_samples(position)
.map_err(Into::into)
} }
/// Retrieves the Buienradar forecasted map samples for the provided position. /// Retrieves the Buienradar forecasted map samples for the provided position.
@ -200,15 +207,18 @@ async fn get_uvi(position: Position, maps_handle: &MapsHandle) -> Result<Vec<Sam
/// It only supports the following metric: /// It only supports the following metric:
/// * [`Metric::Pollen`] /// * [`Metric::Pollen`]
/// * [`Metric::UVI`] /// * [`Metric::UVI`]
///
/// Returns [`None`] if retrieval or deserialization fails, or if the metric is not supported by
/// this provider.
pub(crate) async fn get_samples( pub(crate) async fn get_samples(
position: Position, position: Position,
metric: Metric, metric: Metric,
maps_handle: &MapsHandle, maps_handle: &MapsHandle,
) -> Result<Vec<Sample>> { ) -> Option<Vec<Sample>> {
match metric { match metric {
Metric::Pollen => get_pollen(position, maps_handle).await, Metric::Pollen => get_pollen(position, maps_handle).await,
Metric::UVI => get_uvi(position, maps_handle).await, Metric::UVI => get_uvi(position, maps_handle).await,
_ => Err(Error::UnsupportedMetric(metric)), _ => None,
} }
} }
@ -217,9 +227,11 @@ pub(crate) async fn get_samples(
/// It only supports the following metric: /// It only supports the following metric:
/// * [`Metric::Precipitation`] /// * [`Metric::Precipitation`]
/// ///
pub(crate) async fn get_items(position: Position, metric: Metric) -> Result<Vec<Item>> { /// Returns [`None`] if retrieval or deserialization fails, or if the metric is not supported by
/// this provider.
pub(crate) async fn get_items(position: Position, metric: Metric) -> Option<Vec<Item>> {
match metric { match metric {
Metric::Precipitation => get_precipitation(position).await, Metric::Precipitation => get_precipitation(position).await,
_ => Err(Error::UnsupportedMetric(metric)), _ => None,
} }
} }

View file

@ -11,28 +11,7 @@ pub(crate) use super::buienradar::{self, Sample as BuienradarSample};
pub(crate) use super::luchtmeetnet::{self, Item as LuchtmeetnetItem}; pub(crate) use super::luchtmeetnet::{self, Item as LuchtmeetnetItem};
use crate::maps::MapsHandle; use crate::maps::MapsHandle;
use crate::position::Position; use crate::position::Position;
use crate::{Error, Metric}; use crate::Metric;
/// The possible merge errors that can occur.
#[allow(clippy::enum_variant_names)]
#[derive(Debug, thiserror::Error, PartialEq)]
pub(crate) enum MergeError {
/// No AQI item found.
#[error("No AQI item found")]
NoAqiItemFound,
/// No pollen item found.
#[error("No pollen item found")]
NoPollenItemFound,
/// No AQI item found within 30 minutes of first pollen item.
#[error("No AQI item found within 30 minutes of first pollen item")]
NoCloseAqiItemFound,
/// No pollen item found within 30 minutes of first AQI item.
#[error("No pollen item found within 30 minutes of first AQI item")]
NoClosePollenItemFound,
}
/// The combined data item. /// The combined data item.
#[derive(Clone, Debug, PartialEq, Serialize)] #[derive(Clone, Debug, PartialEq, Serialize)]
@ -56,59 +35,70 @@ impl Item {
/// Merges pollen samples and AQI items into combined items. /// Merges pollen samples and AQI items into combined items.
/// ///
/// The merging drops items from either the pollen samples or from the AQI items if they are not /// The merging drops items from either the pollen samples or from the AQI items if they are not
/// stamped within half an hour of the first item of the latest starting series, thus lining them /// stamped with half an hour of the first item of the latest starting series, thus lining them
/// before they are combined. /// before they are combined.
///
/// This function also finds the maximum pollen sample and AQI item.
///
/// Returns [`None`] if there are no pollen samples, if there are no AQI items, or if
/// lining them up fails. Returns [`None`] for the maximum pollen sample or maximum AQI item
/// if there are no samples or items.
fn merge( fn merge(
pollen_samples: Vec<BuienradarSample>, pollen_samples: Vec<BuienradarSample>,
aqi_items: Vec<LuchtmeetnetItem>, aqi_items: Vec<LuchtmeetnetItem>,
) -> Result<Vec<Item>, MergeError> { ) -> Option<(Vec<Item>, BuienradarSample, LuchtmeetnetItem)> {
let mut pollen_samples = pollen_samples; let mut pollen_samples = pollen_samples;
let mut aqi_items = aqi_items; let mut aqi_items = aqi_items;
// Only retain samples/items that have timestamps that are at least an hour ago. // Only retain samples/items that have timestamps that are at least half an hour ago.
let now = Utc::now(); let now = Utc::now();
pollen_samples.retain(|smp| smp.time.signed_duration_since(now).num_seconds() > -3600); pollen_samples.retain(|smp| smp.time.signed_duration_since(now).num_seconds() > -1800);
aqi_items.retain(|item| item.time.signed_duration_since(now).num_seconds() > -3600); aqi_items.retain(|item| item.time.signed_duration_since(now).num_seconds() > -1800);
// Align the iterators based on the (hourly) timestamps! // Align the iterators based on the (hourly) timestamps!
let pollen_first_time = pollen_samples let pollen_first_time = pollen_samples.first()?.time;
.first() let aqi_first_time = aqi_items.first()?.time;
.ok_or(MergeError::NoPollenItemFound)?
.time;
let aqi_first_time = aqi_items.first().ok_or(MergeError::NoAqiItemFound)?.time;
if pollen_first_time < aqi_first_time { if pollen_first_time < aqi_first_time {
// Drain one or more pollen samples to line up. // Drain one or more pollen samples to line up.
let idx = pollen_samples let idx = pollen_samples.iter().position(|smp| {
.iter() smp.time
.position(|smp| { .signed_duration_since(aqi_first_time)
smp.time .num_seconds()
.signed_duration_since(aqi_first_time) .abs()
.num_seconds() < 1800
.abs() })?;
< 1800
})
.ok_or(MergeError::NoCloseAqiItemFound)?;
pollen_samples.drain(..idx); pollen_samples.drain(..idx);
} else { } else {
// Drain one or more AQI items to line up. // Drain one or more AQI items to line up.
let idx = aqi_items let idx = aqi_items.iter().position(|item| {
.iter() item.time
.position(|item| { .signed_duration_since(pollen_first_time)
item.time .num_seconds()
.signed_duration_since(pollen_first_time) .abs()
.num_seconds() < 1800
.abs() })?;
< 1800
})
.ok_or(MergeError::NoClosePollenItemFound)?;
aqi_items.drain(..idx); aqi_items.drain(..idx);
} }
// Find the maximum sample/item of each series.
// Note: Unwrapping is possible because each series has at least an item otherwise `.first`
// would have failed above.
let pollen_max = pollen_samples
.iter()
.max_by_key(|sample| sample.score)
.cloned()
.unwrap();
let aqi_max = aqi_items
.iter()
.max_by_key(|item| (item.value * 1_000.0) as u32)
.cloned()
.unwrap();
// Combine the samples with items by taking the maximum of pollen sample score and AQI item // Combine the samples with items by taking the maximum of pollen sample score and AQI item
// value. // value.
let items = pollen_samples let items = pollen_samples
.into_iter() .into_iter()
.zip(aqi_items) .zip(aqi_items.into_iter())
.map(|(pollen_sample, aqi_item)| { .map(|(pollen_sample, aqi_item)| {
let time = pollen_sample.time; let time = pollen_sample.time;
let value = (pollen_sample.score as f32).max(aqi_item.value); let value = (pollen_sample.score as f32).max(aqi_item.value);
@ -117,32 +107,42 @@ fn merge(
}) })
.collect(); .collect();
Ok(items) Some((items, pollen_max, aqi_max))
} }
/// Retrieves the combined forecasted items for the provided position and metric. /// Retrieves the combined forecasted items for the provided position and metric.
/// ///
/// Besides the combined items, it also yields the maxium pollen sample and AQI item.
/// Note that the maximum values are calculated before combining them, so the time stamp
/// corresponds to the one in the original series, not to a timestamp of an item after merging.
///
/// It supports the following metric: /// It supports the following metric:
/// * [`Metric::PAQI`] /// * [`Metric::PAQI`]
///
/// Returns [`None`] for the combined items if retrieving data from either the Buienradar or the
/// Luchtmeetnet provider fails or if they cannot be combined. Returns [`None`] for the maxiumum
/// pollen sample or AQI item if there are no samples or items.
///
/// If the result is [`Some`], it will be cached for 30 minutes for the the given position and
/// metric.
#[cached( #[cached(
time = 1800, time = 1800,
key = "(Position, Metric)", key = "(Position, Metric)",
convert = r#"{ (position, metric) }"#, convert = r#"{ (position, metric) }"#,
result = true option = true
)] )]
pub(crate) async fn get( pub(crate) async fn get(
position: Position, position: Position,
metric: Metric, metric: Metric,
maps_handle: &MapsHandle, maps_handle: &MapsHandle,
) -> Result<Vec<Item>, Error> { ) -> Option<(Vec<Item>, BuienradarSample, LuchtmeetnetItem)> {
if metric != Metric::PAQI { if metric != Metric::PAQI {
return Err(Error::UnsupportedMetric(metric)); return None;
}; };
let pollen_items = buienradar::get_samples(position, Metric::Pollen, maps_handle).await?; let pollen_items = buienradar::get_samples(position, Metric::Pollen, maps_handle).await;
let aqi_items = luchtmeetnet::get(position, Metric::AQI).await?; let aqi_items = luchtmeetnet::get(position, Metric::AQI).await;
let items = merge(pollen_items, aqi_items)?;
Ok(items) merge(pollen_items?, aqi_items?)
} }
#[cfg(test)] #[cfg(test)]
@ -181,8 +181,8 @@ mod tests {
// Perform a normal merge. // Perform a normal merge.
let merged = super::merge(pollen_samples.clone(), aqi_items.clone()); let merged = super::merge(pollen_samples.clone(), aqi_items.clone());
assert!(merged.is_ok()); assert!(merged.is_some());
let paqi = merged.unwrap(); let (paqi, max_pollen, max_aqi) = merged.unwrap();
assert_eq!( assert_eq!(
paqi, paqi,
Vec::from([ Vec::from([
@ -191,6 +191,8 @@ mod tests {
Item::new(t_2, 2.4), Item::new(t_2, 2.4),
]) ])
); );
assert_eq!(max_pollen, BuienradarSample::new(t_1, 3));
assert_eq!(max_aqi, LuchtmeetnetItem::new(t_1, 2.9));
// The pollen samples are shifted, i.e. one hour in the future. // The pollen samples are shifted, i.e. one hour in the future.
let shifted_pollen_samples = pollen_samples[2..] let shifted_pollen_samples = pollen_samples[2..]
@ -202,9 +204,11 @@ mod tests {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let merged = super::merge(shifted_pollen_samples, aqi_items.clone()); let merged = super::merge(shifted_pollen_samples, aqi_items.clone());
assert!(merged.is_ok()); assert!(merged.is_some());
let paqi = merged.unwrap(); let (paqi, max_pollen, max_aqi) = merged.unwrap();
assert_eq!(paqi, Vec::from([Item::new(t_1, 2.9), Item::new(t_2, 3.0)])); assert_eq!(paqi, Vec::from([Item::new(t_1, 2.9), Item::new(t_2, 3.0),]));
assert_eq!(max_pollen, BuienradarSample::new(t_2, 3));
assert_eq!(max_aqi, LuchtmeetnetItem::new(t_1, 2.9));
// The AQI items are shifted, i.e. one hour in the future. // The AQI items are shifted, i.e. one hour in the future.
let shifted_aqi_items = aqi_items[2..] let shifted_aqi_items = aqi_items[2..]
@ -216,20 +220,11 @@ mod tests {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let merged = super::merge(pollen_samples.clone(), shifted_aqi_items); let merged = super::merge(pollen_samples.clone(), shifted_aqi_items);
assert!(merged.is_ok()); assert!(merged.is_some());
let paqi = merged.unwrap(); let (paqi, max_pollen, max_aqi) = merged.unwrap();
assert_eq!(paqi, Vec::from([Item::new(t_1, 3.0), Item::new(t_2, 2.9)])); assert_eq!(paqi, Vec::from([Item::new(t_1, 3.0), Item::new(t_2, 2.9),]));
assert_eq!(max_pollen, BuienradarSample::new(t_1, 3));
// The maximum sample/item should not be later then the interval the PAQI items cover. assert_eq!(max_aqi, LuchtmeetnetItem::new(t_2, 2.9));
let merged = super::merge(pollen_samples[..3].to_vec(), aqi_items.clone());
assert!(merged.is_ok());
let paqi = merged.unwrap();
assert_eq!(paqi, Vec::from([Item::new(t_0, 1.1)]));
let merged = super::merge(pollen_samples.clone(), aqi_items[..3].to_vec());
assert!(merged.is_ok());
let paqi = merged.unwrap();
assert_eq!(paqi, Vec::from([Item::new(t_0, 1.1)]));
// Merging fails because the samples/items are too far (6 hours) apart. // Merging fails because the samples/items are too far (6 hours) apart.
let shifted_aqi_items = aqi_items let shifted_aqi_items = aqi_items
@ -241,29 +236,18 @@ mod tests {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let merged = super::merge(pollen_samples.clone(), shifted_aqi_items); let merged = super::merge(pollen_samples.clone(), shifted_aqi_items);
assert_eq!(merged, Err(MergeError::NoCloseAqiItemFound)); assert_eq!(merged, None);
let shifted_pollen_samples = pollen_samples
.iter()
.cloned()
.map(|mut item| {
item.time = item.time.checked_add_signed(Duration::hours(6)).unwrap();
item
})
.collect::<Vec<_>>();
let merged = super::merge(shifted_pollen_samples, aqi_items.clone());
assert_eq!(merged, Err(MergeError::NoClosePollenItemFound));
// The pollen samples list is empty, or everything is too old. // The pollen samples list is empty, or everything is too old.
let merged = super::merge(Vec::new(), aqi_items.clone()); let merged = super::merge(Vec::new(), aqi_items.clone());
assert_eq!(merged, Err(MergeError::NoPollenItemFound)); assert_eq!(merged, None);
let merged = super::merge(pollen_samples[0..2].to_vec(), aqi_items.clone()); let merged = super::merge(pollen_samples[0..2].to_vec(), aqi_items.clone());
assert_eq!(merged, Err(MergeError::NoPollenItemFound)); assert_eq!(merged, None);
// The AQI items list is empty, or everything is too old. // The AQI items list is empty, or everything is too old.
let merged = super::merge(pollen_samples.clone(), Vec::new()); let merged = super::merge(pollen_samples.clone(), Vec::new());
assert_eq!(merged, Err(MergeError::NoAqiItemFound)); assert_eq!(merged, None);
let merged = super::merge(pollen_samples, aqi_items[0..2].to_vec()); let merged = super::merge(pollen_samples, aqi_items[0..2].to_vec());
assert_eq!(merged, Err(MergeError::NoAqiItemFound)); assert_eq!(merged, None);
} }
} }

View file

@ -9,7 +9,7 @@ use reqwest::Url;
use rocket::serde::{Deserialize, Serialize}; use rocket::serde::{Deserialize, Serialize};
use crate::position::Position; use crate::position::Position;
use crate::{Error, Metric, Result}; use crate::Metric;
/// The base URL for the Luchtmeetnet API. /// The base URL for the Luchtmeetnet API.
const LUCHTMEETNET_BASE_URL: &str = "https://api.luchtmeetnet.nl/open_api/concentrations"; const LUCHTMEETNET_BASE_URL: &str = "https://api.luchtmeetnet.nl/open_api/concentrations";
@ -54,14 +54,20 @@ impl Item {
/// * [`Metric::NO2`] /// * [`Metric::NO2`]
/// * [`Metric::O3`] /// * [`Metric::O3`]
/// * [`Metric::PM10`] /// * [`Metric::PM10`]
#[cached(time = 1800, result = true)] ///
pub(crate) async fn get(position: Position, metric: Metric) -> Result<Vec<Item>> { /// Returns [`None`] if retrieval or deserialization fails, or if the metric is not supported by
/// this provider.
///
/// If the result is [`Some`] it will be cached for 30 minutes for the the given position and
/// metric.
#[cached(time = 1800, option = true)]
pub(crate) async fn get(position: Position, metric: Metric) -> Option<Vec<Item>> {
let formula = match metric { let formula = match metric {
Metric::AQI => "lki", Metric::AQI => "lki",
Metric::NO2 => "no2", Metric::NO2 => "no2",
Metric::O3 => "o3", Metric::O3 => "o3",
Metric::PM10 => "pm10", Metric::PM10 => "pm10",
_ => return Err(Error::UnsupportedMetric(metric)), _ => return None, // Unsupported metric
}; };
let mut url = Url::parse(LUCHTMEETNET_BASE_URL).unwrap(); let mut url = Url::parse(LUCHTMEETNET_BASE_URL).unwrap();
url.query_pairs_mut() url.query_pairs_mut()
@ -70,8 +76,11 @@ pub(crate) async fn get(position: Position, metric: Metric) -> Result<Vec<Item>>
.append_pair("longitude", &position.lon_as_str(5)); .append_pair("longitude", &position.lon_as_str(5));
println!("▶️ Retrieving Luchtmeetnet data from: {url}"); println!("▶️ Retrieving Luchtmeetnet data from: {url}");
let response = reqwest::get(url).await?; let response = reqwest::get(url).await.ok()?;
let root: Container = response.error_for_status()?.json().await?; let root: Container = match response.error_for_status() {
Ok(res) => res.json().await.ok()?,
Err(_err) => return None,
};
// Filter items that are older than one hour before now. They seem to occur sometimes? // Filter items that are older than one hour before now. They seem to occur sometimes?
let too_old = Utc::now() - Duration::hours(1); let too_old = Utc::now() - Duration::hours(1);
@ -81,5 +90,5 @@ pub(crate) async fn get(position: Position, metric: Metric) -> Result<Vec<Item>>
.filter(|item| item.time > too_old) .filter(|item| item.time > too_old)
.collect(); .collect();
Ok(items) Some(items)
} }