Compare commits
No commits in common. "main" and "v0.2.0" have entirely different histories.
16 changed files with 1334 additions and 2618 deletions
|
@ -1,26 +0,0 @@
|
|||
name: "Audit dependencies"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- '.forgejo/workflows/audit.yml'
|
||||
- '**/Cargo.toml'
|
||||
- '**/Cargo.lock'
|
||||
- '**/audit.toml'
|
||||
schedule:
|
||||
- cron: '0 23 * * 6'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
audit:
|
||||
runs-on: rust-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run cargo audit
|
||||
run: cargo audit
|
|
@ -1,29 +0,0 @@
|
|||
name: "Check and lint"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check_lint:
|
||||
name: Check and lint
|
||||
runs-on: rust-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run cargo check
|
||||
run: cargo check --all-features
|
||||
|
||||
- name: Run cargo clippy
|
||||
run: cargo clippy -- -D warnings
|
||||
|
||||
- name: Run cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
# TODO: Add a test suite first!
|
||||
# - name: Run cargo test
|
||||
# run: cargo test --all-features
|
|
@ -1,91 +0,0 @@
|
|||
name: "Release"
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: "Release"
|
||||
runs-on: rust-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Determine the repository name and version of the release
|
||||
run: |
|
||||
REPO_NAME=$(basename $GITHUB_REPOSITORY)
|
||||
VERSION=${GITHUB_REF_NAME#v}
|
||||
echo "Releasing version of $REPO_NAME: $VERSION"
|
||||
echo "REPO_NAME=$REPO_NAME" >> $GITHUB_ENV
|
||||
echo "VERSION=$VERSION" >> $GITHUB_ENV
|
||||
|
||||
- name: Get the release notes from the changelog
|
||||
run: |
|
||||
EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64)
|
||||
RELEASE_NOTES=$(sed -n -e "/^## \[$VERSION\]/,/^## \[/{//"'!'"p;}" CHANGELOG.md | sed -e '1d;$d')
|
||||
echo "Release notes:"
|
||||
echo
|
||||
echo "$RELEASE_NOTES"
|
||||
echo "RELEASE_NOTES<<$EOF" >> "$GITHUB_ENV"
|
||||
echo "$RELEASE_NOTES" >> "$GITHUB_ENV"
|
||||
echo "$EOF" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Build a release binary
|
||||
run: |
|
||||
# FIXME: This should be figured out in a better manner!
|
||||
BIN_NAME=${REPO_NAME}-x86_64-unknown-linux-gnu
|
||||
cargo build --release
|
||||
mkdir -p dist
|
||||
cp target/release/${REPO_NAME} dist/${BIN_NAME}
|
||||
shasum -a 256 dist/${BIN_NAME} > dist/${BIN_NAME}.sha256sum
|
||||
|
||||
- name: Release to Forgejo
|
||||
uses: paul/forgejo-release@main
|
||||
with:
|
||||
direction: upload
|
||||
release-dir: dist
|
||||
release-notes: '${{ env.RELEASE_NOTES }}'
|
||||
title: 'Release ${{ env.VERSION }}'
|
||||
token: '${{ secrets.RELEASE_TOKEN }}'
|
||||
|
||||
release-crate:
|
||||
name: "Release Rust crate"
|
||||
runs-on: rust-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Run cargo publish
|
||||
run: cargo publish --registry luon
|
||||
env:
|
||||
CARGO_REGISTRIES_LUON_INDEX: 'sparse+${{ github.server_url }}/api/packages/${{ github.repository_owner }}/cargo/'
|
||||
CARGO_REGISTRIES_LUON_TOKEN: 'Bearer ${{ secrets.CARGO_TOKEN }}'
|
||||
|
||||
release-deb:
|
||||
name: "Release Debian package"
|
||||
runs-on: rust-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install and run cargo-deb
|
||||
run: |
|
||||
unset GITHUB_TOKEN
|
||||
cargo binstall --only-signed -y cargo-deb
|
||||
cargo deb
|
||||
|
||||
- name: Publish Debian package
|
||||
env:
|
||||
DEB_REPO_TOKEN: '${{ secrets.DEB_REPO_TOKEN }}'
|
||||
run: |
|
||||
curl --config <(printf "user=%s:%s" paul "${DEB_REPO_TOKEN}") \
|
||||
--upload-file target/debian/*.deb \
|
||||
${{ github.server_url }}/api/packages/${{ github.repository_owner }}/debian/pool/bookworm/main/upload
|
147
CHANGELOG.md
147
CHANGELOG.md
|
@ -7,141 +7,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.5.4] - 2024-07-26
|
||||
|
||||
### Changed
|
||||
|
||||
* Switch to Forgejo Actions; add audit workflow
|
||||
* Update dependency on `rocket_dyn_templates`
|
||||
* Update dependency on `youtube_dl`
|
||||
|
||||
### Security
|
||||
|
||||
* Update dependencies, fixes security advisories:
|
||||
* [RUSTSEC-2024-0019](https://rustsec.org/advisories/RUSTSEC-2024-0019)
|
||||
* [RUSTSEC-2024-0332](https://rustsec.org/advisories/RUSTSEC-2024-0332)
|
||||
* [RUSTSEC-2024-0336](https://rustsec.org/advisories/RUSTSEC-2024-0336)
|
||||
* [RUSTSEC-2024-0357](https://rustsec.org/advisories/RUSTSEC-2024-0357)
|
||||
|
||||
## [0.5.3] - 2024-02-27
|
||||
|
||||
### Changed
|
||||
|
||||
* Update dependency on `cached`
|
||||
|
||||
### Security
|
||||
|
||||
* Update dependencies, fixes security advisories:
|
||||
* [RUSTSEC-2024-0003](https://rustsec.org/advisories/RUSTSEC-2024-0003)
|
||||
* [RUSTSEC-2023-0072](https://rustsec.org/advisories/RUSTSEC-2024-0072)
|
||||
* [RUSTSEC-2023-0074](https://rustsec.org/advisories/RUSTSEC-2024-0072)
|
||||
|
||||
### Fixed
|
||||
|
||||
* Handle paging information begin absent; fixes short feeds for Mixcloud (#17)
|
||||
|
||||
## [0.5.2] - 2023-11-03
|
||||
|
||||
### Security
|
||||
|
||||
* Update dependencies
|
||||
([RUSTSEC-2020-0071](https://rustsec.org/advisories/RUSTSEC-2020-0071.html))
|
||||
|
||||
### Changed
|
||||
|
||||
* Switch to Rocket 0.5 RC4
|
||||
* Update dependency on `cached`
|
||||
|
||||
## [0.5.1] - 2023-08-25
|
||||
|
||||
### Changed
|
||||
|
||||
* Bump the dependency on `youtube_dl`
|
||||
* Update release Gitea Actions workflow; add seperate job to release Debian
|
||||
package to the new repository
|
||||
|
||||
### Security
|
||||
|
||||
* Update dependencies
|
||||
([RUSTSEC-2023-0034](https://rustsec.org/advisories/RUSTSEC-2023-0034),
|
||||
[RUSTSEC-2023-0044](https://rustsec.org/advisories/RUSTSEC-2023-0044),
|
||||
[RUSTSEC-2023-0052](https://rustsec.org/advisories/RUSTSEC-2023-0052))
|
||||
|
||||
## [0.5.0] - 2023-06-08
|
||||
|
||||
### Added
|
||||
|
||||
* Add full release Gitea Actions workflow
|
||||
|
||||
### Changed
|
||||
|
||||
* Simplify GItea Actions check and lint workflow
|
||||
|
||||
### Fixed
|
||||
|
||||
* Differentiate between publish and update time for items
|
||||
|
||||
## [0.4.1] - 2023-04-11
|
||||
|
||||
### Changed
|
||||
|
||||
* Select only direct HTTP MP4 audio streams for the Mixcloud back-end
|
||||
|
||||
## [0.4.0] - 2023-03-24
|
||||
|
||||
### Added
|
||||
|
||||
* Add Gitea Actions workflow for cargo
|
||||
|
||||
### Changed
|
||||
|
||||
* Update dependencies on `cached` and `youtube_dl`
|
||||
* Update to `rocket` version 0.5.0-rc.3
|
||||
* Select only MP4 audio streams for the YouTube back-end (experimental)
|
||||
* Remove parameters from MIME types to prevent clients tripping over them
|
||||
|
||||
### Fixed
|
||||
|
||||
* Bump the dependency on `ytextract` (#14)
|
||||
* Fix typo in the documentation
|
||||
|
||||
### Security
|
||||
|
||||
* Update dependencies
|
||||
([RUSTSEC-2021-0145](https://rustsec.org/advisories/RUSTSEC-2021-0145.html),
|
||||
[RUSTSEC-2020-0016](https://rustsec.org/advisories/RUSTSEC-2020-0016.html),
|
||||
[RUSTSEC-2023-0001](https://rustsec.org/advisories/RUSTSEC-2023-0001.html),
|
||||
[RUSTSEC-2023-0005](https://rustsec.org/advisories/RUSTSEC-2023-0005.html),
|
||||
[RUSTSEC-2023-0018](https://rustsec.org/advisories/RUSTSEC-2023-0018.html),
|
||||
[RUSTSEC-2023-0022](https://rustsec.org/advisories/RUSTSEC-2023-0022.html),
|
||||
[RUSTSEC-2023-0023](https://rustsec.org/advisories/RUSTSEC-2023-0023.html),
|
||||
[RUSTSEC-2023-0024](https://rustsec.org/advisories/RUSTSEC-2023-0024.html))
|
||||
|
||||
## [0.3.0] - 2022-12-24
|
||||
|
||||
### Added
|
||||
|
||||
* Add abstraction that will support multiple back-ends
|
||||
* Add YouTube back-end for generating feeds of YouTube channels and
|
||||
playlists (#5)
|
||||
|
||||
### Changed
|
||||
|
||||
* Change the name of the `url` to `public_url` in the configuration file
|
||||
`Rocket.toml`
|
||||
* Make feed channel and item images optional
|
||||
* Simplify how Rocket is launched
|
||||
* Split off feed generation to a separate module
|
||||
* Improve documentation
|
||||
|
||||
### Fixed
|
||||
|
||||
* Some code refactoring
|
||||
|
||||
### Security
|
||||
|
||||
* Update/bump dependencies
|
||||
|
||||
## [0.2.0] - 2022-05-27
|
||||
|
||||
### Added
|
||||
|
@ -166,14 +31,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
Initial release.
|
||||
|
||||
[Unreleased]: https://git.luon.net/paul/podbringer/compare/v0.5.4...HEAD
|
||||
[0.5.4]: https://git.luon.net/paul/podbringer/compare/v0.5.3..v0.5.4
|
||||
[0.5.3]: https://git.luon.net/paul/podbringer/compare/v0.5.2..v0.5.3
|
||||
[0.5.2]: https://git.luon.net/paul/podbringer/compare/v0.5.1..v0.5.2
|
||||
[0.5.1]: https://git.luon.net/paul/podbringer/compare/v0.5.0..v0.5.1
|
||||
[0.5.0]: https://git.luon.net/paul/podbringer/compare/v0.4.1..v0.5.0
|
||||
[0.4.1]: https://git.luon.net/paul/podbringer/compare/v0.4.0..v0.4.1
|
||||
[0.4.0]: https://git.luon.net/paul/podbringer/compare/v0.3.0..v0.4.0
|
||||
[0.3.0]: https://git.luon.net/paul/podbringer/compare/v0.2.0..v0.3.0
|
||||
[0.2.0]: https://git.luon.net/paul/podbringer/compare/v0.1.0..v0.2.0
|
||||
[Unreleased]: https://git.luon.net/paul/podbringer/compare/v0.2.0...HEAD
|
||||
[0.2.0]: https://git.luon.net/paul/podbringer/compare/tag/v0.1.0..v0.2.0
|
||||
[0.1.0]: https://git.luon.net/paul/podbringer/commits/tag/v0.1.0
|
||||
|
|
2184
Cargo.lock
generated
2184
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
20
Cargo.toml
20
Cargo.toml
|
@ -1,27 +1,22 @@
|
|||
[package]
|
||||
name = "podbringer"
|
||||
version = "0.5.4"
|
||||
version = "0.2.0"
|
||||
authors = ["Paul van Tilburg <paul@luon.net>"]
|
||||
edition = "2021"
|
||||
description = "Web service that provides podcasts for services that don't offer them (anymore)"
|
||||
readme = "README.md"
|
||||
repository = "https://git.luon.net/paul/podbringer"
|
||||
license = "MIT"
|
||||
|
||||
[dependencies]
|
||||
async-trait = "0.1.57"
|
||||
cached = { version = "0.49.2", features = ["async"] }
|
||||
cached = { version = "0.34.0", features = ["async"] }
|
||||
chrono = { version = "0.4.19", features = ["serde"] }
|
||||
enum_dispatch = "0.3.8"
|
||||
mime-db = "1.6.0"
|
||||
reqwest = { version = "0.11.10", features = ["json"] }
|
||||
rocket = { version = "0.5.0-rc.3", features = ["json"] }
|
||||
rocket_dyn_templates = { version = "0.2.0", features = ["tera"] }
|
||||
rocket = { version = "0.5.0-rc.2", features = ["json"] }
|
||||
rocket_dyn_templates = { version = "0.1.0-rc.2", features = ["tera"] }
|
||||
rss = "2.0.1"
|
||||
thiserror = "1.0.31"
|
||||
url = { version = "2.2.2", features = ["serde"] }
|
||||
youtube_dl = { version = "0.10.0", features = ["tokio"] }
|
||||
ytextract = "0.11.2"
|
||||
url = "2.2.2"
|
||||
youtube_dl = { version = "0.7.0", features = ["tokio"] }
|
||||
|
||||
[package.metadata.deb]
|
||||
maintainer = "Paul van Tilburg <paul@luon.net>"
|
||||
|
@ -32,8 +27,7 @@ Podbringer is a web service that provides podcasts for services that don't
|
|||
offer them (anymore). It provides a way to get the RSS feed for your podcast
|
||||
client and it facilites the downloads of the pods (enclosures).
|
||||
|
||||
It currently only supports [Mixcloud](https://www.mixcloud.com) and
|
||||
[YouTube](https://www.youtube.com).
|
||||
It currently only supports [Mixcloud](https://mixcloud.com).
|
||||
Other back-ends might be added in the future.
|
||||
"""
|
||||
section = "net"
|
||||
|
|
63
README.md
63
README.md
|
@ -4,8 +4,7 @@ Podbringer is a web service that provides podcasts for services that don't
|
|||
offer them (anymore). It provides a way to get the RSS feed for your podcast
|
||||
client and it facilites the downloads of the pods (enclosures).
|
||||
|
||||
It currently only supports [Mixcloud](https://www.mixcloud.com) and
|
||||
[YouTube](https://www.youtube.com).
|
||||
It currently only supports [Mixcloud](https://mixcloud.com).
|
||||
Other back-ends might be added in the future.
|
||||
|
||||
## Building & running
|
||||
|
@ -26,8 +25,8 @@ builds when you don't add `--release`.)
|
|||
### Configuration
|
||||
|
||||
For now, you will need to provide Rocket with configuration to tell it at which
|
||||
public URL Podbringer is hosted. This needs to be done even if you are not using
|
||||
a reverse proxy, in which case you need to provide it with the proxied URL. You
|
||||
URL Podbringer is hosted. This needs to be done even if you are not using a
|
||||
reverse proxy, in which case you need to provide it with the proxied URL. You
|
||||
can also use the configuration to configure a different address and/or port.
|
||||
Just create a `Rocket.toml` file that contains (or copy `Rocket.toml.example`):
|
||||
|
||||
|
@ -35,7 +34,7 @@ Just create a `Rocket.toml` file that contains (or copy `Rocket.toml.example`):
|
|||
[default]
|
||||
address = "0.0.0.0"
|
||||
port = 7062
|
||||
public_url = "https://my.domain.tld/podbringer"
|
||||
url = "https://my.domain.tld/podbringer"
|
||||
```
|
||||
|
||||
This will work independent of the type of build. For more about Rocket's
|
||||
|
@ -45,16 +44,16 @@ configuration, see: <https://rocket.rs/v0.5-rc/guide/configuration/>.
|
|||
|
||||
Podbringer currently has no front-end or web interface yet that can help you
|
||||
use it. Until then, you just have to enter the right service-specific RSS feed
|
||||
URL in your favorite podcast client to start using it. For example:
|
||||
URL in your favorite podcast client to start using it.
|
||||
|
||||
Given the Mixcloud URL <https://www.mixcloud.com/myfavouriteband/>, the URL you
|
||||
need to use for Podbringer is comprised of the following parts:
|
||||
|
||||
```text
|
||||
https://my.domain.tld/podbringer/feed/mixcloud/myfavouriteband
|
||||
|------------------------------| |------| |-------------|
|
||||
The Podbringer public URL Service Service ID
|
||||
```
|
||||
|
||||
So, the URL consists of the location of Podbringer, the fact that you want the feed,
|
||||
the name of the service and the ID that identifies something list on that service.
|
||||
https://my.domain.tld/podbringer/feed/mixcloud/myfavouriteband
|
||||
|------------------------------| |-------||--------------|
|
||||
The Podbringer location URL Service User @ service
|
||||
```
|
||||
|
||||
### Feed item limit
|
||||
|
||||
|
@ -63,43 +62,7 @@ contains at most 50 items by default. If you want to have more (or less) items,
|
|||
provide the limit in the URL by setting the `limit` parameter.
|
||||
|
||||
For example, to get up until 1000 items the URL becomes:
|
||||
|
||||
```text
|
||||
https://my.domain.tld/podbringer/feed/mixcloud/myfavouriteband?limit=1000
|
||||
```
|
||||
|
||||
### Service: Mixcloud
|
||||
|
||||
For Mixcloud, a feed can be constructed of everything that a user posted.
|
||||
Given the Mixcloud URL like <https://www.mixcloud.com/myfavouriteband/>, the
|
||||
`myfavouriteband` part of the URL is the Mixcloud username and can be used as
|
||||
the service ID.
|
||||
|
||||
```text
|
||||
https://my.domain.tld/podbringer/feed/mixcloud/myfavouriteband
|
||||
|------------------------------| |------| |-------------|
|
||||
The Podbringer public URL Service Username
|
||||
```
|
||||
|
||||
### Service: YouTube
|
||||
|
||||
For YouTube, a feed can either be constructed of a channel or a playlist.
|
||||
Given the YouTube channel URL like <https://www.youtube.com/c/favouritechannel>,
|
||||
the `favouritechannel` part of the URL is the YouTube channel ID.
|
||||
Given the YouTube playlist URL
|
||||
<https://www.youtube.com/playlist?list=PLsomeplaylistidentifier>, the
|
||||
`PLsomeplaylistidentifier` part of the URL is the YouTube playlist ID.
|
||||
Either the channel or playlist ID can be used as the service ID.
|
||||
|
||||
```text
|
||||
https://my.domain.tld/podbringer/feed/youtube/favouritechannel
|
||||
|------------------------------| |-----| |--------------|
|
||||
The Podbringer public URL Service Channel ID
|
||||
|
||||
https://my.domain.tld/podbringer/feed/youtube/PLsomeplaylistidentifier
|
||||
|------------------------------| |-----| |----------------------|
|
||||
The Podbringer public URL Service Playlist ID
|
||||
```
|
||||
`https://my.domain.tld/podbringer/feed/mixcloud/myfavouriteband?limit=1000`
|
||||
|
||||
## License
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
[default]
|
||||
address = "0.0.0.0"
|
||||
port = 7062
|
||||
public_url = "https://my.domain.tld/podbringer"
|
||||
url = "https://my.domain.tld/podbringer"
|
||||
|
|
134
src/backends.rs
134
src/backends.rs
|
@ -1,134 +0,0 @@
|
|||
//! The supported content back-ends.
|
||||
//!
|
||||
//! A content back-end should provide two kinds of objects: channels and their (content) items.
|
||||
//! It must provide a methods to retrieve a channel and its items and a method to return the
|
||||
//! redirect URL for some path that points to media within context of the back-end.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use chrono::{DateTime, Utc};
|
||||
use enum_dispatch::enum_dispatch;
|
||||
use reqwest::Url;
|
||||
|
||||
use crate::{Error, Result};
|
||||
|
||||
pub(crate) mod mixcloud;
|
||||
pub(crate) mod youtube;
|
||||
|
||||
/// Retrieves the back-end for the provided ID (if supported).
|
||||
pub(crate) fn get(backend: &str) -> Result<Backends> {
|
||||
match backend {
|
||||
"mixcloud" => Ok(Backends::Mixcloud(mixcloud::backend())),
|
||||
"youtube" => Ok(Backends::YouTube(youtube::backend())),
|
||||
_ => Err(Error::UnsupportedBackend(backend.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
/// The supported back-ends.
|
||||
#[enum_dispatch(Backend)]
|
||||
pub(crate) enum Backends {
|
||||
/// Mixcloud (<https://www.mixcloud.com>)
|
||||
Mixcloud(mixcloud::Backend),
|
||||
|
||||
/// YouTube (<https://www.youtube.com>)
|
||||
YouTube(youtube::Backend),
|
||||
}
|
||||
|
||||
/// Functionality of a content back-end.
|
||||
#[async_trait]
|
||||
#[enum_dispatch]
|
||||
pub(crate) trait Backend {
|
||||
/// Returns the name of the backend.
|
||||
#[allow(unused)] // For future use!
|
||||
fn name(&self) -> &'static str;
|
||||
|
||||
/// Returns the channel with its currently contained content items.
|
||||
async fn channel(&self, channel_id: &str, item_limit: Option<usize>) -> Result<Channel>;
|
||||
|
||||
/// Returns the redirect URL for the provided download file path.
|
||||
async fn redirect_url(&self, file: &Path) -> Result<String>;
|
||||
}
|
||||
|
||||
/// The metadata of a collection of content items.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Channel {
|
||||
/// The title of the channel.
|
||||
pub(crate) title: String,
|
||||
|
||||
/// The link to the channel.
|
||||
pub(crate) link: Url,
|
||||
|
||||
/// The description of the channel.
|
||||
pub(crate) description: String,
|
||||
|
||||
/// The author/composer/creator of the channel.
|
||||
pub(crate) author: Option<String>,
|
||||
|
||||
/// The categories associated with the channel.
|
||||
///
|
||||
/// The first category is considered to be the "main" category.
|
||||
pub(crate) categories: Vec<String>,
|
||||
|
||||
/// The URL of the image/logo/avatar of a channel.
|
||||
pub(crate) image: Option<Url>,
|
||||
|
||||
/// The contained content items.
|
||||
pub(crate) items: Vec<Item>,
|
||||
}
|
||||
|
||||
/// A content item belonging to a channel.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Item {
|
||||
/// The title of the item.
|
||||
pub(crate) title: String,
|
||||
|
||||
/// The direct link to the item.
|
||||
pub(crate) link: Url,
|
||||
|
||||
/// The description of the item.
|
||||
pub(crate) description: Option<String>,
|
||||
|
||||
/// The categories of the items (and their domain URLs).
|
||||
pub(crate) categories: HashMap<String, Url>,
|
||||
|
||||
/// The enclosed media content of the item,
|
||||
pub(crate) enclosure: Enclosure,
|
||||
|
||||
/// The duration of the media content (in seconds).
|
||||
pub(crate) duration: Option<u32>,
|
||||
|
||||
/// The global UID of the item.
|
||||
///
|
||||
/// This GUID is not considered nor needs to be a permalink.
|
||||
pub(crate) guid: String,
|
||||
|
||||
/// The keywords associated with the item.
|
||||
pub(crate) keywords: Vec<String>,
|
||||
|
||||
/// The URL of the image of the item.
|
||||
pub(crate) image: Option<Url>,
|
||||
|
||||
/// The timestamp the item was published.
|
||||
pub(crate) published_at: DateTime<Utc>,
|
||||
|
||||
/// The timestamp the item was last updated.
|
||||
pub(crate) updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
/// The enclosed media content of an item.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct Enclosure {
|
||||
/// The path of the download file associated with the item enclosure.
|
||||
///
|
||||
/// This is used as a part of the enclosure URL of the item and will be passed to
|
||||
/// [`Backend::redirect_url`] later when a client wants to download the media content.
|
||||
pub(crate) file: PathBuf,
|
||||
|
||||
/// The MIME type of the download file path associated with the item enclosure.
|
||||
pub(crate) mime_type: String,
|
||||
|
||||
/// The length of the enclosed media content (in bytes).
|
||||
pub(crate) length: u64,
|
||||
}
|
|
@ -1,323 +0,0 @@
|
|||
//! The Mixcloud back-end.
|
||||
//!
|
||||
//! It uses the Mixcloud API to retrieve the feed (user) and items (cloudcasts)).
|
||||
//! See also: <https://www.mixcloud.com/developers/>
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use cached::proc_macro::cached;
|
||||
use chrono::{DateTime, Utc};
|
||||
use reqwest::Url;
|
||||
use rocket::serde::Deserialize;
|
||||
use youtube_dl::{YoutubeDl, YoutubeDlOutput};
|
||||
|
||||
use super::{Channel, Enclosure, Item};
|
||||
use crate::{Error, Result};
|
||||
|
||||
/// The base URL for the Mixcloud API.
|
||||
const API_BASE_URL: &str = "https://api.mixcloud.com";
|
||||
|
||||
/// The base URL for downloading Mixcloud files.
|
||||
const FILES_BASE_URL: &str = "https://www.mixcloud.com";
|
||||
|
||||
/// The default bitrate used by Mixcloud.
|
||||
const DEFAULT_BITRATE: u64 = 64 * 1024;
|
||||
|
||||
/// The default file (MIME) type used by Mixcloud.
|
||||
const DEFAULT_FILE_TYPE: &str = "audio/mp4";
|
||||
|
||||
/// The default page size.
|
||||
const DEFAULT_PAGE_SIZE: usize = 50;
|
||||
|
||||
/// Creates a Mixcloud back-end.
|
||||
pub(crate) fn backend() -> Backend {
|
||||
Backend
|
||||
}
|
||||
|
||||
/// The Mixcloud back-end.
|
||||
pub struct Backend;
|
||||
|
||||
#[async_trait]
|
||||
impl super::Backend for Backend {
|
||||
fn name(&self) -> &'static str {
|
||||
"Mixcloud"
|
||||
}
|
||||
|
||||
async fn channel(&self, channel_id: &str, item_limit: Option<usize>) -> Result<Channel> {
|
||||
// For Mixcloud a channel ID is some user name.
|
||||
let mut user_url = Url::parse(API_BASE_URL).expect("URL can always be parsed");
|
||||
user_url.set_path(channel_id);
|
||||
|
||||
println!("⏬ Retrieving user {channel_id} from {user_url}...");
|
||||
let user = fetch_user(user_url).await?;
|
||||
|
||||
// The items of a channel are the user's cloudcasts.
|
||||
let mut limit = item_limit.unwrap_or(DEFAULT_PAGE_SIZE);
|
||||
let mut offset = 0;
|
||||
let mut cloudcasts_url = Url::parse(API_BASE_URL).expect("URL can always be parsed");
|
||||
cloudcasts_url.set_path(&format!("{channel_id}/cloudcasts/"));
|
||||
println!("⏬ Retrieving cloudcasts of user {channel_id} from {cloudcasts_url}...");
|
||||
|
||||
set_paging_query(&mut cloudcasts_url, limit, offset);
|
||||
let mut cloudcasts = Vec::with_capacity(50); // The initial limit
|
||||
loop {
|
||||
let cloudcasts_res: CloudcastsResponse = fetch_cloudcasts(cloudcasts_url).await?;
|
||||
let count = cloudcasts_res.items.len();
|
||||
cloudcasts.extend(cloudcasts_res.items);
|
||||
|
||||
// Check if any paging information is present.
|
||||
let Some(paging) = cloudcasts_res.paging else {
|
||||
break;
|
||||
};
|
||||
|
||||
// Continue onto the next URL in the paging, if there is one and the limit was not
|
||||
// reached.
|
||||
limit = limit.saturating_sub(count);
|
||||
offset += count;
|
||||
match (limit, paging.next) {
|
||||
(0, Some(_)) => break,
|
||||
(_, Some(next_url)) => {
|
||||
cloudcasts_url = Url::parse(&next_url)?;
|
||||
set_paging_query(&mut cloudcasts_url, limit, offset);
|
||||
}
|
||||
(_, None) => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Channel::from(UserWithCloudcasts(user, cloudcasts)))
|
||||
}
|
||||
|
||||
async fn redirect_url(&self, file: &Path) -> Result<String> {
|
||||
let key = format!("/{}/", file.with_extension("").to_string_lossy());
|
||||
|
||||
retrieve_redirect_url(&key).await
|
||||
}
|
||||
}
|
||||
|
||||
/// A Mixcloud user with its cloudcasts.
|
||||
pub(crate) struct UserWithCloudcasts(User, Vec<Cloudcast>);
|
||||
|
||||
/// A Mixcloud user (response).
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct User {
|
||||
/// The name of the user.
|
||||
pub(crate) name: String,
|
||||
|
||||
/// The bio (description) of the user.
|
||||
pub(crate) biog: String,
|
||||
|
||||
/// The picture URLs associated with the user.
|
||||
pub(crate) pictures: Pictures,
|
||||
|
||||
/// The original URL of the user.
|
||||
pub(crate) url: Url,
|
||||
}
|
||||
|
||||
/// A collection of different sizes/variants of a picture.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Pictures {
|
||||
/// The URL of a large picture of the user.
|
||||
pub(crate) large: Url,
|
||||
}
|
||||
|
||||
/// The Mixcloud cloudcasts response.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct CloudcastsResponse {
|
||||
/// The contained cloudcast items.
|
||||
#[serde(rename = "data")]
|
||||
items: Vec<Cloudcast>,
|
||||
|
||||
/// The paging information (if any).
|
||||
paging: Option<CloudcastsPaging>,
|
||||
}
|
||||
|
||||
/// The Mixcloud paging info.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct CloudcastsPaging {
|
||||
/// The API URL of the next page.
|
||||
next: Option<String>,
|
||||
}
|
||||
|
||||
/// A Mixcloud cloudcast.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Cloudcast {
|
||||
/// The key of the cloudcast.
|
||||
pub(crate) key: String,
|
||||
|
||||
/// The name of the cloudcast.
|
||||
pub(crate) name: String,
|
||||
|
||||
/// The slug of the cloudcast (used for the enclosure).
|
||||
pub(crate) slug: String,
|
||||
|
||||
/// The picture URLs associated with the cloudcast.
|
||||
pub(crate) pictures: Pictures,
|
||||
|
||||
/// The tags of the cloudcast.
|
||||
pub(crate) tags: Vec<Tag>,
|
||||
|
||||
/// The time the feed was created.
|
||||
pub(crate) created_time: DateTime<Utc>,
|
||||
|
||||
/// The time the feed was updated.
|
||||
pub(crate) updated_time: DateTime<Utc>,
|
||||
|
||||
/// The original URL of the cloudcast.
|
||||
pub(crate) url: Url,
|
||||
|
||||
/// The length of the cloudcast (in seconds).
|
||||
pub(crate) audio_length: u32,
|
||||
}
|
||||
|
||||
/// A Mixcloud cloudcast tag.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Tag {
|
||||
/// The name of the tag.
|
||||
pub(crate) name: String,
|
||||
|
||||
/// The URL of the tag.
|
||||
pub(crate) url: Url,
|
||||
}
|
||||
|
||||
impl From<UserWithCloudcasts> for Channel {
|
||||
fn from(UserWithCloudcasts(user, cloudcasts): UserWithCloudcasts) -> Self {
|
||||
// FIXME: Don't hardcode the category!
|
||||
let categories = Vec::from([String::from("Music")]);
|
||||
let items = cloudcasts.into_iter().map(From::from).collect();
|
||||
|
||||
Channel {
|
||||
title: format!("{0} (via Mixcloud)", user.name),
|
||||
link: user.url,
|
||||
description: user.biog,
|
||||
author: Some(user.name),
|
||||
categories,
|
||||
image: Some(user.pictures.large),
|
||||
items,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Cloudcast> for Item {
|
||||
fn from(cloudcast: Cloudcast) -> Self {
|
||||
let mut file = PathBuf::from(cloudcast.key.trim_end_matches('/'));
|
||||
let extension = mime_db::extension(DEFAULT_FILE_TYPE).expect("MIME type has extension");
|
||||
file.set_extension(extension);
|
||||
|
||||
// FIXME: Don't hardcode the description!
|
||||
let description = Some(format!("Taken from Mixcloud: {0}", cloudcast.url));
|
||||
let categories = cloudcast
|
||||
.tags
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|tag| (tag.name, tag.url))
|
||||
.collect();
|
||||
let enclosure = Enclosure {
|
||||
file,
|
||||
mime_type: String::from(DEFAULT_FILE_TYPE),
|
||||
length: estimated_file_size(cloudcast.audio_length),
|
||||
};
|
||||
let keywords = cloudcast.tags.into_iter().map(|tag| tag.name).collect();
|
||||
|
||||
Item {
|
||||
title: cloudcast.name,
|
||||
link: cloudcast.url,
|
||||
description,
|
||||
categories,
|
||||
enclosure,
|
||||
duration: Some(cloudcast.audio_length),
|
||||
guid: cloudcast.slug,
|
||||
keywords,
|
||||
image: Some(cloudcast.pictures.large),
|
||||
published_at: cloudcast.created_time,
|
||||
updated_at: cloudcast.updated_time,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the estimated file size in bytes for a given duration.
|
||||
///
|
||||
/// This uses the default bitrate (see [`DEFAULT_BITRATE`]) which is in B/s.
|
||||
fn estimated_file_size(duration: u32) -> u64 {
|
||||
DEFAULT_BITRATE * duration as u64 / 8
|
||||
}
|
||||
|
||||
/// Fetches the user from the URL.
|
||||
///
|
||||
/// If the result is [`Ok`], the user will be cached for 24 hours for the given URL.
|
||||
#[cached(
|
||||
key = "String",
|
||||
convert = r#"{ url.to_string() }"#,
|
||||
time = 86400,
|
||||
result = true
|
||||
)]
|
||||
///
|
||||
/// If the result is [`Ok`], the user will be cached for 24 hours for the given username.
|
||||
async fn fetch_user(url: Url) -> Result<User> {
|
||||
let response = reqwest::get(url).await?.error_for_status()?;
|
||||
let user = response.json().await?;
|
||||
|
||||
Ok(user)
|
||||
}
|
||||
|
||||
/// Fetches cloudcasts from the URL.
|
||||
///
|
||||
/// If the result is [`Ok`], the cloudcasts will be cached for 24 hours for the given URL.
|
||||
#[cached(
|
||||
key = "String",
|
||||
convert = r#"{ url.to_string() }"#,
|
||||
time = 86400,
|
||||
result = true
|
||||
)]
|
||||
async fn fetch_cloudcasts(url: Url) -> Result<CloudcastsResponse> {
|
||||
let response = reqwest::get(url).await?.error_for_status()?;
|
||||
let cloudcasts_res = response.json().await?;
|
||||
|
||||
Ok(cloudcasts_res)
|
||||
}
|
||||
|
||||
/// Set paging query pairs for URL.
|
||||
///
|
||||
/// The limit is capped to the default page size. Another request will be necessary to retrieve
|
||||
/// more.
|
||||
fn set_paging_query(url: &mut Url, limit: usize, offset: usize) {
|
||||
url.query_pairs_mut()
|
||||
.clear()
|
||||
.append_pair(
|
||||
"limit",
|
||||
&format!("{}", std::cmp::min(limit, DEFAULT_PAGE_SIZE)),
|
||||
)
|
||||
.append_pair("offset", &format!("{}", offset));
|
||||
}
|
||||
|
||||
/// Retrieves the redirect URL for the provided Mixcloud cloudcast key.
|
||||
///
|
||||
/// If the result is [`Ok`], the redirect URL will be cached for 24 hours for the given cloudcast
|
||||
/// key.
|
||||
#[cached(
|
||||
key = "String",
|
||||
convert = r#"{ download_key.to_owned() }"#,
|
||||
time = 86400,
|
||||
result = true
|
||||
)]
|
||||
async fn retrieve_redirect_url(download_key: &str) -> Result<String> {
|
||||
let mut url = Url::parse(FILES_BASE_URL).expect("URL can always be parsed");
|
||||
url.set_path(download_key);
|
||||
|
||||
println!("🌍 Determining direct URL for {download_key}...");
|
||||
// Select the well-supported, almost always available MP4 container format that is directly
|
||||
// available (so no HLS or DASH). This unfortunately does reduce the bitrate to 64 kbps.
|
||||
let output = YoutubeDl::new(url).format("http").run_async().await?;
|
||||
|
||||
if let YoutubeDlOutput::SingleVideo(yt_item) = output {
|
||||
yt_item.url.ok_or(Error::NoRedirectUrlFound)
|
||||
} else {
|
||||
Err(Error::NoRedirectUrlFound)
|
||||
}
|
||||
}
|
|
@ -1,352 +0,0 @@
|
|||
//! The YouTube back-end.
|
||||
//!
|
||||
//! It uses the `ytextract` crate to retrieve the feed (channel or playlist) and items (videos).
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use cached::proc_macro::cached;
|
||||
use chrono::{TimeZone, Utc};
|
||||
use reqwest::Url;
|
||||
use rocket::futures::StreamExt;
|
||||
use ytextract::playlist::video::{Error as YouTubeVideoError, Video as YouTubePlaylistVideo};
|
||||
use ytextract::{
|
||||
Channel as YouTubeChannel, Client, Playlist as YouTubePlaylist, Stream as YouTubeStream,
|
||||
Video as YouTubeVideo,
|
||||
};
|
||||
|
||||
use super::{Channel, Enclosure, Item};
|
||||
use crate::{Error, Result};
|
||||
|
||||
/// The base URL for YouTube channels.
|
||||
const CHANNEL_BASE_URL: &str = "https://www.youtube.com/channel";
|
||||
|
||||
/// The default item limit.
|
||||
const DEFAULT_ITEM_LIMIT: usize = 50;
|
||||
|
||||
/// The base URL for YouTube playlists.
|
||||
const PLAYLIST_BASE_URL: &str = "https://www.youtube.com/channel";
|
||||
|
||||
/// The base URL for YouTube videos.
|
||||
const VIDEO_BASE_URL: &str = "https://www.youtube.com/watch";
|
||||
|
||||
/// Creates a YouTube back-end.
|
||||
pub(crate) fn backend() -> Backend {
|
||||
Backend::new()
|
||||
}
|
||||
|
||||
/// The YouTube back-end.
|
||||
pub struct Backend {
|
||||
/// The client capable of interacting with YouTube.
|
||||
client: Client,
|
||||
}
|
||||
|
||||
impl Backend {
|
||||
/// Creates a new YouTube back-end.
|
||||
fn new() -> Self {
|
||||
let client = Client::new();
|
||||
|
||||
Self { client }
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl super::Backend for Backend {
|
||||
fn name(&self) -> &'static str {
|
||||
"YouTube"
|
||||
}
|
||||
|
||||
async fn channel(&self, channel_id: &str, item_limit: Option<usize>) -> Result<Channel> {
|
||||
// We assume it is a YouTube playlist ID if the channel ID starts with
|
||||
// "PL"/"OLAK"/"RDCLAK"; it is considered to be a YouTube channel ID otherwise.
|
||||
if channel_id.starts_with("PL")
|
||||
|| channel_id.starts_with("OLAK")
|
||||
|| channel_id.starts_with("RDCLAK")
|
||||
{
|
||||
let (yt_playlist, yt_videos_w_streams) =
|
||||
fetch_playlist_videos(&self.client, channel_id, item_limit).await?;
|
||||
|
||||
Ok(Channel::from(YouTubePlaylistWithVideos(
|
||||
yt_playlist,
|
||||
yt_videos_w_streams,
|
||||
)))
|
||||
} else {
|
||||
let (yt_channel, yt_videos_w_streams) =
|
||||
fetch_channel_videos(&self.client, channel_id, item_limit).await?;
|
||||
|
||||
Ok(Channel::from(YouTubeChannelWithVideos(
|
||||
yt_channel,
|
||||
yt_videos_w_streams,
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
async fn redirect_url(&self, file: &Path) -> Result<String> {
|
||||
let id_part = file.with_extension("");
|
||||
let video_id = id_part.to_string_lossy();
|
||||
|
||||
retrieve_redirect_url(&self.client, &video_id).await
|
||||
}
|
||||
}
|
||||
|
||||
/// A YouTube playlist with its videos.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct YouTubePlaylistWithVideos(YouTubePlaylist, Vec<YouTubeVideoWithStream>);
|
||||
|
||||
/// A YouTube channel with its videos.
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct YouTubeChannelWithVideos(YouTubeChannel, Vec<YouTubeVideoWithStream>);
|
||||
|
||||
/// A YouTube video with its stream.
|
||||
#[derive(Clone, Debug)]
|
||||
struct YouTubeVideoWithStream {
|
||||
/// The information of the YouTube video.
|
||||
video: YouTubeVideo,
|
||||
|
||||
/// The metadata of the selected YouTube stream.
|
||||
stream: YouTubeStream,
|
||||
|
||||
/// The content of the selected YouTube stream.
|
||||
content_length: u64,
|
||||
}
|
||||
|
||||
impl From<YouTubeChannelWithVideos> for Channel {
|
||||
fn from(
|
||||
YouTubeChannelWithVideos(yt_channel, yt_videos_w_streams): YouTubeChannelWithVideos,
|
||||
) -> Self {
|
||||
let mut link = Url::parse(CHANNEL_BASE_URL).expect("valid URL");
|
||||
let title = format!("{0} (via YouTube)", yt_channel.name());
|
||||
let description = yt_channel.description().to_string();
|
||||
link.path_segments_mut()
|
||||
.expect("valid URL")
|
||||
.push(&yt_channel.id());
|
||||
let author = Some(yt_channel.name().to_string());
|
||||
// FIXME: Don't hardcode the category!
|
||||
let categories = Vec::from([String::from("Channel")]);
|
||||
let image = yt_channel
|
||||
.avatar()
|
||||
.max_by_key(|av| av.width * av.height)
|
||||
.map(|av| av.url.clone());
|
||||
let items = yt_videos_w_streams.into_iter().map(Item::from).collect();
|
||||
|
||||
Channel {
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
author,
|
||||
categories,
|
||||
image,
|
||||
items,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<YouTubePlaylistWithVideos> for Channel {
|
||||
fn from(
|
||||
YouTubePlaylistWithVideos(yt_playlist, yt_videos_w_streams): YouTubePlaylistWithVideos,
|
||||
) -> Self {
|
||||
let title = format!("{0} (via YouTube)", yt_playlist.title());
|
||||
let mut link = Url::parse(PLAYLIST_BASE_URL).expect("valid URL");
|
||||
let description = yt_playlist.description().to_string();
|
||||
link.query_pairs_mut()
|
||||
.append_pair("list", &yt_playlist.id().to_string());
|
||||
let author = yt_playlist.channel().map(|chan| chan.name().to_string());
|
||||
// FIXME: Don't hardcode the category!
|
||||
let categories = Vec::from([String::from("Playlist")]);
|
||||
let image = yt_playlist
|
||||
.thumbnails()
|
||||
.iter()
|
||||
.max_by_key(|tn| tn.width * tn.height)
|
||||
.map(|tn| tn.url.clone());
|
||||
let items = yt_videos_w_streams.into_iter().map(Item::from).collect();
|
||||
|
||||
Channel {
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
author,
|
||||
categories,
|
||||
image,
|
||||
items,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<YouTubeVideoWithStream> for Item {
|
||||
fn from(
|
||||
YouTubeVideoWithStream {
|
||||
video,
|
||||
stream,
|
||||
content_length: length,
|
||||
}: YouTubeVideoWithStream,
|
||||
) -> Self {
|
||||
let id = video.id().to_string();
|
||||
|
||||
// Strip parameters from MIME type; some clients are scared of them and they are no
|
||||
// necessary.
|
||||
let mut mime_type = stream.mime_type().to_string();
|
||||
if let Some(sep_idx) = mime_type.find(';') {
|
||||
mime_type.truncate(sep_idx);
|
||||
}
|
||||
let extension = mime_db::extension(&mime_type).unwrap_or_default();
|
||||
let file = PathBuf::from(&id).with_extension(extension);
|
||||
let enclosure = Enclosure {
|
||||
file,
|
||||
mime_type,
|
||||
length,
|
||||
};
|
||||
|
||||
let mut link = Url::parse(VIDEO_BASE_URL).expect("valid URL");
|
||||
link.query_pairs_mut().append_pair("v", &id);
|
||||
let video_description = video.description();
|
||||
let description = Some(format!("{video_description}\n\nTaken from YouTube: {link}"));
|
||||
let categories = video
|
||||
.hashtags()
|
||||
.filter(|hashtag| !hashtag.trim().is_empty())
|
||||
.map(|hashtag| {
|
||||
let url = Url::parse(&format!(
|
||||
"https://www.youtube.com/hashtag/{}",
|
||||
hashtag.trim_start_matches('#')
|
||||
))
|
||||
.expect("valid URL");
|
||||
|
||||
(hashtag.to_string(), url)
|
||||
})
|
||||
.collect();
|
||||
let duration = Some(video.duration().as_secs() as u32);
|
||||
let keywords = video.keywords().clone();
|
||||
let image = video
|
||||
.thumbnails()
|
||||
.iter()
|
||||
.max_by_key(|tn| tn.width * tn.height)
|
||||
.map(|tn| tn.url.clone());
|
||||
let timestamp = video
|
||||
.date()
|
||||
.and_hms_opt(12, 0, 0)
|
||||
.expect("Invalid hour, minute and/or second");
|
||||
let published_at = Utc.from_utc_datetime(×tamp);
|
||||
// There is no updated at timestamp available, really.
|
||||
let updated_at = published_at;
|
||||
|
||||
Item {
|
||||
title: video.title().to_string(),
|
||||
link,
|
||||
description,
|
||||
categories,
|
||||
enclosure,
|
||||
duration,
|
||||
guid: id,
|
||||
keywords,
|
||||
image,
|
||||
published_at,
|
||||
updated_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetches the YouTube playlist videos for the given ID.
|
||||
///
|
||||
/// If the result is [`Ok`], the playlist will be cached for 24 hours for the given playlist ID.
|
||||
#[cached(
|
||||
key = "(String, Option<usize>)",
|
||||
convert = r#"{ (playlist_id.to_owned(), item_limit) }"#,
|
||||
time = 86400,
|
||||
result = true
|
||||
)]
|
||||
async fn fetch_playlist_videos(
|
||||
client: &Client,
|
||||
playlist_id: &str,
|
||||
item_limit: Option<usize>,
|
||||
) -> Result<(YouTubePlaylist, Vec<YouTubeVideoWithStream>)> {
|
||||
let id = playlist_id.parse()?;
|
||||
let limit = item_limit.unwrap_or(DEFAULT_ITEM_LIMIT);
|
||||
let yt_playlist = client.playlist(id).await?;
|
||||
let yt_videos_w_streams = yt_playlist
|
||||
.videos()
|
||||
.filter_map(fetch_stream)
|
||||
.take(limit)
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
Ok((yt_playlist, yt_videos_w_streams))
|
||||
}
|
||||
|
||||
/// Fetches the YouTube channel videos for the given ID.
|
||||
#[cached(
|
||||
key = "(String, Option<usize>)",
|
||||
convert = r#"{ (channel_id.to_owned(), item_limit) }"#,
|
||||
time = 86400,
|
||||
result = true
|
||||
)]
|
||||
async fn fetch_channel_videos(
|
||||
client: &Client,
|
||||
channel_id: &str,
|
||||
item_limit: Option<usize>,
|
||||
) -> Result<(YouTubeChannel, Vec<YouTubeVideoWithStream>)> {
|
||||
let id = channel_id.parse()?;
|
||||
let limit = item_limit.unwrap_or(DEFAULT_ITEM_LIMIT);
|
||||
let yt_channel = client.channel(id).await?;
|
||||
let yt_videos_w_streams = yt_channel
|
||||
.uploads()
|
||||
.await?
|
||||
.filter_map(fetch_stream)
|
||||
.take(limit)
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
Ok((yt_channel, yt_videos_w_streams))
|
||||
}
|
||||
|
||||
/// Fetches the stream and relevant metadata for a YouTube video result.
|
||||
///
|
||||
/// If there is a error retrieving the metadata, the video is discarded/ignored.
|
||||
/// If there are problems retrieving the streams or metadata, the video is also discarded.
|
||||
async fn fetch_stream(
|
||||
yt_video: Result<YouTubePlaylistVideo, YouTubeVideoError>,
|
||||
) -> Option<YouTubeVideoWithStream> {
|
||||
match yt_video {
|
||||
Ok(video) => {
|
||||
let video = video.upgrade().await.ok()?;
|
||||
let stream = video
|
||||
.streams()
|
||||
.await
|
||||
.ok()?
|
||||
// Select the well-supported, almost always available MP4 container format with
|
||||
// only an audio stream and then the one with the highest bitrate.
|
||||
.filter(|v| v.is_audio() && v.mime_type().contains("mp4"))
|
||||
.max_by_key(|v| v.bitrate())?;
|
||||
let content_length = stream.content_length().await.ok()?;
|
||||
|
||||
Some(YouTubeVideoWithStream {
|
||||
video,
|
||||
stream,
|
||||
content_length,
|
||||
})
|
||||
}
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the redirect URL for the provided YouTube video ID.
|
||||
///
|
||||
/// If the result is [`Ok`], the redirect URL will be cached for 24 hours for the given video ID.
|
||||
#[cached(
|
||||
key = "String",
|
||||
convert = r#"{ video_id.to_owned() }"#,
|
||||
time = 86400,
|
||||
result = true
|
||||
)]
|
||||
async fn retrieve_redirect_url(client: &Client, video_id: &str) -> Result<String> {
|
||||
let video_id = video_id.parse()?;
|
||||
let video = client.video(video_id).await?;
|
||||
let stream = video
|
||||
.streams()
|
||||
.await?
|
||||
// Select the well-supported, almost always available MP4 container format with only an
|
||||
// audio stream and then the one with the highest bitrate.
|
||||
.filter(|v| v.is_audio() && v.mime_type().contains("mp4"))
|
||||
.max_by_key(|v| v.bitrate())
|
||||
.ok_or(Error::NoRedirectUrlFound)?;
|
||||
|
||||
Ok(stream.url().to_string())
|
||||
}
|
124
src/feed.rs
124
src/feed.rs
|
@ -1,124 +0,0 @@
|
|||
//! Helper functions for constructing RSS feeds.
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use rocket::http::uri::Absolute;
|
||||
use rocket::uri;
|
||||
use rss::extension::itunes::{
|
||||
ITunesCategoryBuilder, ITunesChannelExtensionBuilder, ITunesItemExtensionBuilder,
|
||||
};
|
||||
use rss::{
|
||||
CategoryBuilder, ChannelBuilder, EnclosureBuilder, GuidBuilder, ImageBuilder, ItemBuilder,
|
||||
};
|
||||
|
||||
use crate::backends::{Channel, Item};
|
||||
use crate::Config;
|
||||
|
||||
/// Constructs a feed as string from a back-end channel using the `rss` crate.
|
||||
///
|
||||
/// It requires the backend and configuration to be able to construct download URLs.
|
||||
pub(crate) fn construct(backend_id: &str, config: &Config, channel: Channel) -> rss::Channel {
|
||||
let category = CategoryBuilder::default()
|
||||
.name(channel.categories.first().cloned().unwrap_or_default())
|
||||
.build();
|
||||
let mut last_build =
|
||||
DateTime::from_timestamp(0, 0).expect("Out-of-range seconds or invalid nanoseconds");
|
||||
let generator = String::from(concat!(
|
||||
env!("CARGO_PKG_NAME"),
|
||||
" ",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
));
|
||||
let image = channel
|
||||
.image
|
||||
.clone()
|
||||
.map(|url| ImageBuilder::default().link(url.clone()).url(url).build());
|
||||
let items = channel
|
||||
.items
|
||||
.into_iter()
|
||||
.map(|item| construct_item(backend_id, config, item, &mut last_build))
|
||||
.collect::<Vec<_>>();
|
||||
let itunes_ext = ITunesChannelExtensionBuilder::default()
|
||||
.author(channel.author)
|
||||
.categories(
|
||||
channel
|
||||
.categories
|
||||
.into_iter()
|
||||
.map(|cat| ITunesCategoryBuilder::default().text(cat).build())
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
.image(channel.image.map(String::from))
|
||||
.explicit(Some(String::from("no")))
|
||||
.summary(Some(channel.description.clone()))
|
||||
.build();
|
||||
|
||||
ChannelBuilder::default()
|
||||
.title(channel.title)
|
||||
.link(channel.link)
|
||||
.description(channel.description)
|
||||
.category(category)
|
||||
.last_build_date(Some(last_build.to_rfc2822()))
|
||||
.generator(Some(generator))
|
||||
.image(image)
|
||||
.items(items)
|
||||
.itunes_ext(Some(itunes_ext))
|
||||
.build()
|
||||
}
|
||||
|
||||
/// Constructs an RSS feed item from a back-end item using the `rss` crate.
|
||||
///
|
||||
/// It requires the backend and configuration to be able to construct download URLs.
|
||||
/// It also bumps the last build timestamp if the last updated timestamp is later than the current
|
||||
/// value.
|
||||
fn construct_item(
|
||||
backend_id: &str,
|
||||
config: &Config,
|
||||
item: Item,
|
||||
last_build: &mut DateTime<Utc>,
|
||||
) -> rss::Item {
|
||||
let categories = item
|
||||
.categories
|
||||
.into_iter()
|
||||
.map(|(cat_name, cat_url)| {
|
||||
CategoryBuilder::default()
|
||||
.name(cat_name)
|
||||
.domain(Some(cat_url.to_string()))
|
||||
.build()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let url = uri!(
|
||||
Absolute::parse(&config.public_url).expect("valid URL"),
|
||||
crate::get_download(backend_id = backend_id, file = item.enclosure.file)
|
||||
);
|
||||
let enclosure = EnclosureBuilder::default()
|
||||
.url(url.to_string())
|
||||
.length(item.enclosure.length.to_string())
|
||||
.mime_type(item.enclosure.mime_type)
|
||||
.build();
|
||||
let guid = GuidBuilder::default()
|
||||
.value(item.guid)
|
||||
.permalink(false)
|
||||
.build();
|
||||
let keywords = item.keywords.join(", ");
|
||||
let itunes_ext = ITunesItemExtensionBuilder::default()
|
||||
.image(item.image.map(String::from))
|
||||
.duration(item.duration.map(|dur| format!("{dur}")))
|
||||
.subtitle(item.description.clone())
|
||||
.keywords(Some(keywords))
|
||||
.build();
|
||||
|
||||
if item.updated_at > *last_build {
|
||||
*last_build = item.updated_at;
|
||||
}
|
||||
|
||||
ItemBuilder::default()
|
||||
.title(Some(item.title))
|
||||
.link(Some(item.link.to_string()))
|
||||
.description(item.description)
|
||||
.categories(categories)
|
||||
.enclosure(Some(enclosure))
|
||||
.guid(Some(guid))
|
||||
.pub_date(Some(item.published_at.to_rfc2822()))
|
||||
.itunes_ext(Some(itunes_ext))
|
||||
.build()
|
||||
}
|
190
src/lib.rs
190
src/lib.rs
|
@ -5,75 +5,49 @@
|
|||
missing_debug_implementations,
|
||||
rust_2018_idioms,
|
||||
rustdoc::broken_intra_doc_links,
|
||||
trivial_numeric_casts,
|
||||
renamed_and_removed_lints,
|
||||
unsafe_code,
|
||||
unstable_features,
|
||||
unused_import_braces,
|
||||
unused_qualifications
|
||||
trivial_numeric_casts
|
||||
)]
|
||||
#![deny(missing_docs)]
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
||||
use rocket::fairing::AdHoc;
|
||||
use rocket::http::uri::Absolute;
|
||||
use rocket::http::Status;
|
||||
use rocket::response::Redirect;
|
||||
use rocket::serde::{Deserialize, Serialize};
|
||||
use rocket::{get, routes, Build, Request, Responder, Rocket, State};
|
||||
use rocket::{get, routes, uri, Build, Request, Responder, Rocket, State};
|
||||
use rocket_dyn_templates::{context, Template};
|
||||
use rss::extension::itunes::{
|
||||
ITunesCategoryBuilder, ITunesChannelExtensionBuilder, ITunesItemExtensionBuilder,
|
||||
};
|
||||
use rss::{
|
||||
CategoryBuilder, ChannelBuilder, EnclosureBuilder, GuidBuilder, ImageBuilder, ItemBuilder,
|
||||
};
|
||||
|
||||
use crate::backends::Backend;
|
||||
|
||||
pub(crate) mod backends;
|
||||
pub(crate) mod feed;
|
||||
pub(crate) mod mixcloud;
|
||||
|
||||
/// The possible errors that can occur.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub(crate) enum Error {
|
||||
/// A standard I/O error occurred.
|
||||
#[error("IO error: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
/// No redirect URL found in item metadata.
|
||||
#[error("No redirect URL found")]
|
||||
NoRedirectUrlFound,
|
||||
|
||||
/// A (reqwest) HTTP error occurred.
|
||||
#[error("HTTP error: {0}")]
|
||||
Request(#[from] reqwest::Error),
|
||||
|
||||
/// Unsupported back-end encountered.
|
||||
#[error("Unsupported back-end: {0}")]
|
||||
#[error("Unknown supported back-end: {0}")]
|
||||
UnsupportedBackend(String),
|
||||
|
||||
/// A URL parse error occurred.
|
||||
#[error("URL parse error: {0}")]
|
||||
UrlParse(#[from] url::ParseError),
|
||||
|
||||
/// An error occurred in youtube-dl.
|
||||
#[error("Youtube-dl failed: {0}")]
|
||||
#[error("Youtube_dl failed: {0}")]
|
||||
YoutubeDl(#[from] youtube_dl::Error),
|
||||
|
||||
/// An YouTube extract error occured.
|
||||
#[error("YouTube extract error: {0}")]
|
||||
YtExtract(#[from] ytextract::Error),
|
||||
|
||||
/// An YouTube extract ID parsing error occured.
|
||||
#[error("YouTube extract ID parsing error: {0}")]
|
||||
YtExtractId0(#[from] ytextract::error::Id<0>),
|
||||
|
||||
/// An YouTube extract ID parsing error occured.
|
||||
#[error("YouTube extract ID parsing error: {0}")]
|
||||
YtExtractId11(#[from] ytextract::error::Id<11>),
|
||||
|
||||
/// An YouTube extract ID parsing error occured.
|
||||
#[error("YouTube extract ID parsing error: {0}")]
|
||||
YtExtractId24(#[from] ytextract::error::Id<24>),
|
||||
|
||||
/// An YouTube extract playlist video error occured.
|
||||
#[error("YouTube extract playlist video error: {0}")]
|
||||
YtExtractPlaylistVideo(#[from] ytextract::playlist::video::Error),
|
||||
}
|
||||
|
||||
impl<'r, 'o: 'r> rocket::response::Responder<'r, 'o> for Error {
|
||||
|
@ -94,9 +68,9 @@ pub(crate) type Result<T, E = Error> = std::result::Result<T, E>;
|
|||
#[derive(Debug, Deserialize, Serialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Config {
|
||||
/// The public URL at which the application is hosted or proxied from.
|
||||
/// The URL at which the application is hosted or proxied from.
|
||||
#[serde(default)]
|
||||
public_url: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
/// A Rocket responder wrapper type for RSS feeds.
|
||||
|
@ -105,40 +79,142 @@ pub(crate) struct Config {
|
|||
struct RssFeed(String);
|
||||
|
||||
/// Retrieves a download by redirecting to the URL resolved by the selected back-end.
|
||||
#[get("/download/<backend_id>/<file..>")]
|
||||
pub(crate) async fn get_download(file: PathBuf, backend_id: &str) -> Result<Redirect> {
|
||||
let backend = backends::get(backend_id)?;
|
||||
#[get("/download/<backend>/<file..>")]
|
||||
pub(crate) async fn download(file: PathBuf, backend: &str) -> Result<Redirect> {
|
||||
match backend {
|
||||
"mixcloud" => {
|
||||
let key = format!("/{}/", file.with_extension("").to_string_lossy());
|
||||
|
||||
backend.redirect_url(&file).await.map(Redirect::to)
|
||||
mixcloud::redirect_url(&key).await.map(Redirect::to)
|
||||
}
|
||||
_ => Err(Error::UnsupportedBackend(backend.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Handler for retrieving the RSS feed of a channel on a certain back-end.
|
||||
/// Handler for retrieving the RSS feed of user on a certain back-end.
|
||||
///
|
||||
/// The limit parameter determines the maximum of items that can be in the feed.
|
||||
#[get("/feed/<backend_id>/<channel_id>?<limit>")]
|
||||
async fn get_feed(
|
||||
backend_id: &str,
|
||||
channel_id: &str,
|
||||
#[get("/feed/<backend>/<username>?<limit>")]
|
||||
async fn feed(
|
||||
backend: &str,
|
||||
username: &str,
|
||||
limit: Option<usize>,
|
||||
config: &State<Config>,
|
||||
) -> Result<RssFeed> {
|
||||
let backend = backends::get(backend_id)?;
|
||||
let channel = backend.channel(channel_id, limit).await?;
|
||||
let feed = feed::construct(backend_id, config, channel);
|
||||
let user = mixcloud::user(username).await?;
|
||||
let cloudcasts = mixcloud::cloudcasts(username, limit).await?;
|
||||
let mut last_build = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(0, 0), Utc);
|
||||
|
||||
Ok(RssFeed(feed.to_string()))
|
||||
let category = CategoryBuilder::default()
|
||||
.name(String::from("Music")) // FIXME: Don't hardcode the category!
|
||||
.build();
|
||||
let generator = String::from(concat!(
|
||||
env!("CARGO_PKG_NAME"),
|
||||
" ",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
));
|
||||
let image = ImageBuilder::default()
|
||||
.link(user.pictures.large.clone())
|
||||
.url(user.pictures.large.clone())
|
||||
.build();
|
||||
let items = cloudcasts
|
||||
.into_iter()
|
||||
.map(|cloudcast| {
|
||||
let mut file = PathBuf::from(cloudcast.key.trim_end_matches('/'));
|
||||
file.set_extension("m4a"); // FIXME: Don't hardcode the extension!
|
||||
let url = uri!(
|
||||
Absolute::parse(&config.url).expect("valid URL"),
|
||||
download(backend = backend, file = file)
|
||||
);
|
||||
// FIXME: Don't hardcode the description!
|
||||
let description = format!("Taken from Mixcloud: {}", cloudcast.url);
|
||||
let keywords = cloudcast
|
||||
.tags
|
||||
.iter()
|
||||
.map(|tag| &tag.name)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
let categories = cloudcast
|
||||
.tags
|
||||
.into_iter()
|
||||
.map(|tag| {
|
||||
CategoryBuilder::default()
|
||||
.name(tag.name)
|
||||
.domain(Some(tag.url))
|
||||
.build()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let length = mixcloud::estimated_file_size(cloudcast.audio_length);
|
||||
let enclosure = EnclosureBuilder::default()
|
||||
.url(url.to_string())
|
||||
.length(format!("{}", length))
|
||||
.mime_type(String::from(mixcloud::default_file_type()))
|
||||
.build();
|
||||
let guid = GuidBuilder::default()
|
||||
.value(cloudcast.slug)
|
||||
.permalink(false)
|
||||
.build();
|
||||
let itunes_ext = ITunesItemExtensionBuilder::default()
|
||||
.image(Some(cloudcast.pictures.large))
|
||||
.duration(Some(format!("{}", cloudcast.audio_length)))
|
||||
.subtitle(Some(description.clone()))
|
||||
.keywords(Some(keywords))
|
||||
.build();
|
||||
|
||||
if cloudcast.updated_time > last_build {
|
||||
last_build = cloudcast.updated_time;
|
||||
}
|
||||
|
||||
ItemBuilder::default()
|
||||
.title(Some(cloudcast.name))
|
||||
.link(Some(cloudcast.url))
|
||||
.description(Some(description))
|
||||
.categories(categories)
|
||||
.enclosure(Some(enclosure))
|
||||
.guid(Some(guid))
|
||||
.pub_date(Some(cloudcast.updated_time.to_rfc2822()))
|
||||
.itunes_ext(Some(itunes_ext))
|
||||
.build()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let itunes_ext = ITunesChannelExtensionBuilder::default()
|
||||
.author(Some(user.name.clone()))
|
||||
.categories(Vec::from([ITunesCategoryBuilder::default()
|
||||
.text("Music")
|
||||
.build()])) // FIXME: Don't hardcode the category!
|
||||
.image(Some(user.pictures.large))
|
||||
.explicit(Some(String::from("no")))
|
||||
.summary(Some(user.biog.clone()))
|
||||
.build();
|
||||
|
||||
let channel = ChannelBuilder::default()
|
||||
.title(&format!("{} (via Mixcloud)", user.name))
|
||||
.link(&user.url)
|
||||
.description(&user.biog)
|
||||
.category(category)
|
||||
.last_build_date(Some(last_build.to_rfc2822()))
|
||||
.generator(Some(generator))
|
||||
.image(Some(image))
|
||||
.items(items)
|
||||
.itunes_ext(Some(itunes_ext))
|
||||
.build();
|
||||
let feed = RssFeed(channel.to_string());
|
||||
|
||||
Ok(feed)
|
||||
}
|
||||
|
||||
/// Returns a simple index page that explains the usage.
|
||||
#[get("/")]
|
||||
pub(crate) async fn get_index(config: &State<Config>) -> Template {
|
||||
Template::render("index", context! { url: &config.public_url })
|
||||
pub(crate) async fn index(config: &State<Config>) -> Template {
|
||||
Template::render("index", context! { url: &config.url })
|
||||
}
|
||||
|
||||
/// Sets up Rocket.
|
||||
pub fn setup() -> Rocket<Build> {
|
||||
rocket::build()
|
||||
.mount("/", routes![get_download, get_feed, get_index])
|
||||
.mount("/", routes![download, feed, index])
|
||||
.attach(AdHoc::config::<Config>())
|
||||
.attach(Template::fairing())
|
||||
}
|
||||
|
|
16
src/main.rs
16
src/main.rs
|
@ -5,17 +5,15 @@
|
|||
missing_debug_implementations,
|
||||
rust_2018_idioms,
|
||||
rustdoc::broken_intra_doc_links,
|
||||
trivial_numeric_casts,
|
||||
renamed_and_removed_lints,
|
||||
unsafe_code,
|
||||
unstable_features,
|
||||
unused_import_braces,
|
||||
unused_qualifications
|
||||
trivial_numeric_casts
|
||||
)]
|
||||
#![deny(missing_docs)]
|
||||
|
||||
/// Sets up and launches Rocket.
|
||||
#[rocket::launch]
|
||||
fn rocket() -> _ {
|
||||
podbringer::setup()
|
||||
#[rocket::main]
|
||||
async fn main() -> Result<(), rocket::Error> {
|
||||
let rocket = podbringer::setup();
|
||||
let _ = rocket.ignite().await?.launch().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
235
src/mixcloud.rs
Normal file
235
src/mixcloud.rs
Normal file
|
@ -0,0 +1,235 @@
|
|||
//! The Mixcloud back-end.
|
||||
//!
|
||||
//! It uses the Mixcloud API to retrieve the feed (user) and items (cloudcasts)).
|
||||
//! See also: <https://www.mixcloud.com/developers/>
|
||||
|
||||
use cached::proc_macro::cached;
|
||||
use chrono::{DateTime, Utc};
|
||||
use reqwest::Url;
|
||||
use rocket::serde::Deserialize;
|
||||
use youtube_dl::{YoutubeDl, YoutubeDlOutput};
|
||||
|
||||
use super::{Error, Result};
|
||||
|
||||
/// A Mixcloud user (response).
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct User {
|
||||
/// The name of the user.
|
||||
pub(crate) name: String,
|
||||
|
||||
/// The bio (description) of the user.
|
||||
pub(crate) biog: String,
|
||||
|
||||
/// The picture URLs associated with the user.
|
||||
pub(crate) pictures: Pictures,
|
||||
|
||||
/// The original URL of the user.
|
||||
pub(crate) url: String,
|
||||
}
|
||||
|
||||
/// A collection of different sizes/variants of a picture.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Pictures {
|
||||
/// The large picture of the user.
|
||||
pub(crate) large: String,
|
||||
}
|
||||
|
||||
/// The Mixcloud cloudcasts response.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct CloudcastsResponse {
|
||||
/// The contained cloudcast items.
|
||||
#[serde(rename = "data")]
|
||||
items: Vec<Cloudcast>,
|
||||
|
||||
/// The paging information.
|
||||
paging: CloudcastsPaging,
|
||||
}
|
||||
|
||||
/// The Mixcloud paging info.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct CloudcastsPaging {
|
||||
/// The API URL of the next page.
|
||||
next: Option<String>,
|
||||
}
|
||||
|
||||
/// A Mixcloud cloudcast.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Cloudcast {
|
||||
/// The key of the cloudcast.
|
||||
pub(crate) key: String,
|
||||
|
||||
/// The name of the cloudcast.
|
||||
pub(crate) name: String,
|
||||
|
||||
/// The slug of the cloudcast (used for the enclosure).
|
||||
pub(crate) slug: String,
|
||||
|
||||
/// The picture URLs associated with the cloudcast.
|
||||
pub(crate) pictures: Pictures,
|
||||
|
||||
/// The tags of the cloudcast.
|
||||
pub(crate) tags: Vec<Tag>,
|
||||
|
||||
/// The time the feed was created/started.
|
||||
pub(crate) updated_time: DateTime<Utc>,
|
||||
|
||||
/// The original URL of the cloudcast.
|
||||
pub(crate) url: String,
|
||||
|
||||
/// The length of the cloudcast (in seconds).
|
||||
pub(crate) audio_length: u32,
|
||||
}
|
||||
|
||||
/// A Mixcloud cloudcast tag.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(crate = "rocket::serde")]
|
||||
pub(crate) struct Tag {
|
||||
/// The name of the tag.
|
||||
pub(crate) name: String,
|
||||
|
||||
/// The URL of the tag.
|
||||
pub(crate) url: String,
|
||||
}
|
||||
|
||||
/// The base URL for the Mixcloud API.
|
||||
const API_BASE_URL: &str = "https://api.mixcloud.com";
|
||||
|
||||
/// The base URL for downloading Mixcloud files.
|
||||
const FILES_BASE_URL: &str = "https://www.mixcloud.com";
|
||||
|
||||
/// The default bitrate used by Mixcloud.
|
||||
const DEFAULT_BITRATE: u32 = 64 * 1024;
|
||||
|
||||
/// The default file (MIME) type used by Mixcloud.
|
||||
const DEFAULT_FILE_TYPE: &str = "audio/mpeg";
|
||||
|
||||
/// The default page size.
|
||||
const DEFAULT_PAGE_SIZE: usize = 50;
|
||||
|
||||
/// Returns the default file type used by Mixcloud.
|
||||
pub(crate) const fn default_file_type() -> &'static str {
|
||||
DEFAULT_FILE_TYPE
|
||||
}
|
||||
|
||||
/// Returns the estimated file size in bytes for a given duration.
|
||||
///
|
||||
/// This uses the default bitrate (see [`DEFAULT_BITRATE`]) which is in B/s.
|
||||
pub(crate) fn estimated_file_size(duration: u32) -> u32 {
|
||||
DEFAULT_BITRATE * duration / 8
|
||||
}
|
||||
|
||||
/// Retrieves the user data using the Mixcloud API.
|
||||
pub(crate) async fn user(username: &str) -> Result<User> {
|
||||
let mut url = Url::parse(API_BASE_URL).expect("URL can always be parsed");
|
||||
url.set_path(username);
|
||||
|
||||
println!("⏬ Retrieving user {username} from {url}...");
|
||||
fetch_user(url).await
|
||||
}
|
||||
|
||||
/// Fetches the user from the URL.
|
||||
#[cached(
|
||||
key = "String",
|
||||
convert = r#"{ url.to_string() }"#,
|
||||
time = 86400,
|
||||
result = true
|
||||
)]
|
||||
///
|
||||
/// If the result is [`Ok`], the user will be cached for 24 hours for the given username.
|
||||
async fn fetch_user(url: Url) -> Result<User> {
|
||||
let response = reqwest::get(url).await?.error_for_status()?;
|
||||
let user = response.json().await?;
|
||||
|
||||
Ok(user)
|
||||
}
|
||||
|
||||
/// Retrieves the cloudcasts data of the user using the Mixcloud API.
|
||||
pub(crate) async fn cloudcasts(username: &str, limit: Option<usize>) -> Result<Vec<Cloudcast>> {
|
||||
let mut limit = limit.unwrap_or(DEFAULT_PAGE_SIZE);
|
||||
let mut offset = 0;
|
||||
let mut url = Url::parse(API_BASE_URL).expect("URL can always be parsed");
|
||||
url.set_path(&format!("{username}/cloudcasts/"));
|
||||
println!("⏬ Retrieving cloudcasts of user {username} from {url}...");
|
||||
|
||||
set_paging_query(&mut url, limit, offset);
|
||||
let mut cloudcasts = Vec::with_capacity(50); // The initial limit
|
||||
loop {
|
||||
let cloudcasts_res: CloudcastsResponse = fetch_cloudcasts(url).await?;
|
||||
let count = cloudcasts_res.items.len();
|
||||
cloudcasts.extend(cloudcasts_res.items);
|
||||
|
||||
// Continue onto the next URL in the paging, if there is one.
|
||||
limit = limit.saturating_sub(count);
|
||||
offset += count;
|
||||
match cloudcasts_res.paging.next {
|
||||
Some(next_url) => {
|
||||
url = Url::parse(&next_url)?;
|
||||
set_paging_query(&mut url, limit, offset);
|
||||
}
|
||||
None => break,
|
||||
}
|
||||
|
||||
// We have reached the limit.
|
||||
if limit == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(cloudcasts)
|
||||
}
|
||||
|
||||
/// Fetches cloudcasts from the URL.
|
||||
///
|
||||
/// If the result is [`Ok`], the cloudcasts will be cached for 24 hours for the given username.
|
||||
#[cached(
|
||||
key = "String",
|
||||
convert = r#"{ url.to_string() }"#,
|
||||
time = 86400,
|
||||
result = true
|
||||
)]
|
||||
async fn fetch_cloudcasts(url: Url) -> Result<CloudcastsResponse> {
|
||||
let response = reqwest::get(url).await?.error_for_status()?;
|
||||
let cloudcasts_res = response.json().await?;
|
||||
|
||||
Ok(cloudcasts_res)
|
||||
}
|
||||
|
||||
/// Set paging query pairs for URL.
|
||||
///
|
||||
/// The limit is capped to the default page size. Another request will be necessary to retrieve
|
||||
/// more.
|
||||
fn set_paging_query(url: &mut Url, limit: usize, offset: usize) {
|
||||
url.query_pairs_mut()
|
||||
.clear()
|
||||
.append_pair(
|
||||
"limit",
|
||||
&format!("{}", std::cmp::min(limit, DEFAULT_PAGE_SIZE)),
|
||||
)
|
||||
.append_pair("offset", &format!("{}", offset));
|
||||
}
|
||||
|
||||
/// Retrieves the redirect URL for the provided Mixcloud cloudcast key.
|
||||
#[cached(
|
||||
key = "String",
|
||||
convert = r#"{ download_key.to_owned() }"#,
|
||||
time = 86400,
|
||||
result = true
|
||||
)]
|
||||
pub(crate) async fn redirect_url(download_key: &str) -> Result<String> {
|
||||
let mut url = Url::parse(FILES_BASE_URL).expect("URL can always be parsed");
|
||||
url.set_path(download_key);
|
||||
|
||||
println!("🌍 Determining direct URL for {download_key}...");
|
||||
let output = YoutubeDl::new(url).run_async().await?;
|
||||
|
||||
if let YoutubeDlOutput::SingleVideo(yt_item) = output {
|
||||
yt_item.url.ok_or(Error::NoRedirectUrlFound)
|
||||
} else {
|
||||
Err(Error::NoRedirectUrlFound)
|
||||
}
|
||||
}
|
|
@ -5,21 +5,15 @@
|
|||
URL in your favorite podcast client to start using it.
|
||||
</p>
|
||||
<p>
|
||||
The URL you need to use for Podbringer is comprised of the following parts:
|
||||
Given the Mixcloud URL <https://www.mixcloud.com/myfavouriteband/>, the URL you
|
||||
need to use for Podbringer is comprised of the following parts:
|
||||
|
||||
<pre>
|
||||
https://my.domain.tld/podbringer/feed/mixcloud/myfavouriteband
|
||||
|------------------------------| |------| |-------------|
|
||||
The Podbringer public URL Service Service ID
|
||||
|------------------------------| |-------||--------------|
|
||||
The Podbringer location URL Service User @ service
|
||||
</pre>
|
||||
</p>
|
||||
<p>
|
||||
Supported services are:
|
||||
<ul>
|
||||
<li>Mixcloud (service ID is Mixcloud username)</li>
|
||||
<li>YouTube (service ID is YouTube channel or playlist ID)</li>
|
||||
</ul>
|
||||
</p>
|
||||
<p>
|
||||
The Podbringer location URL of this instance is: <a href="{{ url }}">{{ url }}</a>.
|
||||
The Podbringer location URL of this instance is: {{ url }}
|
||||
</p>
|
||||
|
|
Loading…
Reference in a new issue