Add source files

This commit is contained in:
Joel Wachsler 2022-07-10 17:06:19 +02:00
parent 33d7e5a04d
commit 721571b240
43 changed files with 30386 additions and 0 deletions

20
.devcontainer/Dockerfile Normal file
View File

@ -0,0 +1,20 @@
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.238.0/containers/rust/.devcontainer/base.Dockerfile
# [Choice] Debian OS version (use bullseye on local arm64/Apple Silicon): buster, bullseye
ARG VARIANT="buster"
FROM mcr.microsoft.com/vscode/devcontainers/rust:0-${VARIANT}
# [Optional] Uncomment this section to install additional packages.
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends fish
USER vscode
# RUN rustup default nightly \
# && cargo install cargo-expand \
# && rustup component add rustfmt \
# && rustup component add clippy
RUN cargo install cargo-expand \
&& rustup component add rustfmt \
&& rustup component add clippy

View File

@ -0,0 +1,40 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.238.0/containers/rust
{
"name": "Rust",
"dockerComposeFile": "docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspace",
// Configure tool-specific properties.
"customizations": {
// Configure properties specific to VS Code.
"vscode": {
// Set *default* container specific settings.json values on container create.
"settings": {
"lldb.executable": "/usr/bin/lldb",
// VS Code don't watch files under ./target
"files.watcherExclude": {
"**/target/**": true
},
"rust-analyzer.checkOnSave.command": "clippy"
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"vadimcn.vscode-lldb",
"mutantdino.resourcemonitor",
"rust-lang.rust-analyzer",
"tamasfe.even-better-toml",
"serayuzgur.crates",
"redhat.vscode-yaml"
]
}
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [
8080
],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "./.devcontainer/setup.sh",
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode"
}

View File

@ -0,0 +1,35 @@
version: '3.8'
services:
app:
build:
context: .
dockerfile: Dockerfile
args:
VARIANT: buster
CARGO_HOME: /workspace/.cargo
volumes:
- ..:/workspace:cached
- /var/run/docker.sock:/var/run/docker.sock
# Overrides default command so things don't shut down after the process ends.
command: sleep infinity
security_opt:
- seccomp:unconfined
cap_add:
- SYS_PTRACE
# Runs app on the same network as the database container, allows "forwardPorts" in devcontainer.json function.
network_mode: service:qbittorrent
# Uncomment the next line to use a non-root user for all processes.
# user: node
# Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
# (Adding the "ports" property to this file will not forward from a Codespace.)
qbittorrent:
image: linuxserver/qbittorrent:4.4.3
restart: unless-stopped

View File

@ -0,0 +1,17 @@
{
"folders": [
{
"path": ".."
},
{
"path": "../parser"
},
{
"path": "../md-parser"
},
{
"path": "../api-gen"
},
],
"settings": {}
}

12
.devcontainer/setup.sh Executable file
View File

@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -ex
git config pull.rebase true
mkdir -p /home/vscode/.config/fish
cat <<EOF > /home/vscode/.config/fish/config.fish
set fish_greeting ""
set -gx FORCE_COLOR true
fish_vi_key_bindings
EOF

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
target
.env

1134
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

16
Cargo.toml Normal file
View File

@ -0,0 +1,16 @@
[package]
name = "qbittorrent-web-api"
version = "0.1.0"
edition = "2021"
[lib]
name = "qbittorrent_web_api"
path = "src/lib.rs"
[dependencies]
reqwest = { version = "0.11.11", features = ["json", "multipart"] }
tokio = { version = "1.19.2", features = ["full"] }
api-gen = { path = "./api-gen" }
serde = { version = "1.0.138", features = ["derive"] }
serde_json = "1.0.82"
thiserror = "1.0.31"

1187
api-gen/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

24
api-gen/Cargo.toml Normal file
View File

@ -0,0 +1,24 @@
[package]
name = "api-gen"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
parser = { path = "../parser" }
syn = { version = "1.0.98", features = ["extra-traits"]}
quote = "1.0.20"
proc-macro2 = "1.0.40"
case = "1.0.0"
thiserror = "1.0.31"
serde = { version = "1.0.138", features = ["derive"] }
serde_json = "1.0.82"
regex = "1.6.0"
[dev-dependencies]
trybuild = { version = "1.0.63", features = ["diff"] }
anyhow = "1.0.58"
tokio = { version = "1.19.2", features = ["full"] }
reqwest = { version = "0.11.11", features = ["json", "multipart"] }

3345
api-gen/src/api-4_1.md Normal file

File diff suppressed because it is too large Load Diff

455
api-gen/src/group.rs Normal file
View File

@ -0,0 +1,455 @@
use std::{collections::HashMap, vec::Vec};
use case::CaseExt;
use parser::types::TypeInfo;
use quote::{format_ident, quote};
use regex::Regex;
use crate::{skeleton::auth_ident, util};
pub fn generate_groups(groups: Vec<parser::ApiGroup>) -> proc_macro2::TokenStream {
let gr = groups
.iter()
// implemented manually
.filter(|group| group.name != "authentication")
.map(generate_group);
quote! {
#(#gr)*
}
}
fn generate_group(group: &parser::ApiGroup) -> proc_macro2::TokenStream {
let group_name_camel = util::to_ident(&group.name.to_camel());
let group_name_snake = util::to_ident(&group.name.to_snake());
let auth = auth_ident();
let methods = generate_methods(group, &auth, &group_name_camel);
let group_method = util::add_docs(
&group.description,
quote! {
pub fn #group_name_snake(&self) -> #group_name_camel {
#group_name_camel::new(self)
}
},
);
quote! {
pub struct #group_name_camel<'a> {
auth: &'a #auth,
}
#methods
impl #auth {
#group_method
}
}
}
fn generate_methods(
group: &parser::ApiGroup,
auth: &syn::Ident,
group_name_camel: &syn::Ident,
) -> proc_macro2::TokenStream {
let methods_and_param_structs = group
.methods
.iter()
.map(|method| generate_method(group, method));
let methods = methods_and_param_structs.clone().map(|(method, ..)| method);
let structs = methods_and_param_structs.flat_map(|(_, s)| s);
quote! {
impl <'a> #group_name_camel<'a> {
pub fn new(auth: &'a #auth) -> Self {
Self { auth }
}
#(#methods)*
}
#(#structs)*
}
}
fn generate_method(
group: &parser::ApiGroup,
method: &parser::ApiMethod,
) -> (proc_macro2::TokenStream, Option<proc_macro2::TokenStream>) {
let method_name = util::to_ident(&method.name.to_snake());
let url = format!("/api/v2/{}/{}", group.url, method.url);
match &method.parameters {
Some(params) => create_method_with_params(group, method, params, &method_name, &url),
None => create_method_without_params(group, method, method_name, &url),
}
}
fn create_method_without_params(
group: &parser::ApiGroup,
method: &parser::ApiMethod,
method_name: proc_macro2::Ident,
url: &str,
) -> (proc_macro2::TokenStream, Option<proc_macro2::TokenStream>) {
match create_return_type(group, method) {
Some((return_type_name, return_type)) => (
util::add_docs(
&method.description,
quote! {
pub async fn #method_name(&self) -> Result<#return_type_name> {
let res = self.auth
.authenticated_client(#url)
.send()
.await?
.json::<#return_type_name>()
.await?;
Ok(res)
}
},
),
Some(return_type),
),
None => (
util::add_docs(
&method.description,
quote! {
pub async fn #method_name(&self) -> Result<String> {
let res = self.auth
.authenticated_client(#url)
.send()
.await?
.text()
.await?;
Ok(res)
}
},
), // assume that all methods without a return type returns a string
None,
),
}
}
fn create_method_with_params(
group: &parser::ApiGroup,
method: &parser::ApiMethod,
params: &[parser::types::Type],
method_name: &proc_macro2::Ident,
url: &str,
) -> (proc_macro2::TokenStream, Option<proc_macro2::TokenStream>) {
let parameter_type = util::to_ident(&format!(
"{}{}Parameters",
group.name.to_camel(),
method.name.to_camel()
));
let mandatory_params = params
.iter()
.filter(|param| !param.get_type_info().is_optional);
let mandatory_param_args = mandatory_params.clone().map(|param| {
let name = util::to_ident(&param.get_type_info().name.to_snake());
let t = util::to_ident(&param.to_borrowed_type());
if param.should_borrow() {
quote! {
#name: &#t
}
} else {
quote! {
#name: #t
}
}
});
let mandatory_param_names = mandatory_params.clone().map(|param| {
let name = util::to_ident(&param.get_type_info().name.to_snake());
quote! {
#name
}
});
let mandatory_param_args_clone = mandatory_param_args.clone();
let mandatory_param_form_build = mandatory_params.map(|param| {
let n = &param.get_type_info().name;
let name = util::to_ident(&n.to_snake());
quote! {
let form = form.text(#n, #name.to_string());
}
});
let optional_params = params
.iter()
.filter(|param| param.get_type_info().is_optional)
.map(|param| {
let n = &param.get_type_info().name;
let name = util::to_ident(&n.to_snake());
let t = util::to_ident(&param.to_borrowed_type());
let method = if param.should_borrow() {
quote! {
pub fn #name(mut self, value: &#t) -> Self {
self.form = self.form.text(#n, value.to_string());
self
}
}
} else {
quote! {
pub fn #name(mut self, value: #t) -> Self {
self.form = self.form.text(#n, value.to_string());
self
}
}
};
util::add_docs(&param.get_type_info().description, method)
});
let group_name = util::to_ident(&group.name.to_camel());
let send = match create_return_type(group, method) {
Some((return_type_name, return_type)) => {
quote! {
impl<'a> #parameter_type<'a> {
fn new(group: &'a #group_name, #(#mandatory_param_args),*) -> Self {
let form = reqwest::multipart::Form::new();
#(#mandatory_param_form_build)*
Self { group, form }
}
#(#optional_params)*
pub async fn send(self) -> Result<#return_type_name> {
let res = self.group
.auth
.authenticated_client(#url)
.multipart(self.form)
.send()
.await?
.json::<#return_type_name>()
.await?;
Ok(res)
}
}
#return_type
}
}
None => {
quote! {
impl<'a> #parameter_type<'a> {
fn new(group: &'a #group_name, #(#mandatory_param_args),*) -> Self {
let form = reqwest::multipart::Form::new();
#(#mandatory_param_form_build)*
Self { group, form }
}
#(#optional_params)*
pub async fn send(self) -> Result<String> {
let res = self.group
.auth
.authenticated_client(#url)
.multipart(self.form)
.send()
.await?
.text()
.await?;
Ok(res)
}
}
}
}
};
(
util::add_docs(
&method.description,
quote! {
pub fn #method_name(&self, #(#mandatory_param_args_clone),*) -> #parameter_type {
#parameter_type::new(self, #(#mandatory_param_names),*)
}
},
),
Some(quote! {
pub struct #parameter_type<'a> {
group: &'a #group_name<'a>,
form: reqwest::multipart::Form,
}
#send
}),
)
}
fn create_return_type(
group: &parser::ApiGroup,
method: &parser::ApiMethod,
) -> Option<(proc_macro2::TokenStream, proc_macro2::TokenStream)> {
let return_type = match &method.return_type {
Some(t) => t,
None => return None,
};
let to_enum_name = |name: &str| {
format!(
"{}{}{}",
group.name.to_camel(),
method.name.to_camel(),
name.to_camel()
)
};
let enum_types_with_names =
return_type
.parameters
.iter()
.flat_map(|parameter| match &parameter.return_type {
parser::types::Type::Number(TypeInfo {
ref name,
type_description: Some(type_description),
..
}) => {
let enum_fields = type_description.values.iter().map(|value| {
let v = &value.value;
let re = Regex::new(r#"\(.*\)"#).unwrap();
let desc = &value
.description
.replace(' ', "_")
.replace('-', "_")
.replace(',', "_");
let desc_without_parentheses = re.replace_all(desc, "");
let ident = util::to_ident(&desc_without_parentheses.to_camel());
util::add_docs(
&Some(value.description.clone()),
quote! {
#[serde(rename = #v)]
#ident
},
)
});
let enum_name = util::to_ident(&to_enum_name(name));
Some((
name,
quote! {
#[allow(clippy::enum_variant_names)]
#[derive(Debug, Deserialize, PartialEq, Eq)]
pub enum #enum_name {
#(#enum_fields,)*
}
},
))
}
parser::types::Type::String(TypeInfo {
ref name,
type_description: Some(type_description),
..
}) => {
let enum_fields = type_description.values.iter().map(|type_description| {
let value = &type_description.value;
let value_as_ident = util::to_ident(&value.to_camel());
util::add_docs(
&Some(type_description.description.clone()),
quote! {
#[serde(rename = #value)]
#value_as_ident
},
)
});
let enum_name = util::to_ident(&to_enum_name(name));
Some((
name,
quote! {
#[allow(clippy::enum_variant_names)]
#[derive(Debug, Deserialize, PartialEq, Eq)]
pub enum #enum_name {
#(#enum_fields,)*
}
},
))
}
_ => None,
});
let enum_names: HashMap<&String, String> = enum_types_with_names
.clone()
.map(|(enum_name, _)| (enum_name, to_enum_name(enum_name)))
.collect();
let enum_types = enum_types_with_names.map(|(_, enum_type)| enum_type);
let parameters = return_type.parameters.iter().map(|parameter| {
let namestr = &parameter.name;
let name = util::to_ident(&namestr.to_snake().replace("__", "_"));
let rtype = if let Some(enum_type) = enum_names.get(namestr) {
util::to_ident(enum_type)
} else {
util::to_ident(&parameter.return_type.to_owned_type())
};
let type_info = parameter.return_type.get_type_info();
let rtype_as_quote = if type_info.is_list {
quote! {
std::vec::Vec<#rtype>
}
} else {
quote! {
#rtype
}
};
// "type" is a reserved keyword in Rust, so we use a different name.
if namestr == "type" {
let non_reserved_name = format_ident!("t_{}", name);
quote! {
#[serde(rename = #namestr)]
pub #non_reserved_name: #rtype_as_quote
}
} else {
quote! {
#[serde(rename = #namestr)]
pub #name: #rtype_as_quote
}
}
});
let return_type_name = util::to_ident(&format!(
"{}{}Result",
&group.name.to_camel(),
&method.name.to_camel()
));
let result_type = if return_type.is_list {
quote! {
std::vec::Vec<#return_type_name>
}
} else {
quote! {
#return_type_name
}
};
Some((
result_type,
quote! {
#[derive(Debug, Deserialize)]
pub struct #return_type_name {
#(#parameters,)*
}
#(#enum_types)*
},
))
}

33
api-gen/src/lib.rs Normal file
View File

@ -0,0 +1,33 @@
mod group;
mod skeleton;
mod util;
use case::CaseExt;
use proc_macro::TokenStream;
use quote::quote;
use skeleton::generate_skeleton;
use syn::parse_macro_input;
use crate::group::generate_groups;
const API_CONTENT: &str = include_str!("api-4_1.md");
#[proc_macro_derive(QBittorrentApiGen, attributes(api_gen))]
pub fn derive(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as syn::DeriveInput);
let ident = &ast.ident;
let api_groups = parser::parse_api_groups(API_CONTENT);
let skeleton = generate_skeleton(ident);
let groups = generate_groups(api_groups);
let impl_ident = syn::Ident::new(&format!("{}_impl", ident).to_snake(), ident.span());
quote! {
mod #impl_ident {
#skeleton
#groups
}
}
.into()
}

104
api-gen/src/skeleton.rs Normal file
View File

@ -0,0 +1,104 @@
use quote::quote;
use crate::util;
pub const AUTH_IDENT: &str = "Authenticated";
pub fn auth_ident() -> proc_macro2::Ident {
util::to_ident(AUTH_IDENT)
}
pub fn generate_skeleton(ident: &syn::Ident) -> proc_macro2::TokenStream {
let auth = auth_ident();
quote! {
use reqwest::RequestBuilder;
use serde::Deserialize;
use thiserror::Error;
use super::#ident;
impl #ident {
/// Creates an authenticated client.
/// base_url is the url to the qbittorrent instance, i.e. http://localhost:8080
pub async fn login(
base_url: &str,
username: &str,
password: &str,
) -> Result<#auth> {
let client = reqwest::Client::new();
let form = reqwest::multipart::Form::new()
.text("username", username.to_string())
.text("password", password.to_string());
let auth_resp = client
.post(format!("{}/api/v2/auth/login", base_url))
.multipart(form)
.send()
.await?;
let cookie_header = match auth_resp.headers().get("set-cookie") {
Some(header) => header.to_str().unwrap(),
None => {
return Err(Error::InvalidUsernameOrPassword);
}
};
fn parse_cookie(input: &str) -> Result<&str> {
match input.split(';').next() {
Some(res) => Ok(res),
_ => Err(Error::AuthCookieParseError),
}
}
let auth_cookie = parse_cookie(cookie_header)?;
Ok(#auth {
client,
auth_cookie: auth_cookie.to_string(),
base_url: base_url.to_string(),
})
}
}
#[allow(clippy::enum_variant_names)]
#[derive(Debug, Error)]
pub enum Error {
#[error("failed to parse auth cookie")]
AuthCookieParseError,
#[error("invalid username or password (failed to parse auth cookie)")]
InvalidUsernameOrPassword,
#[error("request error: {0}")]
HttpError(#[from] reqwest::Error),
}
type Result<T> = std::result::Result<T, Error>;
#[derive(Debug)]
pub struct #auth {
auth_cookie: String,
base_url: String,
client: reqwest::Client,
}
impl #auth {
fn authenticated_client(&self, url: &str) -> RequestBuilder {
let url = format!("{}{}", self.base_url, url);
let cookie = self.auth_cookie.clone();
self.client
.post(url)
.header("cookie", cookie)
}
pub async fn logout(self) -> Result<()> {
self.authenticated_client("/api/v2/auth/logout")
.send()
.await?;
Ok(())
}
}
}
}

19
api-gen/src/util.rs Normal file
View File

@ -0,0 +1,19 @@
use quote::quote;
use quote::ToTokens;
pub fn to_ident(name: &str) -> proc_macro2::Ident {
syn::Ident::new(name, proc_macro2::Span::call_site())
}
pub fn add_docs<T: ToTokens>(docs: &Option<String>, stream: T) -> proc_macro2::TokenStream {
if let Some(docs) = docs {
quote! {
#[doc = #docs]
#stream
}
} else {
quote! {
#stream
}
}
}

View File

@ -0,0 +1,20 @@
use anyhow::Result;
use api_gen::QBittorrentApiGen;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
#[derive(QBittorrentApiGen)]
struct Api {}
#[tokio::main]
async fn main() -> Result<()> {
let api = Api::login(BASE_URL, USERNAME, PASSWORD).await?;
// assuming this torrent will exist for a while: http://www.legittorrents.info/index.php?page=torrent-details&id=5cc013e801095be61d768e609e3039da58616fd0
const TORRENT_URL: &str = "http://www.legittorrents.info/download.php?id=5cc013e801095be61d768e609e3039da58616fd0&f=Oddepoxy%20-%20Oddepoxy%20(2013)%20[OGG%20320%20CBR].torrent";
let _ = api.torrent_management().add(TORRENT_URL).send().await?;
Ok(())
}

View File

@ -0,0 +1,16 @@
use anyhow::Result;
use api_gen::QBittorrentApiGen;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
#[derive(QBittorrentApiGen)]
struct Foo {}
#[tokio::main]
async fn main() -> Result<()> {
let _ = Foo::login(BASE_URL, USERNAME, PASSWORD).await?;
Ok(())
}

View File

@ -0,0 +1,26 @@
use anyhow::Result;
use api_gen::QBittorrentApiGen;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
#[derive(QBittorrentApiGen)]
struct Api {}
#[tokio::main]
async fn main() -> Result<()> {
let api = Api::login(BASE_URL, USERNAME, PASSWORD).await?;
let _ = api
.log()
.main()
.normal(true)
.info(false)
.warning(true)
.critical(false)
.send()
.await?;
Ok(())
}

16
api-gen/tests/login.rs Normal file
View File

@ -0,0 +1,16 @@
use anyhow::Result;
use api_gen::QBittorrentApiGen;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
#[derive(QBittorrentApiGen)]
struct Api {}
#[tokio::main]
async fn main() -> Result<()> {
let _ = Api::login(BASE_URL, USERNAME, PASSWORD).await?;
Ok(())
}

17
api-gen/tests/logout.rs Normal file
View File

@ -0,0 +1,17 @@
use anyhow::Result;
use api_gen::QBittorrentApiGen;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
#[derive(QBittorrentApiGen)]
struct Api {}
#[tokio::main]
async fn main() -> Result<()> {
let api = Api::login(BASE_URL, USERNAME, PASSWORD).await?;
api.logout().await?;
Ok(())
}

View File

@ -0,0 +1,19 @@
use anyhow::Result;
use api_gen::QBittorrentApiGen;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
#[derive(QBittorrentApiGen)]
struct Api {}
#[tokio::main]
async fn main() -> Result<()> {
let api = Api::login(BASE_URL, USERNAME, PASSWORD).await?;
let build_info = api.application().build_info().await?;
assert!(!build_info.qt.is_empty());
Ok(())
}

View File

@ -0,0 +1,24 @@
use anyhow::Result;
use api_gen::QBittorrentApiGen;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
#[derive(QBittorrentApiGen)]
struct Api {}
#[tokio::main]
async fn main() -> Result<()> {
let api = Api::login(BASE_URL, USERNAME, PASSWORD).await?;
// need a torrent in order for info to work
const TORRENT_URL: &str = "http://www.legittorrents.info/download.php?id=5cc013e801095be61d768e609e3039da58616fd0&f=Oddepoxy%20-%20Oddepoxy%20(2013)%20[OGG%20320%20CBR].torrent";
let _ = api.torrent_management().add(TORRENT_URL).send().await?;
let info = api.torrent_management().info().send().await?;
let first = &info[0];
assert_ne!(first.state, api_impl::TorrentManagementInfoState::Unknown);
Ok(())
}

View File

@ -0,0 +1,25 @@
use anyhow::Result;
use api_gen::QBittorrentApiGen;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
#[derive(QBittorrentApiGen)]
struct Api {}
#[tokio::main]
async fn main() -> Result<()> {
let api = Api::login(BASE_URL, USERNAME, PASSWORD).await?;
// need a torrent in order for info to work
const TORRENT_URL: &str = "http://www.legittorrents.info/download.php?id=5cc013e801095be61d768e609e3039da58616fd0&f=Oddepoxy%20-%20Oddepoxy%20(2013)%20[OGG%20320%20CBR].torrent";
let _ = api.torrent_management().add(TORRENT_URL).send().await?;
let info = api.torrent_management().info().send().await?;
let first = &info[0];
// just check that something is there
assert_ne!(first.added_on, 0);
Ok(())
}

21
api-gen/tests/tests.rs Normal file
View File

@ -0,0 +1,21 @@
#[test]
fn tests() {
let t = trybuild::TestCases::new();
// --- Auth ---
t.pass("tests/login.rs");
t.pass("tests/logout.rs");
// --- Parameters ---
t.pass("tests/without_parameters.rs");
// t.pass("tests/with_parameters.rs");
t.pass("tests/default_parameters.rs");
// --- Return types ---
t.pass("tests/return_type.rs");
t.pass("tests/return_type_with_optional_params.rs");
t.pass("tests/return_type_enum.rs");
// --- Misc ---
t.pass("tests/add_torrent.rs");
t.pass("tests/another_struct_name.rs");
}

View File

@ -0,0 +1,20 @@
use anyhow::Result;
use api_gen::QBittorrentApiGen;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
#[derive(QBittorrentApiGen)]
struct Api {}
#[tokio::main]
async fn main() -> Result<()> {
let api = Api::login(BASE_URL, USERNAME, PASSWORD).await?;
let version = api.application().version().await?;
// don't be too specific
assert!(version.starts_with("v4.4"), "got: {}", version);
Ok(())
}

7
md-parser/Cargo.lock generated Normal file
View File

@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "md-parser"
version = "0.1.0"

6
md-parser/Cargo.toml Normal file
View File

@ -0,0 +1,6 @@
[package]
name = "md-parser"
version = "0.1.0"
edition = "2021"
[dependencies]

318
md-parser/src/lib.rs Normal file
View File

@ -0,0 +1,318 @@
use std::{cell::RefCell, rc::Rc};
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MdContent {
Text(String),
Asterix(String),
Table(Table),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Table {
pub header: TableRow,
pub split: String,
pub rows: Vec<TableRow>,
}
impl Table {
fn raw(&self) -> String {
let mut output = Vec::new();
output.push(self.header.raw.clone());
output.push(self.split.clone());
for row in self.rows.clone() {
output.push(row.raw);
}
output.join("\n")
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TableRow {
raw: String,
pub columns: Vec<String>,
}
impl MdContent {
pub fn inner_value_as_string(&self) -> String {
match self {
MdContent::Text(text) => text.into(),
MdContent::Asterix(text) => text.into(),
MdContent::Table(table) => table.raw(),
}
}
}
#[derive(Debug, Clone)]
pub struct Header {
level: i32,
content: String,
}
/// These are the only relevant tokens we need for the api generation.
#[derive(Debug)]
pub enum MdToken {
Header(Header),
Content(MdContent),
}
impl MdToken {
fn parse_token(line: &str) -> MdToken {
if line.starts_with('#') {
let mut level = 0;
for char in line.chars() {
if char != '#' {
break;
}
level += 1;
}
MdToken::Header(Header {
level,
content: line.trim_matches('#').trim().to_string(),
})
} else if line.starts_with('*') {
MdToken::Content(MdContent::Asterix(
line.trim_matches('*').trim().to_string(),
))
} else {
MdToken::Content(MdContent::Text(line.to_string()))
}
}
fn from(content: &str) -> Vec<MdToken> {
let mut output = Vec::new();
let mut iter = content.lines().into_iter();
while let Some(line) = iter.next() {
// assume this is a table
if line.contains('|') {
let to_columns = |column_line: &str| {
column_line
.replace('`', "")
.split('|')
.map(|s| s.trim().to_string())
.collect()
};
let table_header = TableRow {
raw: line.into(),
columns: to_columns(line),
};
let table_split = iter.next().unwrap();
let mut table_rows = Vec::new();
while let Some(row_line) = iter.next() {
if !row_line.contains('|') {
// we've reached the end of the table, let's go back one step
iter.next_back();
break;
}
let table_row = TableRow {
raw: row_line.into(),
columns: to_columns(row_line),
};
table_rows.push(table_row);
}
output.push(MdToken::Content(MdContent::Table(Table {
header: table_header,
split: table_split.to_string(),
rows: table_rows,
})));
} else {
output.push(MdToken::parse_token(line));
}
}
output
}
}
#[derive(Debug)]
pub struct TokenTree {
pub title: Option<String>,
pub content: Vec<MdContent>,
pub children: Vec<TokenTree>,
}
impl From<Rc<TokenTreeFactory>> for TokenTree {
fn from(builder: Rc<TokenTreeFactory>) -> Self {
let children = builder
.children
.clone()
.into_inner()
.into_iter()
.map(|child| child.into())
.collect::<Vec<TokenTree>>();
let content = builder.content.clone().into_inner();
TokenTree {
title: builder.title.clone(),
content,
children,
}
}
}
#[derive(Debug, Default)]
pub struct TokenTreeFactory {
title: Option<String>,
content: RefCell<Vec<MdContent>>,
children: RefCell<Vec<Rc<TokenTreeFactory>>>,
level: i32,
}
impl TokenTreeFactory {
fn new(title: &str, level: i32) -> Self {
Self {
title: if title.is_empty() {
None
} else {
Some(title.to_string())
},
level,
..Default::default()
}
}
fn add_content(&self, content: MdContent) {
self.content.borrow_mut().push(content);
}
fn append(&self, child: &Rc<TokenTreeFactory>) {
self.children.borrow_mut().push(child.clone());
}
pub fn create(content: &str) -> TokenTree {
let tokens = MdToken::from(content);
let mut stack = Vec::new();
let root = Rc::new(TokenTreeFactory::default());
stack.push(root.clone());
for token in tokens {
match token {
MdToken::Header(Header { level, content }) => {
let new_header = Rc::new(TokenTreeFactory::new(&content, level));
// go back until we're at the same or lower level.
while let Some(current) = stack.pop() {
if current.level < level {
current.append(&new_header);
stack.push(current);
break;
}
}
stack.push(new_header.clone());
}
MdToken::Content(content) => {
let current = stack.pop().unwrap();
current.add_content(content);
stack.push(current);
}
}
}
root.into()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_remove_surrounding_asterix() {
// given
let input = r#"
# A
**B**
"#
.trim_matches('\n')
.trim();
// when
let tree = TokenTreeFactory::create(input);
// then
println!("{:#?}", tree);
let first = tree.children.first().unwrap();
let content = first.content.first().unwrap();
assert_eq!(*content, MdContent::Asterix("B".into()));
}
#[test]
fn should_remove_surrounding_hash() {
// given
let input = r#"
# A #
"#
.trim_matches('\n')
.trim();
// when
let tree = TokenTreeFactory::create(input);
// then
println!("{:#?}", tree);
assert_eq!(tree.children.first().unwrap().title, Some("A".into()));
}
#[test]
fn single_level() {
// given
let input = r#"
# A
Foo
"#
.trim_matches('\n')
.trim();
// when
let tree = TokenTreeFactory::create(input);
// then
println!("{:#?}", tree);
assert_eq!(tree.title, None);
let first_child = tree.children.first().unwrap();
assert_eq!(first_child.title, Some("A".into()));
}
#[test]
fn complex() {
// given
let input = r#"
# A
Foo
## B
# C
## D
Bar
"#
.trim_matches('\n')
.trim();
// when
let tree = TokenTreeFactory::create(input);
// then
println!("{:#?}", tree);
assert_eq!(tree.title, None);
assert_eq!(tree.children.len(), 2);
let first = tree.children.get(0).unwrap();
assert_eq!(first.title, Some("A".into()));
assert_eq!(first.children.len(), 1);
assert_eq!(first.children.first().unwrap().title, Some("B".into()));
let second = tree.children.get(1).unwrap();
assert_eq!(second.title, Some("C".into()));
assert_eq!(second.children.len(), 1);
assert_eq!(second.children.first().unwrap().title, Some("D".into()));
}
}

54
parser/Cargo.lock generated Normal file
View File

@ -0,0 +1,54 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
dependencies = [
"memchr",
]
[[package]]
name = "case"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd6c0e7b807d60291f42f33f58480c0bfafe28ed08286446f45e463728cf9c1c"
[[package]]
name = "md-parser"
version = "0.1.0"
[[package]]
name = "memchr"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "parser"
version = "0.1.0"
dependencies = [
"case",
"md-parser",
"regex",
]
[[package]]
name = "regex"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.6.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"

9
parser/Cargo.toml Normal file
View File

@ -0,0 +1,9 @@
[package]
name = "parser"
version = "0.1.0"
edition = "2021"
[dependencies]
md-parser = { path = "../md-parser" }
case = "1.0.0"
regex = "1.6.0"

5696
parser/groups.txt Normal file

File diff suppressed because one or more lines are too long

3345
parser/src/api-4_1.md Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,51 @@
use md_parser::MdContent;
pub fn get_group_description(content: &[MdContent]) -> Option<String> {
let return_desc = content
.iter()
.map(|row| row.inner_value_as_string())
.collect::<Vec<String>>()
.join("\n")
.trim()
.to_string();
if return_desc.is_empty() {
None
} else {
Some(return_desc)
}
}
pub fn get_method_description(content: &[MdContent]) -> Option<String> {
let return_desc = content
.iter()
// skip until we get to the "Returns:" text
.skip_while(|row| match row {
MdContent::Asterix(text) => !text.starts_with("Returns:"),
_ => true,
})
// there is one space before the table
.skip(2)
.skip_while(|row| match row {
MdContent::Text(text) => !text.is_empty(),
_ => true,
})
// and there is one space after the table
.skip(1)
// then what is left should be the description
.flat_map(|row| match row {
MdContent::Text(text) => Some(text),
_ => None,
})
.cloned()
.collect::<Vec<String>>()
.join("\n")
.trim()
.to_string();
if return_desc.is_empty() {
None
} else {
Some(return_desc)
}
}

View File

@ -0,0 +1,65 @@
mod description;
mod parameters;
mod return_type;
mod url_parser;
use md_parser::TokenTree;
use crate::{util, ApiGroup, ApiMethod};
use self::{parameters::get_parameters, return_type::get_return_type};
pub fn parse_groups(trees: Vec<TokenTree>) -> Vec<ApiGroup> {
trees.into_iter().map(parse_api_group).collect()
}
fn parse_api_group(tree: TokenTree) -> ApiGroup {
let methods = tree
.children
.into_iter()
.flat_map(parse_api_method)
.collect();
let group_description = description::get_group_description(&tree.content);
let group_url = url_parser::get_group_url(&tree.content);
let name = tree
.title
.unwrap()
.to_lowercase()
.trim_end_matches("(experimental)")
.trim()
.replace(' ', "_");
ApiGroup {
name,
methods,
description: group_description,
url: group_url,
}
}
fn parse_api_method(child: TokenTree) -> Option<ApiMethod> {
util::find_content_starts_with(&child.content, "Name: ")
.map(|name| {
name.trim_start_matches("Name: ")
.trim_matches('`')
.to_string()
})
.map(|name| to_api_method(&child, &name))
}
fn to_api_method(child: &TokenTree, name: &str) -> ApiMethod {
let method_description = description::get_method_description(&child.content);
let return_type = get_return_type(&child.content);
let parameters = get_parameters(&child.content);
let method_url = url_parser::get_method_url(&child.content);
ApiMethod {
name: name.to_string(),
description: method_description,
parameters,
return_type,
url: method_url,
}
}

View File

@ -0,0 +1,48 @@
use std::collections::HashMap;
use md_parser::MdContent;
use crate::types::{Type, OPTIONAL};
pub fn get_parameters(content: &[MdContent]) -> Option<Vec<Type>> {
let mut it = content
.iter()
.skip_while(|row| match row {
MdContent::Asterix(content) | MdContent::Text(content) => {
!content.starts_with("Parameters:")
}
_ => true,
})
// Parameters: <-- skip
// <-- skip
// table with parameters <-- take
.skip(2);
let parameter_table = match it.next() {
Some(MdContent::Table(table)) => table,
_ => return None,
};
// empty for now
let type_map = HashMap::default();
let table = parameter_table
.rows
.iter()
.flat_map(|row| {
let description = row.columns.get(2).cloned();
match &row.columns.get(2) {
// If the description contains a default value it means that the parameter is optional.
Some(desc) if desc.contains("default: ") => {
// type defines a variable as default if it contains: _optional_
let name_with_optional = format!("{} {}", row.columns[0], OPTIONAL);
Type::from(&row.columns[1], &name_with_optional, description, &type_map)
}
_ => Type::from(&row.columns[1], &row.columns[0], description, &type_map),
}
})
.collect();
Some(table)
}

View File

@ -0,0 +1,51 @@
use md_parser::MdContent;
use crate::{object_types::get_object_types, types::Type, ReturnType, ReturnTypeParameter};
pub fn get_return_type(content: &[MdContent]) -> Option<ReturnType> {
let table = content
.iter()
// The response is a ... <-- Trying to find this line
// <-- The next line is empty
// Table with the return type <-- And then extract the following type table
.skip_while(|row| match row {
MdContent::Text(text) => !text.starts_with("The response is a"),
_ => true,
})
.find_map(|row| match row {
MdContent::Table(table) => Some(table),
_ => None,
})?;
let types = get_object_types(content);
let parameters = table
.rows
.iter()
.map(|parameter| ReturnTypeParameter {
name: parameter.columns[0].clone(),
description: parameter.columns[2].clone(),
return_type: Type::from(
&parameter.columns[1],
&parameter.columns[0],
Some(parameter.columns[2].clone()),
&types,
)
.unwrap_or_else(|| panic!("Failed to parse type {}", &parameter.columns[1])),
})
.collect();
let is_list = content
.iter()
.find_map(|row| match row {
MdContent::Text(text) if text.starts_with("The response is a") => Some(text),
_ => None,
})
.map(|found| found.contains("array"))
.unwrap_or_else(|| false);
Some(ReturnType {
parameters,
is_list,
})
}

View File

@ -0,0 +1,23 @@
use md_parser::MdContent;
use regex::Regex;
use crate::util;
pub fn get_group_url(content: &[MdContent]) -> String {
let row = util::find_content_contains(content, "API methods are under")
.expect("Could not find api method");
let re = Regex::new(r#"All (?:\w+\s?)+ API methods are under "(\w+)", e.g."#)
.expect("Failed to create regex");
let res = re.captures(&row).expect("Failed find capture");
res[1].to_string()
}
pub fn get_method_url(content: &[MdContent]) -> String {
const START: &str = "Name: ";
util::find_content_starts_with(content, START)
.map(|text| text.trim_start_matches(START).trim_matches('`').to_string())
.expect("Could find method url")
}

85
parser/src/lib.rs Normal file
View File

@ -0,0 +1,85 @@
mod group_parser;
mod object_types;
pub mod types;
mod util;
use group_parser::parse_groups;
use md_parser::{self, TokenTree};
use types::Type;
#[derive(Debug)]
pub struct ApiGroup {
pub name: String,
pub methods: Vec<ApiMethod>,
pub description: Option<String>,
pub url: String,
}
#[derive(Debug)]
pub struct ApiMethod {
pub name: String,
pub description: Option<String>,
pub parameters: Option<Vec<Type>>,
pub return_type: Option<ReturnType>,
pub url: String,
}
#[derive(Debug)]
pub struct ReturnType {
pub is_list: bool,
pub parameters: Vec<ReturnTypeParameter>,
}
#[derive(Debug)]
pub struct ReturnTypeParameter {
pub name: String,
pub description: String,
pub return_type: Type,
}
fn extract_relevant_parts(tree: TokenTree) -> Vec<TokenTree> {
let relevant: Vec<TokenTree> = tree
.children
.into_iter()
.skip_while(|row| match &row.title {
Some(title) => title != "Authentication",
None => false,
})
.filter(|row| match &row.title {
Some(title) => title != "WebAPI versioning",
None => false,
})
.collect();
relevant
}
pub fn parse_api_groups(content: &str) -> Vec<ApiGroup> {
parse_groups(extract_relevant_parts(md_parser::TokenTreeFactory::create(
content,
)))
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
fn parse() -> TokenTree {
let content = include_str!("api-4_1.md");
let md_tree = md_parser::TokenTreeFactory::create(content);
let output = format!("{:#?}", md_tree);
fs::write("token_tree.txt", output).unwrap();
md_tree
}
#[test]
fn it_works() {
let groups = parse_groups(extract_relevant_parts(parse()));
let groups_as_str = format!("{:#?}", groups);
fs::write("groups.txt", groups_as_str).unwrap();
}
}

View File

@ -0,0 +1,38 @@
use std::collections::HashMap;
use md_parser::MdContent;
use crate::types::{TypeDescription, TypeDescriptions};
pub fn get_object_types(content: &[MdContent]) -> HashMap<String, TypeDescription> {
let mut output = HashMap::new();
let mut content_it = content.iter();
while let Some(entry) = content_it.next() {
if let MdContent::Text(content) = entry {
const POSSIBLE_VALUES_OF: &str = "Possible values of ";
if content.contains(POSSIBLE_VALUES_OF) {
// is empty
content_it.next();
if let Some(MdContent::Table(table)) = content_it.next() {
let enum_types = table
.rows
.iter()
.map(|row| TypeDescriptions {
value: row.columns[0].to_string(),
description: row.columns[1].to_string(),
})
.collect();
let name = content
.trim_start_matches(POSSIBLE_VALUES_OF)
.replace('`', "")
.replace(':', "");
output.insert(name, TypeDescription { values: enum_types });
}
}
}
}
output
}

119
parser/src/types.rs Normal file
View File

@ -0,0 +1,119 @@
use std::collections::HashMap;
#[derive(Debug, Clone)]
pub struct TypeDescriptions {
pub value: String,
pub description: String,
}
#[derive(Debug, Clone)]
pub struct TypeDescription {
pub values: Vec<TypeDescriptions>,
}
#[derive(Debug, Clone)]
pub struct TypeInfo {
pub name: String,
pub is_optional: bool,
pub is_list: bool,
pub description: Option<String>,
pub type_description: Option<TypeDescription>,
}
impl TypeInfo {
pub fn new(
name: &str,
is_optional: bool,
is_list: bool,
description: Option<String>,
type_description: Option<TypeDescription>,
) -> Self {
Self {
name: name.into(),
is_optional,
is_list,
description,
type_description,
}
}
}
pub const OPTIONAL: &str = "_optional_";
#[derive(Debug, Clone)]
pub enum Type {
Number(TypeInfo),
Float(TypeInfo),
Bool(TypeInfo),
String(TypeInfo),
StringArray(TypeInfo),
Object(TypeInfo),
}
impl Type {
pub fn to_owned_type(&self) -> String {
match self {
Type::Number(_) => "i128".into(),
Type::Float(_) => "f32".into(),
Type::Bool(_) => "bool".into(),
Type::String(_) => "String".into(),
// TODO: fixme
Type::StringArray(_) => "String".into(),
Type::Object(_) => "String".into(),
}
}
pub fn to_borrowed_type(&self) -> String {
match self {
Type::Number(_) => "i32".into(),
Type::Float(_) => "f32".into(),
Type::Bool(_) => "bool".into(),
Type::String(_) => "str".into(),
Type::StringArray(_) => "&[str]".into(),
Type::Object(_) => "str".into(),
}
}
pub fn should_borrow(&self) -> bool {
matches!(self, Type::String(_) | Type::Object(_))
}
pub fn get_type_info(&self) -> &TypeInfo {
match self {
Type::Number(t) => t,
Type::Float(t) => t,
Type::Bool(t) => t,
Type::String(t) => t,
Type::StringArray(t) => t,
Type::Object(t) => t,
}
}
pub fn from(
type_as_str: &str,
name: &str,
description: Option<String>,
types: &HashMap<String, TypeDescription>,
) -> Option<Type> {
let available_types = types.get(name).cloned();
let type_name = match name.split_once(OPTIONAL) {
Some((split, _)) => split,
None => name,
}
.trim();
let is_optional = name.contains(OPTIONAL);
let type_info = TypeInfo::new(type_name, is_optional, false, description, available_types);
match type_as_str {
"bool" => Some(Type::Bool(type_info)),
"integer" | "number" | "int" => Some(Type::Number(type_info)),
"string" => Some(Type::String(type_info)),
// This is probably not right but we don't have any information about the actual type.
"array" => Some(Type::StringArray(type_info)),
"object" => Some(Type::Object(type_info)),
"float" => Some(Type::Float(type_info)),
_ => None,
}
}
}

27
parser/src/util.rs Normal file
View File

@ -0,0 +1,27 @@
use md_parser::MdContent;
pub fn find_content_starts_with(content: &[MdContent], starts_with: &str) -> Option<String> {
content.iter().find_map(|row| match row {
MdContent::Text(content) => {
if content.starts_with(starts_with) {
Some(content.into())
} else {
None
}
}
_ => None,
})
}
pub fn find_content_contains(content: &[MdContent], contains: &str) -> Option<String> {
content.iter().find_map(|row| match row {
MdContent::Text(content) => {
if content.contains(contains) {
Some(content.into())
} else {
None
}
}
_ => None,
})
}

13793
parser/token_tree.txt Normal file

File diff suppressed because it is too large Load Diff

4
src/lib.rs Normal file
View File

@ -0,0 +1,4 @@
use api_gen::QBittorrentApiGen;
#[derive(QBittorrentApiGen)]
pub struct Api;