30: Missing search r=JoelWachsler a=JoelWachsler



Co-authored-by: Joel Wachsler <JoelWachsler@users.noreply.github.com>
This commit is contained in:
bors[bot] 2022-07-23 00:45:03 +00:00 committed by GitHub
commit 2d7b0ddb3f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
82 changed files with 10121 additions and 26351 deletions

View File

@ -10,11 +10,6 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
USER vscode
# RUN rustup default nightly \
# && cargo install cargo-expand \
# && rustup component add rustfmt \
# && rustup component add clippy
RUN cargo install cargo-expand \
&& rustup component add rustfmt \
&& rustup component add clippy

View File

@ -26,7 +26,8 @@
"tamasfe.even-better-toml",
"serayuzgur.crates",
"redhat.vscode-yaml",
"eamodio.gitlens"
"eamodio.gitlens",
"streetsidesoftware.code-spell-checker"
]
}
},

47
Cargo.lock generated
View File

@ -11,6 +11,15 @@ dependencies = [
"memchr",
]
[[package]]
name = "ansi_term"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
dependencies = [
"winapi",
]
[[package]]
name = "anyhow"
version = "1.0.58"
@ -81,6 +90,22 @@ version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc"
[[package]]
name = "ctor"
version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f877be4f7c9f246b183111634f75baa039715e3f46ce860677d3b19a69fb229c"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "diff"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
[[package]]
name = "dissimilar"
version = "1.0.4"
@ -487,6 +512,15 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "output_vt100"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66"
dependencies = [
"winapi",
]
[[package]]
name = "parking_lot"
version = "0.12.1"
@ -534,6 +568,18 @@ version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
[[package]]
name = "pretty_assertions"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c89f989ac94207d048d92db058e4f6ec7342b0971fc58d1271ca148b799b3563"
dependencies = [
"ansi_term",
"ctor",
"diff",
"output_vt100",
]
[[package]]
name = "proc-macro2"
version = "1.0.40"
@ -561,6 +607,7 @@ version = "0.3.2"
dependencies = [
"anyhow",
"case",
"pretty_assertions",
"proc-macro2",
"quote",
"regex",

View File

@ -16,6 +16,4 @@ serde_json = "1.0.82"
thiserror = "1.0.31"
[workspace]
members = [
"qbittorrent-web-api-gen",
]
members = ["qbittorrent-web-api-gen"]

View File

@ -1,6 +1,6 @@
# qBittorrent web api for Rust
This is an automatic async implementation of the qBittorrent 4.1 web api. The api generation is based on the wiki markdown file which can be found [here](https://github.com/qbittorrent/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1)).
This is an automatic async implementation of the qBittorrent 4.1 web api. The api generation is based on a forked wiki markdown file describing the api which can be found [here](https://github.com/JoelWachsler/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1)) and the original [here](https://github.com/qbittorrent/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1)).
## Example

View File

@ -6,11 +6,17 @@ license = "MIT"
keywords = ["qbittorrent"]
repository = "https://github.com/JoelWachsler/qbittorrent-web-api"
description = "Generated web api for qBittorrent"
exclude = ["*.txt", "tests"]
exclude = ["*.txt", "tests", "src/md_parser/token_tree_factory_tests"]
# we use trybuild instead
autotests = false
[lib]
proc-macro = true
[[test]]
name = "tests"
path = "tests/tests.rs"
[dependencies]
syn = { version = "1.0.98", features = ["extra-traits"]}
quote = "1.0.20"
@ -26,3 +32,4 @@ trybuild = { version = "1.0.63", features = ["diff"] }
anyhow = "1.0.58"
tokio = { version = "1.19.2", features = ["full"] }
reqwest = { version = "0.11.11", features = ["json", "multipart"] }
pretty_assertions = "1.2.1"

View File

@ -1002,17 +1002,17 @@ HTTP Status Code | Scenario
The response is a JSON object with the following possible fields
Property | Type | Description
------------------------------|---------|------------
`rid` | integer | Response ID
`full_update` | bool | Whether the response contains all the data or partial data
`torrents` | object | Property: torrent hash, value: same as [torrent list](#get-torrent-list)
`torrents_removed` | array | List of hashes of torrents removed since last request
`categories` | object | Info for categories added since last request
`categories_removed` | array | List of categories removed since last request
`tags` | array | List of tags added since last request
`tags_removed` | array | List of tags removed since last request
`server_state` | object | Global transfer info
Property | Type | Description
--------------------------------|-----------|------------
`rid` | integer | Response ID
`full_update`_optional_ | bool | Whether the response contains all the data or partial data
`torrents`_optional_ | object | Property: torrent hash, value: same as [torrent list](#get-torrent-list), map from string to torrents object
`torrents_removed`_optional_ | array | List of hashes of torrents removed since last request
`categories`_optional_ | object | Info for categories added since last request, map from string to categories object
`categories_removed`_optional_ | array | List of categories removed since last request
`tags`_optional_ | array | List of tags added since last request
`tags_removed`_optional_ | array | List of tags removed since last request
`server_state`_optional_ | object | `server_state` object see table below
Example:
@ -1029,6 +1029,74 @@ Example:
}
```
**ServerState object:**
Property | Type | Description
------------------------------|---------|------------
`average_time_queue` | integer | Average time queue
`dl_info_data` | number | Download info data
`dl_info_speed` | number | Download info speed
`queued_io_jobs` | integer | Queued io jobs
`total_buffers_size` | number | Total buffers size
`total_peer_connections` | integer | Total peer connections
**Categories object:**
Property | Type | Description
------------------------------|---------|------------
`name` | string | Category name
`savePath` | string | Save path
**Torrents object:**
Property | Type | Description
--------------------------------|-----------|------------
`added_on`_optional_ | integer | Time (Unix Epoch) when the torrent was added to the client
`amount_left`_optional_ | integer | Amount of data left to download (bytes)
`auto_tmm`_optional_ | bool | Whether this torrent is managed by Automatic Torrent Management
`availability`_optional_ | float | Percentage of file pieces currently available
`category`_optional_ | string | Category of the torrent
`completed`_optional_ | integer | Amount of transfer data completed (bytes)
`completion_on`_optional_ | integer | Time (Unix Epoch) when the torrent completed
`content_path`_optional_ | string | Absolute path of torrent content (root path for multifile torrents, absolute file path for singlefile torrents)
`dl_limit`_optional_ | integer | Torrent download speed limit (bytes/s). `-1` if ulimited.
`dlspeed`_optional_ | integer | Torrent download speed (bytes/s)
`downloaded`_optional_ | integer | Amount of data downloaded
`downloaded_session`_optional_ | integer | Amount of data downloaded this session
`eta`_optional_ | integer | Torrent ETA (seconds)
`f_l_piece_prio`_optional_ | bool | True if first last piece are prioritized
`force_start`_optional_ | bool | True if force start is enabled for this torrent
`hash`_optional_ | string | Torrent hash
`last_activity`_optional_ | integer | Last time (Unix Epoch) when a chunk was downloaded/uploaded
`magnet_uri`_optional_ | string | Magnet URI corresponding to this torrent
`max_ratio`_optional_ | float | Maximum share ratio until torrent is stopped from seeding/uploading
`max_seeding_time`_optional_ | integer | Maximum seeding time (seconds) until torrent is stopped from seeding
`name`_optional_ | string | Torrent name
`num_complete`_optional_ | integer | Number of seeds in the swarm
`num_incomplete`_optional_ | integer | Number of leechers in the swarm
`num_leechs`_optional_ | integer | Number of leechers connected to
`num_seeds`_optional_ | integer | Number of seeds connected to
`priority`_optional_ | integer | Torrent priority. Returns -1 if queuing is disabled or torrent is in seed mode
`progress`_optional_ | float | Torrent progress (percentage/100)
`ratio`_optional_ | float | Torrent share ratio. Max ratio value: 9999.
`ratio_limit`_optional_ | float | TODO (what is different from `max_ratio`?)
`save_path`_optional_ | string | Path where this torrent's data is stored
`seeding_time`_optional_ | integer | Torrent elapsed time while complete (seconds)
`seeding_time_limit`_optional_ | integer | TODO (what is different from `max_seeding_time`?) seeding_time_limit is a per torrent setting, when Automatic Torrent Management is disabled, furthermore then max_seeding_time is set to seeding_time_limit for this torrent. If Automatic Torrent Management is enabled, the value is -2. And if max_seeding_time is unset it have a default value -1.
`seen_complete`_optional_ | integer | Time (Unix Epoch) when this torrent was last seen complete
`seq_dl`_optional_ | bool | True if sequential download is enabled
`size`_optional_ | integer | Total size (bytes) of files selected for download
`state`_optional_ | string | Torrent state. See table here below for the possible values
`super_seeding`_optional_ | bool | True if super seeding is enabled
`tags`_optional_ | string | Comma-concatenated tag list of the torrent
`time_active`_optional_ | integer | Total active time (seconds)
`total_size`_optional_ | integer | Total size (bytes) of all file in this torrent (including unselected ones)
`tracker`_optional_ | string | The first tracker with working status. Returns empty string if no tracker is working.
`up_limit`_optional_ | integer | Torrent upload speed limit (bytes/s). `-1` if ulimited.
`uploaded`_optional_ | integer | Amount of data uploaded
`uploaded_session`_optional_ | integer | Amount of data uploaded this session
`upspeed`_optional_ | integer | Torrent upload speed (bytes/s)
## Get torrent peers data ##
Name: `torrentPeers`
@ -1756,7 +1824,7 @@ Name: `delete`
Parameter | Type | Description
------------|----------|------------
`hashes` | string | The hashes of the torrents you want to delete. `hashes` can contain multiple hashes separated by `\|`, to delete multiple torrents, or set to `all`, to delete all torrents.
`deleteFiles` | If set to `true`, the downloaded data will also be deleted, otherwise has no effect.
`deleteFiles` | bool | If set to `true`, the downloaded data will also be deleted, otherwise has no effect.
Example:
@ -3275,6 +3343,13 @@ Field | Type | Description
]
```
**Category object:**
Field | Type | Description
---------------------------|---------|------------
`id` | string | Id
`name` | string | Name
## Install search plugin ##
Name: `installPlugin`

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,89 @@
use crate::parser;
use case::CaseExt;
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use super::{group_method::GroupMethod, skeleton::auth_ident, util};
impl parser::ApiGroup {
pub fn generate(&self) -> TokenStream {
let struct_name = self.struct_name();
let group_name_snake = self.name_snake();
let group_methods = self.generate_group_methods();
let group_struct = self.group_struct();
let group_factory = self.group_factory();
let auth = auth_ident();
quote! {
pub mod #group_name_snake {
impl <'a> #struct_name<'a> {
pub fn new(auth: &'a super::#auth) -> Self {
Self { auth }
}
}
#group_struct
#group_factory
#(#group_methods)*
}
}
}
fn generate_group_methods(&self) -> Vec<TokenStream> {
let group_methods = self.group_methods();
group_methods
.iter()
.map(|group_method| group_method.generate_method())
.collect()
}
fn group_factory(&self) -> TokenStream {
let struct_name = self.struct_name();
let name_snake = self.name_snake();
let auth = auth_ident();
util::add_docs(
&self.description,
quote! {
impl super::#auth {
pub fn #name_snake(&self) -> #struct_name {
#struct_name::new(self)
}
}
},
)
}
fn group_struct(&self) -> TokenStream {
let struct_name = self.struct_name();
let auth = auth_ident();
quote! {
#[derive(Debug)]
pub struct #struct_name<'a> {
auth: &'a super::#auth,
}
}
}
fn group_methods(&self) -> Vec<GroupMethod> {
self.methods
.iter()
.map(|method| GroupMethod::new(self, method))
.collect()
}
pub fn struct_name(&self) -> Ident {
self.name_camel()
}
fn name_camel(&self) -> Ident {
util::to_ident(&self.name.to_camel())
}
fn name_snake(&self) -> Ident {
util::to_ident(&self.name.to_snake())
}
}

View File

@ -0,0 +1,31 @@
use case::CaseExt;
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use crate::parser;
use super::util;
impl parser::ApiMethod {
pub fn structs(&self) -> TokenStream {
let objects = self.types.objects();
let structs = objects.iter().map(|obj| obj.generate_struct());
quote! {
#(#structs)*
}
}
pub fn enums(&self) -> TokenStream {
let enums = self.types.enums();
let generated_enums = enums.iter().map(|e| e.generate());
quote! {
#(#generated_enums)*
}
}
pub fn name_snake(&self) -> Ident {
util::to_ident(&self.name.to_snake())
}
}

View File

@ -0,0 +1,184 @@
use crate::{parser, types};
use case::CaseExt;
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use super::util;
pub fn generate_groups(groups: Vec<parser::ApiGroup>) -> TokenStream {
let gr = groups
.iter()
// implemented manually
.filter(|group| group.name != "authentication")
.map(generate_group);
quote! {
#(#gr)*
}
}
fn generate_group(group: &parser::ApiGroup) -> TokenStream {
let group = group.generate();
quote! {
#group
}
}
impl parser::TypeWithName {
pub fn generate_struct(&self) -> TokenStream {
let fields = self.types.iter().map(|obj| obj.generate_struct_field());
let name = util::to_ident(&self.name);
quote! {
#[derive(Debug, serde::Deserialize)]
pub struct #name {
#(#fields,)*
}
}
}
}
impl types::Type {
pub fn generate_struct_field(&self) -> TokenStream {
let name_snake = self.name_snake();
let type_ = self.owned_type_ident();
let orig_name = self.name();
util::add_docs(
&self.get_type_info().description,
quote! {
#[serde(rename = #orig_name)]
pub #name_snake: #type_
},
)
}
fn owned_type_ident(&self) -> TokenStream {
let owned_type = match self {
types::Type::Number(_) => quote! { i128 },
types::Type::Float(_) => quote! { f32 },
types::Type::Bool(_) => quote! { bool },
types::Type::String(_) => quote! { String },
types::Type::StringArray(_) => quote! { String },
types::Type::Object(obj) => match &obj.ref_type {
types::RefType::String(str) => {
let str_ident = &util::to_ident(str);
quote! { #str_ident }
}
types::RefType::Map(key, value) => {
let key_ident = util::to_ident(key);
let value_ident = util::to_ident(value);
quote! { std::collections::HashMap<#key_ident, #value_ident> }
}
},
};
if self.is_list() {
quote! { std::vec::Vec<#owned_type> }
} else {
owned_type
}
}
fn name(&self) -> String {
self.get_type_info().name.clone()
}
fn name_snake(&self) -> Ident {
util::to_ident(&self.name().to_snake())
}
}
impl parser::Enum {
pub fn generate(&self) -> TokenStream {
let values = self.values.iter().map(|enum_value| enum_value.generate());
let name = util::to_ident(&self.name);
quote! {
#[allow(clippy::enum_variant_names)]
#[derive(Debug, serde::Deserialize, PartialEq, Eq)]
pub enum #name {
#(#values,)*
}
}
}
}
impl parser::EnumValue {
fn generate(&self) -> TokenStream {
util::add_docs(&self.description, self.generate_field())
}
fn generate_field(&self) -> TokenStream {
let orig_name = self.original_value.clone();
// special enum value which does not follow conventions
if orig_name == "\"/path/to/download/to\"" {
quote! {
PathToDownloadTo(String)
}
} else {
let name_camel = self.name_camel();
quote! {
#[serde(rename = #orig_name)]
#name_camel
}
}
}
fn name_camel(&self) -> Ident {
util::to_ident(&self.value.to_camel())
}
}
impl types::Type {
pub fn generate_optional_builder_method_with_docs(&self) -> TokenStream {
util::add_docs(
&self.get_type_info().description,
self.generate_optional_builder_method(),
)
}
fn borrowed_type_ident(&self) -> Ident {
util::to_ident(&self.to_borrowed_type())
}
pub fn to_parameter(&self) -> TokenStream {
let name_snake = self.name_snake();
let borrowed_type = self.borrowed_type();
quote! { #name_snake: #borrowed_type }
}
pub fn generate_form_builder(&self, add_to: TokenStream) -> TokenStream {
let name_str = self.name();
let name_snake = self.name_snake();
quote! {
#add_to = #add_to.text(#name_str, #name_snake.to_string());
}
}
fn generate_optional_builder_method(&self) -> TokenStream {
let name_snake = self.name_snake();
let borrowed_type = self.borrowed_type();
let form_builder = self.generate_form_builder(quote! { self.form });
quote! {
pub fn #name_snake(mut self, #name_snake: #borrowed_type) -> Self {
#form_builder;
self
}
}
}
fn borrowed_type(&self) -> TokenStream {
let type_ = self.borrowed_type_ident();
if self.should_borrow() {
quote! { &#type_ }
} else {
quote! { #type_ }
}
}
}

View File

@ -1,156 +0,0 @@
use case::CaseExt;
use quote::quote;
use crate::{generate::util, parser, types};
use super::{return_type::create_return_type, send_method_builder::SendMethodBuilder};
pub fn create_method_with_params(
group: &parser::ApiGroup,
method: &parser::ApiMethod,
params: &[types::Type],
method_name: &proc_macro2::Ident,
url: &str,
) -> (proc_macro2::TokenStream, Option<proc_macro2::TokenStream>) {
let param_type = util::to_ident(&format!(
"{}{}Parameters",
group.name.to_camel(),
method.name.to_camel()
));
let mandatory_params = mandatory_params(params);
let mandatory_param_args = generate_mandatory_params(&mandatory_params);
let mandatory_param_names = mandatory_params.iter().map(|param| {
let (name, ..) = param_name(param);
quote! { #name }
});
let group_name = util::to_ident(&group.name.to_camel());
let send_builder =
SendMethodBuilder::new(&util::to_ident("send"), url, quote! { self.group.auth })
.with_form();
let generate_send_impl = |send_method: proc_macro2::TokenStream| {
let optional_params = generate_optional_params(params);
let mandatory_param_form_build = generate_mandatory_param_builder(&mandatory_params);
quote! {
impl<'a> #param_type<'a> {
fn new(group: &'a #group_name, #(#mandatory_param_args),*) -> Self {
let form = reqwest::multipart::Form::new();
#(#mandatory_param_form_build)*
Self { group, form }
}
#(#optional_params)*
#send_method
}
}
};
let send = match create_return_type(group, method) {
Some((return_type_name, return_type)) => {
let send_impl = generate_send_impl(send_builder.return_type(&return_type_name).build());
quote! {
#send_impl
#return_type
}
}
None => generate_send_impl(send_builder.build()),
};
let builder = util::add_docs(
&method.description,
quote! {
pub fn #method_name(&self, #(#mandatory_param_args),*) -> #param_type {
#param_type::new(self, #(#mandatory_param_names),*)
}
},
);
let group_impl = quote! {
pub struct #param_type<'a> {
group: &'a #group_name<'a>,
form: reqwest::multipart::Form,
}
#send
};
(builder, Some(group_impl))
}
fn generate_mandatory_params(mandatory_params: &[&types::Type]) -> Vec<proc_macro2::TokenStream> {
mandatory_params
.iter()
.map(|param| param_with_name(param))
.collect()
}
fn generate_mandatory_param_builder(
mandatory_params: &[&types::Type],
) -> Vec<proc_macro2::TokenStream> {
mandatory_params
.iter()
.map(|param| {
let (name, name_as_str) = param_name(param);
quote! { let form = form.text(#name_as_str, #name.to_string()); }
})
.collect()
}
fn generate_optional_params(params: &[types::Type]) -> Vec<proc_macro2::TokenStream> {
params
.iter()
.filter(|param| param.get_type_info().is_optional)
.map(generate_optional_param)
.collect()
}
fn mandatory_params(params: &[types::Type]) -> Vec<&types::Type> {
params
.iter()
.filter(|param| !param.get_type_info().is_optional)
.collect()
}
fn generate_optional_param(param: &types::Type) -> proc_macro2::TokenStream {
let n = &param.get_type_info().name;
let name = util::to_ident(&n.to_snake());
let t = util::to_ident(&param.to_borrowed_type());
let builder_param = if param.should_borrow() {
quote! { &#t }
} else {
quote! { #t }
};
util::add_docs(
&param.get_type_info().description,
quote! {
pub fn #name(mut self, value: #builder_param) -> Self {
self.form = self.form.text(#n, value.to_string());
self
}
},
)
}
fn param_name(param: &types::Type) -> (proc_macro2::Ident, String) {
let name_as_str = param.get_type_info().name.to_snake();
(util::to_ident(&name_as_str), name_as_str)
}
fn param_with_name(param: &types::Type) -> proc_macro2::TokenStream {
let t = util::to_ident(&param.to_borrowed_type());
let (name, ..) = param_name(param);
let t = if param.should_borrow() {
quote! { &#t }
} else {
quote! { #t }
};
quote! { #name: #t }
}

View File

@ -1,26 +0,0 @@
use quote::quote;
use super::{return_type::create_return_type, send_method_builder::SendMethodBuilder};
use crate::parser;
pub fn create_method_without_params(
group: &parser::ApiGroup,
method: &parser::ApiMethod,
method_name: proc_macro2::Ident,
url: &str,
) -> (proc_macro2::TokenStream, Option<proc_macro2::TokenStream>) {
let builder = SendMethodBuilder::new(&method_name, url, quote! { self.auth })
.description(&method.description);
match create_return_type(group, method) {
Some((return_type_name, return_type)) => (
builder.return_type(&return_type_name).build(),
Some(return_type),
),
None => (
builder.build(),
// assume that all methods without a return type returns a string
None,
),
}
}

View File

@ -1,52 +0,0 @@
mod method_with_params;
mod method_without_params;
mod return_type;
mod send_method_builder;
use crate::{generate::util, parser};
use case::CaseExt;
use quote::quote;
use self::{
method_with_params::create_method_with_params,
method_without_params::create_method_without_params,
};
pub fn generate_methods(
group: &parser::ApiGroup,
auth: &syn::Ident,
group_name_camel: &syn::Ident,
) -> proc_macro2::TokenStream {
let methods_and_param_structs = group
.methods
.iter()
.map(|method| generate_method(group, method));
let methods = methods_and_param_structs.clone().map(|(method, ..)| method);
let structs = methods_and_param_structs.flat_map(|(_, s)| s);
quote! {
impl <'a> #group_name_camel<'a> {
pub fn new(auth: &'a #auth) -> Self {
Self { auth }
}
#(#methods)*
}
#(#structs)*
}
}
fn generate_method(
group: &parser::ApiGroup,
method: &parser::ApiMethod,
) -> (proc_macro2::TokenStream, Option<proc_macro2::TokenStream>) {
let method_name = util::to_ident(&method.name.to_snake());
let url = format!("/api/v2/{}/{}", group.url, method.url);
match &method.parameters {
Some(params) => create_method_with_params(group, method, params, &method_name, &url),
None => create_method_without_params(group, method, method_name, &url),
}
}

View File

@ -1,203 +0,0 @@
use std::collections::HashMap;
use case::CaseExt;
use quote::{format_ident, quote};
use regex::Regex;
use crate::{generate::util, parser, types};
pub fn create_return_type(
group: &parser::ApiGroup,
method: &parser::ApiMethod,
) -> Option<(proc_macro2::TokenStream, proc_macro2::TokenStream)> {
let return_type = match &method.return_type {
Some(t) => t,
None => return None,
};
let to_enum_name = |name: &str| to_enum_name(&group.name, &method.name, name);
let enum_types_with_names: Vec<(String, proc_macro2::TokenStream)> =
create_enum_with_names(return_type, &group.name, &method.name);
let enum_names: HashMap<String, String> = enum_types_with_names
.iter()
.map(|(enum_name, _)| (enum_name.clone(), to_enum_name(enum_name)))
.collect();
let enum_types = enum_types_with_names.iter().map(|(_, enum_type)| enum_type);
let builder_fields = return_type
.parameters
.iter()
.map(|parameter| generate_builder_field(parameter, &enum_names));
let return_type_name = util::to_ident(&format!(
"{}{}Result",
&group.name.to_camel(),
&method.name.to_camel()
));
let result_type = if return_type.is_list {
quote! { std::vec::Vec<#return_type_name> }
} else {
quote! { #return_type_name }
};
Some((
result_type,
quote! {
#[derive(Debug, Deserialize)]
pub struct #return_type_name {
#(#builder_fields,)*
}
#(#enum_types)*
},
))
}
fn create_enum_with_names(
return_type: &parser::ReturnType,
group_name: &str,
method_name: &str,
) -> Vec<(String, proc_macro2::TokenStream)> {
return_type
.parameters
.iter()
.flat_map(create_enum_fields)
.map(|(name, enum_fields)| create_enum(enum_fields, group_name, method_name, name))
.collect()
}
fn create_enum(
enum_fields: Vec<proc_macro2::TokenStream>,
group_name: &str,
method_name: &str,
name: String,
) -> (String, proc_macro2::TokenStream) {
let enum_name = util::to_ident(&to_enum_name(group_name, method_name, &name));
(
name,
quote! {
#[allow(clippy::enum_variant_names)]
#[derive(Debug, Deserialize, PartialEq, Eq)]
pub enum #enum_name {
#(#enum_fields,)*
}
},
)
}
fn create_enum_fields(
parameter: &parser::ReturnTypeParameter,
) -> Option<(String, Vec<proc_macro2::TokenStream>)> {
match &parameter.return_type {
types::Type::Number(types::TypeInfo {
ref name,
type_description: Some(type_description),
..
}) => create_enum_field_value(type_description, name, create_number_enum_value),
types::Type::String(types::TypeInfo {
ref name,
type_description: Some(type_description),
..
}) => create_enum_field_value(type_description, name, create_string_enum_value),
_ => None,
}
}
fn generate_builder_field(
parameter: &parser::ReturnTypeParameter,
enum_names: &HashMap<String, String>,
) -> proc_macro2::TokenStream {
let namestr = &parameter.name;
let name = util::to_ident(&namestr.to_snake().replace("__", "_"));
let enum_name = match enum_names.get(namestr) {
Some(enum_type) => enum_type.to_owned(),
None => parameter.return_type.to_owned_type(),
};
let rtype = util::to_ident(&enum_name);
let rtype_as_quote = if parameter.return_type.get_type_info().is_list {
quote! { std::vec::Vec<#rtype> }
} else {
quote! { #rtype }
};
let generate_field = |field_name| {
quote! {
#[serde(rename = #namestr)]
pub #field_name: #rtype_as_quote
}
};
// "type" is a reserved keyword in Rust, so we just add "t_" to it.
if namestr == "type" {
generate_field(format_ident!("t_{}", name))
} else {
generate_field(name)
}
}
fn create_enum_field_value<F>(
type_description: &types::TypeDescription,
name: &str,
f: F,
) -> Option<(String, Vec<proc_macro2::TokenStream>)>
where
F: Fn(&types::TypeDescriptions) -> proc_macro2::TokenStream,
{
let enum_fields: Vec<proc_macro2::TokenStream> = type_description
.values
.iter()
.map(f)
.collect::<Vec<proc_macro2::TokenStream>>();
let nn = name.to_string();
Some((nn, enum_fields))
}
fn create_string_enum_value(
type_description: &types::TypeDescriptions,
) -> proc_macro2::TokenStream {
let value = &type_description.value;
let value_as_ident = util::to_ident(&value.to_camel());
create_enum_field(&value_as_ident, value, &type_description.description)
}
fn create_number_enum_value(value: &types::TypeDescriptions) -> proc_macro2::TokenStream {
let v = &value.value;
let re = Regex::new(r#"\(.*\)"#).unwrap();
let desc = &value
.description
.replace(' ', "_")
.replace('-', "_")
.replace(',', "_");
let desc_without_parentheses = re.replace_all(desc, "");
let ident = util::to_ident(&desc_without_parentheses.to_camel());
create_enum_field(&ident, v, &value.description)
}
fn create_enum_field(
ident: &syn::Ident,
rename: &str,
description: &str,
) -> proc_macro2::TokenStream {
util::add_docs(
&Some(description.to_string()),
quote! {
#[serde(rename = #rename)]
#ident
},
)
}
fn to_enum_name(group_name: &str, method_name: &str, name: &str) -> String {
format!(
"{}{}{}",
group_name.to_camel(),
method_name.to_camel(),
name.to_camel()
)
}

View File

@ -1,76 +0,0 @@
use quote::quote;
use crate::generate::util;
pub struct SendMethodBuilder {
method_name: syn::Ident,
url: String,
auth_module_path: proc_macro2::TokenStream,
return_type: Option<proc_macro2::TokenStream>,
description: Option<String>,
form: bool,
}
impl SendMethodBuilder {
pub fn new(
method_name: &syn::Ident,
url: &str,
auth_module_path: proc_macro2::TokenStream,
) -> Self {
Self {
method_name: method_name.clone(),
url: url.to_string(),
auth_module_path,
return_type: None,
description: None,
form: false,
}
}
pub fn return_type(mut self, value: &proc_macro2::TokenStream) -> Self {
self.return_type = Some(value.clone());
self
}
pub fn description(mut self, value: &Option<String>) -> Self {
self.description = value.clone();
self
}
pub fn with_form(mut self) -> Self {
self.form = true;
self
}
pub fn build(&self) -> proc_macro2::TokenStream {
let method_name = &self.method_name;
let (return_type, parse_type) = match &self.return_type {
Some(t) => (t.clone(), quote! { .json::<#t>() }),
None => (quote! { String }, quote! { .text() }),
};
let url = &self.url;
let auth_module_path = &self.auth_module_path;
let form = if self.form {
quote! { .multipart(self.form) }
} else {
quote! {}
};
util::add_docs(
&self.description,
quote! {
pub async fn #method_name(self) -> Result<#return_type> {
let res = #auth_module_path
.authenticated_client(#url)
#form
.send()
.await?
#parse_type
.await?;
Ok(res)
}
},
)
}
}

View File

@ -1,49 +0,0 @@
mod method;
use crate::parser;
use case::CaseExt;
use quote::quote;
use self::method::generate_methods;
use super::{skeleton::auth_ident, util};
pub fn generate_groups(groups: Vec<parser::ApiGroup>) -> proc_macro2::TokenStream {
let gr = groups
.iter()
// implemented manually
.filter(|group| group.name != "authentication")
.map(generate_group);
quote! {
#(#gr)*
}
}
fn generate_group(group: &parser::ApiGroup) -> proc_macro2::TokenStream {
let group_name_camel = util::to_ident(&group.name.to_camel());
let group_name_snake = util::to_ident(&group.name.to_snake());
let auth = auth_ident();
let methods = generate_methods(group, &auth, &group_name_camel);
let group_method = util::add_docs(
&group.description,
quote! {
pub fn #group_name_snake(&self) -> #group_name_camel {
#group_name_camel::new(self)
}
},
);
quote! {
pub struct #group_name_camel<'a> {
auth: &'a #auth,
}
#methods
impl #auth {
#group_method
}
}
}

View File

@ -0,0 +1,186 @@
use crate::parser;
use proc_macro2::{Ident, TokenStream};
use quote::quote;
use super::util;
#[derive(Debug)]
pub struct GroupMethod<'a> {
group: &'a parser::ApiGroup,
method: &'a parser::ApiMethod,
}
impl<'a> GroupMethod<'a> {
pub fn new(group: &'a parser::ApiGroup, method: &'a parser::ApiMethod) -> Self {
Self { group, method }
}
pub fn generate_method(&self) -> TokenStream {
let method_name = self.method.name_snake();
let structs = self.method.structs();
let enums = self.method.enums();
let builder = self.generate_request_builder();
let response_struct = self.generate_response_struct();
let request_method = self.generate_request_method();
quote! {
pub mod #method_name {
#structs
#enums
#builder
#response_struct
#request_method
}
}
}
fn generate_request_method(&self) -> TokenStream {
let method_name = self.method.name_snake();
let parameters = self
.method
.types
.mandatory_params()
.iter()
.map(|param| param.to_parameter())
.collect();
let form_builder = self.mandatory_parameters_as_form_builder();
let method_impl = if self.method.types.optional_parameters().is_empty() {
self.generate_send_method(
&method_name,
parameters,
quote! { self.auth },
quote! { form },
quote! {
let form = reqwest::multipart::Form::new();
#form_builder
},
)
} else {
quote! {
pub fn #method_name(&self, #(#parameters),*) -> Builder<'_> {
let form = reqwest::multipart::Form::new();
#form_builder
Builder { group: self, form }
}
}
};
let group_struct_name = self.group.struct_name();
let method_impl_with_docs = util::add_docs(&self.method.description, method_impl);
quote! {
impl<'a> super::#group_struct_name<'a> {
#method_impl_with_docs
}
}
}
fn generate_response_struct(&self) -> TokenStream {
let response = match self.method.types.response() {
Some(res) => res,
None => return quote! {},
};
let struct_fields = response
.types
.iter()
.map(|field| field.generate_struct_field());
quote! {
#[derive(Debug, serde::Deserialize)]
pub struct Response {
#(#struct_fields,)*
}
}
}
/// Returns a TokenStream containing a request builder if there are optional
/// parameters, otherwise an empty TokenStream is returned.
fn generate_request_builder(&self) -> TokenStream {
let optional_params = self.method.types.optional_parameters();
if optional_params.is_empty() {
return quote! {};
}
let builder_methods = optional_params
.iter()
.map(|param| param.generate_optional_builder_method_with_docs());
let group_name = self.group.struct_name();
let send_method = self.generate_send_method(
&util::to_ident("send"),
vec![],
quote! { self.group.auth },
quote! { self.form },
quote! {},
);
quote! {
pub struct Builder<'a> {
group: &'a super::#group_name<'a>,
form: reqwest::multipart::Form,
}
impl<'a> Builder<'a> {
#send_method
#(#builder_methods)*
}
}
}
fn generate_send_method(
&self,
method_name: &Ident,
parameters: Vec<TokenStream>,
auth_access: TokenStream,
form_access: TokenStream,
form_factory: TokenStream,
) -> TokenStream {
let method_url = format!("/api/v2/{}/{}", self.group.url, self.method.url);
let (response_type, response_parse) = match self.method.types.response() {
Some(resp) => {
if resp.is_list {
(
quote! { std::vec::Vec<Response> },
quote! { .json::<std::vec::Vec<Response>>() },
)
} else {
(quote! { Response }, quote! { .json::<Response>() })
}
}
None => (quote! { String }, quote! { .text() }),
};
quote! {
pub async fn #method_name(self, #(#parameters),*) -> super::super::Result<#response_type> {
#form_factory
let res = #auth_access
.authenticated_client(#method_url)
.multipart(#form_access)
.send()
.await?
#response_parse
.await?;
Ok(res)
}
}
}
fn mandatory_parameters_as_form_builder(&self) -> TokenStream {
let builder = self
.method
.types
.mandatory_params()
.into_iter()
.map(|param| param.generate_form_builder(quote! { form }));
quote! {
#(let #builder)*
}
}
}

View File

@ -1,4 +1,7 @@
mod api_group;
mod api_method;
mod group;
mod group_method;
mod skeleton;
mod util;

View File

@ -10,13 +10,7 @@ pub fn generate_skeleton(ident: &syn::Ident) -> proc_macro2::TokenStream {
let auth = auth_ident();
quote! {
use reqwest::RequestBuilder;
use serde::Deserialize;
use thiserror::Error;
use super::#ident;
impl #ident {
impl super::#ident {
/// Creates an authenticated client.
/// base_url is the url to the qbittorrent instance, i.e. http://localhost:8080
pub async fn login(
@ -61,7 +55,7 @@ pub fn generate_skeleton(ident: &syn::Ident) -> proc_macro2::TokenStream {
}
#[allow(clippy::enum_variant_names)]
#[derive(Debug, Error)]
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("failed to parse auth cookie")]
AuthCookieParseError,
@ -81,7 +75,7 @@ pub fn generate_skeleton(ident: &syn::Ident) -> proc_macro2::TokenStream {
}
impl #auth {
fn authenticated_client(&self, url: &str) -> RequestBuilder {
fn authenticated_client(&self, url: &str) -> reqwest::RequestBuilder {
let url = format!("{}{}", self.base_url, url);
let cookie = self.auth_cookie.clone();

View File

@ -1,316 +0,0 @@
use std::{cell::RefCell, rc::Rc};
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MdContent {
Text(String),
Asterix(String),
Table(Table),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Table {
pub header: TableRow,
pub split: String,
pub rows: Vec<TableRow>,
}
impl Table {
fn raw(&self) -> String {
let mut output = vec![self.header.raw.clone(), self.split.clone()];
for row in self.rows.clone() {
output.push(row.raw);
}
output.join("\n")
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TableRow {
raw: String,
pub columns: Vec<String>,
}
impl MdContent {
pub fn inner_value_as_string(&self) -> String {
match self {
MdContent::Text(text) => text.into(),
MdContent::Asterix(text) => text.into(),
MdContent::Table(table) => table.raw(),
}
}
}
#[derive(Debug, Clone)]
pub struct Header {
level: i32,
content: String,
}
/// These are the only relevant tokens we need for the api generation.
#[derive(Debug)]
pub enum MdToken {
Header(Header),
Content(MdContent),
}
impl MdToken {
fn parse_token(line: &str) -> MdToken {
if line.starts_with('#') {
let mut level = 0;
for char in line.chars() {
if char != '#' {
break;
}
level += 1;
}
MdToken::Header(Header {
level,
content: line.trim_matches('#').trim().to_string(),
})
} else if line.starts_with('*') {
MdToken::Content(MdContent::Asterix(
line.trim_matches('*').trim().to_string(),
))
} else {
MdToken::Content(MdContent::Text(line.to_string()))
}
}
fn from(content: &str) -> Vec<MdToken> {
let mut output = Vec::new();
let mut iter = content.lines();
while let Some(line) = iter.next() {
// assume this is a table
if line.contains('|') {
let to_columns = |column_line: &str| {
column_line
.replace('`', "")
.split('|')
.map(|s| s.trim().to_string())
.collect()
};
let table_header = TableRow {
raw: line.into(),
columns: to_columns(line),
};
let table_split = iter.next().unwrap();
let mut table_rows = Vec::new();
while let Some(row_line) = iter.next() {
if !row_line.contains('|') {
// we've reached the end of the table, let's go back one step
iter.next_back();
break;
}
let table_row = TableRow {
raw: row_line.into(),
columns: to_columns(row_line),
};
table_rows.push(table_row);
}
output.push(MdToken::Content(MdContent::Table(Table {
header: table_header,
split: table_split.to_string(),
rows: table_rows,
})));
} else {
output.push(MdToken::parse_token(line));
}
}
output
}
}
#[derive(Debug)]
pub struct TokenTree {
pub title: Option<String>,
pub content: Vec<MdContent>,
pub children: Vec<TokenTree>,
}
impl From<Rc<TokenTreeFactory>> for TokenTree {
fn from(builder: Rc<TokenTreeFactory>) -> Self {
let children = builder
.children
.clone()
.into_inner()
.into_iter()
.map(|child| child.into())
.collect::<Vec<TokenTree>>();
let content = builder.content.clone().into_inner();
TokenTree {
title: builder.title.clone(),
content,
children,
}
}
}
#[derive(Debug, Default)]
pub struct TokenTreeFactory {
title: Option<String>,
content: RefCell<Vec<MdContent>>,
children: RefCell<Vec<Rc<TokenTreeFactory>>>,
level: i32,
}
impl TokenTreeFactory {
fn new(title: &str, level: i32) -> Self {
Self {
title: if title.is_empty() {
None
} else {
Some(title.to_string())
},
level,
..Default::default()
}
}
fn add_content(&self, content: MdContent) {
self.content.borrow_mut().push(content);
}
fn append(&self, child: &Rc<TokenTreeFactory>) {
self.children.borrow_mut().push(child.clone());
}
pub fn create(content: &str) -> TokenTree {
let tokens = MdToken::from(content);
let mut stack = Vec::new();
let root = Rc::new(TokenTreeFactory::default());
stack.push(root.clone());
for token in tokens {
match token {
MdToken::Header(Header { level, content }) => {
let new_header = Rc::new(TokenTreeFactory::new(&content, level));
// go back until we're at the same or lower level.
while let Some(current) = stack.pop() {
if current.level < level {
current.append(&new_header);
stack.push(current);
break;
}
}
stack.push(new_header.clone());
}
MdToken::Content(content) => {
let current = stack.pop().unwrap();
current.add_content(content);
stack.push(current);
}
}
}
root.into()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_remove_surrounding_asterix() {
// given
let input = r#"
# A
**B**
"#
.trim_matches('\n')
.trim();
// when
let tree = TokenTreeFactory::create(input);
// then
println!("{:#?}", tree);
let first = tree.children.first().unwrap();
let content = first.content.first().unwrap();
assert_eq!(*content, MdContent::Asterix("B".into()));
}
#[test]
fn should_remove_surrounding_hash() {
// given
let input = r#"
# A #
"#
.trim_matches('\n')
.trim();
// when
let tree = TokenTreeFactory::create(input);
// then
println!("{:#?}", tree);
assert_eq!(tree.children.first().unwrap().title, Some("A".into()));
}
#[test]
fn single_level() {
// given
let input = r#"
# A
Foo
"#
.trim_matches('\n')
.trim();
// when
let tree = TokenTreeFactory::create(input);
// then
println!("{:#?}", tree);
assert_eq!(tree.title, None);
let first_child = tree.children.first().unwrap();
assert_eq!(first_child.title, Some("A".into()));
}
#[test]
fn complex() {
// given
let input = r#"
# A
Foo
## B
# C
## D
Bar
"#
.trim_matches('\n')
.trim();
// when
let tree = TokenTreeFactory::create(input);
// then
println!("{:#?}", tree);
assert_eq!(tree.title, None);
assert_eq!(tree.children.len(), 2);
let first = tree.children.get(0).unwrap();
assert_eq!(first.title, Some("A".into()));
assert_eq!(first.children.len(), 1);
assert_eq!(first.children.first().unwrap().title, Some("B".into()));
let second = tree.children.get(1).unwrap();
assert_eq!(second.title, Some("C".into()));
assert_eq!(second.children.len(), 1);
assert_eq!(second.children.first().unwrap().title, Some("D".into()));
}
}

View File

@ -0,0 +1,179 @@
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MdContent {
Text(String),
Asterisk(String),
Table(Table),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Table {
pub header: TableRow,
pub split: String,
pub rows: Vec<TableRow>,
}
impl Table {
fn raw(&self) -> String {
let mut output = vec![self.header.raw.clone(), self.split.clone()];
for row in self.rows.clone() {
output.push(row.raw);
}
output.join("\n")
}
}
#[derive(Debug, Clone)]
pub struct Header {
pub level: i32,
pub content: String,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TableRow {
raw: String,
pub columns: Vec<String>,
}
impl MdContent {
pub fn inner_value_as_string(&self) -> String {
match self {
MdContent::Text(text) => text.into(),
MdContent::Asterisk(text) => text.into(),
MdContent::Table(table) => table.raw(),
}
}
}
#[derive(Debug)]
pub enum MdToken {
Header(Header),
Content(MdContent),
}
impl MdToken {
pub fn from(content: &str) -> Vec<MdToken> {
// to prevent infinite loops
let mut max_iterator_checker = MaxIteratorChecker::default();
let mut output = Vec::new();
let mut iter = content.lines().peekable();
while let Some(line) = iter.next() {
max_iterator_checker.decrease();
if line.contains(" | ") || line.contains("-|") || line.contains("|-") {
let table = TableParser::new(&mut max_iterator_checker, &mut iter).parse(line);
output.push(MdToken::Content(table));
} else if line.starts_with('#') {
output.push(parse_header(line));
} else if line.starts_with('*') {
let asterisk = MdContent::Asterisk(line.trim_matches('*').trim().to_string());
output.push(MdToken::Content(asterisk));
} else {
output.push(MdToken::Content(MdContent::Text(line.to_string())));
}
}
output
}
}
fn parse_header(line: &str) -> MdToken {
let mut level = 0;
for char in line.chars() {
if char != '#' {
break;
}
level += 1;
}
MdToken::Header(Header {
level,
content: line.trim_matches('#').trim().to_string(),
})
}
struct TableParser<'a, 'b> {
max_iterator_checker: &'a mut MaxIteratorChecker,
iter: &'a mut std::iter::Peekable<std::str::Lines<'b>>,
}
impl<'a, 'b> TableParser<'a, 'b> {
fn new(
max_iterator_checker: &'a mut MaxIteratorChecker,
iter: &'a mut std::iter::Peekable<std::str::Lines<'b>>,
) -> Self {
Self {
max_iterator_checker,
iter,
}
}
fn parse(&mut self, line: &str) -> MdContent {
let table_header = TableRow {
raw: line.into(),
columns: Self::to_columns(line),
};
let table_split = self.iter.next().unwrap();
let table_rows = self.table_rows();
MdContent::Table(Table {
header: table_header,
split: table_split.to_string(),
rows: table_rows,
})
}
fn table_rows(&mut self) -> Vec<TableRow> {
let mut table_rows = Vec::new();
while let Some(peeked_row_line) = self.iter.peek() {
self.max_iterator_checker.decrease();
if !peeked_row_line.contains('|') {
// we've reached the end of the table, let's go back one step
break;
}
let next_row_line = self.iter.next().unwrap();
table_rows.push(TableRow {
raw: next_row_line.to_string(),
columns: Self::to_columns(next_row_line),
});
}
table_rows
}
fn to_columns(column_line: &str) -> Vec<String> {
column_line
.replace('`', "")
.split('|')
.map(|s| s.trim().to_string())
.collect()
}
}
#[derive(Debug)]
struct MaxIteratorChecker {
max_iterations: i32,
}
impl MaxIteratorChecker {
fn decrease(&mut self) {
self.max_iterations -= 1;
if self.max_iterations <= 0 {
panic!("Max iterations reached, missing termination?");
}
}
}
impl Default for MaxIteratorChecker {
fn default() -> Self {
MaxIteratorChecker {
max_iterations: 10000,
}
}
}

View File

@ -0,0 +1,7 @@
mod md_token;
mod token_tree;
mod token_tree_factory;
pub use md_token::*;
pub use token_tree::TokenTree;
pub use token_tree_factory::TokenTreeFactory;

View File

@ -0,0 +1,30 @@
use std::rc::Rc;
use super::{md_token::MdContent, token_tree_factory::TokenTreeFactory};
#[derive(Debug)]
pub struct TokenTree {
pub title: Option<String>,
pub content: Vec<MdContent>,
pub children: Vec<TokenTree>,
}
impl From<Rc<TokenTreeFactory>> for TokenTree {
fn from(builder: Rc<TokenTreeFactory>) -> Self {
let children = builder
.children
.clone()
.into_inner()
.into_iter()
.map(|child| child.into())
.collect::<Vec<TokenTree>>();
let content = builder.content.clone().into_inner();
TokenTree {
title: builder.title.clone(),
content,
children,
}
}
}

View File

@ -0,0 +1,154 @@
use std::{cell::RefCell, rc::Rc};
use super::{
md_token::{Header, MdContent, MdToken},
token_tree::TokenTree,
};
#[derive(Debug, Default)]
pub struct TokenTreeFactory {
pub title: Option<String>,
pub content: RefCell<Vec<MdContent>>,
pub children: RefCell<Vec<Rc<TokenTreeFactory>>>,
pub level: i32,
}
impl TokenTreeFactory {
fn new(title: &str, level: i32) -> Self {
Self {
title: if title.is_empty() {
None
} else {
Some(title.to_string())
},
level,
..Default::default()
}
}
fn add_content(&self, content: MdContent) {
self.content.borrow_mut().push(content);
}
fn append(&self, child: &Rc<TokenTreeFactory>) {
self.children.borrow_mut().push(child.clone());
}
pub fn create(content: &str) -> TokenTree {
let tokens = MdToken::from(content);
let mut stack = Vec::new();
let root = Rc::new(TokenTreeFactory::default());
stack.push(root.clone());
for token in tokens {
match token {
MdToken::Header(Header { level, content }) => {
let new_header = Rc::new(TokenTreeFactory::new(&content, level));
// go back until we're at the same or lower level.
while let Some(current) = stack.pop() {
if current.level < level {
current.append(&new_header);
stack.push(current);
break;
}
}
stack.push(new_header.clone());
}
MdToken::Content(content) => {
let current = stack.pop().unwrap();
current.add_content(content);
stack.push(current);
}
}
}
root.into()
}
}
#[cfg(test)]
mod tests {
use super::*;
macro_rules! TEST_DIR {
() => {
"token_tree_factory_tests"
};
}
macro_rules! run_test {
($test_file:expr) => {
use pretty_assertions::assert_eq;
// given
let input = include_str!(concat!(TEST_DIR!(), "/", $test_file, ".md"));
// when
let tree = TokenTreeFactory::create(input);
// then
let tree_as_str = format!("{tree:#?}");
let should_be = include_str!(concat!(TEST_DIR!(), "/", $test_file, ".check"));
assert_eq!(tree_as_str, should_be);
};
}
// use this macro when creating/updating as test
#[allow(unused_macros)]
macro_rules! update_test {
($test_file:expr) => {
use std::fs;
use std::path::Path;
let input = include_str!(concat!(TEST_DIR!(), "/", $test_file, ".md"));
let tree = TokenTreeFactory::create(input);
let tree_as_str = format!("{tree:#?}");
let file = concat!("src/md_parser/", TEST_DIR!(), "/", $test_file, ".check");
// prevent user from accidentally leaving the current macro in a test
if Path::new(file).exists() {
panic!("Test case already exists: {file}");
}
fs::write(file, tree_as_str).unwrap();
};
}
#[test]
fn should_remove_surrounding_asterisk() {
run_test!("should_remove_surrounding_asterisk");
}
#[test]
fn should_remove_surrounding_hash() {
run_test!("should_remove_surrounding_hash");
}
#[test]
fn single_level() {
run_test!("single_level");
}
#[test]
fn complex() {
run_test!("complex");
}
#[test]
fn log() {
run_test!("log");
}
#[test]
fn multi_table() {
run_test!("multi_table");
}
#[test]
fn non_table_with_pipe() {
run_test!("non_table_with_pipe");
}
}

View File

@ -0,0 +1,44 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"A",
),
content: [
Text(
"Foo",
),
],
children: [
TokenTree {
title: Some(
"B",
),
content: [],
children: [],
},
],
},
TokenTree {
title: Some(
"C",
),
content: [],
children: [
TokenTree {
title: Some(
"D",
),
content: [
Text(
"Bar",
),
],
children: [],
},
],
},
],
}

View File

@ -0,0 +1,6 @@
# A
Foo
## B
# C
## D
Bar

View File

@ -0,0 +1,521 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"Log",
),
content: [
Text(
"",
),
Text(
"All Log API methods are under \"log\", e.g.: `/api/v2/log/methodName`.",
),
Text(
"",
),
],
children: [
TokenTree {
title: Some(
"Get log",
),
content: [
Text(
"",
),
Text(
"Name: `main`",
),
Text(
"",
),
Asterisk(
"Parameters:",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "Parameter | Type | Description",
columns: [
"Parameter",
"Type",
"Description",
],
},
split: "----------------|---------|------------",
rows: [
TableRow {
raw: "`normal` | bool | Include normal messages (default: `true`)",
columns: [
"normal",
"bool",
"Include normal messages (default: true)",
],
},
TableRow {
raw: "`info` | bool | Include info messages (default: `true`)",
columns: [
"info",
"bool",
"Include info messages (default: true)",
],
},
TableRow {
raw: "`warning` | bool | Include warning messages (default: `true`)",
columns: [
"warning",
"bool",
"Include warning messages (default: true)",
],
},
TableRow {
raw: "`critical` | bool | Include critical messages (default: `true`)",
columns: [
"critical",
"bool",
"Include critical messages (default: true)",
],
},
TableRow {
raw: "`last_known_id` | integer | Exclude messages with \"message id\" <= `last_known_id` (default: `-1`)",
columns: [
"last_known_id",
"integer",
"Exclude messages with \"message id\" <= last_known_id (default: -1)",
],
},
],
},
),
Text(
"",
),
Text(
"Example:",
),
Text(
"",
),
Text(
"```http",
),
Text(
"/api/v2/log/main?normal=true&info=true&warning=true&critical=true&last_known_id=-1",
),
Text(
"```",
),
Text(
"",
),
Asterisk(
"Returns:",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "HTTP Status Code | Scenario",
columns: [
"HTTP Status Code",
"Scenario",
],
},
split: "----------------------------------|---------------------",
rows: [
TableRow {
raw: "200 | All scenarios- see JSON below",
columns: [
"200",
"All scenarios- see JSON below",
],
},
],
},
),
Text(
"",
),
Text(
"The response is a JSON array in which each element is an entry of the log.",
),
Text(
"",
),
Text(
"Each element of the array has the following properties:",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "Property | Type | Description",
columns: [
"Property",
"Type",
"Description",
],
},
split: "------------|---------|------------",
rows: [
TableRow {
raw: "`id` | integer | ID of the message",
columns: [
"id",
"integer",
"ID of the message",
],
},
TableRow {
raw: "`message` | string | Text of the message",
columns: [
"message",
"string",
"Text of the message",
],
},
TableRow {
raw: "`timestamp` | integer | Milliseconds since epoch",
columns: [
"timestamp",
"integer",
"Milliseconds since epoch",
],
},
TableRow {
raw: "`type` | integer | Type of the message: Log::NORMAL: `1`, Log::INFO: `2`, Log::WARNING: `4`, Log::CRITICAL: `8`",
columns: [
"type",
"integer",
"Type of the message: Log::NORMAL: 1, Log::INFO: 2, Log::WARNING: 4, Log::CRITICAL: 8",
],
},
],
},
),
Text(
"",
),
Text(
"Example:",
),
Text(
"",
),
Text(
"```JSON",
),
Text(
"[",
),
Text(
" {",
),
Text(
" \"id\":0,",
),
Text(
" \"message\":\"qBittorrent v3.4.0 started\",",
),
Text(
" \"timestamp\":1507969127860,",
),
Text(
" \"type\":1",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":1,",
),
Text(
" \"message\":\"qBittorrent is trying to listen on any interface port: 19036\",",
),
Text(
" \"timestamp\":1507969127869,",
),
Text(
" \"type\":2",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":2,",
),
Text(
" \"message\":\"Peer ID: -qB3400-\",",
),
Text(
" \"timestamp\":1507969127870,",
),
Text(
" \"type\":1",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":3,",
),
Text(
" \"message\":\"HTTP User-Agent is 'qBittorrent/3.4.0'\",",
),
Text(
" \"timestamp\":1507969127870,",
),
Text(
" \"type\":1",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":4,",
),
Text(
" \"message\":\"DHT support [ON]\",",
),
Text(
" \"timestamp\":1507969127871,",
),
Text(
" \"type\":2",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":5,",
),
Text(
" \"message\":\"Local Peer Discovery support [ON]\",",
),
Text(
" \"timestamp\":1507969127871,",
),
Text(
" \"type\":2",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":6,",
),
Text(
" \"message\":\"PeX support [ON]\",",
),
Text(
" \"timestamp\":1507969127871,",
),
Text(
" \"type\":2",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":7,",
),
Text(
" \"message\":\"Anonymous mode [OFF]\",",
),
Text(
" \"timestamp\":1507969127871,",
),
Text(
" \"type\":2",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":8,",
),
Text(
" \"message\":\"Encryption support [ON]\",",
),
Text(
" \"timestamp\":1507969127871,",
),
Text(
" \"type\":2",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":9,",
),
Text(
" \"message\":\"Embedded Tracker [OFF]\",",
),
Text(
" \"timestamp\":1507969127871,",
),
Text(
" \"type\":2",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":10,",
),
Text(
" \"message\":\"UPnP / NAT-PMP support [ON]\",",
),
Text(
" \"timestamp\":1507969127873,",
),
Text(
" \"type\":2",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":11,",
),
Text(
" \"message\":\"Web UI: Now listening on port 8080\",",
),
Text(
" \"timestamp\":1507969127883,",
),
Text(
" \"type\":1",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":12,",
),
Text(
" \"message\":\"Options were saved successfully.\",",
),
Text(
" \"timestamp\":1507969128055,",
),
Text(
" \"type\":1",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":13,",
),
Text(
" \"message\":\"qBittorrent is successfully listening on interface :: port: TCP/19036\",",
),
Text(
" \"timestamp\":1507969128270,",
),
Text(
" \"type\":2",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":14,",
),
Text(
" \"message\":\"qBittorrent is successfully listening on interface 0.0.0.0 port: TCP/19036\",",
),
Text(
" \"timestamp\":1507969128271,",
),
Text(
" \"type\":2",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"id\":15,",
),
Text(
" \"message\":\"qBittorrent is successfully listening on interface 0.0.0.0 port: UDP/19036\",",
),
Text(
" \"timestamp\":1507969128272,",
),
Text(
" \"type\":2",
),
Text(
" }",
),
Text(
"]",
),
Text(
"```",
),
],
children: [],
},
],
},
],
}

View File

@ -0,0 +1,143 @@
# Log #
All Log API methods are under "log", e.g.: `/api/v2/log/methodName`.
## Get log ##
Name: `main`
**Parameters:**
Parameter | Type | Description
----------------|---------|------------
`normal` | bool | Include normal messages (default: `true`)
`info` | bool | Include info messages (default: `true`)
`warning` | bool | Include warning messages (default: `true`)
`critical` | bool | Include critical messages (default: `true`)
`last_known_id` | integer | Exclude messages with "message id" <= `last_known_id` (default: `-1`)
Example:
```http
/api/v2/log/main?normal=true&info=true&warning=true&critical=true&last_known_id=-1
```
**Returns:**
HTTP Status Code | Scenario
----------------------------------|---------------------
200 | All scenarios- see JSON below
The response is a JSON array in which each element is an entry of the log.
Each element of the array has the following properties:
Property | Type | Description
------------|---------|------------
`id` | integer | ID of the message
`message` | string | Text of the message
`timestamp` | integer | Milliseconds since epoch
`type` | integer | Type of the message: Log::NORMAL: `1`, Log::INFO: `2`, Log::WARNING: `4`, Log::CRITICAL: `8`
Example:
```JSON
[
{
"id":0,
"message":"qBittorrent v3.4.0 started",
"timestamp":1507969127860,
"type":1
},
{
"id":1,
"message":"qBittorrent is trying to listen on any interface port: 19036",
"timestamp":1507969127869,
"type":2
},
{
"id":2,
"message":"Peer ID: -qB3400-",
"timestamp":1507969127870,
"type":1
},
{
"id":3,
"message":"HTTP User-Agent is 'qBittorrent/3.4.0'",
"timestamp":1507969127870,
"type":1
},
{
"id":4,
"message":"DHT support [ON]",
"timestamp":1507969127871,
"type":2
},
{
"id":5,
"message":"Local Peer Discovery support [ON]",
"timestamp":1507969127871,
"type":2
},
{
"id":6,
"message":"PeX support [ON]",
"timestamp":1507969127871,
"type":2
},
{
"id":7,
"message":"Anonymous mode [OFF]",
"timestamp":1507969127871,
"type":2
},
{
"id":8,
"message":"Encryption support [ON]",
"timestamp":1507969127871,
"type":2
},
{
"id":9,
"message":"Embedded Tracker [OFF]",
"timestamp":1507969127871,
"type":2
},
{
"id":10,
"message":"UPnP / NAT-PMP support [ON]",
"timestamp":1507969127873,
"type":2
},
{
"id":11,
"message":"Web UI: Now listening on port 8080",
"timestamp":1507969127883,
"type":1
},
{
"id":12,
"message":"Options were saved successfully.",
"timestamp":1507969128055,
"type":1
},
{
"id":13,
"message":"qBittorrent is successfully listening on interface :: port: TCP/19036",
"timestamp":1507969128270,
"type":2
},
{
"id":14,
"message":"qBittorrent is successfully listening on interface 0.0.0.0 port: TCP/19036",
"timestamp":1507969128271,
"type":2
},
{
"id":15,
"message":"qBittorrent is successfully listening on interface 0.0.0.0 port: UDP/19036",
"timestamp":1507969128272,
"type":2
}
]
```

View File

@ -0,0 +1,86 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"Foo",
),
content: [
Text(
"",
),
],
children: [
TokenTree {
title: Some(
"Bar",
),
content: [
Table(
Table {
header: TableRow {
raw: "Parameter | Type | Description",
columns: [
"Parameter",
"Type",
"Description",
],
},
split: "----------------|---------|------------",
rows: [
TableRow {
raw: "`normal` | bool | Include normal messages (default: `true`)",
columns: [
"normal",
"bool",
"Include normal messages (default: true)",
],
},
],
},
),
Text(
"",
),
],
children: [],
},
TokenTree {
title: Some(
"Baz",
),
content: [
Table(
Table {
header: TableRow {
raw: "Parameter | Type | Description",
columns: [
"Parameter",
"Type",
"Description",
],
},
split: "----------------|---------|------------",
rows: [
TableRow {
raw: "`last_known_id` | integer | Exclude messages with \"message id\" <= `last_known_id` (default: `-1`)",
columns: [
"last_known_id",
"integer",
"Exclude messages with \"message id\" <= last_known_id (default: -1)",
],
},
],
},
),
Text(
"",
),
],
children: [],
},
],
},
],
}

View File

@ -0,0 +1,12 @@
# Foo
## Bar
Parameter | Type | Description
----------------|---------|------------
`normal` | bool | Include normal messages (default: `true`)
## Baz
Parameter | Type | Description
----------------|---------|------------
`last_known_id` | integer | Exclude messages with "message id" <= `last_known_id` (default: `-1`)

View File

@ -0,0 +1,17 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"A",
),
content: [
Text(
"a|b",
),
],
children: [],
},
],
}

View File

@ -0,0 +1,17 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"A",
),
content: [
Asterisk(
"B",
),
],
children: [],
},
],
}

View File

@ -0,0 +1,13 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"A",
),
content: [],
children: [],
},
],
}

View File

@ -0,0 +1,17 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"A",
),
content: [
Text(
"Foo",
),
],
children: [],
},
],
}

View File

@ -1,17 +1,20 @@
use crate::md_parser;
pub fn parse_group_description(content: &[md_parser::MdContent]) -> Option<String> {
let return_desc = content
.iter()
.map(|row| row.inner_value_as_string())
.collect::<Vec<String>>()
.join("\n")
.trim()
.to_string();
impl md_parser::TokenTree {
pub fn parse_group_description(&self) -> Option<String> {
let return_desc = self
.content
.iter()
.map(|row| row.inner_value_as_string())
.collect::<Vec<String>>()
.join("\n")
.trim()
.to_string();
if return_desc.is_empty() {
None
} else {
Some(return_desc)
if return_desc.is_empty() {
None
} else {
Some(return_desc)
}
}
}

View File

@ -1,35 +1,38 @@
use crate::md_parser::MdContent;
use crate::md_parser::{self, MdContent};
pub fn parse_method_description(content: &[MdContent]) -> Option<String> {
let return_desc = content
.iter()
// skip until we get to the "Returns:" text
.skip_while(|row| match row {
MdContent::Asterix(text) => !text.starts_with("Returns:"),
_ => true,
})
// there is one space before the table
.skip(2)
.skip_while(|row| match row {
MdContent::Text(text) => !text.is_empty(),
_ => true,
})
// and there is one space after the table
.skip(1)
// then what is left should be the description
.flat_map(|row| match row {
MdContent::Text(text) => Some(text),
_ => None,
})
.cloned()
.collect::<Vec<String>>()
.join("\n")
.trim()
.to_string();
impl md_parser::TokenTree {
pub fn parse_method_description(&self) -> Option<String> {
let return_desc = self
.content
.iter()
// skip until we get to the "Returns:" text
.skip_while(|row| match row {
MdContent::Asterisk(text) => !text.starts_with("Returns:"),
_ => true,
})
// there is one space before the table
.skip(2)
.skip_while(|row| match row {
MdContent::Text(text) => !text.is_empty(),
_ => true,
})
// and there is one space after the table
.skip(1)
// then what is left should be the description
.flat_map(|row| match row {
MdContent::Text(text) => Some(text),
_ => None,
})
.cloned()
.collect::<Vec<String>>()
.join("\n")
.trim()
.to_string();
if return_desc.is_empty() {
None
} else {
Some(return_desc)
if return_desc.is_empty() {
None
} else {
Some(return_desc)
}
}
}

View File

@ -0,0 +1,26 @@
ApiMethod {
name: "foo",
description: None,
url: "foo",
types: CompositeTypes {
composite_types: [
Response(
TypeWithoutName {
types: [
Number(
TypeInfo {
name: "amount_left",
description: Some(
"Amount of data left to download (bytes)",
),
is_optional: false,
is_list: true,
},
),
],
is_list: false,
},
),
],
},
}

View File

@ -0,0 +1,9 @@
## Testing
Name: `foo`
The response is a JSON object with the following fields
Property | Type | Description
---------------------|---------|------------
`amount_left` | integer array | Amount of data left to download (bytes)

View File

@ -0,0 +1,52 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"Testing",
),
content: [
Text(
"",
),
Text(
"Name: `foo`",
),
Text(
"",
),
Text(
"The response is a JSON object with the following fields",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "Property | Type | Description",
columns: [
"Property",
"Type",
"Description",
],
},
split: "---------------------|---------|------------",
rows: [
TableRow {
raw: "`amount_left` | integer array | Amount of data left to download (bytes)",
columns: [
"amount_left",
"integer array",
"Amount of data left to download (bytes)",
],
},
],
},
),
],
children: [],
},
],
}

View File

@ -0,0 +1,26 @@
ApiMethod {
name: "foo",
description: None,
url: "foo",
types: CompositeTypes {
composite_types: [
Response(
TypeWithoutName {
types: [
Number(
TypeInfo {
name: "added_on",
description: Some(
"Time (Unix Epoch) when the torrent was added to the client",
),
is_optional: false,
is_list: false,
},
),
],
is_list: true,
},
),
],
},
}

View File

@ -0,0 +1,9 @@
## Testing
Name: `foo`
The response is a JSON array with the following fields
Property | Type | Description
---------------------|---------|------------
`added_on` | integer | Time (Unix Epoch) when the torrent was added to the client

View File

@ -0,0 +1,52 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"Testing",
),
content: [
Text(
"",
),
Text(
"Name: `foo`",
),
Text(
"",
),
Text(
"The response is a JSON array with the following fields",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "Property | Type | Description",
columns: [
"Property",
"Type",
"Description",
],
},
split: "---------------------|---------|------------",
rows: [
TableRow {
raw: "`added_on` | integer | Time (Unix Epoch) when the torrent was added to the client",
columns: [
"added_on",
"integer",
"Time (Unix Epoch) when the torrent was added to the client",
],
},
],
},
),
],
children: [],
},
],
}

View File

@ -0,0 +1,37 @@
ApiMethod {
name: "foo",
description: None,
url: "foo",
types: CompositeTypes {
composite_types: [
Enum(
Enum {
name: "ScanDirs",
values: [
EnumValue {
description: Some(
"Download to the monitored folder",
),
value: "DownloadToTheMonitoredFolder",
original_value: "0",
},
EnumValue {
description: Some(
"Download to the default save path",
),
value: "DownloadToTheDefaultSavePath",
original_value: "1",
},
EnumValue {
description: Some(
"Download to this path",
),
value: "\"/path/to/download/to\"",
original_value: "\"/path/to/download/to\"",
},
],
},
),
],
},
}

View File

@ -0,0 +1,12 @@
## Testing
Name: `foo`
Possible values of `scan_dirs`:
Value | Description
----------------------------|------------
`0` | Download to the monitored folder
`1` | Download to the default save path
`"/path/to/download/to"` | Download to this path

View File

@ -0,0 +1,67 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"Testing",
),
content: [
Text(
"",
),
Text(
"Name: `foo`",
),
Text(
"",
),
Text(
"Possible values of `scan_dirs`:",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "Value | Description",
columns: [
"Value",
"Description",
],
},
split: "----------------------------|------------",
rows: [
TableRow {
raw: "`0` | Download to the monitored folder",
columns: [
"0",
"Download to the monitored folder",
],
},
TableRow {
raw: "`1` | Download to the default save path",
columns: [
"1",
"Download to the default save path",
],
},
TableRow {
raw: "`\"/path/to/download/to\"` | Download to this path",
columns: [
"\"/path/to/download/to\"",
"Download to this path",
],
},
],
},
),
Text(
"",
),
],
children: [],
},
],
}

View File

@ -0,0 +1,110 @@
ApiMethod {
name: "plugins",
description: Some(
"The response is a JSON array of objects containing the following fields\n\n\n```JSON\n[\n {\n \"enabled\": true,\n \"fullName\": \"Legit Torrents\",\n \"name\": \"legittorrents\",\n \"supportedCategories\": [{\n \"id\": \"all\",\n \"name\": \"All categories\"\n }, {\n \"id\": \"anime\",\n \"name\": \"Anime\"\n }, {\n \"id\": \"books\",\n \"name\": \"Books\"\n }, {\n \"id\": \"games\",\n \"name\": \"Games\"\n }, {\n \"id\": \"movies\",\n \"name\": \"Movies\"\n }, {\n \"id\": \"music\",\n \"name\": \"Music\"\n }, {\n \"id\": \"tv\",\n \"name\": \"TV shows\"\n }],\n \"url\": \"http://www.legittorrents.info\",\n \"version\": \"2.3\"\n }\n]\n```",
),
url: "plugins",
types: CompositeTypes {
composite_types: [
Object(
TypeWithName {
name: "Category",
types: [
String(
TypeInfo {
name: "id",
description: Some(
"Id",
),
is_optional: false,
is_list: false,
},
),
String(
TypeInfo {
name: "name",
description: Some(
"Name",
),
is_optional: false,
is_list: false,
},
),
],
},
),
Response(
TypeWithoutName {
types: [
Bool(
TypeInfo {
name: "enabled",
description: Some(
"Whether the plugin is enabled",
),
is_optional: false,
is_list: false,
},
),
String(
TypeInfo {
name: "fullName",
description: Some(
"Full name of the plugin",
),
is_optional: false,
is_list: false,
},
),
String(
TypeInfo {
name: "name",
description: Some(
"Short name of the plugin",
),
is_optional: false,
is_list: false,
},
),
Object(
Object {
type_info: TypeInfo {
name: "supportedCategories",
description: Some(
"List of category objects",
),
is_optional: false,
is_list: true,
},
ref_type: String(
"Category",
),
},
),
String(
TypeInfo {
name: "url",
description: Some(
"URL of the torrent site",
),
is_optional: false,
is_list: false,
},
),
String(
TypeInfo {
name: "version",
description: Some(
"Installed version of the plugin",
),
is_optional: false,
is_list: false,
},
),
],
is_list: true,
},
),
],
},
}

View File

@ -0,0 +1,65 @@
## Get search plugins ##
Name: `plugins`
**Parameters:**
None
**Returns:**
HTTP Status Code | Scenario
----------------------------------|---------------------
200 | All scenarios- see JSON below
The response is a JSON array of objects containing the following fields
Field | Type | Description
----------------------------------|---------|------------
`enabled` | bool | Whether the plugin is enabled
`fullName` | string | Full name of the plugin
`name` | string | Short name of the plugin
`supportedCategories` | array | List of category objects
`url` | string | URL of the torrent site
`version` | string | Installed version of the plugin
```JSON
[
{
"enabled": true,
"fullName": "Legit Torrents",
"name": "legittorrents",
"supportedCategories": [{
"id": "all",
"name": "All categories"
}, {
"id": "anime",
"name": "Anime"
}, {
"id": "books",
"name": "Books"
}, {
"id": "games",
"name": "Games"
}, {
"id": "movies",
"name": "Movies"
}, {
"id": "music",
"name": "Music"
}, {
"id": "tv",
"name": "TV shows"
}],
"url": "http://www.legittorrents.info",
"version": "2.3"
}
]
```
**Category object:**
Field | Type | Description
---------------------------|---------|------------
`id` | string | Id
`name` | string | Name

View File

@ -0,0 +1,276 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"Get search plugins",
),
content: [
Text(
"",
),
Text(
"Name: `plugins`",
),
Text(
"",
),
Asterisk(
"Parameters:",
),
Text(
"",
),
Text(
"None",
),
Text(
"",
),
Asterisk(
"Returns:",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "HTTP Status Code | Scenario",
columns: [
"HTTP Status Code",
"Scenario",
],
},
split: "----------------------------------|---------------------",
rows: [
TableRow {
raw: "200 | All scenarios- see JSON below",
columns: [
"200",
"All scenarios- see JSON below",
],
},
],
},
),
Text(
"",
),
Text(
"The response is a JSON array of objects containing the following fields",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "Field | Type | Description",
columns: [
"Field",
"Type",
"Description",
],
},
split: "----------------------------------|---------|------------",
rows: [
TableRow {
raw: "`enabled` | bool | Whether the plugin is enabled",
columns: [
"enabled",
"bool",
"Whether the plugin is enabled",
],
},
TableRow {
raw: "`fullName` | string | Full name of the plugin",
columns: [
"fullName",
"string",
"Full name of the plugin",
],
},
TableRow {
raw: "`name` | string | Short name of the plugin",
columns: [
"name",
"string",
"Short name of the plugin",
],
},
TableRow {
raw: "`supportedCategories` | array | List of category objects",
columns: [
"supportedCategories",
"array",
"List of category objects",
],
},
TableRow {
raw: "`url` | string | URL of the torrent site",
columns: [
"url",
"string",
"URL of the torrent site",
],
},
TableRow {
raw: "`version` | string | Installed version of the plugin",
columns: [
"version",
"string",
"Installed version of the plugin",
],
},
],
},
),
Text(
"",
),
Text(
"```JSON",
),
Text(
"[",
),
Text(
" {",
),
Text(
" \"enabled\": true,",
),
Text(
" \"fullName\": \"Legit Torrents\",",
),
Text(
" \"name\": \"legittorrents\",",
),
Text(
" \"supportedCategories\": [{",
),
Text(
" \"id\": \"all\",",
),
Text(
" \"name\": \"All categories\"",
),
Text(
" }, {",
),
Text(
" \"id\": \"anime\",",
),
Text(
" \"name\": \"Anime\"",
),
Text(
" }, {",
),
Text(
" \"id\": \"books\",",
),
Text(
" \"name\": \"Books\"",
),
Text(
" }, {",
),
Text(
" \"id\": \"games\",",
),
Text(
" \"name\": \"Games\"",
),
Text(
" }, {",
),
Text(
" \"id\": \"movies\",",
),
Text(
" \"name\": \"Movies\"",
),
Text(
" }, {",
),
Text(
" \"id\": \"music\",",
),
Text(
" \"name\": \"Music\"",
),
Text(
" }, {",
),
Text(
" \"id\": \"tv\",",
),
Text(
" \"name\": \"TV shows\"",
),
Text(
" }],",
),
Text(
" \"url\": \"http://www.legittorrents.info\",",
),
Text(
" \"version\": \"2.3\"",
),
Text(
" }",
),
Text(
"]",
),
Text(
"```",
),
Text(
"",
),
Asterisk(
"Category object:",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "Field | Type | Description",
columns: [
"Field",
"Type",
"Description",
],
},
split: "---------------------------|---------|------------",
rows: [
TableRow {
raw: "`id` | string | Id",
columns: [
"id",
"string",
"Id",
],
},
TableRow {
raw: "`name` | string | Name",
columns: [
"name",
"string",
"Name",
],
},
],
},
),
],
children: [],
},
],
}

View File

@ -0,0 +1,167 @@
ApiMethod {
name: "results",
description: Some(
"The response is a JSON object with the following fields\n\n\n\n\nExample:\n\n```JSON\n{\n \"results\": [\n {\n \"descrLink\": \"http://www.legittorrents.info/index.php?page=torrent-details&id=8d5f512e1acb687029b8d7cc6c5a84dce51d7a41\",\n \"fileName\": \"Ubuntu-10.04-32bit-NeTV.ova\",\n \"fileSize\": -1,\n \"fileUrl\": \"http://www.legittorrents.info/download.php?id=8d5f512e1acb687029b8d7cc6c5a84dce51d7a41&f=Ubuntu-10.04-32bit-NeTV.ova.torrent\",\n \"nbLeechers\": 1,\n \"nbSeeders\": 0,\n \"siteUrl\": \"http://www.legittorrents.info\"\n },\n {\n \"descrLink\": \"http://www.legittorrents.info/index.php?page=torrent-details&id=d5179f53e105dc2c2401bcfaa0c2c4936a6aa475\",\n \"fileName\": \"mangOH-Legato-17_06-Ubuntu-16_04.ova\",\n \"fileSize\": -1,\n \"fileUrl\": \"http://www.legittorrents.info/download.php?id=d5179f53e105dc2c2401bcfaa0c2c4936a6aa475&f=mangOH-Legato-17_06-Ubuntu-16_04.ova.torrent\",\n \"nbLeechers\": 0,\n \"nbSeeders\": 59,\n \"siteUrl\": \"http://www.legittorrents.info\"\n }\n ],\n \"status\": \"Running\",\n \"total\": 2\n}\n```",
),
url: "results",
types: CompositeTypes {
composite_types: [
Parameters(
TypeWithoutName {
types: [
Number(
TypeInfo {
name: "id",
description: Some(
"ID of the search job",
),
is_optional: false,
is_list: false,
},
),
Number(
TypeInfo {
name: "limit",
description: Some(
"max number of results to return. 0 or negative means no limit",
),
is_optional: true,
is_list: false,
},
),
Number(
TypeInfo {
name: "offset",
description: Some(
"result to start at. A negative number means count backwards (e.g. -2 returns the 2 most recent results)",
),
is_optional: true,
is_list: false,
},
),
],
is_list: false,
},
),
Object(
TypeWithName {
name: "Result",
types: [
String(
TypeInfo {
name: "descrLink",
description: Some(
"URL of the torrent's description page",
),
is_optional: false,
is_list: false,
},
),
String(
TypeInfo {
name: "fileName",
description: Some(
"Name of the file",
),
is_optional: false,
is_list: false,
},
),
Number(
TypeInfo {
name: "fileSize",
description: Some(
"Size of the file in Bytes",
),
is_optional: false,
is_list: false,
},
),
String(
TypeInfo {
name: "fileUrl",
description: Some(
"Torrent download link (usually either .torrent file or magnet link)",
),
is_optional: false,
is_list: false,
},
),
Number(
TypeInfo {
name: "nbLeechers",
description: Some(
"Number of leechers",
),
is_optional: false,
is_list: false,
},
),
Number(
TypeInfo {
name: "nbSeeders",
description: Some(
"Number of seeders",
),
is_optional: false,
is_list: false,
},
),
String(
TypeInfo {
name: "siteUrl",
description: Some(
"URL of the torrent site",
),
is_optional: false,
is_list: false,
},
),
],
},
),
Response(
TypeWithoutName {
types: [
Object(
Object {
type_info: TypeInfo {
name: "results",
description: Some(
"Array of result objects- see table below",
),
is_optional: false,
is_list: true,
},
ref_type: String(
"Result",
),
},
),
String(
TypeInfo {
name: "status",
description: Some(
"Current status of the search job (either Running or Stopped)",
),
is_optional: false,
is_list: false,
},
),
Number(
TypeInfo {
name: "total",
description: Some(
"Total number of results. If the status is Running this number may continue to increase",
),
is_optional: false,
is_list: false,
},
),
],
is_list: false,
},
),
],
},
}

View File

@ -0,0 +1,68 @@
## Get search results ##
Name: `results`
**Parameters:**
Parameter | Type | Description
----------------------------------|---------|------------
`id` | number | ID of the search job
`limit` _optional_ | number | max number of results to return. 0 or negative means no limit
`offset` _optional_ | number | result to start at. A negative number means count backwards (e.g. `-2` returns the 2 most recent results)
**Returns:**
HTTP Status Code | Scenario
----------------------------------|---------------------
404 | Search job was not found
409 | Offset is too large, or too small (e.g. absolute value of negative number is greater than # results)
200 | All other scenarios- see JSON below
The response is a JSON object with the following fields
Field | Type | Description
----------------------------------|---------|------------
`results` | array | Array of `result` objects- see table below
`status` | string | Current status of the search job (either `Running` or `Stopped`)
`total` | number | Total number of results. If the status is `Running` this number may continue to increase
**Result object:**
Field | Type | Description
----------------------------------|---------|------------
`descrLink` | string | URL of the torrent's description page
`fileName` | string | Name of the file
`fileSize` | number | Size of the file in Bytes
`fileUrl` | string | Torrent download link (usually either .torrent file or magnet link)
`nbLeechers` | number | Number of leechers
`nbSeeders` | number | Number of seeders
`siteUrl` | string | URL of the torrent site
Example:
```JSON
{
"results": [
{
"descrLink": "http://www.legittorrents.info/index.php?page=torrent-details&id=8d5f512e1acb687029b8d7cc6c5a84dce51d7a41",
"fileName": "Ubuntu-10.04-32bit-NeTV.ova",
"fileSize": -1,
"fileUrl": "http://www.legittorrents.info/download.php?id=8d5f512e1acb687029b8d7cc6c5a84dce51d7a41&f=Ubuntu-10.04-32bit-NeTV.ova.torrent",
"nbLeechers": 1,
"nbSeeders": 0,
"siteUrl": "http://www.legittorrents.info"
},
{
"descrLink": "http://www.legittorrents.info/index.php?page=torrent-details&id=d5179f53e105dc2c2401bcfaa0c2c4936a6aa475",
"fileName": "mangOH-Legato-17_06-Ubuntu-16_04.ova",
"fileSize": -1,
"fileUrl": "http://www.legittorrents.info/download.php?id=d5179f53e105dc2c2401bcfaa0c2c4936a6aa475&f=mangOH-Legato-17_06-Ubuntu-16_04.ova.torrent",
"nbLeechers": 0,
"nbSeeders": 59,
"siteUrl": "http://www.legittorrents.info"
}
],
"status": "Running",
"total": 2
}
```

View File

@ -0,0 +1,327 @@
TokenTree {
title: None,
content: [],
children: [
TokenTree {
title: Some(
"Get search results",
),
content: [
Text(
"",
),
Text(
"Name: `results`",
),
Text(
"",
),
Asterisk(
"Parameters:",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "Parameter | Type | Description",
columns: [
"Parameter",
"Type",
"Description",
],
},
split: "----------------------------------|---------|------------",
rows: [
TableRow {
raw: "`id` | number | ID of the search job",
columns: [
"id",
"number",
"ID of the search job",
],
},
TableRow {
raw: "`limit` _optional_ | number | max number of results to return. 0 or negative means no limit",
columns: [
"limit _optional_",
"number",
"max number of results to return. 0 or negative means no limit",
],
},
TableRow {
raw: "`offset` _optional_ | number | result to start at. A negative number means count backwards (e.g. `-2` returns the 2 most recent results)",
columns: [
"offset _optional_",
"number",
"result to start at. A negative number means count backwards (e.g. -2 returns the 2 most recent results)",
],
},
],
},
),
Text(
"",
),
Asterisk(
"Returns:",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "HTTP Status Code | Scenario",
columns: [
"HTTP Status Code",
"Scenario",
],
},
split: "----------------------------------|---------------------",
rows: [
TableRow {
raw: "404 | Search job was not found",
columns: [
"404",
"Search job was not found",
],
},
TableRow {
raw: "409 | Offset is too large, or too small (e.g. absolute value of negative number is greater than # results)",
columns: [
"409",
"Offset is too large, or too small (e.g. absolute value of negative number is greater than # results)",
],
},
TableRow {
raw: "200 | All other scenarios- see JSON below",
columns: [
"200",
"All other scenarios- see JSON below",
],
},
],
},
),
Text(
"",
),
Text(
"The response is a JSON object with the following fields",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "Field | Type | Description",
columns: [
"Field",
"Type",
"Description",
],
},
split: "----------------------------------|---------|------------",
rows: [
TableRow {
raw: "`results` | array | Array of `result` objects- see table below",
columns: [
"results",
"array",
"Array of result objects- see table below",
],
},
TableRow {
raw: "`status` | string | Current status of the search job (either `Running` or `Stopped`)",
columns: [
"status",
"string",
"Current status of the search job (either Running or Stopped)",
],
},
TableRow {
raw: "`total` | number | Total number of results. If the status is `Running` this number may continue to increase",
columns: [
"total",
"number",
"Total number of results. If the status is Running this number may continue to increase",
],
},
],
},
),
Text(
"",
),
Asterisk(
"Result object:",
),
Text(
"",
),
Table(
Table {
header: TableRow {
raw: "Field | Type | Description",
columns: [
"Field",
"Type",
"Description",
],
},
split: "----------------------------------|---------|------------",
rows: [
TableRow {
raw: "`descrLink` | string | URL of the torrent's description page",
columns: [
"descrLink",
"string",
"URL of the torrent's description page",
],
},
TableRow {
raw: "`fileName` | string | Name of the file",
columns: [
"fileName",
"string",
"Name of the file",
],
},
TableRow {
raw: "`fileSize` | number | Size of the file in Bytes",
columns: [
"fileSize",
"number",
"Size of the file in Bytes",
],
},
TableRow {
raw: "`fileUrl` | string | Torrent download link (usually either .torrent file or magnet link)",
columns: [
"fileUrl",
"string",
"Torrent download link (usually either .torrent file or magnet link)",
],
},
TableRow {
raw: "`nbLeechers` | number | Number of leechers",
columns: [
"nbLeechers",
"number",
"Number of leechers",
],
},
TableRow {
raw: "`nbSeeders` | number | Number of seeders",
columns: [
"nbSeeders",
"number",
"Number of seeders",
],
},
TableRow {
raw: "`siteUrl` | string | URL of the torrent site",
columns: [
"siteUrl",
"string",
"URL of the torrent site",
],
},
],
},
),
Text(
"",
),
Text(
"Example:",
),
Text(
"",
),
Text(
"```JSON",
),
Text(
"{",
),
Text(
" \"results\": [",
),
Text(
" {",
),
Text(
" \"descrLink\": \"http://www.legittorrents.info/index.php?page=torrent-details&id=8d5f512e1acb687029b8d7cc6c5a84dce51d7a41\",",
),
Text(
" \"fileName\": \"Ubuntu-10.04-32bit-NeTV.ova\",",
),
Text(
" \"fileSize\": -1,",
),
Text(
" \"fileUrl\": \"http://www.legittorrents.info/download.php?id=8d5f512e1acb687029b8d7cc6c5a84dce51d7a41&f=Ubuntu-10.04-32bit-NeTV.ova.torrent\",",
),
Text(
" \"nbLeechers\": 1,",
),
Text(
" \"nbSeeders\": 0,",
),
Text(
" \"siteUrl\": \"http://www.legittorrents.info\"",
),
Text(
" },",
),
Text(
" {",
),
Text(
" \"descrLink\": \"http://www.legittorrents.info/index.php?page=torrent-details&id=d5179f53e105dc2c2401bcfaa0c2c4936a6aa475\",",
),
Text(
" \"fileName\": \"mangOH-Legato-17_06-Ubuntu-16_04.ova\",",
),
Text(
" \"fileSize\": -1,",
),
Text(
" \"fileUrl\": \"http://www.legittorrents.info/download.php?id=d5179f53e105dc2c2401bcfaa0c2c4936a6aa475&f=mangOH-Legato-17_06-Ubuntu-16_04.ova.torrent\",",
),
Text(
" \"nbLeechers\": 0,",
),
Text(
" \"nbSeeders\": 59,",
),
Text(
" \"siteUrl\": \"http://www.legittorrents.info\"",
),
Text(
" }",
),
Text(
" ],",
),
Text(
" \"status\": \"Running\",",
),
Text(
" \"total\": 2",
),
Text(
"}",
),
Text(
"```",
),
],
children: [],
},
],
}

View File

@ -1,47 +1,430 @@
mod description;
mod parameters;
mod return_type;
// mod return_type;
mod url;
use crate::{md_parser, parser::util, types};
pub use return_type::ReturnType;
use self::{
description::parse_method_description, parameters::parse_parameters,
return_type::parse_return_type, url::get_method_url,
};
use crate::{md_parser, types};
use case::CaseExt;
use regex::Regex;
use std::collections::BTreeMap;
#[derive(Debug)]
pub struct ApiMethod {
pub name: String,
pub description: Option<String>,
pub parameters: Option<Vec<types::Type>>,
pub return_type: Option<ReturnType>,
pub url: String,
pub types: CompositeTypes,
}
pub fn parse_api_method(child: &md_parser::TokenTree) -> Option<ApiMethod> {
util::find_content_starts_with(&child.content, "Name: ")
.map(|name| {
name.trim_start_matches("Name: ")
.trim_matches('`')
.to_string()
#[derive(Debug)]
pub struct CompositeTypes {
pub composite_types: Vec<CompositeType>,
}
impl CompositeTypes {
pub fn new(tables: &Tables) -> Self {
Self {
composite_types: tables.get_all_tables_as_types(),
}
}
pub fn parameters(&self) -> Vec<&types::Type> {
self.composite_types
.iter()
.find_map(|type_| match type_ {
CompositeType::Parameters(p) => Some(p.types.iter().collect()),
_ => None,
})
.unwrap_or_default()
}
pub fn optional_parameters(&self) -> Vec<&types::Type> {
self.parameters()
.iter()
.filter(|param| param.is_optional())
.copied()
.collect()
}
pub fn mandatory_params(&self) -> Vec<&types::Type> {
self.parameters()
.iter()
.filter(|param| !param.is_optional())
.copied()
.collect()
}
pub fn response(&self) -> Option<&TypeWithoutName> {
self.composite_types.iter().find_map(|type_| match type_ {
CompositeType::Response(p) => Some(p),
_ => None,
})
.map(|name| to_api_method(child, &name))
}
}
fn to_api_method(child: &md_parser::TokenTree, name: &str) -> ApiMethod {
let method_description = parse_method_description(&child.content);
let return_type = parse_return_type(&child.content);
let parameters = parse_parameters(&child.content);
let method_url = get_method_url(&child.content);
pub fn objects(&self) -> Vec<&TypeWithName> {
self.composite_types
.iter()
.filter_map(|type_| match type_ {
CompositeType::Object(p) => Some(p),
_ => None,
})
.collect()
}
ApiMethod {
name: name.to_string(),
description: method_description,
parameters,
return_type,
url: method_url,
pub fn enums(&self) -> Vec<&Enum> {
self.composite_types
.iter()
.filter_map(|type_| match type_ {
CompositeType::Enum(p) => Some(p),
_ => None,
})
.collect()
}
}
#[derive(Debug)]
pub struct ApiParameters {
pub mandatory: Vec<types::Type>,
pub optional: Vec<types::Type>,
}
#[derive(Debug)]
pub enum CompositeType {
Enum(Enum),
Object(TypeWithName),
Response(TypeWithoutName),
Parameters(TypeWithoutName),
}
#[derive(Debug)]
pub struct TypeWithName {
pub name: String,
pub types: Vec<types::Type>,
}
#[derive(Debug)]
pub struct TypeWithoutName {
pub types: Vec<types::Type>,
pub is_list: bool,
}
impl TypeWithoutName {
pub fn new(types: Vec<types::Type>, is_list: bool) -> Self {
Self { types, is_list }
}
}
impl TypeWithName {
pub fn new(name: &str, types: Vec<types::Type>) -> Self {
Self {
name: name.to_string(),
types,
}
}
}
#[derive(Debug)]
pub struct Enum {
pub name: String,
pub values: Vec<EnumValue>,
}
#[derive(Debug)]
pub struct EnumValue {
pub description: Option<String>,
pub value: String,
pub original_value: String,
}
impl Enum {
fn new(name: &str, table: &md_parser::Table) -> Self {
let values = table.rows.iter().map(EnumValue::from).collect();
Enum {
name: name.to_string(),
values,
}
}
}
impl From<&md_parser::TableRow> for EnumValue {
fn from(row: &md_parser::TableRow) -> Self {
let description = row.columns.get(1).cloned();
let original_value = row.columns[0].clone();
let value = if original_value.parse::<i32>().is_ok() {
let name = description
.clone()
.unwrap()
.replace(' ', "_")
.replace('-', "_")
.replace(',', "_");
let re = Regex::new(r#"\(.*\)"#).unwrap();
re.replace_all(&name, "").to_camel()
} else {
original_value.to_camel()
};
EnumValue {
description,
value,
original_value,
}
}
}
impl ApiMethod {
pub fn try_new(child: &md_parser::TokenTree) -> Option<Self> {
const NAME: &str = "Name: ";
child
.find_content_starts_with(NAME)
.map(|name| name.trim_start_matches(NAME).trim_matches('`').to_string())
.map(|name| ApiMethod::new(child, &name))
}
fn new(child: &md_parser::TokenTree, name: &str) -> Self {
let tables = Tables::from(child);
let method_description = child.parse_method_description();
let method_url = child.get_method_url();
ApiMethod {
name: name.to_string(),
description: method_description,
url: method_url,
types: CompositeTypes::new(&tables),
}
}
}
impl md_parser::TokenTree {
fn find_content_starts_with(&self, starts_with: &str) -> Option<String> {
self.content.iter().find_map(|row| match row {
md_parser::MdContent::Text(content) if content.starts_with(starts_with) => {
Some(content.into())
}
_ => None,
})
}
}
impl<'a> From<&'a md_parser::TokenTree> for Tables<'a> {
fn from(token_tree: &'a md_parser::TokenTree) -> Self {
let mut tables = BTreeMap::new();
let mut prev_prev: Option<&md_parser::MdContent> = None;
let mut prev: Option<&md_parser::MdContent> = None;
for content in &token_tree.content {
if let md_parser::MdContent::Table(table) = content {
let title = match prev_prev {
Some(md_parser::MdContent::Text(text)) => text.clone(),
Some(md_parser::MdContent::Asterisk(text)) => text.clone(),
_ => panic!("Expected table title, found: {:?}", prev_prev),
};
tables.insert(title.replace(':', ""), table);
}
prev_prev = prev;
prev = Some(content);
}
Tables { tables }
}
}
#[derive(Debug)]
pub struct Tables<'a> {
tables: BTreeMap<String, &'a md_parser::Table>,
}
impl md_parser::Table {
fn to_enum(&self, input_name: &str) -> Option<CompositeType> {
let re = Regex::new(r"^Possible values of `(\w+)`$").unwrap();
if !re.is_match(input_name) {
return None;
}
Some(CompositeType::Enum(Enum::new(
&Self::regex_to_name(&re, input_name),
self,
)))
}
fn to_object(&self, input_name: &str) -> Option<CompositeType> {
let re = Regex::new(r"^(\w+) object$").unwrap();
if !re.is_match(input_name) {
return None;
}
Some(CompositeType::Object(TypeWithName::new(
&Self::regex_to_name(&re, input_name),
self.to_types(),
)))
}
fn to_response(&self, input_name: &str) -> Option<CompositeType> {
if !input_name.starts_with("The response is a") {
return None;
}
Some(CompositeType::Response(TypeWithoutName::new(
self.to_types(),
input_name.to_lowercase().contains("array"),
)))
}
fn to_parameters(&self, input_name: &str) -> Option<CompositeType> {
if !input_name.starts_with("Parameters") {
return None;
}
Some(CompositeType::Parameters(TypeWithoutName::new(
self.to_types(),
input_name.to_lowercase().contains("array"),
)))
}
fn to_composite_type(&self, input_name: &str) -> Option<CompositeType> {
self.to_enum(input_name)
.or_else(|| self.to_response(input_name))
.or_else(|| self.to_object(input_name))
.or_else(|| self.to_parameters(input_name))
}
fn regex_to_name(re: &Regex, input_name: &str) -> String {
re.captures(input_name)
.unwrap()
.get(1)
.unwrap()
.as_str()
.to_string()
.to_camel()
}
}
impl<'a> Tables<'a> {
fn get_all_tables_as_types(&self) -> Vec<CompositeType> {
self.tables
.iter()
.flat_map(|(k, v)| v.to_composite_type(k))
.collect()
}
}
impl md_parser::Table {
fn to_types(&self) -> Vec<types::Type> {
self.rows
.iter()
.flat_map(|table_row| table_row.to_type())
.collect()
}
}
impl md_parser::TableRow {
fn to_type(&self) -> Option<types::Type> {
let columns = &self.columns;
let description = columns.get(2).cloned();
match &columns.get(2) {
// If the description contains a default value it means that the parameter is optional.
Some(desc) if desc.contains("default: ") => {
// type defines a variable as default if it contains: _optional_
let name_with_optional = format!("{} {}", columns[0], types::OPTIONAL);
types::Type::from(&columns[1], &name_with_optional, description)
}
_ => types::Type::from(&columns[1], &columns[0], description),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use md_parser::TokenTreeFactory;
macro_rules! TEST_DIR {
() => {
"method_tests"
};
}
#[allow(unused_macros)]
macro_rules! run_test {
($test_file:expr) => {
use pretty_assertions::assert_eq;
// given
let input = include_str!(concat!(TEST_DIR!(), "/", $test_file, ".md"));
// when
let tree = TokenTreeFactory::create(input);
let api_method = ApiMethod::try_new(&tree.children[0]).unwrap();
// then
let api_method_as_str = format!("{api_method:#?}");
let should_be = include_str!(concat!(TEST_DIR!(), "/", $test_file, ".check"));
assert_eq!(api_method_as_str, should_be);
};
}
// use this macro when creating/updating as test
#[allow(unused_macros)]
macro_rules! update_test {
($test_file:expr) => {
use std::fs;
use std::path::Path;
let input = include_str!(concat!(TEST_DIR!(), "/", $test_file, ".md"));
let tree = TokenTreeFactory::create(input);
let api_method = ApiMethod::try_new(&tree.children[0]).unwrap();
let tree_as_str = format!("{tree:#?}");
let api_method_as_str = format!("{api_method:#?}");
let tree_file = concat!(
"src/parser/group/method/",
TEST_DIR!(),
"/",
$test_file,
".tree"
);
let file = concat!(
"src/parser/group/method/",
TEST_DIR!(),
"/",
$test_file,
".check"
);
fs::write(file, api_method_as_str).unwrap();
fs::write(tree_file, tree_as_str).unwrap();
};
}
#[test]
fn search_result() {
run_test!("search_result");
}
#[test]
fn enum_test() {
run_test!("enum");
}
#[test]
fn array_result() {
run_test!("array_result");
}
#[test]
fn array_field() {
run_test!("array_field");
}
#[test]
fn ref_type() {
run_test!("ref_type");
}
}

View File

@ -1,51 +0,0 @@
use std::collections::HashMap;
use crate::{md_parser, parser::types};
pub fn parse_parameters(content: &[md_parser::MdContent]) -> Option<Vec<types::Type>> {
let mut it = content
.iter()
.skip_while(|row| match row {
md_parser::MdContent::Asterix(content) | md_parser::MdContent::Text(content) => {
!content.starts_with("Parameters:")
}
_ => true,
})
// Parameters: <-- skip
// <-- skip
// table with parameters <-- take
.skip(2);
let parameter_table = match it.next() {
Some(md_parser::MdContent::Table(table)) => table,
_ => return None,
};
// empty for now
let type_map = HashMap::default();
let table = parameter_table
.rows
.iter()
.flat_map(|row| parse_parameter(row, &type_map))
.collect();
Some(table)
}
fn parse_parameter(
row: &md_parser::TableRow,
type_map: &HashMap<String, types::TypeDescription>,
) -> Option<types::Type> {
let description = row.columns.get(2).cloned();
match &row.columns.get(2) {
// If the description contains a default value it means that the parameter is optional.
Some(desc) if desc.contains("default: ") => {
// type defines a variable as default if it contains: _optional_
let name_with_optional = format!("{} {}", row.columns[0], types::OPTIONAL);
types::Type::from(&row.columns[1], &name_with_optional, description, type_map)
}
_ => types::Type::from(&row.columns[1], &row.columns[0], description, type_map),
}
}

View File

@ -1,100 +1,107 @@
use std::collections::HashMap;
use crate::{
md_parser::{self, MdContent},
md_parser,
parser::{types, ReturnTypeParameter},
};
use super::Tables;
#[derive(Debug)]
pub struct ReturnType {
pub is_list: bool,
pub parameters: Vec<ReturnTypeParameter>,
}
pub fn parse_return_type(content: &[MdContent]) -> Option<ReturnType> {
let table = content
.iter()
// The response is a ... <-- Trying to find this line
// <-- The next line is empty
// Table with the return type <-- And then extract the following type table
.skip_while(|row| match row {
MdContent::Text(text) => !text.starts_with("The response is a"),
_ => true,
})
.find_map(|row| match row {
MdContent::Table(table) => Some(table),
_ => None,
})?;
let types = parse_object_types(content);
let parameters = table
.rows
.iter()
.map(|parameter| ReturnTypeParameter {
name: parameter.columns[0].clone(),
description: parameter.columns[2].clone(),
return_type: types::Type::from(
&parameter.columns[1],
&parameter.columns[0],
Some(parameter.columns[2].clone()),
&types,
)
.unwrap_or_else(|| panic!("Failed to parse type {}", &parameter.columns[1])),
})
.collect();
let is_list = content
.iter()
.find_map(|row| match row {
MdContent::Text(text) if text.starts_with("The response is a") => Some(text),
_ => None,
})
.map(|found| found.contains("array"))
.unwrap_or_else(|| false);
Some(ReturnType {
parameters,
is_list,
})
impl md_parser::Table {
fn to_return_type_parameters(
&self,
types: &HashMap<String, types::TypeDescription>,
) -> Vec<ReturnTypeParameter> {
self.rows
.iter()
.map(|parameter| parameter.to_return_type_parameter(types))
.collect()
}
}
pub fn parse_object_types(
content: &[md_parser::MdContent],
) -> HashMap<String, types::TypeDescription> {
let mut output = HashMap::new();
let mut content_it = content.iter();
impl md_parser::TokenTree {
pub fn parse_return_type(&self) -> Option<ReturnType> {
let tables: Tables = self.into();
let table = tables
.get_type_containing_as_table("The response is a")
// these two are special cases not following a pattern
.or_else(|| tables.get_type_containing_as_table("Possible fields"))
.or_else(|| {
tables.get_type_containing_as_table(
"Each element of the array has the following properties",
)
})?;
while let Some(entry) = content_it.next() {
if let md_parser::MdContent::Text(content) = entry {
const POSSIBLE_VALUES_OF: &str = "Possible values of ";
if content.contains(POSSIBLE_VALUES_OF) {
// is empty
content_it.next();
if let Some(md_parser::MdContent::Table(table)) = content_it.next() {
let enum_types = to_type_descriptions(table);
let types = self.parse_object_types();
let name = content
.trim_start_matches(POSSIBLE_VALUES_OF)
.replace('`', "")
.replace(':', "");
Some(ReturnType {
parameters: table.to_return_type_parameters(&types),
is_list: self.is_list(),
})
}
output.insert(name, types::TypeDescription { values: enum_types });
}
}
fn is_list(&self) -> bool {
self.find_content_starts_with("The response is a")
.map(|found| found.contains("array"))
.unwrap_or_else(|| false)
}
pub fn parse_object_types(&self) -> HashMap<String, types::TypeDescription> {
let tables: Tables = self.into();
const POSSIBLE_VALUES_OF: &str = "Possible values of ";
tables
.get_all_type_containing_as_table(POSSIBLE_VALUES_OF)
.iter()
.map(|(k, table)| {
let name = k
.trim_start_matches(POSSIBLE_VALUES_OF)
.replace('`', "")
.replace(':', "");
(name, table.to_type_description())
})
.collect()
}
}
impl md_parser::Table {
pub fn to_type_description(&self) -> types::TypeDescription {
types::TypeDescription {
values: self.to_type_descriptions(),
}
}
output
pub fn to_type_descriptions(&self) -> Vec<types::TypeDescriptions> {
self.rows
.iter()
.map(|row| types::TypeDescriptions {
value: row.columns[0].to_string(),
description: row.columns[1].to_string(),
})
.collect()
}
}
fn to_type_descriptions(table: &md_parser::Table) -> Vec<types::TypeDescriptions> {
table
.rows
.iter()
.map(|row| types::TypeDescriptions {
value: row.columns[0].to_string(),
description: row.columns[1].to_string(),
})
.collect()
impl md_parser::TableRow {
fn to_return_type_parameter(
&self,
type_map: &HashMap<String, types::TypeDescription>,
) -> ReturnTypeParameter {
let columns = &self.columns;
ReturnTypeParameter {
name: columns[0].clone(),
description: columns[2].clone(),
return_type: self
.to_types_with_types(type_map)
.unwrap_or_else(|| panic!("Failed to parse type {}", &columns[1])),
}
}
}

View File

@ -1,9 +1,11 @@
use crate::{md_parser, parser::util};
use crate::md_parser;
pub fn get_method_url(content: &[md_parser::MdContent]) -> String {
const START: &str = "Name: ";
impl md_parser::TokenTree {
pub fn get_method_url(&self) -> String {
const START: &str = "Name: ";
util::find_content_starts_with(content, START)
.map(|text| text.trim_start_matches(START).trim_matches('`').to_string())
.expect("Could find method url")
self.find_content_starts_with(START)
.map(|text| text.trim_start_matches(START).trim_matches('`').to_string())
.expect("Could find method url")
}
}

View File

@ -4,8 +4,7 @@ mod url;
use crate::md_parser;
use self::{description::parse_group_description, method::parse_api_method, url::get_group_url};
pub use method::{ApiMethod, ReturnType};
pub use method::*;
#[derive(Debug)]
pub struct ApiGroup {
@ -15,25 +14,29 @@ pub struct ApiGroup {
pub url: String,
}
pub fn parse_api_group(tree: &md_parser::TokenTree) -> ApiGroup {
let methods = tree.children.iter().flat_map(parse_api_method).collect();
let group_description = parse_group_description(&tree.content);
let group_url = get_group_url(&tree.content);
let name = tree
.title
.clone()
.unwrap()
.to_lowercase()
.trim_end_matches("(experimental)")
.trim()
.replace(' ', "_");
ApiGroup {
name,
methods,
description: group_description,
url: group_url,
impl ApiGroup {
pub fn new(tree: &md_parser::TokenTree) -> ApiGroup {
ApiGroup {
name: tree.name(),
methods: tree.methods(),
description: tree.parse_group_description(),
url: tree.get_group_url(),
}
}
}
impl md_parser::TokenTree {
fn name(&self) -> String {
self.title
.clone()
.unwrap()
.to_lowercase()
.trim_end_matches("(experimental)")
.trim()
.replace(' ', "_")
}
fn methods(&self) -> Vec<ApiMethod> {
self.children.iter().flat_map(ApiMethod::try_new).collect()
}
}

View File

@ -1,14 +1,26 @@
use regex::Regex;
use crate::{md_parser, parser::util};
use crate::md_parser;
pub fn get_group_url(content: &[md_parser::MdContent]) -> String {
let row = util::find_content_contains(content, "API methods are under")
.expect("Could not find api method");
impl md_parser::TokenTree {
pub fn get_group_url(&self) -> String {
let row = self
.find_content_contains("API methods are under")
.expect("Could not find api method");
let re = Regex::new(r#"All (?:\w+\s?)+ API methods are under "(\w+)", e.g."#)
.expect("Failed to create regex");
let re = Regex::new(r#"All (?:\w+\s?)+ API methods are under "(\w+)", e.g."#)
.expect("Failed to create regex");
let res = re.captures(&row).expect("Failed find capture");
res[1].to_string()
let res = re.captures(&row).expect("Failed find capture");
res[1].to_string()
}
fn find_content_contains(&self, contains: &str) -> Option<String> {
self.content.iter().find_map(|row| match row {
md_parser::MdContent::Text(content) if content.contains(contains) => {
Some(content.into())
}
_ => None,
})
}
}

File diff suppressed because one or more lines are too long

View File

@ -1,9 +1,6 @@
use crate::{md_parser, types};
use self::group::parse_api_group;
mod group;
mod util;
#[derive(Debug)]
pub struct ReturnTypeParameter {
@ -12,17 +9,14 @@ pub struct ReturnTypeParameter {
pub return_type: types::Type,
}
pub use group::{ApiGroup, ApiMethod, ReturnType};
pub use group::*;
pub fn parse_api_groups(token_tree: md_parser::TokenTree) -> Vec<ApiGroup> {
parse_groups(extract_relevant_parts(token_tree))
}
pub fn parse_groups(trees: Vec<md_parser::TokenTree>) -> Vec<ApiGroup> {
trees
.into_iter()
.map(|tree| parse_api_group(&tree))
.collect()
trees.iter().map(ApiGroup::new).collect()
}
fn extract_relevant_parts(tree: md_parser::TokenTree) -> Vec<md_parser::TokenTree> {

File diff suppressed because it is too large Load Diff

View File

@ -1,15 +0,0 @@
use crate::md_parser::MdContent;
pub fn find_content_starts_with(content: &[MdContent], starts_with: &str) -> Option<String> {
content.iter().find_map(|row| match row {
MdContent::Text(content) if content.starts_with(starts_with) => Some(content.into()),
_ => None,
})
}
pub fn find_content_contains(content: &[MdContent], contains: &str) -> Option<String> {
content.iter().find_map(|row| match row {
MdContent::Text(content) if content.contains(contains) => Some(content.into()),
_ => None,
})
}

View File

@ -1,4 +1,5 @@
use std::collections::HashMap;
use case::CaseExt;
use regex::RegexBuilder;
#[derive(Debug, Clone)]
pub struct TypeDescriptions {
@ -14,30 +15,47 @@ pub struct TypeDescription {
#[derive(Debug, Clone)]
pub struct TypeInfo {
pub name: String,
pub is_optional: bool,
pub is_list: bool,
pub description: Option<String>,
pub type_description: Option<TypeDescription>,
is_optional: bool,
is_list: bool,
}
impl TypeInfo {
pub fn new(
name: &str,
is_optional: bool,
is_list: bool,
description: Option<String>,
type_description: Option<TypeDescription>,
) -> Self {
pub fn new(name: &str, is_optional: bool, is_list: bool, description: Option<String>) -> Self {
Self {
name: name.into(),
description,
is_optional,
is_list,
description,
type_description,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum RefType {
String(String),
Map(String, String),
}
#[derive(Debug, Clone)]
pub struct Object {
pub type_info: TypeInfo,
pub ref_type: RefType,
}
#[derive(Debug, Clone)]
pub struct Enum {
pub type_info: TypeInfo,
pub values: Vec<EnumValue>,
}
#[derive(Debug, Clone)]
pub struct EnumValue {
pub description: Option<String>,
pub key: String,
pub value: String,
}
pub const OPTIONAL: &str = "_optional_";
#[derive(Debug, Clone)]
@ -47,22 +65,10 @@ pub enum Type {
Bool(TypeInfo),
String(TypeInfo),
StringArray(TypeInfo),
Object(TypeInfo),
Object(Object),
}
impl Type {
pub fn to_owned_type(&self) -> String {
match self {
Type::Number(_) => "i128".into(),
Type::Float(_) => "f32".into(),
Type::Bool(_) => "bool".into(),
Type::String(_) => "String".into(),
// TODO: fixme
Type::StringArray(_) => "String".into(),
Type::Object(_) => "String".into(),
}
}
pub fn to_borrowed_type(&self) -> String {
match self {
Type::Number(_) => "i32".into(),
@ -70,7 +76,7 @@ impl Type {
Type::Bool(_) => "bool".into(),
Type::String(_) => "str".into(),
Type::StringArray(_) => "&[str]".into(),
Type::Object(_) => "str".into(),
Type::Object(_) => todo!(),
}
}
@ -78,6 +84,14 @@ impl Type {
matches!(self, Type::String(_) | Type::Object(_))
}
pub fn is_optional(&self) -> bool {
self.get_type_info().is_optional
}
pub fn is_list(&self) -> bool {
self.get_type_info().is_list
}
pub fn get_type_info(&self) -> &TypeInfo {
match self {
Type::Number(t) => t,
@ -85,17 +99,11 @@ impl Type {
Type::Bool(t) => t,
Type::String(t) => t,
Type::StringArray(t) => t,
Type::Object(t) => t,
Type::Object(t) => &t.type_info,
}
}
pub fn from(
type_as_str: &str,
name: &str,
description: Option<String>,
types: &HashMap<String, TypeDescription>,
) -> Option<Type> {
let available_types = types.get(name).cloned();
pub fn from(type_as_str: &str, name: &str, description: Option<String>) -> Option<Type> {
let type_name = match name.split_once(OPTIONAL) {
Some((split, _)) => split,
None => name,
@ -103,17 +111,130 @@ impl Type {
.trim();
let is_optional = name.contains(OPTIONAL);
let type_info = TypeInfo::new(type_name, is_optional, false, description, available_types);
let is_list = description
.clone()
.map(|desc| desc.contains("array"))
.unwrap_or(false);
match type_as_str {
"bool" => Some(Type::Bool(type_info)),
"integer" | "number" | "int" => Some(Type::Number(type_info)),
"string" => Some(Type::String(type_info)),
// This is probably not right but we don't have any information about the actual type.
"array" => Some(Type::StringArray(type_info)),
"object" => Some(Type::Object(type_info)),
"float" => Some(Type::Float(type_info)),
_ => None,
let (type_without_array, type_contains_array) = if type_as_str.contains("array") {
(type_as_str.replace("array", ""), true)
} else {
(type_as_str.to_owned(), false)
};
let create_type_info = || {
TypeInfo::new(
type_name,
is_optional,
is_list || type_contains_array,
description.clone(),
)
};
let create_object_type = |ref_type: RefType| {
Some(Type::Object(Object {
type_info: create_type_info(),
ref_type,
}))
};
match type_without_array.trim() {
"raw" => None,
"bool" => Some(Type::Bool(create_type_info())),
"integer" | "number" | "int" => Some(Type::Number(create_type_info())),
"string" => Some(Type::String(create_type_info())),
"array" => description
.extract_type()
.and_then(create_object_type)
.or_else(|| Some(Type::StringArray(create_type_info()))),
"float" => Some(Type::Float(create_type_info())),
name => description
.extract_type()
.and_then(create_object_type)
.or_else(|| {
let n = if name.is_empty() {
"String".into()
} else {
name.into()
};
create_object_type(RefType::String(n))
}),
}
}
}
trait ExtractType {
fn extract_type(&self) -> Option<RefType>;
}
impl ExtractType for Option<String> {
fn extract_type(&self) -> Option<RefType> {
let list_type = || {
self.as_ref().and_then(|t| {
let re = RegexBuilder::new(r"(?:Array|List) of (\w+) objects")
.case_insensitive(true)
.build()
.unwrap();
let cap = re.captures(t)?;
cap.get(1)
.map(|m| m.as_str().to_camel())
.map(RefType::String)
})
};
let map_type = || {
self.as_ref().and_then(|t| {
let re = RegexBuilder::new(r"map from (\w+) to (\w+) object")
.case_insensitive(true)
.build()
.unwrap();
let cap = re.captures(t)?;
let key_type = match cap.get(1).map(|m| m.as_str().to_camel()) {
Some(k) => k,
None => return None,
};
let value_type = match cap.get(2).map(|m| m.as_str().to_camel()) {
Some(v) => v,
None => return None,
};
Some(RefType::Map(key_type, value_type))
})
};
let object_type = || {
self.as_ref().and_then(|t| {
let re = RegexBuilder::new(r"(\w+) object see table below")
.case_insensitive(true)
.build()
.unwrap();
let cap = re.captures(t)?;
let object_type = match cap.get(1).map(|m| m.as_str().to_camel()) {
Some(k) => k,
None => return None,
};
Some(RefType::String(object_type))
})
};
list_type().or_else(map_type).or_else(object_type)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_regex() {
let input = Some("Array of result objects- see table below".to_string());
let res = input.extract_type();
assert_eq!(res.unwrap(), RefType::String("Result".into()));
}
}

View File

@ -10,7 +10,7 @@ mod foo {
#[tokio::main]
async fn main() -> Result<()> {
let _ = foo::api_impl::ApplicationPreferencesBittorrentProtocol::TCP;
let _ = foo::api_impl::application::preferences::BittorrentProtocol::TCP;
Ok(())
}

View File

@ -0,0 +1,20 @@
mod common;
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
#[derive(QBittorrentApiGen)]
struct Api {}
#[tokio::main]
async fn main() -> Result<()> {
let api = Api::login(BASE_URL, USERNAME, PASSWORD).await?;
let _ = api.search().delete(1).send().await?;
let _ = api.search().plugins().await?;
let _ = api.search().plugins().await?;
let _ = api.search().install_plugin("https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/legittorrents.py").send().await?;
Ok(())
}

View File

@ -1,9 +1,8 @@
use anyhow::Result;
use qbittorrent_web_api_gen::QBittorrentApiGen;
mod common;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
#[derive(QBittorrentApiGen)]
struct Api {}

View File

@ -1,9 +1,8 @@
use anyhow::Result;
use qbittorrent_web_api_gen::QBittorrentApiGen;
mod common;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
#[derive(QBittorrentApiGen)]
struct Foo {}

View File

@ -0,0 +1,3 @@
pub const USERNAME: &str = "admin";
pub const PASSWORD: &str = "adminadmin";
pub const BASE_URL: &str = "http://localhost:8080";

View File

@ -1,9 +1,8 @@
use anyhow::Result;
use qbittorrent_web_api_gen::QBittorrentApiGen;
mod common;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
#[derive(QBittorrentApiGen)]
struct Api {}

View File

@ -1,9 +1,8 @@
use anyhow::Result;
use qbittorrent_web_api_gen::QBittorrentApiGen;
mod common;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
#[derive(QBittorrentApiGen)]
struct Api {}

View File

@ -1,9 +1,8 @@
use anyhow::Result;
use qbittorrent_web_api_gen::QBittorrentApiGen;
mod common;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
#[derive(QBittorrentApiGen)]
struct Api {}

View File

@ -1,9 +1,8 @@
use anyhow::Result;
use qbittorrent_web_api_gen::QBittorrentApiGen;
mod common;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
#[derive(QBittorrentApiGen)]
struct Api {}

View File

@ -1,10 +1,9 @@
use anyhow::Result;
use qbittorrent_web_api_gen::QBittorrentApiGen;
use tokio::time::{sleep, Duration};
mod common;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
use tokio::time::*;
#[derive(QBittorrentApiGen)]
struct Api {}

View File

@ -1,10 +1,9 @@
use anyhow::Result;
use qbittorrent_web_api_gen::QBittorrentApiGen;
use tokio::time::{sleep, Duration};
mod common;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
use tokio::time::*;
#[derive(QBittorrentApiGen)]
struct Api {}

View File

@ -0,0 +1,19 @@
mod common;
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
#[derive(QBittorrentApiGen)]
struct Api {}
#[tokio::main]
async fn main() -> Result<()> {
let api = Api::login(BASE_URL, USERNAME, PASSWORD).await?;
let _ = api.search().install_plugin("https://raw.githubusercontent.com/qbittorrent/search-plugins/master/nova3/engines/legittorrents.py").await?;
// just check that the deserialization works
let _ = api.search().plugins().await?;
Ok(())
}

View File

@ -1,6 +1,7 @@
#[test]
fn tests() {
let t = trybuild::TestCases::new();
// --- Auth ---
t.pass("tests/login.rs");
t.pass("tests/logout.rs");
@ -19,4 +20,5 @@ fn tests() {
t.pass("tests/add_torrent.rs");
t.pass("tests/another_struct_name.rs");
t.pass("tests/access_impl_types.rs");
t.pass("tests/search_types.rs");
}

View File

@ -1,9 +1,8 @@
use anyhow::Result;
use qbittorrent_web_api_gen::QBittorrentApiGen;
mod common;
const USERNAME: &str = "admin";
const PASSWORD: &str = "adminadmin";
const BASE_URL: &str = "http://localhost:8080";
use anyhow::Result;
use common::*;
use qbittorrent_web_api_gen::QBittorrentApiGen;
#[derive(QBittorrentApiGen)]
struct Api {}

File diff suppressed because it is too large Load Diff