Try to parse the cli commands
This commit is contained in:
parent
5426c6186e
commit
d2a22d6ee1
|
@ -1,7 +1,7 @@
|
||||||
/target
|
/target
|
||||||
.vscode
|
.vscode
|
||||||
|
|
||||||
# Debug related directories that we don't want included
|
# Debug related directories that we don't want included
|
||||||
/torrents
|
/torrents*
|
||||||
/output
|
/output
|
||||||
config.toml
|
config.toml
|
File diff suppressed because it is too large
Load Diff
32
Cargo.toml
32
Cargo.toml
|
@ -1,17 +1,17 @@
|
||||||
[package]
|
[package]
|
||||||
name = "cross-seed"
|
name = "cross-seed"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tokio = { version = "1.19.2", features = ["full"] }
|
tokio = { version = "1.19.2", features = ["full"] }
|
||||||
toml = "0.5.9"
|
toml = "0.5.9"
|
||||||
lava_torrent = "0.7.0" # https://docs.rs/lava_torrent/0.7.0/lava_torrent/
|
lava_torrent = "0.7.0" # https://docs.rs/lava_torrent/0.7.0/lava_torrent/
|
||||||
torznab = "0.7.2" # https://docs.rs/torznab/0.7.2/torznab/
|
torznab = "0.7.2" # https://docs.rs/torznab/0.7.2/torznab/
|
||||||
magnet-url = "2.0.0"
|
magnet-url = "2.0.0"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
figment = { version = "0.10", features = ["toml", "env"] }
|
figment = { version = "0.10", features = ["toml", "env"] }
|
||||||
wild = "2.0.4"
|
wild = "2.0.4"
|
||||||
argmap = "1.1.2"
|
argmap = "1.1.2"
|
|
@ -1,94 +1,157 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
use std::slice::Iter;
|
||||||
use figment::{Provider, Metadata, Profile, Error};
|
|
||||||
use figment::value::{Map, Dict, Value, Tag};
|
use figment::{Provider, Metadata, Profile, Error};
|
||||||
use serde::Deserialize;
|
use figment::value::{Map, Dict, Value, Tag};
|
||||||
|
use serde::Deserialize;
|
||||||
/// A provider that fetches its data from a given URL.
|
|
||||||
pub struct CliProvider {
|
/// A provider that fetches its data from a given URL.
|
||||||
/// The profile to emit data to if nesting is disabled.
|
pub struct CliProvider {
|
||||||
profile: Option<Profile>,
|
/// The profile to emit data to if nesting is disabled.
|
||||||
args: Vec<std::string::String>,
|
profile: Option<Profile>,
|
||||||
}
|
args: Vec<std::string::String>,
|
||||||
|
}
|
||||||
impl CliProvider {
|
|
||||||
pub fn new() -> CliProvider {
|
impl CliProvider {
|
||||||
CliProvider {
|
pub fn new() -> CliProvider {
|
||||||
profile: None,
|
CliProvider {
|
||||||
args: wild::args().collect(),
|
profile: None,
|
||||||
}
|
args: wild::args().collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
impl Provider for CliProvider {
|
|
||||||
/// Returns metadata with kind `Network`, custom source `self.url`,
|
impl Provider for CliProvider {
|
||||||
/// and interpolator that returns a URL of `url/a/b/c` for key `a.b.c`.
|
/// Returns metadata with kind `Network`, custom source `self.url`,
|
||||||
fn metadata(&self) -> Metadata {
|
/// and interpolator that returns a URL of `url/a/b/c` for key `a.b.c`.
|
||||||
let args = &self.args;
|
fn metadata(&self) -> Metadata {
|
||||||
Metadata::named("CLI Flags")
|
let args = &self.args;
|
||||||
.source(args.join(" "))
|
Metadata::named("CLI Flags")
|
||||||
//.source(args.map(|args| args.collect::<Vec<_>>().join(" ")).unwrap_or(String::default()))
|
.source(args.join(" "))
|
||||||
/* .interpolater(move |profile, keys| match profile.is_custom() {
|
//.source(args.map(|args| args.collect::<Vec<_>>().join(" ")).unwrap_or(String::default()))
|
||||||
true => format!("{}/{}/{}", url, profile, keys.join("/")),
|
/* .interpolater(move |profile, keys| match profile.is_custom() {
|
||||||
false => format!("{}/{}", url, keys.join("/")),
|
true => format!("{}/{}/{}", url, profile, keys.join("/")),
|
||||||
}) */
|
false => format!("{}/{}", url, keys.join("/")),
|
||||||
}
|
}) */
|
||||||
|
}
|
||||||
/// Fetches the data from `self.url`. Note that `Dict`, `Map`, and
|
|
||||||
/// `Profile` are `Deserialize`, so we can deserialized to them.
|
/// Fetches the data from `self.url`. Note that `Dict`, `Map`, and
|
||||||
fn data(&self) -> Result<Map<Profile, Dict>, Error> {
|
/// `Profile` are `Deserialize`, so we can deserialized to them.
|
||||||
// Parse a `Value` from a `String`
|
fn data(&self) -> Result<Map<Profile, Dict>, Error> {
|
||||||
fn parse_from_string(string: &String) -> Value {
|
// Parse a `Value` from a `String`
|
||||||
// TODO: Other integer types
|
fn parse_from_string(string: &String) -> Value {
|
||||||
match string.parse::<i32>() {
|
// TODO: Other integer types
|
||||||
Ok(i) => Value::Num(Tag::Default, figment::value::Num::I32(i)),
|
match string.parse::<i32>() {
|
||||||
Err(_) => match string.parse::<bool>() {
|
Ok(i) => Value::Num(Tag::Default, figment::value::Num::I32(i)),
|
||||||
Ok(b) => Value::Bool(Tag::Default, b),
|
Err(_) => match string.parse::<bool>() {
|
||||||
Err(_) => Value::from(string.to_owned()),
|
Ok(b) => Value::Bool(Tag::Default, b),
|
||||||
},
|
Err(_) => Value::from(string.to_owned()),
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
}
|
||||||
fn fetch<'a, T: Deserialize<'a>>(args: &Vec<std::string::String>) -> Result<T, Error> {
|
|
||||||
let (args, argv) = argmap::parse(args.iter());
|
fn parse_keys(keys: &mut Iter<&str>, dict: &Dict, vals: &Vec<String>) -> Value {
|
||||||
|
let key = keys.next();
|
||||||
let mut dict = Dict::new();
|
|
||||||
|
match key {
|
||||||
for (key, vals) in argv {
|
None => {
|
||||||
let len = vals.len();
|
if vals.len() == 1 {
|
||||||
if len == 0 {
|
parse_from_string(&vals[0])
|
||||||
continue;
|
} else {
|
||||||
}
|
let mut values = Vec::new();
|
||||||
|
for val in vals.iter() {
|
||||||
let key_vec: Vec<&str> = key.split(".").collect();
|
values.push(parse_from_string(val));
|
||||||
for key in key_vec.iter() {
|
}
|
||||||
dict.insert(key.to_owned(), Value::from(key.to_owned()));
|
|
||||||
}
|
Value::Array(Tag::Default, values)
|
||||||
|
}
|
||||||
if len == 1 {
|
},
|
||||||
dict.insert(key, parse_from_string(&vals[0]));
|
Some(key) => {
|
||||||
} else {
|
let key = key.to_string();
|
||||||
let mut values = Vec::new();
|
println!("Key is {}", key);
|
||||||
for val in &vals {
|
|
||||||
values.push(parse_from_string(val));
|
println!("Dict is {:?}", dict);
|
||||||
}
|
|
||||||
|
match dict.get(&key) {
|
||||||
dict.insert(key, Value::Array(Tag::Default, values));
|
Some(val) => {
|
||||||
}
|
println!("Val is {:?}", val);
|
||||||
}
|
|
||||||
|
match val.as_dict() {
|
||||||
Ok(T::deserialize(dict).unwrap())
|
Some(dict) => parse_keys(keys, &dict, vals),
|
||||||
|
None => panic!("Expected a `Dict`, got some other value"),
|
||||||
//Ok(T::deserialize(args.unwrap_or(&std::env::args()))?)
|
}
|
||||||
|
//parse_keys(keys, &dict, vals)
|
||||||
//Profile::default()
|
},
|
||||||
}
|
None => {
|
||||||
|
let mut current_dict = Dict::new();
|
||||||
match &self.profile {
|
let val = parse_keys(keys, ¤t_dict, vals);
|
||||||
// Don't nest: `fetch` into a `Dict`.
|
|
||||||
Some(profile) => Ok(profile.collect(fetch(&self.args)?)),
|
current_dict.insert(key.to_string(), val);
|
||||||
// Nest: `fetch` into a `Map<Profile, Dict>`.
|
|
||||||
None => fetch(&self.args),
|
Value::from(current_dict)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
/* let mut current_dict = Dict::new();
|
||||||
|
let val = parse_keys(keys, ¤t_dict, vals);
|
||||||
|
|
||||||
|
current_dict.insert(key.to_string(), val);
|
||||||
|
|
||||||
|
Value::from(current_dict) */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_cli(args: &Vec<std::string::String>)-> Result<Dict, Error> {
|
||||||
|
let (args, argv) = argmap::parse(args.iter());
|
||||||
|
|
||||||
|
let mut dict = Dict::new();
|
||||||
|
|
||||||
|
for (key, vals) in argv {
|
||||||
|
let len = vals.len();
|
||||||
|
if len == 0 {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let key_vec = key.split(".").collect::<Vec<_>>();
|
||||||
|
if key_vec.len() > 1 {
|
||||||
|
let mut key_iter = key_vec.iter();
|
||||||
|
|
||||||
|
//let key = key_iter.next();
|
||||||
|
let key = key_vec.first();
|
||||||
|
let val = parse_keys(&mut key_iter, &dict, &vals);
|
||||||
|
|
||||||
|
println!("Final val is {:?}", val);
|
||||||
|
|
||||||
|
dict.insert(key.unwrap().to_string(), val);
|
||||||
|
} else {
|
||||||
|
if len == 1 {
|
||||||
|
dict.insert(key, parse_from_string(&vals[0]));
|
||||||
|
} else {
|
||||||
|
let mut values = Vec::new();
|
||||||
|
for val in &vals {
|
||||||
|
values.push(parse_from_string(val));
|
||||||
|
}
|
||||||
|
|
||||||
|
dict.insert(key, Value::Array(Tag::Default, values));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("Dict: {:?}", dict);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Ok(dict)
|
||||||
|
}
|
||||||
|
|
||||||
|
match &self.profile {
|
||||||
|
// Don't nest: `fetch` into a `Dict`.
|
||||||
|
Some(profile) => Ok(profile.collect(parse_cli(&self.args)?)),
|
||||||
|
None => {
|
||||||
|
let mut map = Map::new();
|
||||||
|
map.insert(Profile::default(), parse_cli(&self.args)?);
|
||||||
|
Ok(map)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -1,87 +1,87 @@
|
||||||
use serde::{Deserialize,Serialize};
|
use serde::{Deserialize,Serialize};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use figment::{Figment, providers::{Format, Toml, Env}};
|
use figment::{Figment, providers::{Format, Toml, Env}};
|
||||||
use figment::value::Value as FigmentValue;
|
use figment::value::Value as FigmentValue;
|
||||||
|
|
||||||
use super::CliProvider;
|
use super::CliProvider;
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize)]
|
#[derive(Deserialize, Serialize)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
/// The path of the torrents to search.
|
/// The path of the torrents to search.
|
||||||
torrents_path: String,
|
torrents_path: String,
|
||||||
/// The output path of the torrents.
|
/// The output path of the torrents.
|
||||||
output_path: Option<String>,
|
output_path: Option<String>,
|
||||||
|
|
||||||
//pub indexers: HashMap<String, Indexer>,
|
//pub indexers: HashMap<String, Indexer>,
|
||||||
|
|
||||||
|
|
||||||
/// Used for deserializing the indexers into a Vec<Indexer>.
|
/// Used for deserializing the indexers into a Vec<Indexer>.
|
||||||
#[serde(rename = "indexers")]
|
#[serde(rename = "indexers")]
|
||||||
indexers_map: HashMap<String, FigmentValue>,
|
indexers_map: HashMap<String, FigmentValue>,
|
||||||
|
|
||||||
/// The indexers to search.
|
/// The indexers to search.
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
pub indexers: Vec<Indexer>,
|
pub indexers: Vec<Indexer>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize)]
|
#[derive(Deserialize, Serialize)]
|
||||||
pub struct Indexer {
|
pub struct Indexer {
|
||||||
#[serde(skip_deserializing)]
|
#[serde(skip_deserializing)]
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub enabled: Option<bool>,
|
pub enabled: Option<bool>,
|
||||||
pub url: String,
|
pub url: String,
|
||||||
pub api_key: String,
|
pub api_key: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Allow dead code for functions. We should probably remove this later on.
|
// Allow dead code for functions. We should probably remove this later on.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl Config {
|
impl Config {
|
||||||
pub fn new() -> Config {
|
pub fn new() -> Config {
|
||||||
// The path of the config file without the file extension
|
// The path of the config file without the file extension
|
||||||
let path = match env::var("CROSS_SEED_CONFIG") {
|
let path = match env::var("CROSS_SEED_CONFIG") {
|
||||||
Ok(path) => path,
|
Ok(path) => path,
|
||||||
Err(_) => "config".to_string(),
|
Err(_) => "config".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: Create a command line argument `Provider` (https://docs.rs/figment/0.10.6/figment/trait.Provider.html)
|
// TODO: Create a command line argument `Provider` (https://docs.rs/figment/0.10.6/figment/trait.Provider.html)
|
||||||
// TODO: Figure out priority
|
// TODO: Figure out priority
|
||||||
// Merge the config files
|
// Merge the config files
|
||||||
let figment = Figment::new()
|
let figment = Figment::new()
|
||||||
.join(Toml::file(format!("{}.toml", path)))
|
.join(CliProvider::new())
|
||||||
.join(Env::prefixed("CROSS_SEED_"))
|
.join(Env::prefixed("CROSS_SEED_"))
|
||||||
.join(CliProvider::new());
|
.join(Toml::file(format!("{}.toml", path)));
|
||||||
|
|
||||||
let mut config: Config = figment.extract().unwrap();
|
let mut config: Config = figment.extract().unwrap();
|
||||||
|
|
||||||
// Parse the indexers map into a vector.
|
// Parse the indexers map into a vector.
|
||||||
for (name, value) in &mut config.indexers_map {
|
for (name, value) in &mut config.indexers_map {
|
||||||
let mut indexer: Indexer = value.deserialize().unwrap();
|
let mut indexer: Indexer = value.deserialize().unwrap();
|
||||||
indexer.name = name.to_owned();
|
indexer.name = name.to_owned();
|
||||||
|
|
||||||
config.indexers.push(indexer);
|
config.indexers.push(indexer);
|
||||||
}
|
}
|
||||||
|
|
||||||
config
|
config
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn torrents_path(&self) -> &Path {
|
pub fn torrents_path(&self) -> &Path {
|
||||||
Path::new(&self.torrents_path)
|
Path::new(&self.torrents_path)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn torrents_path_str(&self) -> &String {
|
pub fn torrents_path_str(&self) -> &String {
|
||||||
&self.torrents_path
|
&self.torrents_path
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn output_path(&self) -> Option<&Path> {
|
pub fn output_path(&self) -> Option<&Path> {
|
||||||
match self.output_path {
|
match self.output_path {
|
||||||
Some(ref path) => Some(Path::new(path)),
|
Some(ref path) => Some(Path::new(path)),
|
||||||
None => None,
|
None => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn output_path_str(&self) -> Option<&String> {
|
pub fn output_path_str(&self) -> Option<&String> {
|
||||||
self.output_path.as_ref()
|
self.output_path.as_ref()
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,5 +1,5 @@
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub use config::Config;
|
pub use config::Config;
|
||||||
|
|
||||||
pub mod cli_provider;
|
pub mod cli_provider;
|
||||||
pub use cli_provider::CliProvider;
|
pub use cli_provider::CliProvider;
|
148
src/main.rs
148
src/main.rs
|
@ -1,75 +1,75 @@
|
||||||
mod config;
|
mod config;
|
||||||
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
|
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use lava_torrent::torrent::v1::Torrent;
|
use lava_torrent::torrent::v1::Torrent;
|
||||||
|
|
||||||
use torznab::Client as TorznabClient;
|
use torznab::Client as TorznabClient;
|
||||||
|
|
||||||
fn read_torrents(path: &Path) -> Result<Vec<PathBuf>, Box<dyn Error>> {
|
fn read_torrents(path: &Path) -> Result<Vec<PathBuf>, Box<dyn Error>> {
|
||||||
let mut torrents = Vec::new();
|
let mut torrents = Vec::new();
|
||||||
for entry in path.read_dir()? {
|
for entry in path.read_dir()? {
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
let path = entry.path();
|
let path = entry.path();
|
||||||
if path.is_file() {
|
if path.is_file() {
|
||||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||||
if filename.ends_with(".torrent") {
|
if filename.ends_with(".torrent") {
|
||||||
torrents.push(path);
|
torrents.push(path);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let mut inner = read_torrents(&path)?;
|
let mut inner = read_torrents(&path)?;
|
||||||
torrents.append(&mut inner);
|
torrents.append(&mut inner);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(torrents);
|
return Ok(torrents);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
// Get config and debug the torrents
|
// Get config and debug the torrents
|
||||||
let config = Config::new();//.expect("Failed to get config");
|
let config = Config::new();//.expect("Failed to get config");
|
||||||
println!("Searching torrents in: {}", config.torrents_path_str());
|
println!("Searching torrents in: {}", config.torrents_path_str());
|
||||||
|
|
||||||
println!("Searching {} trackers: ", config.indexers.len());
|
println!("Searching {} trackers: ", config.indexers.len());
|
||||||
for indexer in config.indexers.iter() {
|
for indexer in config.indexers.iter() {
|
||||||
println!(" {}: {}", indexer.name, indexer.url);
|
println!(" {}: {}", indexer.name, indexer.url);
|
||||||
}
|
}
|
||||||
|
|
||||||
let torrents = read_torrents(config.torrents_path()).unwrap();
|
let torrents = read_torrents(config.torrents_path()).unwrap();
|
||||||
|
|
||||||
for torrent_path in torrents.iter() {
|
for torrent_path in torrents.iter() {
|
||||||
let torrent = Torrent::read_from_file(torrent_path).unwrap();
|
let torrent = Torrent::read_from_file(torrent_path).unwrap();
|
||||||
println!("{}:", torrent.name);
|
println!("{}:", torrent.name);
|
||||||
|
|
||||||
/* for indexer in config.indexers.iter() {
|
/* for indexer in config.indexers.iter() {
|
||||||
if indexer.enabled {
|
if indexer.enabled {
|
||||||
let client = TorznabClient::new(indexer.url.clone());
|
let client = TorznabClient::new(indexer.url.clone());
|
||||||
let results = client.search(&torrent).unwrap();
|
let results = client.search(&torrent).unwrap();
|
||||||
println!("{}", results);
|
println!("{}", results);
|
||||||
}
|
}
|
||||||
} */
|
} */
|
||||||
//TorznabClient
|
//TorznabClient
|
||||||
|
|
||||||
/*if let Some(announce) = torrent.announce {
|
/*if let Some(announce) = torrent.announce {
|
||||||
println!(" Announce: {}", announce);
|
println!(" Announce: {}", announce);
|
||||||
}
|
}
|
||||||
if let Some(announce_list) = torrent.announce_list {
|
if let Some(announce_list) = torrent.announce_list {
|
||||||
println!(" Announce list:");
|
println!(" Announce list:");
|
||||||
for announce in announce_list {
|
for announce in announce_list {
|
||||||
for ann in announce {
|
for ann in announce {
|
||||||
println!(" {}", ann);
|
println!(" {}", ann);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
println!(" Files:");
|
println!(" Files:");
|
||||||
|
|
||||||
if let Some(files) = torrent.files {
|
if let Some(files) = torrent.files {
|
||||||
for file in files.iter() {
|
for file in files.iter() {
|
||||||
println!(" {}", file.path.to_str().unwrap());
|
println!(" {}", file.path.to_str().unwrap());
|
||||||
}
|
}
|
||||||
} */
|
} */
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue