Skip to content
Snippets Groups Projects
Commit dd3b2f96 authored by Éloïs's avatar Éloïs
Browse files

[feat] create rust bin duniter-dbex (dex)

parent 2e2190d7
No related branches found
No related tags found
1 merge request!1333Feat/dex
...@@ -49,6 +49,12 @@ version = "1.0.32" ...@@ -49,6 +49,12 @@ version = "1.0.32"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b602bfe940d21c130f3895acd65221e8a61270debe89d628b9cb4e3ccb8569b" checksum = "6b602bfe940d21c130f3895acd65221e8a61270debe89d628b9cb4e3ccb8569b"
[[package]]
name = "arc-swap"
version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d25d88fd6b8041580a654f9d0c581a047baee2b3efee13275f2fc392fc75034"
[[package]] [[package]]
name = "arrayref" name = "arrayref"
version = "0.3.6" version = "0.3.6"
...@@ -389,6 +395,15 @@ dependencies = [ ...@@ -389,6 +395,15 @@ dependencies = [
"vec_map", "vec_map",
] ]
[[package]]
name = "cloudabi"
version = "0.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
dependencies = [
"bitflags",
]
[[package]] [[package]]
name = "cloudabi" name = "cloudabi"
version = "0.1.0" version = "0.1.0"
...@@ -407,6 +422,17 @@ dependencies = [ ...@@ -407,6 +422,17 @@ dependencies = [
"cc", "cc",
] ]
[[package]]
name = "comfy-table"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f97a418f1dee79b100875499e272ea81882c1b931a9c8432e165b595b461752"
dependencies = [
"crossterm",
"strum",
"strum_macros",
]
[[package]] [[package]]
name = "concurrent-queue" name = "concurrent-queue"
version = "1.2.2" version = "1.2.2"
...@@ -514,6 +540,31 @@ dependencies = [ ...@@ -514,6 +540,31 @@ dependencies = [
"lazy_static", "lazy_static",
] ]
[[package]]
name = "crossterm"
version = "0.17.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f4919d60f26ae233e14233cc39746c8c8bb8cd7b05840ace83604917b51b6c7"
dependencies = [
"bitflags",
"crossterm_winapi",
"lazy_static",
"libc",
"mio",
"parking_lot 0.10.2",
"signal-hook",
"winapi",
]
[[package]]
name = "crossterm_winapi"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "057b7146d02fb50175fd7dbe5158f6097f33d02831f43b4ee8ae4ddf67b68f5c"
dependencies = [
"winapi",
]
[[package]] [[package]]
name = "cryptoxide" name = "cryptoxide"
version = "0.2.1" version = "0.2.1"
...@@ -679,6 +730,22 @@ dependencies = [ ...@@ -679,6 +730,22 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "duniter-dbex"
version = "0.1.0"
dependencies = [
"arrayvec",
"comfy-table",
"dirs",
"dubp-common",
"duniter-dbs",
"rayon",
"serde",
"serde_json",
"structopt",
"unwrap",
]
[[package]] [[package]]
name = "duniter-dbs" name = "duniter-dbs"
version = "0.1.0" version = "0.1.0"
...@@ -1065,7 +1132,7 @@ dependencies = [ ...@@ -1065,7 +1132,7 @@ dependencies = [
"leveldb_minimal", "leveldb_minimal",
"maybe-async", "maybe-async",
"mockall", "mockall",
"parking_lot", "parking_lot 0.11.0",
"rayon", "rayon",
"regex", "regex",
"serde_json", "serde_json",
...@@ -1118,6 +1185,15 @@ version = "0.2.78" ...@@ -1118,6 +1185,15 @@ version = "0.2.78"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa7087f49d294270db4e1928fc110c976cd4b9e5a16348e0a1df09afa99e6c98" checksum = "aa7087f49d294270db4e1928fc110c976cd4b9e5a16348e0a1df09afa99e6c98"
[[package]]
name = "lock_api"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4da24a77a3d8a6d4862d95f72e6fdb9c09a643ecdb402d754004a557f2bec75"
dependencies = [
"scopeguard",
]
[[package]] [[package]]
name = "lock_api" name = "lock_api"
version = "0.4.1" version = "0.4.1"
...@@ -1190,6 +1266,29 @@ dependencies = [ ...@@ -1190,6 +1266,29 @@ dependencies = [
"autocfg", "autocfg",
] ]
[[package]]
name = "mio"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e53a6ea5f38c0a48ca42159868c6d8e1bd56c0451238856cc08d58563643bdc3"
dependencies = [
"libc",
"log",
"miow",
"ntapi",
"winapi",
]
[[package]]
name = "miow"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07b88fb9795d4d36d62a012dfbf49a8f5cf12751f36d31a9dbe66d528e58979e"
dependencies = [
"socket2",
"winapi",
]
[[package]] [[package]]
name = "mockall" name = "mockall"
version = "0.8.1" version = "0.8.1"
...@@ -1307,6 +1406,15 @@ version = "0.3.0" ...@@ -1307,6 +1406,15 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be"
[[package]]
name = "ntapi"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a31937dea023539c72ddae0e3571deadc1414b300483fa7aaec176168cfa9d2"
dependencies = [
"winapi",
]
[[package]] [[package]]
name = "num" name = "num"
version = "0.2.1" version = "0.2.1"
...@@ -1411,6 +1519,16 @@ version = "2.0.0" ...@@ -1411,6 +1519,16 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72" checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72"
[[package]]
name = "parking_lot"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3a704eb390aafdc107b0e392f56a82b668e3a71366993b5340f5833fd62505e"
dependencies = [
"lock_api 0.3.4",
"parking_lot_core 0.7.2",
]
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
version = "0.11.0" version = "0.11.0"
...@@ -1418,8 +1536,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" ...@@ -1418,8 +1536,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4893845fa2ca272e647da5d0e46660a314ead9c2fdd9a883aabc32e481a8733" checksum = "a4893845fa2ca272e647da5d0e46660a314ead9c2fdd9a883aabc32e481a8733"
dependencies = [ dependencies = [
"instant", "instant",
"lock_api", "lock_api 0.4.1",
"parking_lot_core", "parking_lot_core 0.8.0",
]
[[package]]
name = "parking_lot_core"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d58c7c768d4ba344e3e8d72518ac13e259d7c7ade24167003b8488e10b6740a3"
dependencies = [
"cfg-if",
"cloudabi 0.0.3",
"libc",
"redox_syscall",
"smallvec",
"winapi",
] ]
[[package]] [[package]]
...@@ -1429,7 +1561,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" ...@@ -1429,7 +1561,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c361aa727dd08437f2f1447be8b59a33b0edd15e0fcee698f935613d9efbca9b" checksum = "c361aa727dd08437f2f1447be8b59a33b0edd15e0fcee698f935613d9efbca9b"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"cloudabi", "cloudabi 0.1.0",
"instant", "instant",
"libc", "libc",
"redox_syscall", "redox_syscall",
...@@ -1846,6 +1978,27 @@ dependencies = [ ...@@ -1846,6 +1978,27 @@ dependencies = [
"opaque-debug", "opaque-debug",
] ]
[[package]]
name = "signal-hook"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "604508c1418b99dfe1925ca9224829bb2a8a9a04dda655cc01fcad46f4ab05ed"
dependencies = [
"libc",
"mio",
"signal-hook-registry",
]
[[package]]
name = "signal-hook-registry"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3e12110bc539e657a646068aaf5eb5b63af9d0c1f7b29c97113fad80e15f035"
dependencies = [
"arc-swap",
"libc",
]
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.4.2" version = "0.4.2"
...@@ -1865,7 +2018,7 @@ dependencies = [ ...@@ -1865,7 +2018,7 @@ dependencies = [
"fxhash", "fxhash",
"libc", "libc",
"log", "log",
"parking_lot", "parking_lot 0.11.0",
] ]
[[package]] [[package]]
...@@ -1898,6 +2051,18 @@ dependencies = [ ...@@ -1898,6 +2051,18 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "socket2"
version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1fa70dc5c8104ec096f4fe7ede7a221d35ae13dcd19ba1ad9a81d2cab9a1c44"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"winapi",
]
[[package]] [[package]]
name = "spin" name = "spin"
version = "0.5.2" version = "0.5.2"
...@@ -1934,6 +2099,24 @@ dependencies = [ ...@@ -1934,6 +2099,24 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "strum"
version = "0.19.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b89a286a7e3b5720b9a477b23253bc50debac207c8d21505f8e70b36792f11b5"
[[package]]
name = "strum_macros"
version = "0.19.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e61bb0be289045cb80bfce000512e32d09f8337e54c186725da381377ad1f8d5"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.42" version = "1.0.42"
......
...@@ -29,6 +29,7 @@ rusty-hook = "0.11.2" ...@@ -29,6 +29,7 @@ rusty-hook = "0.11.2"
[workspace] [workspace]
members = [ members = [
"neon/native", "neon/native",
"rust-bins/duniter-dbex",
"rust-bins/xtask", "rust-bins/xtask",
"rust-libs/dubp-wot", "rust-libs/dubp-wot",
"rust-libs/duniter-dbs", "rust-libs/duniter-dbs",
......
[package]
name = "duniter-dbex"
version = "0.1.0"
authors = ["elois <elois@duniter.org>"]
description = "Duniter blockchain DB"
repository = "https://git.duniter.org/nodes/typescript/duniter/rust-bins/duniter-dbs-explorer"
readme = "README.md"
keywords = ["duniter", "database"]
license = "AGPL-3.0"
edition = "2018"
[[bin]]
bench = false
path = "src/main.rs"
name = "dex"
[build-dependencies]
structopt = "0.3.16"
[dependencies]
arrayvec = "0.5.1"
comfy-table = "1.0.0"
dirs = "3.0.1"
dubp-common = { version = "0.25.2", features = ["crypto_scrypt"] }
duniter-dbs = { path = "../../rust-libs/duniter-dbs", default-features = false, features = ["explorer", "leveldb_backend", "sync"] }
rayon = "1.3.1"
serde_json = "1.0.53"
structopt = "0.3.16"
[dev-dependencies]
serde = { version = "1.0.105", features = ["derive"] }
unwrap = "1.2.1"
# Duniter databases explorer (dex)
## Compile
git clone https://git.duniter.org/nodes/typescript/duniter.git
cd duniter
cargo build --release -p duniter-dbex
The binary executable is then here: `target/release/dex`
## Use
See `dex --help`
## Autocompletion
Bash autocompletion script is available here : `target/release/dex.bash`
**Several others Shell are supported : Zsh, Fish, Powershell and Elvish!**
To generate the autocompletion script for your shell, recompile with env var `COMPLETION_SHELL`.
For exemple for fish : `COMPLETION_SHELL=fish cargo build --release -p duniter-dbex`
The autocompletion script can be found in : `target/release/`
// Copyright (C) 2020 Éloïs SANCHEZ.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
extern crate structopt;
include!("src/cli.rs");
use std::env;
use structopt::clap::Shell;
fn main() {
// Define out dir
let current_dir = match env::current_dir() {
Err(_e) => return,
Ok(current_dir) => current_dir,
};
let out_dir = current_dir.as_path().join(format!(
"../../target/{}",
env::var("PROFILE").unwrap_or_else(|_| "debug".to_owned())
));
// Define shell
let shell = if let Some(shell_str) = option_env!("COMPLETION_SHELL") {
Shell::from_str(shell_str).expect("Unknown shell")
} else {
Shell::Bash
};
let mut app = Opt::clap();
app.gen_completions(
"dex", // We need to specify the bin name manually
shell, // Then say which shell to build completions for
out_dir,
); // Then say where write the completions to
}
// Copyright (C) 2020 Éloïs SANCHEZ.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
use std::{num::NonZeroUsize, path::PathBuf, str::FromStr};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(name = "duniter-dbex", about = "Duniter databases explorer.")]
pub struct Opt {
/// Duniter profile name
#[structopt(short, long)]
pub profile: Option<String>,
/// Duniter home directory
#[structopt(short, long, parse(from_os_str))]
pub home: Option<PathBuf>,
/// database
#[structopt(default_value = "bc_v1", possible_values = &["bc_v1", "bc_v2", "mp_v1"])]
pub database: Database,
#[structopt(subcommand)]
pub cmd: SubCommand,
}
#[derive(Debug)]
pub enum Database {
BcV1,
}
impl FromStr for Database {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"bc_v1" => Ok(Self::BcV1),
"bc_v2" | "mp_v1" => unimplemented!(),
_ => unreachable!(),
}
}
}
#[derive(Debug, StructOpt)]
pub enum SubCommand {
/// Count collection entries
Count { collection: String },
/// Get one value
Get { collection: String, key: String },
/// Search values by criteria
Find {
collection: String,
#[structopt(short, long)]
/// Key min
start: Option<String>,
#[structopt(short, long)]
/// Key max
end: Option<String>,
/// Filter keys by a regular expression
#[structopt(short = "k", long)]
key_regex: Option<String>,
/// Show keys only
#[structopt(long)]
keys_only: bool,
/// Filter values by a regular expression
#[structopt(short = "v", long)]
value_regex: Option<String>,
/// Maximum number of entries to be found (Slower because force sequential search)
#[structopt(short, long)]
limit: Option<usize>,
/// Browse the collection upside down
#[structopt(short, long)]
reverse: bool,
/// Step by
#[structopt(long, default_value = "1")]
step: NonZeroUsize,
/// Output format
#[structopt(short, long, default_value = "table-json", possible_values = &["csv", "json", "table-json", "table-properties"])]
output: OutputFormat,
/// Pretty json (Only for output format json or table-json)
#[structopt(long)]
pretty: bool,
/// Show only the specified properties
#[structopt(short, long)]
properties: Vec<String>,
/// Export found data to a file
#[structopt(short, long, parse(from_os_str))]
file: Option<PathBuf>,
},
/// Show database schema
Schema,
}
#[derive(Clone, Copy, Debug)]
pub enum OutputFormat {
Table,
TableJson,
Json,
Csv,
}
impl FromStr for OutputFormat {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"csv" => Ok(Self::Csv),
"json" => Ok(Self::Json),
"table-properties" => Ok(Self::Table),
"table-json" => Ok(Self::TableJson),
_ => unreachable!(),
}
}
}
// Copyright (C) 2020 Éloïs SANCHEZ.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
#![deny(
clippy::unwrap_used,
missing_copy_implementations,
trivial_casts,
trivial_numeric_casts,
unstable_features,
unused_import_braces
)]
mod cli;
mod print_found_data;
mod stringify_json_value;
use self::cli::{Database, Opt, OutputFormat, SubCommand};
use self::stringify_json_value::stringify_json_value;
use comfy_table::Table;
use duniter_dbs::kv_typed::prelude::*;
use duniter_dbs::prelude::*;
use duniter_dbs::regex::Regex;
use duniter_dbs::serde_json::{Map, Value};
use duniter_dbs::smallvec::{smallvec, SmallVec};
use duniter_dbs::BcV1Db;
use duniter_dbs::BcV1DbWritable;
use rayon::prelude::*;
use std::{
collections::{HashMap, HashSet},
fs::File,
io::{stdin, Write},
iter::FromIterator,
time::Instant,
};
use structopt::StructOpt;
const DATA_DIR: &str = "data";
const TOO_MANY_ENTRIES_ALERT: usize = 5_000;
fn main() -> Result<(), String> {
let opt = Opt::from_args();
let home = if let Some(home) = opt.home {
home
} else {
dirs::config_dir()
.ok_or_else(|| {
"Fail to auto find duniter's home directory, please specify it explicitly."
.to_owned()
})?
.as_path()
.join("duniter")
};
let profile_name = if let Some(profile_name) = opt.profile {
profile_name
} else {
"duniter_default".to_owned()
};
let profile_path = home.as_path().join(&profile_name);
let data_path = profile_path.as_path().join(DATA_DIR);
if !data_path.exists() {
return Err(format!(
"Path '{}' don't exist !",
data_path.to_str().expect("non-UTF-8 strings not supported")
));
}
let open_db_start_time = Instant::now();
match opt.database {
Database::BcV1 => apply_subcommand(
BcV1Db::<LevelDb>::open(LevelDbConf {
db_path: data_path.as_path().join("leveldb"),
..Default::default()
})
.map_err(|e| format!("{}", e))?,
opt.cmd,
open_db_start_time,
),
}
}
fn apply_subcommand<DB: DbExplorable>(
db: DB,
cmd: SubCommand,
open_db_start_time: Instant,
) -> Result<(), String> {
let duration = open_db_start_time.elapsed();
println!(
"Database opened in {}.{:06} seconds.",
duration.as_secs(),
duration.subsec_micros()
);
let start_time = Instant::now();
match cmd {
SubCommand::Count { collection } => {
if let ExplorerActionResponse::Count(count) = db
.explore(&collection, ExplorerAction::Count, stringify_json_value)
.map_err(|e| format!("{}", e))?
.map_err(|e| e.0)?
{
let duration = start_time.elapsed();
println!(
"Count operation performed in {}.{:06} seconds.",
duration.as_secs(),
duration.subsec_micros()
);
println!("\nThis collection contains {} entries.", count);
}
}
SubCommand::Get { collection, key } => {
if let ExplorerActionResponse::Get(value_opt) = db
.explore(
&collection,
ExplorerAction::Get { key: &key },
stringify_json_value,
)
.map_err(|e| format!("{}", e))?
.map_err(|e| e.0)?
{
if let Some(value) = value_opt {
println!("\n{}", value)
} else {
println!("\nThis collection not contains this key.")
}
}
}
SubCommand::Find {
collection,
start,
end,
key_regex,
keys_only,
value_regex,
limit,
reverse,
properties,
output: output_format,
pretty: pretty_json,
file: output_file,
step,
} => {
let value_regex_opt = opt_string_to_res_opt_regex(value_regex)?;
let captures_headers = if let Some(ref value_regex) = value_regex_opt {
value_regex
.capture_names()
.skip(1)
.enumerate()
.map(|(i, name_opt)| {
if let Some(name) = name_opt {
name.to_owned()
} else {
format!("CAP{}", i + 1)
}
})
.collect()
} else {
vec![]
};
if let ExplorerActionResponse::Find(entries) = db
.explore(
&collection,
ExplorerAction::Find {
key_min: start,
key_max: end,
key_regex: opt_string_to_res_opt_regex(key_regex)?,
value_regex: value_regex_opt,
limit,
reverse,
step,
},
stringify_json_value,
)
.map_err(|e| format!("{}", e))?
.map_err(|e| e.0)?
{
let duration = start_time.elapsed();
println!(
"Search performed in {}.{:06} seconds.\n\n{} entries found.",
duration.as_secs(),
duration.subsec_micros(),
entries.len()
);
if !too_many_entries(entries.len(), output_file.is_none())
.map_err(|e| format!("{}", e))?
{
return Ok(());
}
let start_print = Instant::now();
if let Some(output_file) = output_file {
let mut file =
File::create(output_file.as_path()).map_err(|e| format!("{}", e))?;
//let mut file_buffer = BufWriter::new(file);
print_found_data::print_found_data(
&mut file,
output_format,
pretty_json,
false,
print_found_data::DataToShow {
entries,
keys_only,
only_properties: properties,
},
captures_headers,
)
.map_err(|e| format!("{}", e))?;
//file_buffer.flush().map_err(|e| format!("{}", e))?;
let export_duration = start_print.elapsed();
println!(
"Search results were written to file: '{}' in {}.{:06} seconds.",
output_file
.to_str()
.expect("output-file contains invalid utf8 characters"),
export_duration.as_secs(),
export_duration.subsec_micros(),
);
} else {
print_found_data::print_found_data(
&mut std::io::stdout(),
output_format,
pretty_json,
true,
print_found_data::DataToShow {
entries,
keys_only,
only_properties: properties,
},
captures_headers,
)
.map_err(|e| format!("{}", e))?;
let print_duration = start_print.elapsed();
println!(
"Search results were displayed in {}.{:06} seconds.",
print_duration.as_secs(),
print_duration.subsec_micros(),
);
};
}
}
SubCommand::Schema => {
show_db_schema(db.list_collections());
}
};
Ok(())
}
fn too_many_entries(entries_len: usize, output_in_term: bool) -> std::io::Result<bool> {
if entries_len > TOO_MANY_ENTRIES_ALERT {
println!(
"{} all {} entries ? (Be careful, may crash your system!) [y/N]",
if output_in_term { "Display" } else { "Export" },
entries_len
);
let mut buffer = String::new();
stdin().read_line(&mut buffer)?;
Ok(buffer == "y\n")
} else {
Ok(true)
}
}
fn show_db_schema(collections_names: Vec<(&'static str, &'static str, &'static str)>) {
let mut table = Table::new();
table.set_content_arrangement(comfy_table::ContentArrangement::Dynamic);
table.set_header(&["Collection name", "Key type", "Value type"]);
for (collection_name, key_full_type_name, value_full_type_name) in collections_names {
let key_type_name_opt = key_full_type_name.split(':').last();
let value_type_name_opt = value_full_type_name.split(':').last();
table.add_row(&[
collection_name,
key_type_name_opt.unwrap_or("?"),
value_type_name_opt.unwrap_or("?"),
]);
}
println!("{}", table);
}
#[inline]
fn opt_string_to_res_opt_regex(str_regex_opt: Option<String>) -> Result<Option<Regex>, String> {
if let Some(str_regex) = str_regex_opt {
Ok(Some(Regex::new(&str_regex).map_err(|e| format!("{}", e))?))
} else {
Ok(None)
}
}
// Copyright (C) 2020 Éloïs SANCHEZ.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
use crate::*;
const KEY_COLUMN_NAME: &str = "Key";
const VALUE_COLUMN_NAME: &str = "Value";
pub struct DataToShow {
pub entries: Vec<EntryFound>,
pub keys_only: bool,
pub only_properties: Vec<String>,
}
pub fn print_found_data<W: Write>(
output: &mut W,
output_format: OutputFormat,
pretty_json: bool,
dynamic_content_arrangement: bool,
data_to_show: DataToShow,
captures_names: Vec<String>,
) -> std::io::Result<()> {
let DataToShow {
entries,
keys_only,
only_properties,
} = data_to_show;
if entries.is_empty() {
return Ok(());
}
let only_properties_set = if !only_properties.is_empty() {
HashSet::from_iter(only_properties.into_iter())
} else {
HashSet::with_capacity(0)
};
match output_format {
OutputFormat::Json => {
let json_array = if keys_only {
entries
.into_par_iter()
.map(|entry| key_to_json(entry.key))
.collect()
} else {
entries
.into_par_iter()
.map(|entry| entry_to_json(&only_properties_set, &captures_names, entry))
.collect()
};
if pretty_json {
writeln!(output, "{:#}", Value::Array(json_array))
} else {
writeln!(output, "{}", Value::Array(json_array))
}
}
OutputFormat::Table => {
// If value is not an object or an array of objects, force raw output format
let mut entries_iter = entries.iter();
let first_object_opt = if keys_only {
None
} else {
loop {
if let Some(EntryFound { value, .. }) = entries_iter.next() {
if let Value::Array(ref json_array) = value {
if json_array.is_empty() {
continue;
} else {
break json_array[0].as_object();
}
} else {
break value.as_object();
}
} else {
// All values are empty array, force raw output format
break None;
}
}
};
let properties_names = if let Some(first_object) = first_object_opt {
if only_properties_set.is_empty() {
first_object.keys().cloned().collect::<HashSet<String>>()
} else {
first_object
.keys()
.filter(|property_name| {
only_properties_set.contains(property_name.as_str())
})
.cloned()
.collect::<HashSet<String>>()
}
} else {
return print_found_data(
output,
OutputFormat::TableJson,
pretty_json,
dynamic_content_arrangement,
print_found_data::DataToShow {
entries,
keys_only,
only_properties: vec![],
},
captures_names,
);
};
// Create table
let mut table = Table::new();
if dynamic_content_arrangement {
table.set_content_arrangement(comfy_table::ContentArrangement::Dynamic);
}
// Map data by property
let entries_map: Vec<HashMap<String, String>> = entries
.into_par_iter()
.map(|entry| entry_to_rows(&only_properties_set, &captures_names, entry))
.flatten()
.collect();
// Define table headers
let mut headers = Vec::with_capacity(1 + properties_names.len() + captures_names.len());
headers.push(KEY_COLUMN_NAME.to_owned());
if !keys_only {
for property_name in properties_names {
headers.push(property_name);
}
headers.sort_by(|a, b| {
if a == KEY_COLUMN_NAME {
std::cmp::Ordering::Less
} else if b == KEY_COLUMN_NAME {
std::cmp::Ordering::Greater
} else {
a.cmp(b)
}
});
headers.extend(captures_names);
}
table.set_header(&headers);
// Fill table
for properties_values in entries_map {
let mut row = SmallVec::<[&str; 8]>::new();
for column_name in &headers {
if let Some(property_value) = properties_values.get(column_name) {
row.push(property_value.as_str());
} else {
row.push("")
}
}
table.add_row(row);
}
// Print table
writeln!(output, "{}", table)
}
OutputFormat::TableJson => {
let mut table = Table::new();
if dynamic_content_arrangement {
table.set_content_arrangement(comfy_table::ContentArrangement::Dynamic);
}
let mut headers = Vec::with_capacity(2 + captures_names.len());
headers.push(KEY_COLUMN_NAME);
if !keys_only {
headers.push(VALUE_COLUMN_NAME);
}
headers.extend(captures_names.iter().map(String::as_str));
table.set_header(headers);
for EntryFound {
key,
value,
captures: captures_opt,
} in entries
{
let mut row = Vec::with_capacity(2 + captures_names.len());
row.push(key);
if keys_only {
table.add_row(row);
} else {
if pretty_json {
row.push(format!("{:#}", value));
} else {
row.push(value.to_string());
}
let rows = caps_to_rows(row, &captures_names[..], captures_opt);
for row in rows {
table.add_row(row);
}
}
}
writeln!(output, "{}", table)
}
_ => todo!(),
}
}
#[inline(always)]
fn key_to_json(key: String) -> Value {
Value::String(key)
}
fn entry_to_json(
only_properties_set: &HashSet<String>,
captures_names: &[String],
entry: EntryFound,
) -> Value {
let EntryFound {
key,
mut value,
captures: captures_opt,
} = entry;
if !only_properties_set.is_empty() {
match value {
Value::Object(ref mut json_map) => {
let mut properties_to_rm = SmallVec::<[String; 64]>::new();
for property_name in json_map.keys() {
if !only_properties_set.contains(property_name) {
properties_to_rm.push(property_name.clone());
}
}
for property_name in properties_to_rm {
json_map.remove(&property_name);
}
}
Value::Array(ref mut json_array) => {
for sub_value in json_array {
if let Value::Object(ref mut json_map) = sub_value {
let mut properties_to_rm = SmallVec::<[String; 64]>::new();
for property_name in json_map.keys() {
if !only_properties_set.contains(property_name) {
properties_to_rm.push(property_name.clone());
}
}
for property_name in properties_to_rm {
json_map.remove(&property_name);
}
}
}
}
_ => (),
}
}
let mut json_map = Map::with_capacity(2);
json_map.insert("key".to_owned(), Value::String(key));
json_map.insert("value".to_owned(), value);
if !captures_names.is_empty() {
let mut captures_objects = Vec::new();
if let Some(ValueCaptures(captures)) = captures_opt {
for capture in captures {
let mut capture_object = Map::with_capacity(captures_names.len());
for (i, capture_group_value_opt) in capture.into_iter().enumerate() {
if let Some(capture_group_value) = capture_group_value_opt {
capture_object.insert(
captures_names[i].to_owned(),
Value::String(capture_group_value),
);
}
}
captures_objects.push(Value::Object(capture_object));
}
}
json_map.insert("captures".to_owned(), Value::Array(captures_objects));
}
Value::Object(json_map)
}
fn entry_to_rows(
only_properties_set: &HashSet<String>,
captures_names: &[String],
entry: EntryFound,
) -> Vec<HashMap<String, String>> {
let EntryFound {
key,
value,
captures: captures_opt,
} = entry;
match value {
Value::Object(value_json_map) => {
let row_map = map_entry_by_properties(&only_properties_set, key, value_json_map);
caps_to_rows_maps(row_map, captures_names, captures_opt)
}
Value::Array(json_array) => json_array
.into_iter()
.map(|sub_value| {
if let Value::Object(sub_value_json_map) = sub_value {
map_entry_by_properties(&only_properties_set, key.clone(), sub_value_json_map)
} else {
unreachable!()
}
})
.collect(),
_ => unreachable!(),
}
}
fn map_entry_by_properties(
only_properties_set: &HashSet<String>,
k: String,
value_json_map: Map<String, Value>,
) -> HashMap<String, String> {
let mut row_map = HashMap::with_capacity(1 + value_json_map.len());
row_map.insert(KEY_COLUMN_NAME.to_owned(), k);
for (property_name, property_value) in value_json_map {
if only_properties_set.is_empty() || only_properties_set.contains(&property_name) {
if let Value::String(property_value_string) = property_value {
row_map.insert(property_name, property_value_string);
} else {
row_map.insert(property_name, property_value.to_string());
}
}
}
row_map
}
fn caps_to_rows(
mut first_row_begin: Vec<String>,
captures_names: &[String],
captures_opt: Option<ValueCaptures>,
) -> SmallVec<[Vec<String>; 2]> {
if !captures_names.is_empty() {
if let Some(ValueCaptures(captures)) = captures_opt {
let first_row_begin_len = first_row_begin.len();
let mut rows = SmallVec::with_capacity(captures.len());
let mut current_row = first_row_begin;
for capture in captures {
for capture_group_value_opt in capture.into_iter() {
if let Some(capture_group_value) = capture_group_value_opt {
current_row.push(capture_group_value);
} else {
current_row.push(String::new());
}
}
rows.push(current_row);
current_row = (0..first_row_begin_len).map(|_| String::new()).collect();
}
rows
} else {
first_row_begin.extend((0..captures_names.len()).map(|_| String::new()));
smallvec![first_row_begin]
}
} else {
smallvec![first_row_begin]
}
}
fn caps_to_rows_maps(
first_row_map_begin: HashMap<String, String>,
captures_names: &[String],
captures_opt: Option<ValueCaptures>,
) -> Vec<HashMap<String, String>> {
if !captures_names.is_empty() {
if let Some(ValueCaptures(captures)) = captures_opt {
let mut rows = Vec::with_capacity(captures.len());
let mut current_row_map = first_row_map_begin;
for capture in captures {
for (i, capture_group_value_opt) in capture.into_iter().enumerate() {
if let Some(capture_group_value) = capture_group_value_opt {
current_row_map.insert(captures_names[i].to_owned(), capture_group_value);
}
}
rows.push(current_row_map);
current_row_map = HashMap::with_capacity(captures_names.len());
}
rows
} else {
vec![first_row_map_begin]
}
} else {
vec![first_row_map_begin]
}
}
use arrayvec::ArrayVec;
use dubp_common::crypto::bases::b58::ToBase58 as _;
use dubp_common::crypto::hashs::Hash;
use dubp_common::crypto::keys::ed25519::{PublicKey, Signature};
use dubp_common::crypto::keys::Signature as _;
use std::convert::TryFrom;
pub fn stringify_json_value(mut json_value: serde_json::Value) -> serde_json::Value {
match json_value {
serde_json::Value::Object(ref mut json_obj) => stringify_json_object(json_obj),
serde_json::Value::Array(ref mut json_array) => {
for json_array_cell in json_array {
if let serde_json::Value::Object(json_obj) = json_array_cell {
stringify_json_object(json_obj)
}
}
}
_ => (),
}
json_value
}
fn stringify_json_object(json_object: &mut serde_json::Map<String, serde_json::Value>) {
let mut stringified_values: Vec<(String, serde_json::Value)> = Vec::new();
for (k, v) in json_object.iter_mut() {
match k.as_str() {
"pub" | "pubkey" | "issuer" => {
if let serde_json::Value::Object(json_pubkey) = v {
let json_pubkey_data = json_pubkey.get("datas").expect("corrupted db");
if let serde_json::Value::Array(json_array) = json_pubkey_data {
let pubkey_string =
PublicKey::try_from(&json_array_to_32_bytes(json_array)[..])
.expect("corrupted db")
.to_base58();
stringified_values
.push((k.to_owned(), serde_json::Value::String(pubkey_string)));
} else {
panic!("corrupted db");
}
}
}
"hash" | "inner_hash" | "previous_hash" => {
if let serde_json::Value::Array(json_array) = v {
let hash_string = Hash(json_array_to_32_bytes(json_array)).to_hex();
stringified_values.push((k.to_owned(), serde_json::Value::String(hash_string)));
}
}
"sig" | "signature" => {
if let serde_json::Value::Array(json_array) = v {
let sig_string = Signature(json_array_to_64_bytes(json_array)).to_base64();
stringified_values.push((k.to_owned(), serde_json::Value::String(sig_string)));
}
}
_ => {
if let serde_json::Value::Object(ref mut json_sub_object) = v {
stringify_json_object(json_sub_object)
}
}
}
}
for (k, v) in stringified_values {
json_object.insert(k, v);
}
}
#[inline]
fn json_array_to_32_bytes(json_array: &[serde_json::Value]) -> [u8; 32] {
let bytes = json_array
.iter()
.map(|jv| {
if let serde_json::Value::Number(jn) = jv {
jn.as_u64().unwrap_or_default() as u8
} else {
panic!("corrupted db")
}
})
.collect::<ArrayVec<[u8; 32]>>();
bytes.into_inner().expect("corrupted db")
}
#[inline]
fn json_array_to_64_bytes(json_array: &[serde_json::Value]) -> [u8; 64] {
let bytes = json_array
.iter()
.map(|jv| {
if let serde_json::Value::Number(jn) = jv {
jn.as_u64().unwrap_or_default() as u8
} else {
panic!("corrupted db")
}
})
.collect::<ArrayVec<[u8; 64]>>();
bytes.into_inner().expect("corrupted db")
}
#[cfg(test)]
mod tests {
use super::*;
use dubp_common::crypto::keys::PublicKey as _;
use serde_json::Number;
use serde_json::Value;
use unwrap::unwrap;
#[derive(serde::Serialize)]
struct JsonObjectTest {
pubkey: PublicKey,
hash: Hash,
other: usize,
inner: JsonSubObjectTest,
}
#[derive(serde::Serialize)]
struct JsonSubObjectTest {
issuer: PublicKey,
}
#[test]
fn test_stringify_json_object() {
let mut json_value = unwrap!(serde_json::to_value(JsonObjectTest {
pubkey: unwrap!(PublicKey::from_base58(
"A2C6cVJnkkT2n4ivMPiLH2njQHeHSZcVf1cSTwZYScQ6"
)),
hash: unwrap!(Hash::from_hex(
"51DF2FCAB8809596253CD98594D0DBCEECAAF3A88A43C6EDD285B6B24FB9D50D"
)),
other: 3,
inner: JsonSubObjectTest {
issuer: unwrap!(PublicKey::from_base58(
"4agK3ycEQNahuRGoFJDXA2aQGt4iV2YSMPKcoMeR6ZfA"
))
}
}));
if let serde_json::Value::Object(ref mut json_obj) = json_value {
stringify_json_object(json_obj);
assert_eq!(
json_obj.get("pubkey"),
Some(Value::String(
"A2C6cVJnkkT2n4ivMPiLH2njQHeHSZcVf1cSTwZYScQ6".to_owned()
))
.as_ref()
);
assert_eq!(
json_obj.get("hash"),
Some(Value::String(
"51DF2FCAB8809596253CD98594D0DBCEECAAF3A88A43C6EDD285B6B24FB9D50D".to_owned()
))
.as_ref()
);
assert_eq!(
json_obj.get("other"),
Some(Value::Number(Number::from(3))).as_ref()
);
let json_sub_obj = unwrap!(json_obj.get("inner"));
if let serde_json::Value::Object(json_sub_obj) = json_sub_obj {
assert_eq!(
json_sub_obj.get("issuer"),
Some(Value::String(
"4agK3ycEQNahuRGoFJDXA2aQGt4iV2YSMPKcoMeR6ZfA".to_owned()
))
.as_ref()
);
} else {
panic!("json_sub_obj must be an abject");
}
} else {
panic!("json_value must be an abject");
}
}
#[test]
fn test_json_array_to_32_bytes() {
let json_array = vec![
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
Value::Number(Number::from(2)),
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
Value::Number(Number::from(2)),
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
Value::Number(Number::from(2)),
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
Value::Number(Number::from(2)),
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
Value::Number(Number::from(2)),
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
Value::Number(Number::from(2)),
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
Value::Number(Number::from(2)),
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
Value::Number(Number::from(2)),
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
Value::Number(Number::from(2)),
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
Value::Number(Number::from(2)),
Value::Number(Number::from(0)),
Value::Number(Number::from(1)),
];
assert_eq!(
[
0, 1, 2, 0, 1, 2, 0, 1, 2, 0, 1, 2, 0, 1, 2, 0, 1, 2, 0, 1, 2, 0, 1, 2, 0, 1, 2, 0,
1, 2, 0, 1
],
json_array_to_32_bytes(json_array.as_ref()),
)
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment