Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
Duniter v2S
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Container registry
Model registry
Monitor
Service Desk
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
nodes
rust
Duniter v2S
Commits
b328921f
Unverified
Commit
b328921f
authored
1 year ago
by
bgallois
Browse files
Options
Downloads
Patches
Plain Diff
add max_block_weight
parent
03320e1d
No related branches found
No related tags found
No related merge requests found
Pipeline
#35059
failed
1 year ago
Stage: labels
Stage: quality
Stage: build
Stage: tests
Changes
1
Pipelines
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
resources/weight_analyzer/src/lib.rs
+32
-9
32 additions, 9 deletions
resources/weight_analyzer/src/lib.rs
with
32 additions
and
9 deletions
resources/weight_analyzer/src/lib.rs
+
32
−
9
View file @
b328921f
use
glob
::
glob
;
use
glob
::
glob
;
use
std
::
collections
::
HashMap
;
use
std
::
collections
::
HashMap
;
use
std
::
ops
::
Div
;
use
std
::
path
::
Path
;
use
std
::
path
::
Path
;
use
subweight_core
::
parse
::
overhead
::
Weight
;
use
subweight_core
::
parse
::
overhead
::
Weight
;
use
subweight_core
::
parse
::
pallet
::
ChromaticExtrinsic
;
use
subweight_core
::
parse
::
pallet
::
ChromaticExtrinsic
;
...
@@ -8,8 +9,18 @@ use subweight_core::parse::storage::Weights;
...
@@ -8,8 +9,18 @@ use subweight_core::parse::storage::Weights;
use
subweight_core
::
scope
::
Scope
;
use
subweight_core
::
scope
::
Scope
;
use
subweight_core
::
term
::
Term
;
use
subweight_core
::
term
::
Term
;
const
MAX_BLOCK_WEIGHT
:
f64
=
2_000_000_000_000.
;
// TODO auto extraction from metadata using
pub
struct
MaxBlockWeight
(
f64
);
// frame_system blockweights constant
impl
Default
for
MaxBlockWeight
{
fn
default
()
->
Self
{
MaxBlockWeight
(
2_000_000_000_000.
)
}
}
impl
Div
<&
MaxBlockWeight
>
for
f64
{
type
Output
=
Self
;
fn
div
(
self
,
max_block_weight
:
&
MaxBlockWeight
)
->
Self
::
Output
{
self
/
max_block_weight
.0
}
}
#[derive(Debug)]
#[derive(Debug)]
pub
struct
WeightInfo
{
pub
struct
WeightInfo
{
...
@@ -25,16 +36,21 @@ pub struct WeightInfo {
...
@@ -25,16 +36,21 @@ pub struct WeightInfo {
/// * `folder_path` - A Path to a folder where the weight files are stored.
/// * `folder_path` - A Path to a folder where the weight files are stored.
/// `paritydb_weights.rs` is mandatory and pallet weights should start by
/// `paritydb_weights.rs` is mandatory and pallet weights should start by
/// `pallet_`.
/// `pallet_`.
/// *`max_block_weight` - The maximal weight of a block.
///
///
/// # Examples
/// # Examples
///
///
/// ```
/// ```
/// use weightanalyzer::analyze_weight;
/// use weightanalyzer::analyze_weight;
/// use std::path::Path;
/// use std::path::Path;
/// let weight_by_pallet = analyze_weight(Path::new("../../runtime/common/src/weights/"));
/// use weightanalyzer::MaxBlockWeight;
/// let weight_by_pallet = analyze_weight(Path::new("../../runtime/common/src/weights/"), &MaxBlockWeight::default());
/// println!("{:?}", weight_by_pallet);
/// println!("{:?}", weight_by_pallet);
/// ```
/// ```
pub
fn
analyze_weight
(
folder_path
:
&
Path
)
->
HashMap
<
String
,
HashMap
<
String
,
WeightInfo
>>
{
pub
fn
analyze_weight
(
folder_path
:
&
Path
,
max_block_weight
:
&
MaxBlockWeight
,
)
->
HashMap
<
String
,
HashMap
<
String
,
WeightInfo
>>
{
let
pallet_weights
=
read_pallet_weight
(
folder_path
);
let
pallet_weights
=
read_pallet_weight
(
folder_path
);
let
db_weight
=
read_db_weight
(
folder_path
);
let
db_weight
=
read_db_weight
(
folder_path
);
let
_overhead_weights
=
read_overhead_weight
(
folder_path
);
let
_overhead_weights
=
read_overhead_weight
(
folder_path
);
...
@@ -43,7 +59,7 @@ pub fn analyze_weight(folder_path: &Path) -> HashMap<String, HashMap<String, Wei
...
@@ -43,7 +59,7 @@ pub fn analyze_weight(folder_path: &Path) -> HashMap<String, HashMap<String, Wei
let
mut
scope
=
Scope
::
from_substrate
();
let
mut
scope
=
Scope
::
from_substrate
();
scope
=
scope
.with_storage_weights
(
db_weight
.weights.read
,
db_weight
.weights.write
);
scope
=
scope
.with_storage_weights
(
db_weight
.weights.read
,
db_weight
.weights.write
);
process
(
pallet_weights
,
scope
)
process
(
pallet_weights
,
scope
,
max_block_weight
)
}
}
fn
read_pallet_weight
(
folder_path
:
&
Path
)
->
Vec
<
Vec
<
ChromaticExtrinsic
>>
{
fn
read_pallet_weight
(
folder_path
:
&
Path
)
->
Vec
<
Vec
<
ChromaticExtrinsic
>>
{
...
@@ -81,6 +97,7 @@ fn read_overhead_weight(folder_path: &Path) -> Weight {
...
@@ -81,6 +97,7 @@ fn read_overhead_weight(folder_path: &Path) -> Weight {
fn
evaluate_weight
(
fn
evaluate_weight
(
extrinsic
:
ChromaticExtrinsic
,
extrinsic
:
ChromaticExtrinsic
,
scope
:
&
mut
Scope
<
Term
<
u128
>>
,
scope
:
&
mut
Scope
<
Term
<
u128
>>
,
max_block_weight
:
&
MaxBlockWeight
,
)
->
Result
<
(
String
,
String
,
WeightInfo
),
String
>
{
)
->
Result
<
(
String
,
String
,
WeightInfo
),
String
>
{
// Extend the scope with the maximum value of the complexity parameter.
// Extend the scope with the maximum value of the complexity parameter.
if
let
Some
(
params
)
=
extrinsic
.comp_ranges
{
if
let
Some
(
params
)
=
extrinsic
.comp_ranges
{
...
@@ -98,7 +115,7 @@ fn evaluate_weight(
...
@@ -98,7 +115,7 @@ fn evaluate_weight(
.expect
(
"Can't evaluate"
)
.expect
(
"Can't evaluate"
)
.eval
(
scope
)
.eval
(
scope
)
.unwrap
();
.unwrap
();
let
relative_weight
=
(
weight
as
f64
)
/
MAX_BLOCK_WEIGHT
*
100.
;
let
relative_weight
=
(
weight
as
f64
)
/
max_block_weight
*
100.
;
Ok
((
Ok
((
extrinsic
.pallet
,
extrinsic
.pallet
,
extrinsic
.name
,
extrinsic
.name
,
...
@@ -112,11 +129,13 @@ fn evaluate_weight(
...
@@ -112,11 +129,13 @@ fn evaluate_weight(
fn
process
(
fn
process
(
pallet_weights
:
Vec
<
Vec
<
ChromaticExtrinsic
>>
,
pallet_weights
:
Vec
<
Vec
<
ChromaticExtrinsic
>>
,
mut
scope
:
Scope
<
Term
<
u128
>>
,
mut
scope
:
Scope
<
Term
<
u128
>>
,
max_block_weight
:
&
MaxBlockWeight
,
)
->
HashMap
<
String
,
HashMap
<
String
,
WeightInfo
>>
{
)
->
HashMap
<
String
,
HashMap
<
String
,
WeightInfo
>>
{
let
mut
weight_by_pallet
=
HashMap
::
new
();
let
mut
weight_by_pallet
=
HashMap
::
new
();
for
i
in
pallet_weights
{
for
i
in
pallet_weights
{
for
j
in
i
{
for
j
in
i
{
let
(
pallet
,
extrinsic
,
weight
)
=
evaluate_weight
(
j
,
&
mut
scope
)
.unwrap
();
let
(
pallet
,
extrinsic
,
weight
)
=
evaluate_weight
(
j
,
&
mut
scope
,
max_block_weight
)
.unwrap
();
weight_by_pallet
.insert
(
pallet
.clone
(),
HashMap
::
from
([(
extrinsic
.clone
(),
weight
)]));
weight_by_pallet
.insert
(
pallet
.clone
(),
HashMap
::
from
([(
extrinsic
.clone
(),
weight
)]));
}
}
}
}
...
@@ -126,15 +145,19 @@ fn process(
...
@@ -126,15 +145,19 @@ fn process(
#[cfg(test)]
#[cfg(test)]
mod
tests
{
mod
tests
{
use
crate
::
analyze_weight
;
use
crate
::
analyze_weight
;
use
crate
::
MaxBlockWeight
;
use
std
::
path
::
Path
;
use
std
::
path
::
Path
;
#[test]
#[test]
fn
should_works
()
{
fn
should_works
()
{
let
weight_by_pallet
=
analyze_weight
(
Path
::
new
(
"../../runtime/common/src/weights/"
));
let
weight_by_pallet
=
analyze_weight
(
Path
::
new
(
"../../runtime/common/src/weights/"
),
&
MaxBlockWeight
::
default
(),
);
println!
(
"{:?}"
,
weight_by_pallet
);
// cargo test -- --nocapture
println!
(
"{:?}"
,
weight_by_pallet
);
// cargo test -- --nocapture
}
}
#[test]
#[test]
#[should_panic]
#[should_panic]
fn
should_not_works
()
{
fn
should_not_works
()
{
let
_
=
analyze_weight
(
Path
::
new
(
""
));
let
_
=
analyze_weight
(
Path
::
new
(
""
)
,
&
MaxBlockWeight
::
default
()
);
}
}
}
}
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment