hexsha
stringlengths 40
40
| size
int64 2
1.05M
| content
stringlengths 2
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
01b037f32b441daf3fa6d0039d68216b6e1cc3d7 | 7,438 | mod test_utils;
/// Instead of guarding every individual thing with `#[cfg(feature = "http")]`, use a module.
#[cfg(feature = "http")]
mod http_happy {
use crate::test_utils::{read_to_end, test_data};
use mockito::mock;
use std::fs::File;
use std::str::FromStr;
use tough::{DefaultTransport, HttpTransport, RepositoryLoader, Transport};
use url::Url;
/// Create a path in a mock HTTP server which serves a file from `tuf-reference-impl`.
fn create_successful_get_mock(relative_path: &str) -> mockito::Mock {
let repo_dir = test_data().join("tuf-reference-impl");
let file_bytes = std::fs::read(&repo_dir.join(relative_path)).unwrap();
mock("GET", ("/".to_owned() + relative_path).as_str())
.with_status(200)
.with_header("content-type", "application/octet-stream")
.with_body(file_bytes.as_slice())
.expect(1)
.create()
}
/// Test that `tough` works with a healthy HTTP server.
#[test]
fn test_http_transport_happy_case() {
run_http_test(HttpTransport::default());
}
/// Test that `DefaultTransport` works over HTTP when the `http` feature is enabled.
#[test]
fn test_http_default_transport() {
run_http_test(DefaultTransport::default());
}
fn run_http_test<T: Transport + 'static>(transport: T) {
let repo_dir = test_data().join("tuf-reference-impl");
let mock_timestamp = create_successful_get_mock("metadata/timestamp.json");
let mock_snapshot = create_successful_get_mock("metadata/snapshot.json");
let mock_targets = create_successful_get_mock("metadata/targets.json");
let mock_role1 = create_successful_get_mock("metadata/role1.json");
let mock_role2 = create_successful_get_mock("metadata/role2.json");
let mock_file1_txt = create_successful_get_mock("targets/file1.txt");
let mock_file2_txt = create_successful_get_mock("targets/file2.txt");
let base_url = Url::from_str(mockito::server_url().as_str()).unwrap();
let repo = RepositoryLoader::new(
File::open(repo_dir.join("metadata").join("1.root.json")).unwrap(),
base_url.join("metadata").unwrap(),
base_url.join("targets").unwrap(),
)
.transport(transport)
.load()
.unwrap();
assert_eq!(
read_to_end(repo.read_target("file1.txt").unwrap().unwrap()),
&b"This is an example target file."[..]
);
assert_eq!(
read_to_end(repo.read_target("file2.txt").unwrap().unwrap()),
&b"This is an another example target file."[..]
);
assert_eq!(
repo.targets()
.signed
.targets
.get("file1.txt")
.unwrap()
.custom
.get("file_permissions")
.unwrap(),
"0644"
);
mock_timestamp.assert();
mock_snapshot.assert();
mock_targets.assert();
mock_role1.assert();
mock_role2.assert();
mock_file1_txt.assert();
mock_file2_txt.assert();
}
}
#[cfg(feature = "http")]
#[cfg(feature = "integ")]
mod http_integ {
use crate::test_utils::test_data;
use std::fs::File;
use std::path::PathBuf;
use std::process::{Command, Stdio};
use tough::{HttpTransportBuilder, RepositoryLoader};
use url::Url;
pub fn integ_dir() -> PathBuf {
let mut p = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
p.pop();
p = p.join("integ");
p
}
/// Returns a command object that runs the provided script under BASH , wether we are under cygwin or unix.
pub fn bash_base() -> Command {
// if under cygwin, run the bash script under cygwin64 bash
if cfg!(target_os = "windows") {
let mut command = Command::new("c:\\cygwin64\\bin\\bash");
command.arg("-l");
return command;
} else {
return Command::new("bash");
}
}
pub fn tuf_reference_impl() -> PathBuf {
test_data().join("tuf-reference-impl")
}
pub fn tuf_reference_impl_metadata() -> PathBuf {
tuf_reference_impl().join("metadata")
}
pub fn tuf_reference_impl_root_json() -> PathBuf {
tuf_reference_impl_metadata().join("1.root.json")
}
/// Test `tough` using faulty HTTP connections.
///
/// This test requires `docker` and should be disabled for PRs because it will not work with our
/// current CI setup. It works by starting HTTP services in containers which serve the tuf-
/// reference-impl through fault-ridden HTTP. We load the repo many times in a loop, and
/// statistically exercise many of the retry code paths. In particular, the server aborts during
/// the send which exercises the range-header retry in the `Read` loop, and 5XX's are also sent
/// triggering retries in the `fetch` loop.
#[test]
fn test_retries() {
use std::ffi::OsString;
// run docker images to create a faulty http representation of tuf-reference-impl
// Get the "run.sh" path
let script_path = integ_dir()
.join("failure-server")
.join("run.sh")
.into_os_string()
.into_string()
.unwrap();
// Run it under BASH
let output = bash_base()
.arg(OsString::from(script_path))
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.output()
.expect("failed to start server with docker containers");
if !output.status.success() {
panic!("Failed to run integration test HTTP servers, is docker running?");
}
// load the tuf-reference-impl repo via http repeatedly through faulty proxies
for i in 0..5 {
let transport = HttpTransportBuilder::new()
// the service we have created is very toxic with many failures, so we will do a
// large number of retries, enough that we can be reasonably assured that we will
// always succeed.
.tries(200)
// we don't want the test to take forever so we use small pauses
.initial_backoff(std::time::Duration::from_nanos(100))
.max_backoff(std::time::Duration::from_millis(1))
.build();
let root_path = tuf_reference_impl_root_json();
RepositoryLoader::new(
File::open(&root_path).unwrap(),
Url::parse("http://localhost:10103/metadata").unwrap(),
Url::parse("http://localhost:10103/targets").unwrap(),
)
.transport(transport)
.load()
.unwrap();
println!("{}:{} SUCCESSFULLY LOADED THE REPO {}", file!(), line!(), i,);
}
// stop and delete the docker containers, images and network
let output = bash_base()
.arg(
integ_dir()
.join("failure-server")
.join("teardown.sh")
.into_os_string(),
)
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
.output()
.expect("failed to delete docker objects");
assert!(output.status.success());
}
}
| 37.376884 | 111 | 0.581608 |
ddc7f55d58343f172901d0ba5cdb57b50db1ff72 | 22,133 | use itertools::Itertools;
use syntect::highlighting::Style as SyntectStyle;
use unicode_segmentation::UnicodeSegmentation;
use crate::ansi;
use crate::cli;
use crate::config::Config;
use crate::delta::State;
use crate::features::line_numbers;
use crate::features::OptionValueFunction;
use crate::minusplus::*;
use crate::paint::Painter;
use crate::paint::{BgFillMethod, BgShouldFill};
use crate::style::Style;
use crate::wrapping::wrap_zero_block;
pub type LineSegments<'a, S> = Vec<(S, &'a str)>;
pub fn make_feature() -> Vec<(String, OptionValueFunction)> {
builtin_feature!([
(
"side-by-side",
bool,
None,
_opt => true
),
("features", bool, None, _opt => "line-numbers"),
("line-numbers-left-format", String, None, _opt => "β{nm:^4}β".to_string()),
("line-numbers-right-format", String, None, _opt => "β{np:^4}β".to_string())
])
}
// Aliases for Minus/Plus because Left/Right and PanelSide makes
// more sense in a side-by-side context.
pub use crate::minusplus::MinusPlusIndex as PanelSide;
pub use MinusPlusIndex::Minus as Left;
pub use MinusPlusIndex::Plus as Right;
#[derive(Debug)]
pub struct Panel {
pub width: usize,
}
pub type LeftRight<T> = MinusPlus<T>;
pub type SideBySideData = LeftRight<Panel>;
impl SideBySideData {
/// Create a [`LeftRight<Panel>`](LeftRight<Panel>) named [`SideBySideData`].
pub fn new_sbs(decorations_width: &cli::Width, available_terminal_width: &usize) -> Self {
let panel_width = match decorations_width {
cli::Width::Fixed(w) => w / 2,
_ => available_terminal_width / 2,
};
SideBySideData::new(Panel { width: panel_width }, Panel { width: panel_width })
}
}
pub fn available_line_width(
config: &Config,
data: &line_numbers::LineNumbersData,
) -> line_numbers::SideBySideLineWidth {
let linennumbers_width = data.formatted_width();
// The width can be reduced by the line numbers and/or a possibly kept 1-wide "+/-/ " prefix.
let line_width = |side: PanelSide| {
config.side_by_side_data[side]
.width
.saturating_sub(linennumbers_width[side])
.saturating_sub(config.keep_plus_minus_markers as usize)
};
LeftRight::new(line_width(Left), line_width(Right))
}
pub fn line_is_too_long(line: &str, line_width: usize) -> bool {
let line_sum = line.graphemes(true).count();
// `line_sum` is too large, because both a leading "+/-/ " and a trailing
// newline are present, counted, but are never printed. So allow two more
// characters.
line_sum > line_width + 2
}
/// Return whether any of the input lines is too long, and a data
/// structure indicating which are too long. This avoids
/// calculating the length again later.
pub fn has_long_lines(
lines: &LeftRight<&Vec<(String, State)>>,
line_width: &line_numbers::SideBySideLineWidth,
) -> (bool, LeftRight<Vec<bool>>) {
let mut wrap_any = LeftRight::default();
let mut wrapping_lines = LeftRight::default();
let mut check_if_too_long = |side| {
let lines_side: &Vec<(String, State)> = lines[side];
wrapping_lines[side] = lines_side
.iter()
.map(|(line, _)| line_is_too_long(line, line_width[side]))
.inspect(|b| wrap_any[side] |= b)
.collect();
};
check_if_too_long(Left);
check_if_too_long(Right);
(wrap_any[Left] || wrap_any[Right], wrapping_lines)
}
/// Emit a sequence of minus and plus lines in side-by-side mode.
#[allow(clippy::too_many_arguments)]
pub fn paint_minus_and_plus_lines_side_by_side<'a>(
syntax_left_right: LeftRight<Vec<LineSegments<'a, SyntectStyle>>>,
diff_left_right: LeftRight<Vec<LineSegments<'a, Style>>>,
states_left_right: LeftRight<Vec<State>>,
line_alignment: Vec<(Option<usize>, Option<usize>)>,
output_buffer: &mut String,
config: &Config,
line_numbers_data: &mut Option<&mut line_numbers::LineNumbersData>,
background_color_extends_to_terminal_width: LeftRight<BgShouldFill>,
) {
for (minus_line_index, plus_line_index) in line_alignment {
output_buffer.push_str(&paint_left_panel_minus_line(
minus_line_index,
&syntax_left_right[Left],
&diff_left_right[Left],
match minus_line_index {
Some(i) => &states_left_right[Left][i],
None => &State::HunkMinus(None),
},
line_numbers_data,
background_color_extends_to_terminal_width[Left],
config,
));
output_buffer.push_str(&paint_right_panel_plus_line(
plus_line_index,
&syntax_left_right[Right],
&diff_left_right[Right],
match plus_line_index {
Some(i) => &states_left_right[Right][i],
None => &State::HunkPlus(None),
},
line_numbers_data,
background_color_extends_to_terminal_width[Right],
config,
));
output_buffer.push('\n');
}
}
#[allow(clippy::too_many_arguments)]
pub fn paint_zero_lines_side_by_side<'a>(
raw_line: &str,
syntax_style_sections: Vec<LineSegments<'a, SyntectStyle>>,
diff_style_sections: Vec<LineSegments<'a, Style>>,
output_buffer: &mut String,
config: &Config,
line_numbers_data: &mut Option<&mut line_numbers::LineNumbersData>,
painted_prefix: Option<ansi_term::ANSIString>,
background_color_extends_to_terminal_width: BgShouldFill,
) {
let states = vec![State::HunkZero];
let (states, syntax_style_sections, diff_style_sections) = wrap_zero_block(
config,
raw_line,
states,
syntax_style_sections,
diff_style_sections,
line_numbers_data,
);
for (line_index, ((syntax_sections, diff_sections), state)) in syntax_style_sections
.into_iter()
.zip_eq(diff_style_sections.iter())
.zip_eq(states.into_iter())
.enumerate()
{
for panel_side in &[Left, Right] {
let (mut panel_line, panel_line_is_empty) = Painter::paint_line(
&syntax_sections,
diff_sections,
&state,
line_numbers_data,
Some(*panel_side),
painted_prefix.clone(),
config,
);
pad_panel_line_to_width(
&mut panel_line,
panel_line_is_empty,
Some(line_index),
&diff_style_sections,
&state,
*panel_side,
background_color_extends_to_terminal_width,
config,
);
output_buffer.push_str(&panel_line);
}
output_buffer.push('\n');
}
}
#[allow(clippy::too_many_arguments)]
fn paint_left_panel_minus_line<'a>(
line_index: Option<usize>,
syntax_style_sections: &[LineSegments<'a, SyntectStyle>],
diff_style_sections: &[LineSegments<'a, Style>],
state: &'a State,
line_numbers_data: &mut Option<&mut line_numbers::LineNumbersData>,
background_color_extends_to_terminal_width: BgShouldFill,
config: &Config,
) -> String {
let (mut panel_line, panel_line_is_empty) = paint_minus_or_plus_panel_line(
line_index,
syntax_style_sections,
diff_style_sections,
state,
line_numbers_data,
Left,
config,
);
pad_panel_line_to_width(
&mut panel_line,
panel_line_is_empty,
line_index,
diff_style_sections,
state,
Left,
background_color_extends_to_terminal_width,
config,
);
panel_line
}
#[allow(clippy::too_many_arguments)]
fn paint_right_panel_plus_line<'a>(
line_index: Option<usize>,
syntax_style_sections: &[LineSegments<'a, SyntectStyle>],
diff_style_sections: &[LineSegments<'a, Style>],
state: &'a State,
line_numbers_data: &mut Option<&mut line_numbers::LineNumbersData>,
background_color_extends_to_terminal_width: BgShouldFill,
config: &Config,
) -> String {
let (mut panel_line, panel_line_is_empty) = paint_minus_or_plus_panel_line(
line_index,
syntax_style_sections,
diff_style_sections,
state,
line_numbers_data,
Right,
config,
);
pad_panel_line_to_width(
&mut panel_line,
panel_line_is_empty,
line_index,
diff_style_sections,
state,
Right,
background_color_extends_to_terminal_width,
config,
);
panel_line
}
fn get_right_fill_style_for_panel<'a>(
line_is_empty: bool,
line_index: Option<usize>,
diff_style_sections: &[LineSegments<'a, Style>],
state: &State,
panel_side: PanelSide,
background_color_extends_to_terminal_width: BgShouldFill,
config: &Config,
) -> (Option<BgFillMethod>, Style) {
// If in the the left panel then it must be filled with spaces.
let none_or_override = if panel_side == Left {
Some(BgFillMethod::Spaces)
} else {
None
};
match (line_is_empty, line_index) {
(true, _) => (none_or_override, config.null_style),
(false, None) => (none_or_override, config.null_style),
(false, Some(index)) => {
let (bg_fill_mode, fill_style) =
Painter::get_should_right_fill_background_color_and_fill_style(
&diff_style_sections[index],
state,
background_color_extends_to_terminal_width,
config,
);
match bg_fill_mode {
None => (none_or_override, config.null_style),
_ if panel_side == Left => (Some(BgFillMethod::Spaces), fill_style),
_ => (bg_fill_mode, fill_style),
}
}
}
}
/// Construct half of a minus or plus line under side-by-side mode, i.e. the half line that
/// goes in one or other panel. Return a tuple `(painted_half_line, is_empty)`.
// Suppose the line being displayed is a minus line with a paired plus line. Then both times
// this function is called, `line_index` will be `Some`. This case proceeds as one would
// expect: on the first call, we are constructing the left panel line, and we are passed
// `(Some(index), HunkMinus, Left)`. We pass `(HunkMinus, Left)` to
// `paint_line`. This has two consequences:
// 1. `format_and_paint_line_numbers` will increment the minus line number.
// 2. `format_and_paint_line_numbers` will emit the left line number field, and not the right.
//
// The second call does the analogous thing for the plus line to be displayed in the right panel:
// we are passed `(Some(index), HunkPlus, Right)` and we pass `(HunkPlus, Right)` to `paint_line`,
// causing it to increment the plus line number and emit the right line number field.
//
// Now consider the case where the line being displayed is a minus line with no paired plus line.
// The first call is as before. On the second call, we are passed `(None, HunkPlus, Right)` and we
// wish to display the right panel, with its line number container, but without any line number
// (and without any line contents). We do this by passing (HunkMinus, Right) to `paint_line`, since
// what this will do is set the line number pair in that function to `(Some(minus_number), None)`,
// and then only emit the right field (which has a None number, i.e. blank). However, it will also
// increment the minus line number, so we need to knock that back down.
#[allow(clippy::too_many_arguments)]
fn paint_minus_or_plus_panel_line<'a>(
line_index: Option<usize>,
syntax_style_sections: &[LineSegments<'a, SyntectStyle>],
diff_style_sections: &[LineSegments<'a, Style>],
state: &State,
line_numbers_data: &mut Option<&mut line_numbers::LineNumbersData>,
panel_side: PanelSide,
config: &Config,
) -> (String, bool) {
let (empty_line_syntax_sections, empty_line_diff_sections) = (Vec::new(), Vec::new());
let (line_syntax_sections, line_diff_sections, state_for_line_numbers_field) =
if let Some(index) = line_index {
(
&syntax_style_sections[index],
&diff_style_sections[index],
state.clone(),
)
} else {
let opposite_state = match state {
State::HunkMinus(x) => State::HunkPlus(x.clone()),
State::HunkPlus(x) => State::HunkMinus(x.clone()),
_ => unreachable!(),
};
(
&empty_line_syntax_sections,
&empty_line_diff_sections,
opposite_state,
)
};
let painted_prefix = match (config.keep_plus_minus_markers, panel_side, state) {
(true, _, State::HunkPlusWrapped) => Some(config.plus_style.paint(" ")),
(true, _, State::HunkMinusWrapped) => Some(config.minus_style.paint(" ")),
(true, Left, _) => Some(config.minus_style.paint("-")),
(true, Right, _) => Some(config.plus_style.paint("+")),
_ => None,
};
let (line, line_is_empty) = Painter::paint_line(
line_syntax_sections,
line_diff_sections,
&state_for_line_numbers_field,
line_numbers_data,
Some(panel_side),
painted_prefix,
config,
);
(line, line_is_empty)
}
/// Right-fill the background color of a line in a panel. If in the left panel this is always
/// done with spaces. The right panel can be filled with spaces or using ANSI sequences
/// instructing the terminal emulator to fill the background color rightwards.
#[allow(clippy::too_many_arguments, clippy::comparison_chain)]
fn pad_panel_line_to_width<'a>(
panel_line: &mut String,
panel_line_is_empty: bool,
line_index: Option<usize>,
diff_style_sections: &[LineSegments<'a, Style>],
state: &State,
panel_side: PanelSide,
background_color_extends_to_terminal_width: BgShouldFill,
config: &Config,
) {
// Emit empty line marker if the panel line is empty but not empty-by-construction. IOW if the
// other panel contains a real line, and we are currently emitting an empty counterpart panel
// to form the other half of the line, then don't emit the empty line marker.
if panel_line_is_empty && line_index.is_some() {
match state {
State::HunkMinus(_) => Painter::mark_empty_line(
&config.minus_empty_line_marker_style,
panel_line,
Some(" "),
),
State::HunkPlus(_) => Painter::mark_empty_line(
&config.plus_empty_line_marker_style,
panel_line,
Some(" "),
),
State::HunkZero => {}
_ => unreachable!(),
};
};
let text_width = ansi::measure_text_width(panel_line);
let panel_width = config.side_by_side_data[panel_side].width;
if text_width > panel_width {
*panel_line =
ansi::truncate_str(panel_line, panel_width, &config.truncation_symbol).to_string();
}
let (bg_fill_mode, fill_style) = get_right_fill_style_for_panel(
panel_line_is_empty,
line_index,
diff_style_sections,
state,
panel_side,
background_color_extends_to_terminal_width,
config,
);
match bg_fill_mode {
Some(BgFillMethod::TryAnsiSequence) => {
Painter::right_fill_background_color(panel_line, fill_style)
}
Some(BgFillMethod::Spaces) if text_width >= panel_width => (),
Some(BgFillMethod::Spaces) => panel_line.push_str(
&fill_style
.paint(" ".repeat(panel_width - text_width))
.to_string(),
),
None => (),
}
}
pub mod ansifill {
use super::SideBySideData;
use crate::config::Config;
use crate::paint::BgFillMethod;
pub const ODD_PAD_CHAR: char = ' ';
// Panels in side-by-side mode always sum up to an even number, so when the terminal
// has an odd width an extra column is left over.
// If the background color is extended with an ANSI sequence (which only knows "fill
// this row until the end") instead of spaces (see `BgFillMethod`), then the coloring
// extends into that column. This becomes noticeable when the displayed content reaches
// the right side of the right panel to be truncated or wrapped.
// However using an ANSI sequence instead of spaces is generally preferable because
// small changes to the terminal width are less noticeable.
/// The solution in this case is to add `ODD_PAD_CHAR` before the first line number in
/// the right panel and increasing its width by one, thus using the full terminal width
/// with the two panels.
/// This also means line numbers can not be disabled in side-by-side mode, but they may
/// not actually paint numbers.
#[derive(Clone, Debug)]
pub struct UseFullPanelWidth(pub bool);
impl UseFullPanelWidth {
pub fn new(config: &Config) -> Self {
Self(
config.side_by_side
&& Self::is_odd_with_ansi(&config.decorations_width, &config.line_fill_method),
)
}
pub fn sbs_odd_fix(
width: &crate::cli::Width,
method: &BgFillMethod,
sbs_data: SideBySideData,
) -> SideBySideData {
if Self::is_odd_with_ansi(width, method) {
Self::adapt_sbs_data(sbs_data)
} else {
sbs_data
}
}
pub fn pad_width(&self) -> bool {
self.0
}
fn is_odd_with_ansi(width: &crate::cli::Width, method: &BgFillMethod) -> bool {
method == &BgFillMethod::TryAnsiSequence
&& matches!(&width, crate::cli::Width::Fixed(width) if width % 2 == 1)
}
fn adapt_sbs_data(mut sbs_data: SideBySideData) -> SideBySideData {
sbs_data[super::Right].width += 1;
sbs_data
}
}
}
#[cfg(test)]
pub mod tests {
use crate::ansi::strip_ansi_codes;
use crate::features::line_numbers::tests::*;
use crate::tests::integration_test_utils::{make_config_from_args, run_delta};
#[test]
fn test_two_minus_lines() {
let config = make_config_from_args(&["--side-by-side", "--width", "40"]);
let output = run_delta(TWO_MINUS_LINES_DIFF, &config);
let mut lines = output.lines().skip(crate::config::HEADER_LEN);
let (line_1, line_2) = (lines.next().unwrap(), lines.next().unwrap());
assert_eq!("β 1 βa = 1 β β", strip_ansi_codes(line_1));
assert_eq!("β 2 βb = 23456 β β", strip_ansi_codes(line_2));
}
#[test]
fn test_two_minus_lines_truncated() {
let mut config = make_config_from_args(&[
"--side-by-side",
"--wrap-max-lines",
"0",
"--width",
"28",
"--line-fill-method=spaces",
]);
config.truncation_symbol = ">".into();
let output = run_delta(TWO_MINUS_LINES_DIFF, &config);
let mut lines = output.lines().skip(crate::config::HEADER_LEN);
let (line_1, line_2) = (lines.next().unwrap(), lines.next().unwrap());
assert_eq!("β 1 βa = 1 β β", strip_ansi_codes(line_1));
assert_eq!("β 2 βb = 234>β β", strip_ansi_codes(line_2));
}
#[test]
fn test_two_plus_lines() {
let config = make_config_from_args(&[
"--side-by-side",
"--width",
"41",
"--line-fill-method=spaces",
]);
let output = run_delta(TWO_PLUS_LINES_DIFF, &config);
let mut lines = output.lines().skip(crate::config::HEADER_LEN);
let (line_1, line_2) = (lines.next().unwrap(), lines.next().unwrap());
let sac = strip_ansi_codes; // alias to help with `cargo fmt`-ing:
assert_eq!("β β β 1 βa = 1 ", sac(line_1));
assert_eq!("β β β 2 βb = 234567 ", sac(line_2));
}
#[test]
fn test_two_plus_lines_truncated() {
let mut config = make_config_from_args(&[
"--side-by-side",
"--wrap-max-lines",
"0",
"--width",
"30",
"--line-fill-method=spaces",
]);
config.truncation_symbol = ">".into();
let output = run_delta(TWO_PLUS_LINES_DIFF, &config);
let mut lines = output.lines().skip(crate::config::HEADER_LEN);
let (line_1, line_2) = (lines.next().unwrap(), lines.next().unwrap());
assert_eq!("β β β 1 βa = 1 ", strip_ansi_codes(line_1));
assert_eq!("β β β 2 βb = 2345>", strip_ansi_codes(line_2));
}
#[test]
fn test_two_plus_lines_exact_fit() {
let config =
make_config_from_args(&["--side-by-side", "--width", "33", "--line-fill-method=ansi"]);
let output = run_delta(TWO_PLUS_LINES_DIFF, &config);
let mut lines = output.lines().skip(crate::config::HEADER_LEN);
let (line_1, line_2) = (lines.next().unwrap(), lines.next().unwrap());
let sac = strip_ansi_codes; // alias to help with `cargo fmt`-ing:
assert_eq!("β β β 1 βa = 1", sac(line_1));
assert_eq!("β β β 2 βb = 234567", sac(line_2));
}
#[test]
fn test_one_minus_one_plus_line() {
let config = make_config_from_args(&[
"--side-by-side",
"--width",
"40",
"--line-fill-method=spaces",
]);
let output = run_delta(ONE_MINUS_ONE_PLUS_LINE_DIFF, &config);
let output = strip_ansi_codes(&output);
let mut lines = output.lines().skip(crate::config::HEADER_LEN);
let mut lnu = move || lines.next().unwrap(); // for cargo fmt
assert_eq!("β 1 βa = 1 β 1 βa = 1", lnu());
assert_eq!("β 2 βb = 2 β 2 βbb = 2 ", lnu());
}
}
| 36.826955 | 99 | 0.614693 |
fc1987de4e6a63c04785c3c267e35879d25b4364 | 2,873 | use proc_fs::stats::*;
use proc_fs::kernel::*;
use proc_fs::net::*;
use proc_fs::ToPid;
use iron::{Iron, IronResult, Request, Response};
use router::Router;
use std::sync::{Arc, Mutex, Condvar};
use serde_json;
use marid::{MaridError, Runner, Signal, Receiver};
use util::handle_signals_condvar;
pub struct RouterRunner {
router: Option<Router>,
}
impl RouterRunner {
pub fn new() -> RouterRunner {
let mut router = Router::new();
router.get("/proc/:pid/statm", proc_statm_handler);
router.get("/proc/:pid/io", proc_io_handler);
router.get("/proc/:pid/stack", proc_stack_handler);
router.get("/net/tcpstats", proc_tcp_handler);
RouterRunner {
router: Some(router),
}
}
}
impl Runner for RouterRunner {
fn setup(&mut self) -> Result<(), MaridError> {
Ok(())
}
fn run(mut self: Box<Self>, signals: Receiver<Signal>) -> Result<(), MaridError> {
debug!("Running RouterRunner");
let shutdown = Arc::new((Mutex::new(false), Condvar::new()));
debug!("Setting up signal handling");
handle_signals_condvar(signals, shutdown.clone());
let router = self.router.take().expect("Could not take router");
let mut listener = Iron::new(router).http("localhost:3000").unwrap();
debug!("Serving requests...");
let &(ref lock, ref cvar) = &*shutdown;
let mut stop = lock.lock().expect("Lock was poisoned");
while !*stop {
stop = cvar.wait(stop).expect("Lock was poisoned");
}
debug!("Shutting down router thread...");
match listener.close() {
Ok(_) => Ok(()),
Err(e) => Err(Box::new(e)),
}
}
}
fn proc_statm_handler(req: &mut Request) -> IronResult<Response> {
let ref pid = req.extensions.get::<Router>().unwrap().find("pid").unwrap_or("/");
let stats = process_statm((*pid).to_pid()).unwrap();
let serialized = serde_json::to_string(&stats).unwrap();
Ok(Response::with(serialized))
}
fn proc_io_handler(req: &mut Request) -> IronResult<Response> {
let ref pid = req.extensions.get::<Router>().unwrap().find("pid").unwrap_or("/");
let stats = process_io((*pid).to_pid()).unwrap();
let serialized = serde_json::to_string(&stats).unwrap();
Ok(Response::with(serialized))
}
fn proc_stack_handler(req: &mut Request) -> IronResult<Response> {
let ref pid = req.extensions.get::<Router>().unwrap().find("pid").unwrap_or("/");
let stack_trace = process_stack((*pid).to_pid()).unwrap();
let serialized = serde_json::to_string(&stack_trace).unwrap();
Ok(Response::with(serialized))
}
fn proc_tcp_handler(_req: &mut Request) -> IronResult<Response> {
let tcp = process_tcp().unwrap();
let serialized = serde_json::to_string(&tcp).unwrap();
Ok(Response::with(serialized))
}
| 31.922222 | 86 | 0.62339 |
0998da14d4226f31aeaf7fa2021ac92b0208654e | 39,508 | // Copyright (c) 2020 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{collections::{HashMap,
HashSet},
fmt,
fs::File,
io::prelude::*};
use petgraph::{algo::{connected_components,
is_cyclic_directed},
graphmap::DiGraphMap,
Direction};
use habitat_builder_db::models::package::PackageWithVersionArray;
use crate::hab_core::package::{ident::Identifiable,
PackageIdent,
PackageTarget};
use crate::{data_store::Unbuildable,
graph_helpers,
package_build_manifest_graph::PackageBuild,
package_graph_trait::Stats,
package_ident_intern::{display_ordering_cmp,
PackageIdentIntern},
package_info::PackageInfo,
util::*};
pub struct PackageGraphForTarget {
target: PackageTarget,
packages: HashMap<PackageIdentIntern, PackageInfo>,
// Map from truncated ident to latest matching; it could be origin/packagename, or
// origin/packagename/version. Logically this a map from a partially qualified ident
// to a fully qualified one. Possibly extract this as separate entity.
latest_map: HashMap<PackageIdentIntern, PackageIdentIntern>,
// We build this alongside the full graph
latest_graph: DiGraphMap<PackageIdentIntern, EdgeType>,
}
impl PackageGraphForTarget {
pub fn new(target: PackageTarget) -> Self {
PackageGraphForTarget { target,
packages: HashMap::<PackageIdentIntern, PackageInfo>::new(),
latest_map:
HashMap::<PackageIdentIntern, PackageIdentIntern>::new(),
latest_graph: DiGraphMap::<PackageIdentIntern, EdgeType>::new() }
}
// This is currently only used for testing, but it is part of the original API and left for
// ease of adaptation/backfit.
pub fn build<T>(&mut self, packages: T, use_build_deps: bool) -> (usize, usize)
where T: Iterator<Item = PackageInfo>
{
for p in packages {
self.extend(&p, use_build_deps);
}
(self.latest_graph.node_count(), self.latest_graph.edge_count())
}
fn update_latest(&mut self, id: PackageIdentIntern) {
// We should check if this is fully qualified, as we implictly assume that to be the case
let just_package = id.short_ident();
self.update_if_newer(just_package, id);
let package_version = id.versioned_ident(); // technically this might fail if we already have a short ident!!!
self.update_if_newer(package_version, id);
}
fn update_if_newer(&mut self, id: PackageIdentIntern, fqpi: PackageIdentIntern) {
match self.latest_map.get(&id) {
Some(&old_fqpi) => {
if fqpi > old_fqpi {
self.latest_map.insert(id, fqpi);
}
}
None => {
self.latest_map.insert(id, fqpi);
}
};
}
// Incrementally adds a node to the graph, rejecting it and doing nothing
// if it returns a cycle.
// Returns current node, edge count of graph.
pub fn extend(&mut self, package_info: &PackageInfo, use_build_deps: bool) -> (usize, usize) {
debug!("Extend: {} {} N:{} E:{} P:{}",
package_info.ident,
package_info.target,
self.latest_graph.node_count(),
self.latest_graph.edge_count(),
self.packages.len());
// TODO make plan for removing package_info structs from package table when they are no
// longer part of graph Right now we keep them forever, which is unnecessary.
let package_ident = PackageIdentIntern::from(&package_info.ident);
let short_ident = package_ident.short_ident();
// Next, add to latest graph. We overwrite any prior ident, so that any incoming
// dependencies are preserved
// Are we the newest? Ignore older versions of the package
if !self.latest_map.contains_key(&short_ident)
|| self.latest_map[&short_ident] <= package_ident
{
// we will need to be checking for cycles here...
// List current node in graph outgoing runtime (rt) deps
// Compare to new package rt deps
// if rt deps added, then do cycle check
// if rt deps deleted, then smarter cycle check algos might need to do something, but
// not us now just delete the edge.
// if same, just no-op
// secondary optimization; if no *incoming* rt deps we can skip cycle check as well.
// skip fully qualified idents in the graph; they never will be rebuilt, so they only
// add noise to the dependency graph.
//
// We also *could* skip partially qualified idents here. There are two cases to
// consider: deps on a version that's not latest, which again won't be rebuilt. There's
// a special case with some packages that bump versions in lockstep (gcc, gcc-libs) They
// are version pinned, but always on latest. We should treat those as if they we're
// unqualified. However at this time we don't have the proper information to know if
// they are pointing at latest version or not. For now we are building the graph
// optimisically, and will need to check later if that is sane.
let plan_deps = filter_out_fully_qualified(&package_info.plan_deps);
let (added, deleted) = graph_helpers::changed_edges_for_type(&self.latest_graph,
short_ident,
&plan_deps,
EdgeType::RuntimeDep);
//
// * The graph invariant is that it is cycle free, so if we aren't adding any new edges,
// we can't add a cycle
// * If this node doesn't have anyone depending on it, it can't be part of a cycle
// * Otherwise, we have to search to see if we have created a cycle We only look at the
// added edges, since they are where a cycle might be introduced.
// TODO: track some statistics on how often we advert a cycle check.
let start = std::time::Instant::now();
let has_cycle = graph_helpers::detect_cycles(&self.latest_graph, short_ident, &added);
let cycle_detect_time = (start.elapsed().as_nanos() as f64) / 1_000_000_000.0;
debug!("Detect cycle E ({}) for {} took {} s ({} edges)",
has_cycle,
short_ident,
cycle_detect_time,
plan_deps.len());
if has_cycle {
// Handle cycle case here
debug!("Detect cycle for E {} found a cycle in {}s",
short_ident, cycle_detect_time);
return (self.latest_graph.node_count(), self.latest_graph.edge_count());
} else {
// No cycle created, so
graph_helpers::update_edges_for_type(&mut self.latest_graph,
short_ident,
&added,
&deleted,
EdgeType::RuntimeDep);
}
if use_build_deps {
let plan_bdeps = filter_out_fully_qualified(&package_info.plan_bdeps);
graph_helpers::revise_edges_for_type(&mut self.latest_graph,
short_ident,
&plan_bdeps,
EdgeType::BuildDep);
// Long term, strong build deps should be integrated into our notion of the plan. In
// the intermediate term this probably needs to be stored in the
// database along with the package info However, for now, we're
// hydrating it from a hardcoded set specific to core plans.
let plan_sdeps = filter_out_fully_qualified(&package_info.strong_bdeps);
graph_helpers::revise_edges_for_type(&mut self.latest_graph,
short_ident,
&plan_sdeps,
EdgeType::StrongBuildDep);
}
}
self.update_latest(package_ident);
self.packages.insert(package_ident, package_info.clone());
debug!("Extend: {} {} N:{} E:{} P:{}",
package_ident,
package_info.target,
self.latest_graph.node_count(),
self.latest_graph.edge_count(),
self.packages.len());
(self.latest_graph.node_count(), self.latest_graph.edge_count())
}
// This is basically extend above, but only checks, doesn't update.
// The embarassing levels of parallel construction between the two should be cleaned up and
// unified
// Returns true if we can add this w/o a cycle
pub fn check_extend(&self, package_info: &PackageInfo, _use_build_deps: bool) -> bool {
// TODO make plan for removing package_info structs from package table when they are no
// longer part of graph Right now we keep them forever, which is unnecessary.
let package_ident = PackageIdentIntern::from(&package_info.ident);
let short_ident = package_ident.short_ident();
// Next, add to latest graph. We overwrite any prior ident, so that any incoming
// dependencies are preserved
// Are we the newest? Ignore older versions of the package
if !self.latest_map.contains_key(&short_ident)
|| self.latest_map[&short_ident] <= package_ident
{
let plan_deps = filter_out_fully_qualified(&package_info.plan_deps);
let (added, _deleted) = graph_helpers::changed_edges_for_type(&self.latest_graph,
short_ident,
&plan_deps,
EdgeType::RuntimeDep);
//
// * The graph invariant is that it is cycle free, so if we aren't adding any new edges,
// we can't add a cycle
// * If this node doesn't have anyone depending on it, it can't be part of a cycle
// * Otherwise, we have to search to see if we have created a cycle We only look at the
// added edges, since they are where a cycle might be introduced.
// TODO: track some statistics on how often we advert a cycle check.
// TODO: examine whether the reverse dep scan is faster (most nodes are leaf nodes)
let start = std::time::Instant::now();
let has_cycle = graph_helpers::detect_cycles(&self.latest_graph, short_ident, &added);
let cycle_detect_time = (start.elapsed().as_nanos() as f64) / 1_000_000_000.0;
debug!("Detect cycle CE ({}) for {} took {} s ({} edges)",
has_cycle,
short_ident,
cycle_detect_time,
plan_deps.len());
if has_cycle {
// Handle cycle case here
return false;
}
// NOTE:
// At some point we should be checking the global graph for build cycles across origins
}
true
}
pub fn write_packages_json(&self, filename: &str, filter: Option<&str>) {
let mut output: Vec<PackageWithVersionArray> = Vec::new();
let mut keep = 0;
let mut m = 0;
for package_ref in self.packages.values() {
if filter_match(&package_ref.ident, filter) {
m += 1;
if let Some(p) = &package_ref.package {
keep += 1;
output.push(p.clone())
}
}
}
debug!("Wrote {}/{}/{} K/M/T packages with filter {:?}",
keep,
m,
self.packages.len(),
filter);
write_packages_json(output.into_iter(), filename)
}
pub fn read_packages_json(&mut self, filename: &str, use_build_edges: bool) {
let packages = read_packages_json(filename);
for package in packages {
let package_info = PackageInfo::from(package.clone());
self.extend(&package_info, use_build_edges);
}
}
pub fn rdeps(&self,
name: PackageIdentIntern,
origin: Option<&str>)
-> Vec<(PackageIdentIntern, PackageIdentIntern)> {
let seed = vec![name];
let deps = graph_helpers::flood_deps_in_origin(&self.latest_graph, &seed, origin);
deps.iter()
.map(|&dep| {
let fq_dep: PackageIdentIntern = *(self.latest_map.get(&dep).unwrap_or(&dep));
(dep, fq_dep)
})
.collect()
}
// Mostly for debugging
pub fn rdeps_dump(&self) {
debug!("Reverse dependencies:");
unimplemented!("Rdeps aren't a thing right now, come back later");
}
pub fn search(&self, _phrase: &str) -> Vec<String> {
unimplemented!("Search isn't a thing right now, come back later");
// TODO: Rework this for new PackageTable construct
// let v: Vec<String> = self
// .packages
// .values()
// .map(|package| format!("{}", package.borrow().ident))
// .filter(|s| s.contains(phrase))
// .collect();
// v
}
pub fn latest(&self) -> Vec<String> {
self.latest_map.values().map(|x| format!("{}", x)).collect()
}
// Given an identifier in 'origin/name' format, returns the
// most recent version (fully-qualified package ident string)
pub fn resolve(&self, ident: &PackageIdent) -> Option<PackageIdent> {
let ident = PackageIdentIntern::from(ident);
self.latest_map.get(&ident).map(|x| (*x).into())
}
pub fn stats(&self) -> Stats {
Stats { node_count: self.latest_graph.node_count(),
edge_count: self.latest_graph.edge_count(),
connected_comp: connected_components(&self.latest_graph),
is_cyclic: is_cyclic_directed(&self.latest_graph), }
}
// Who has the most things depending on them?
pub fn top(&self, _max: usize) -> Vec<(String, usize)> {
unimplemented!("Top isn't a thing right now, come back later");
// TODO REIMPLEMENT IN NEW WORLD;
}
// Takes a initial list of package idents and expands their deps; then permutes this
// to produce a map of dep with the list of each item in the initial set that required it.
// Optionally follows build time dep edges as well.
//
pub fn compute_attributed_deps(&self,
idents: &[PackageIdentIntern],
include_build_deps: bool)
-> HashMap<PackageIdentIntern, Vec<PackageIdentIntern>> {
let mut acc: HashMap<PackageIdentIntern, HashSet<PackageIdentIntern>> = HashMap::new();
for ident in idents {
let deps = graph_helpers::transitive_deps(&self.latest_graph,
&[*ident],
None,
include_build_deps);
for dep in deps {
acc.entry(dep)
.and_modify(|e| {
(*e).insert(*ident);
})
.or_insert_with(|| {
let mut s = HashSet::new();
s.insert(*ident);
s
});
}
}
let mut results: HashMap<PackageIdentIntern, Vec<PackageIdentIntern>> = HashMap::new();
for dep in acc.keys() {
let mut r: Vec<PackageIdentIntern> = acc[dep].iter()
.cloned()
.collect::<Vec<PackageIdentIntern>>();
r.sort_by(display_ordering_cmp);
results.insert(*dep, r);
}
results
}
pub fn write_deps(&self, ident: &PackageIdent) {
let ident = PackageIdentIntern::from(ident);
let full_ident = if ident.fully_qualified() {
Some(ident)
} else {
self.latest_map.get(&ident).cloned()
};
let maybe_package = full_ident.and_then(|pi| self.packages.get(&pi));
match maybe_package {
Some(pkg) => pkg.write(),
None => println!("Couldn't find match for {}", ident),
}
}
pub fn dump_graph(&self, _file: &str) {
unimplemented!("dump_graph unimplemented");
}
pub fn dump_latest_graph_raw_h<T>(&self, file: &str, p: &T)
where T: fmt::Display
{
let filename = format!("{}_{}", file, p).replace("/", "_");
self.dump_latest_graph_raw(filename.as_str(), None)
}
// Output a human readable, machine parsable form of the graph; useful for debugging
pub fn dump_latest_graph_raw(&self, file: &str, origin: Option<&str>) {
graph_helpers::dump_graph_raw(&self.latest_graph, file, origin)
}
// The built in Dot utility wasn't flexible for what I wanted, so implemented our own.
pub fn dump_latest_graph_as_dot(&self, file: &str, origin: Option<&str>) {
graph_helpers::emit_graph_as_dot(&self.latest_graph, file, origin)
}
pub fn dump_build_levels(&self, _file: &str, _origin: Option<&str>) {
unimplemented!("Isn't a thing right now, come back later");
// self.latest_graph.dump_build_levels(file, origin)
}
pub fn dump_scc(&self, file: &str, origin: Option<&str>) {
graph_helpers::dump_scc(&self.latest_graph, file, origin)
}
pub fn dump_diagnostics(&self, file: &str, _origin: Option<&str>) {
let mut _file = std::fs::File::create(file).unwrap();
unimplemented!("Isn't a thing right now, come back later");
}
pub fn dump_build_ordering(&mut self,
unbuildable: &dyn Unbuildable,
_filename: &str,
origin: &str,
base_set: &[PackageIdent],
touched: &[PackageIdent])
-> Vec<PackageBuild> {
self.compute_build(unbuildable, origin, base_set, touched, 3)
}
// Compute a build ordering
//
// Inputs:
//
// * Set of base packages to build with (most likely stable channel, but as long as they're
// consisitent it's ok
// * Universe of packages to build (core minus some unbuildables)
// * Kernel of packages 'modified'
// * Graph of package dependencies
//
// Process
// 1) Take kernel of packages, and recursively expand it over reverse build/runtime deps
// Filter that expansion by the universe set
//
// 2) Compute ordering of expanded set using SCC and RT edges inside SCC
// 3) Initialize 'latest' table using base set
// 4) Walk ordering rebuilding packages
// For each package
// a) Resolve deps using latest,
// b) create new package with special name, record it in package table
// c) then update latest with new package
//
// 5) Take new latest table, walk graph to find actually used packages.
pub fn compute_build(&mut self,
unbuildable: &dyn Unbuildable,
origin: &str,
base_set: &[PackageIdent],
touched: &[PackageIdent],
converge_count: usize)
-> Vec<PackageBuild> {
// debug!("Using base: {} {}\n",
// base_set.len(),
// join_idents(", ", &base_set));
debug!("Using touched: {} {}\n",
touched.len(),
join_idents(", ", &touched));
let preconditioned_graph = self.precondition_graph(origin);
let touched: Vec<PackageIdentIntern> = touched.iter().map(|x| x.into()).collect();
let rebuild_set = graph_helpers::compute_rebuild_set(&preconditioned_graph,
unbuildable,
&touched,
origin,
self.target);
// TODO DO check of rebuild set to make sure that it includes the pinned versions that had
// edges added in the precondition_graph phase above.
debug!("Rebuild: {} {}\n",
rebuild_set.len(),
join_idents(", ", &rebuild_set));
let build_order = graph_helpers::compute_build_order(&preconditioned_graph, &rebuild_set);
// Rework this later
debug!("CB: {} components", build_order.len());
for component in &build_order {
debug!("CB: #{} {}", component.len(), join_idents(", ", component));
}
let mut latest = HashMap::<PackageIdent, PackageIdent>::new();
for ident in base_set {
latest.insert(short_ident(&ident, false), ident.clone());
}
let mut file = File::create("latest_from_base.txt").expect("Failed to initialize file");
for (k, v) in &latest {
file.write_all(format!("{}: {}\n", &k, &v).as_bytes())
.unwrap();
}
let mut built: Vec<PackageBuild> = Vec::new();
for component in build_order.iter() {
// If there is only one element in component, don't need to converge, can just run
// once
let component_converge_count = if component.len() > 1 {
converge_count
} else {
1
};
for _i in 1..=component_converge_count {
for &ident in component {
let ident: PackageIdentIntern = ident;
let ident_latest = self.latest_map[&ident];
let package =
self.packages.get(&ident_latest).unwrap_or_else(|| {
panic!("Expected to find package for \
{} {} iter {}",
ident_latest, ident, _i)
});
let build = build_package(package, &mut latest);
latest.insert(short_ident(&build.ident, false), build.ident.clone());
built.push(build);
}
}
}
built
}
// Precondition Graph
//
// The graph is built incrementally, and we may not be able to fixup things because we
// lack full knowledge at the point of insertion.
// In particular, we have to treat edges with version information specially. If we depend
// on a particular version of a package, and the version is the latest, then we will rebuild it
// if the package rebuilds. If we depend on an older version, we will not rebuild it unless
// a new release of that version is uploaded (or if we modify builder to build old versions)
// So we fixup the graph here to represent that
pub fn precondition_graph(&self, origin: &str) -> DiGraphMap<PackageIdentIntern, EdgeType> {
let mut graph: DiGraphMap<PackageIdentIntern, EdgeType> = DiGraphMap::new();
for node in self.latest_graph.nodes() {
if self.node_filter_helper(Some(origin), node) {
graph.add_node(node);
}
}
for (src, dst, edge) in self.latest_graph.all_edges() {
if graph.contains_node(src) && graph.contains_node(dst) {
if dst.version().is_some() {
let short_dst = dst.short_ident();
if let Some(latest) = self.latest_map.get(&short_dst) {
if latest.version() <= dst.version() {
// If we are pointing to the latest version, it's treated just as if
// we were using an short ident. Otherwise we ignore this as it will
// never trigger a rebuild.
// We choose <= just in case we have a race condition w
graph.add_edge(src, short_dst, *edge);
}
} else {
// This is an interesting subcase.
// Here we have a dependency on a package *we've never seen* (because
// it's not in latest_map)
// That's serious breakage, as we can't build anything that depends on
// it.assert_eq! we're going to put it in the graph
// to make it visible but not fail, because it might
// be worth doing a partial build. A open question is how to best
// communicate to users that a package is
// unbuildable because of missing dependencies.
graph.add_edge(src, short_dst, *edge);
}
} else {
graph.add_edge(src, dst, *edge);
}
}
}
graph
}
// We keep the node if it either is in the origin, or if it is directly depended on by a node in
// the origin
//
pub fn node_filter_helper(&self, origin: Option<&str>, node: PackageIdentIntern) -> bool {
if filter_match(&node, origin) {
true
} else {
for pred in self.latest_graph
.neighbors_directed(node, Direction::Incoming)
{
if filter_match(&pred, origin) {
return true;
}
}
false
}
}
}
// While parameterizing over a hasher is a nice thing for a library, this code is very specialized,
// and we won't be using anything beyond the standard PackageIdent hasher here.
// https://rust-lang.github.io/rust-clippy/master/index.html#implicit_hasher
#[allow(clippy::implicit_hasher)]
pub fn build_package(package: &PackageInfo,
latest: &mut HashMap<PackageIdent, PackageIdent>)
-> PackageBuild {
// Create our package name
let ident = make_temp_ident(&package.ident);
// resolve our runtime and build deps
let mut bt_deps = Vec::new();
let mut rt_deps = Vec::new();
for dep in &package.plan_bdeps {
// Horrible hack to get around our own pinning
let sdep = short_ident(dep, false);
bt_deps.push(latest.get(&sdep)
.unwrap_or_else(|| {
panic!("{} Unable to find bt dep {} ({})", &ident, &dep, &sdep)
})
.clone())
}
for dep in &package.plan_deps {
// Horrible hack to get around our own pinning
let sdep = short_ident(dep, false);
rt_deps.push(latest.get(&sdep)
.unwrap_or_else(|| {
panic!("{} Unable to find rt dep {} ({})", &ident, &dep, &sdep)
})
.clone())
}
// update latest
latest.insert(short_ident(&ident, false), ident.clone());
latest.insert(short_ident(&ident, true), ident.clone());
// Make the package
PackageBuild { ident,
bt_deps,
rt_deps }
}
#[cfg(test)]
mod test {
use super::*;
use std::str::FromStr;
const TGT: &str = "x86_64-linux";
const EMPTY: [&str; 0] = [];
#[test]
#[ignore] // This is probably broken by the changes to serialization
fn write_restore_packages() {
let mut packages = Vec::new();
let package1 = PackageInfo::mk("foo/bar/1/2", TGT, &["foo/baz/1/2"], &EMPTY, &EMPTY);
let package2 = PackageInfo::mk("foo/baz/1/2", TGT, &["foo/bat/1/2"], &EMPTY, &EMPTY);
packages.push(package1);
packages.push(package2);
let tmpfile = "/tmp/junk"; // Do this smarter
let mut graph = PackageGraphForTarget::new(PackageTarget::from_str(TGT).unwrap());
graph.build(packages.into_iter(), true);
let stats = graph.stats();
assert_eq!(stats.node_count, 2);
graph.write_packages_json(tmpfile, None);
let mut graph2 = PackageGraphForTarget::new(PackageTarget::from_str(TGT).unwrap());
graph2.read_packages_json(tmpfile, true);
let stats = graph2.stats();
assert_eq!(stats.node_count, 2);
}
// we can create a simple graph
#[test]
fn pre_check_with_dep_not_present() {
let mut graph =
PackageGraphForTarget::new(PackageTarget::from_str("x86_64-linux").unwrap());
let empty: [&str; 0] = [];
let package1 = PackageInfo::mk("foo/bar/1/2", "x86_64-linux", &["foo/baz"], &empty, &empty);
let package2 = PackageInfo::mk("foo/baz/1/2", "x86_64-linux", &["foo/xyz"], &empty, &empty);
let pre_check1 = graph.check_extend(&package1, true);
assert_eq!(pre_check1, true);
let (ncount, ecount) = graph.extend(&package1, true);
assert_eq!(ncount, 2);
assert_eq!(ecount, 1);
let pre_check2 = graph.check_extend(&package2, true);
assert_eq!(pre_check2, true);
let (ncount, ecount) = graph.extend(&package2, true);
assert_eq!(ncount, 3);
assert_eq!(ecount, 2);
}
// A run time circular dependency is forbidden, and should not change the graph if attempted.
#[test]
fn disallow_circular_dependency() {
let mut graph = PackageGraphForTarget::new(PackageTarget::from_str(TGT).unwrap());
let mut packages = Vec::new();
let package1 = PackageInfo::mk("foo/bar/1/2", TGT, &["foo/baz"], &EMPTY, &EMPTY);
let package2 = PackageInfo::mk("foo/baz/1/2", TGT, &["foo/bar"], &EMPTY, &EMPTY);
packages.push(package1);
let (ncount, ecount) = graph.build(packages.into_iter(), true);
// Both nodes will be present in the graph, but only the first edge should exist.
// The first package created will create nodes for all of its declared dependencies.
// The second node added will already exist, but the edge back to the first node
// should not be created as this will cause a cycle.
assert_eq!(ncount, 2);
assert_eq!(ecount, 1);
let stats = graph.stats();
assert_eq!(stats.is_cyclic, false);
// check extend should reject a cycle
let pre_check = graph.check_extend(&package2, true);
assert_eq!(pre_check, false);
let (ncount, ecount) = graph.extend(&package2, true);
// We shouldn't add any edges for a circular dependency
assert_eq!(ncount, 2);
assert_eq!(ecount, 1);
}
// A build time circular dependency is ok
#[test]
fn allow_circular_build_dependency() {
let mut graph = PackageGraphForTarget::new(PackageTarget::from_str(TGT).unwrap());
let mut packages = Vec::new();
let package1 = PackageInfo::mk("foo/bar/1/2", TGT, &["foo/baz"], &EMPTY, &EMPTY);
let package2 = PackageInfo::mk("foo/baz/1/2", TGT, &EMPTY, &["foo/bar"], &EMPTY);
packages.push(package1);
let (ncount, ecount) = graph.build(packages.into_iter(), true);
assert_eq!(ncount, 2);
assert_eq!(ecount, 1);
let stats = graph.stats();
assert_eq!(stats.is_cyclic, false);
// check extend should allow a cycle for a build dep
let pre_check = graph.check_extend(&package2, true);
assert_eq!(pre_check, true);
let (ncount, ecount) = graph.extend(&package2, true);
// We should see the edges including the circular dependency
assert_eq!(ncount, 2);
assert_eq!(ecount, 2);
}
// Test that updated nodes with removed edges do the right thing
#[test]
fn updates_remove_edges_correctly() {
let mut graph = PackageGraphForTarget::new(PackageTarget::from_str(TGT).unwrap());
let package1v1 = PackageInfo::mk("foo/bar/1/2", TGT, &["foo/baz"], &EMPTY, &EMPTY);
let package1v2 = PackageInfo::mk("foo/bar/2/2", TGT, &EMPTY, &EMPTY, &EMPTY);
let package2 = PackageInfo::mk("foo/baz/1/2", TGT, &["foo/bar"], &EMPTY, &EMPTY);
let (ncount, ecount) = graph.extend(&package1v1, true);
assert_eq!(ncount, 2);
assert_eq!(ecount, 1);
// We reject adding a runtime dep
assert_eq!(false, graph.check_extend(&package2, true));
// update the package
let (ncount, ecount) = graph.extend(&package1v2, true);
assert_eq!(ncount, 2);
assert_eq!(ecount, 0);
// We allow adding a runtime dep, once the cycle is removed
assert_eq!(true, graph.check_extend(&package2, true));
let (ncount, ecount) = graph.extend(&package2, true);
assert_eq!(ncount, 2);
assert_eq!(ecount, 1);
}
fn extend_variant_helper(graph: &mut PackageGraphForTarget,
package: &PackageInfo,
success_expected: bool,
node_delta: i64,
edge_delta: i64) {
let Stats { node_count: ncount,
edge_count: ecount,
.. } = graph.stats();
assert_eq!(success_expected, graph.check_extend(&package, true));
// assert_graph_extend!(graph, pkg_info, expected_status, expected_node_count,
// expected_edge_count);
let (new_ncount, new_ecount) = graph.extend(&package, true);
if !success_expected {
assert_eq!(ncount, new_ncount);
assert_eq!(ecount, new_ecount);
}
assert_eq!(node_delta,
(new_ncount as i64) - (ncount as i64),
"Node expected delta not equal to actual");
assert_eq!(edge_delta,
(new_ecount as i64) - (ecount as i64),
"Edge expected delta not equal to actual");
}
// test for long cycles
#[test]
fn longer_cycles_are_spotted() {
let mut graph = PackageGraphForTarget::new(PackageTarget::from_str(TGT).unwrap());
let packages = vec![PackageInfo::mk("foo/c1/1/2", TGT, &["foo/c2"], &EMPTY, &EMPTY),
PackageInfo::mk("foo/c2/1/2", TGT, &["foo/c3"], &EMPTY, &EMPTY),
PackageInfo::mk("foo/c3/2/2", TGT, &["foo/c4"], &EMPTY, &EMPTY),
PackageInfo::mk("foo/c4/2/2", TGT, &["foo/c5"], &EMPTY, &EMPTY),
PackageInfo::mk("foo/c5/2/2", TGT, &["foo/c6"], &EMPTY, &EMPTY),
PackageInfo::mk("foo/c6/2/2", TGT, &["foo/c7"], &EMPTY, &EMPTY),
PackageInfo::mk("foo/c7/2/2", TGT, &["foo/c1"], &EMPTY, &EMPTY),];
extend_variant_helper(&mut graph, &packages[0], true, 2, 1);
extend_variant_helper(&mut graph, &packages[1], true, 1, 1);
extend_variant_helper(&mut graph, &packages[2], true, 1, 1);
extend_variant_helper(&mut graph, &packages[3], true, 1, 1);
extend_variant_helper(&mut graph, &packages[4], true, 1, 1);
extend_variant_helper(&mut graph, &packages[5], true, 1, 1);
extend_variant_helper(&mut graph, &packages[6], false, 0, 0);
}
// test for pinned deps
#[test]
fn pinned_deps_are_ignored() {
let mut graph = PackageGraphForTarget::new(PackageTarget::from_str(TGT).unwrap());
let packages = vec![PackageInfo::mk("foo/c1/1/2", TGT, &["foo/c2"], &EMPTY, &EMPTY),
PackageInfo::mk("foo/c2/1/2", TGT, &["foo/c1/1/2"], &EMPTY, &EMPTY),];
extend_variant_helper(&mut graph, &packages[0], true, 2, 1);
extend_variant_helper(&mut graph, &packages[1], true, 0, 0);
}
// This test the currently implmented behaviour, but this might need to change.
// While we ignore fully qualified deps, we have to track (for now) partially qualified deps.
// A common idiom is to have a pair of packages (say A & B) with a dependency from A to B where
// B must have a particular version. Most commonly, this happens when two packages bump
// version in lockstep and must have the same version number.
// If A points to the latest version of B, we treat this as an edge to B for build purposes, and
// otherwise ignore it, as currently we don't rebuild older versions.
// It might be nice to track this in the graph, but we might not have full visibility into
// whether it is the latest during an incremental build process. So we track it for now, and
// fix up in the build phase. Note the build phase should explicitly test this!
#[test]
fn version_pinned_deps_are_ignored() {
let mut graph = PackageGraphForTarget::new(PackageTarget::from_str(TGT).unwrap());
let packages = vec![PackageInfo::mk("foo/c1/0/2", TGT, &["foo/c2"], &EMPTY, &EMPTY),
PackageInfo::mk("foo/c1/1/2", TGT, &["foo/c2"], &EMPTY, &EMPTY),
PackageInfo::mk("foo/c2/1/2", TGT, &["foo/c1/0"], &EMPTY, &EMPTY),
PackageInfo::mk("foo/c2/2/2", TGT, &["foo/c1/1"], &EMPTY, &EMPTY),];
extend_variant_helper(&mut graph, &packages[0], true, 2, 1);
extend_variant_helper(&mut graph, &packages[1], true, 0, 0);
// older version pin not rejected, but changes nothing
extend_variant_helper(&mut graph, &packages[2], true, 1, 1);
// newer version pin allowed as latest.
extend_variant_helper(&mut graph, &packages[3], true, 1, 0);
}
// TODO:
// ghost nodes? (nodes that we've not seen package/plan info for)
}
| 44.044593 | 118 | 0.555255 |
7564338857b6813305201265e45185d80b4e6c97 | 18,348 | //! Lorem ipsum generator.
//!
//! This crate generates pseudo-Latin [lorem ipsum placeholder
//! text][wiki]. The traditional lorem ipsum text start like this:
//!
//! > Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
//! > eiusmod tempor incididunt ut labore et dolore magna aliqua.
//!
//! This text is in the [`LOREM_IPSUM`] constant. Random looking text
//! like the above can be generated using the [`lipsum`] function. The
//! function allows you to generate as much text as desired and each
//! invocation will generate different text.
//!
//! The random looking text is generated using a [Markov chain] of
//! order two, which simply means that the next word is based on the
//! previous two words in the input texts. The Markov chain can be
//! used with other input texts by creating an instance of
//! [`MarkovChain`] and calling its [`learn`] method.
//!
//! [wiki]: https://en.wikipedia.org/wiki/Lorem_ipsum
//! [`lipsum`]: fn.lipsum.html
//! [`MarkovChain`]: struct.MarkovChain.html
//! [`learn`]: struct.MarkovChain.html#method.learn
//! [Markov chain]: https://en.wikipedia.org/wiki/Markov_chain
#![doc(html_root_url = "https://docs.rs/lipsum/0.6.0")]
#![deny(missing_docs)]
use rand::rngs::ThreadRng;
use rand::seq::SliceRandom;
use rand::Rng;
use std::cell::RefCell;
use std::collections::HashMap;
/// A bigram is simply two consecutive words.
pub type Bigram<'a> = (&'a str, &'a str);
/// Simple order two Markov chain implementation.
///
/// The [Markov chain] is a chain of order two, which means that it
/// will use the previous two words (a bigram) when predicting the
/// next word. This is normally enough to generate random text that
/// looks somewhat plausible. The implementation is based on
/// [Generating arbitrary text with Markov chains in Rust][blog post].
///
/// [Markov chain]: https://en.wikipedia.org/wiki/Markov_chain
/// [blog post]: https://blakewilliams.me/posts/generating-arbitrary-text-with-markov-chains-in-rust
pub struct MarkovChain<'a, R: Rng> {
map: HashMap<Bigram<'a>, Vec<&'a str>>,
keys: Vec<Bigram<'a>>,
rng: R,
}
impl<'a> MarkovChain<'a, ThreadRng> {
/// Create a new empty Markov chain. It will use a default
/// thread-local random number generator.
///
/// # Examples
///
/// ```
/// use lipsum::MarkovChain;
///
/// let chain = MarkovChain::new();
/// assert!(chain.is_empty());
/// ```
pub fn new() -> MarkovChain<'a, ThreadRng> {
MarkovChain::new_with_rng(rand::thread_rng())
}
}
impl<'a> Default for MarkovChain<'a, ThreadRng> {
/// Create a new empty Markov chain. It will use a default
/// thread-local random number generator.
fn default() -> Self {
Self::new()
}
}
impl<'a, R: Rng> MarkovChain<'a, R> {
/// Create a new empty Markov chain that uses the given random
/// number generator.
///
/// # Examples
///
/// ```
/// # fn main() {
/// use rand::SeedableRng;
/// use rand_xorshift::XorShiftRng;
/// use lipsum::MarkovChain;
///
/// let rng = XorShiftRng::seed_from_u64(0);
/// let mut chain = MarkovChain::new_with_rng(rng);
/// chain.learn("infra-red red orange yellow green blue indigo x-ray");
///
/// // The chain jumps consistently like this:
/// assert_eq!(chain.generate(1), "Yellow.");
/// assert_eq!(chain.generate(1), "Blue.");
/// assert_eq!(chain.generate(1), "Orange.");
/// # }
/// ```
pub fn new_with_rng(rng: R) -> MarkovChain<'a, R> {
MarkovChain {
map: HashMap::new(),
keys: Vec::new(),
rng: rng,
}
}
/// Add new text to the Markov chain. This can be called several
/// times to build up the chain.
///
/// # Examples
///
/// ```
/// use lipsum::MarkovChain;
///
/// let mut chain = MarkovChain::new();
/// chain.learn("red green blue");
/// assert_eq!(chain.words(("red", "green")), Some(&vec!["blue"]));
///
/// chain.learn("red green yellow");
/// assert_eq!(chain.words(("red", "green")), Some(&vec!["blue", "yellow"]));
/// ```
pub fn learn(&mut self, sentence: &'a str) {
let words = sentence.split_whitespace().collect::<Vec<&str>>();
for window in words.windows(3) {
let (a, b, c) = (window[0], window[1], window[2]);
self.map.entry((a, b)).or_insert_with(Vec::new).push(c);
}
// Sync the keys with the current map.
self.keys = self.map.keys().cloned().collect();
self.keys.sort();
}
/// Returs the number of states in the Markov chain.
///
/// # Examples
///
/// ```
/// use lipsum::MarkovChain;
///
/// let mut chain = MarkovChain::new();
/// assert_eq!(chain.len(), 0);
///
/// chain.learn("red orange yellow green blue indigo");
/// assert_eq!(chain.len(), 4);
/// ```
#[inline]
pub fn len(&self) -> usize {
self.map.len()
}
/// Returns `true` if the Markov chain has no states.
///
/// # Examples
///
/// ```
/// use lipsum::MarkovChain;
///
/// let mut chain = MarkovChain::new();
/// assert!(chain.is_empty());
///
/// chain.learn("foo bar baz");
/// assert!(!chain.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Get the possible words following the given bigram, or `None`
/// if the state is invalid.
///
/// # Examples
///
/// ```
/// use lipsum::MarkovChain;
///
/// let mut chain = MarkovChain::new();
/// chain.learn("red green blue");
/// assert_eq!(chain.words(("red", "green")), Some(&vec!["blue"]));
/// assert_eq!(chain.words(("foo", "bar")), None);
/// ```
pub fn words(&self, state: Bigram<'a>) -> Option<&Vec<&str>> {
self.map.get(&state)
}
/// Generate a sentence with `n` words of lorem ipsum text. The
/// sentence will start from a random point in the Markov chain
/// and a `.` will be added as necessary to form a full sentence.
///
/// See [`generate_from`] if you want to control the starting
/// point for the generated text and see [`iter`] if you simply
/// want a sequence of words.
///
/// # Examples
///
/// Generating the sounds of a grandfather clock:
///
/// ```
/// use lipsum::MarkovChain;
///
/// let mut chain = MarkovChain::new();
/// chain.learn("Tick, Tock, Tick, Tock, Ding! Tick, Tock, Ding! Ding!");
/// println!("{}", chain.generate(15));
/// ```
///
/// The output looks like this:
///
/// > Ding! Tick, Tock, Tick, Tock, Ding! Ding! Tock, Ding! Tick,
/// > Tock, Tick, Tock, Tick, Tock.
///
/// [`generate_from`]: struct.MarkovChain.html#method.generate_from
/// [`iter`]: struct.MarkovChain.html#method.iter
pub fn generate(&mut self, n: usize) -> String {
join_words(self.iter().take(n))
}
/// Generate a sentence with `n` words of lorem ipsum text. The
/// sentence will start from the given bigram and a `.` will be
/// added as necessary to form a full sentence.
///
/// Use [`generate`] if the starting point is not important. See
/// [`iter_from`] if you want a sequence of words that you can
/// format yourself.
///
/// [`generate`]: struct.MarkovChain.html#method.generate
/// [`iter_from`]: struct.MarkovChain.html#method.iter_from
pub fn generate_from(&mut self, n: usize, from: Bigram<'a>) -> String {
join_words(self.iter_from(from).take(n))
}
/// Make a never-ending iterator over the words in the Markov
/// chain. The iterator starts at a random point in the chain.
pub fn iter(&mut self) -> Words<'_, R> {
let state = if self.is_empty() {
("", "")
} else {
*self.keys.choose(&mut self.rng).unwrap()
};
Words {
map: &self.map,
rng: &mut self.rng,
keys: &self.keys,
state: state,
}
}
/// Make a never-ending iterator over the words in the Markov
/// chain. The iterator starts at the given bigram.
pub fn iter_from(&mut self, from: Bigram<'a>) -> Words<'_, R> {
Words {
map: &self.map,
rng: &mut self.rng,
keys: &self.keys,
state: from,
}
}
}
/// Never-ending iterator over words in the Markov chain.
///
/// Generated with the [`iter`] or [`iter_from`] methods.
///
/// [`iter`]: struct.MarkovChain.html#method.iter
/// [`iter_from`]: struct.MarkovChain.html#method.iter_from
pub struct Words<'a, R: Rng> {
map: &'a HashMap<Bigram<'a>, Vec<&'a str>>,
rng: &'a mut R,
keys: &'a Vec<Bigram<'a>>,
state: Bigram<'a>,
}
impl<'a, R: Rng> Iterator for Words<'a, R> {
type Item = &'a str;
fn next(&mut self) -> Option<&'a str> {
if self.map.is_empty() {
return None;
}
let result = Some(self.state.0);
while !self.map.contains_key(&self.state) {
self.state = *self.keys.choose(self.rng).unwrap();
}
let next_words = &self.map[&self.state];
let next = next_words.choose(self.rng).unwrap();
self.state = (self.state.1, next);
result
}
}
/// Check if `c` is an ASCII punctuation character.
fn is_ascii_punctuation(c: char) -> bool {
c.is_ascii_punctuation()
}
/// Capitalize the first character in a string.
fn capitalize<'a>(word: &'a str) -> String {
let idx = match word.chars().next() {
Some(c) => c.len_utf8(),
None => 0,
};
let mut result = String::with_capacity(word.len());
result.push_str(&word[..idx].to_uppercase());
result.push_str(&word[idx..]);
result
}
/// Join words from an iterator. The first word is always capitalized
/// and the generated sentence will end with `'.'` if it doesn't
/// already end with some other ASCII punctuation character.
fn join_words<'a, I: Iterator<Item = &'a str>>(mut words: I) -> String {
match words.next() {
None => String::new(),
Some(word) => {
let mut sentence = capitalize(word);
// Add remaining words.
for word in words {
sentence.push(' ');
sentence.push_str(word);
}
// Ensure the sentence ends with either one of ".!?".
if !sentence.ends_with(|c: char| c == '.' || c == '!' || c == '?') {
// Trim all trailing punctuation characters to avoid
// adding '.' after a ',' or similar.
let idx = sentence.trim_end_matches(is_ascii_punctuation).len();
sentence.truncate(idx);
sentence.push('.');
}
sentence
}
}
}
/// The traditional lorem ipsum text as given in [Wikipedia]. Using
/// this text alone for a Markov chain of order two doesn't work very
/// well since each bigram (two consequtive words) is followed by just
/// one other word. In other words, the Markov chain will always
/// produce the same output and recreate the lorem ipsum text
/// precisely. However, combining it with the full text in
/// [`LIBER_PRIMUS`] works well.
///
/// [Wikipedia]: https://en.wikipedia.org/wiki/Lorem_ipsum
/// [`LIBER_PRIMUS`]: constant.LIBER_PRIMUS.html
pub const LOREM_IPSUM: &'static str = include_str!("lorem-ipsum.txt");
/// The first book in Cicero's work De finibus bonorum et malorum ("On
/// the ends of good and evil"). The lorem ipsum text in
/// [`LOREM_IPSUM`] is derived from part of this text.
///
/// [`LOREM_IPSUM`]: constant.LOREM_IPSUM.html
pub const LIBER_PRIMUS: &'static str = include_str!("liber-primus.txt");
thread_local! {
// Markov chain generating lorem ipsum text.
static LOREM_IPSUM_CHAIN: RefCell<MarkovChain<'static, ThreadRng>> = {
let mut chain = MarkovChain::new();
// The cost of learning increases as more and more text is
// added, so we start with the smallest text.
chain.learn(LOREM_IPSUM);
chain.learn(LIBER_PRIMUS);
RefCell::new(chain)
}
}
/// Generate `n` words of lorem ipsum text. The output will always
/// start with "Lorem ipsum".
///
/// The text continues with the standard lorem ipsum text from
/// [`LOREM_IPSUM`] and becomes random if more than 18 words is
/// requested. See [`lipsum_words`] if fully random text is needed.
///
/// # Examples
///
/// ```
/// use lipsum::lipsum;
///
/// assert_eq!(lipsum(7), "Lorem ipsum dolor sit amet, consectetur adipiscing.");
/// ```
///
/// [`LOREM_IPSUM`]: constant.LOREM_IPSUM.html
/// [`lipsum_words`]: fn.lipsum_words.html
pub fn lipsum(n: usize) -> String {
LOREM_IPSUM_CHAIN.with(|cell| {
let mut chain = cell.borrow_mut();
chain.generate_from(n, ("Lorem", "ipsum"))
})
}
/// Generate `n` words of random lorem ipsum text.
///
/// The text starts with a random word from [`LOREM_IPSUM`]. Multiple
/// sentences may be generated, depending on the punctuation of the
/// words being random selected.
///
/// # Examples
///
/// ```
/// use lipsum::lipsum_words;
///
/// println!("{}", lipsum_words(6));
/// // -> "Propter soliditatem, censet in infinito inani."
/// ```
///
/// [`LOREM_IPSUM`]: constant.LOREM_IPSUM.html
pub fn lipsum_words(n: usize) -> String {
LOREM_IPSUM_CHAIN.with(|cell| {
let mut chain = cell.borrow_mut();
chain.generate(n)
})
}
/// Minimum number of words to include in a title.
const TITLE_MIN_WORDS: usize = 3;
/// Maximum number of words to include in a title.
const TITLE_MAX_WORDS: usize = 8;
/// Words shorter than this size are not capitalized.
const TITLE_SMALL_WORD: usize = 3;
/// Generate a short lorem ipsum text with words in title case.
///
/// The words are capitalized and stripped for punctuation characters.
///
/// # Examples
///
/// ```
/// use lipsum::lipsum_title;
///
/// println!("{}", lipsum_title());
/// ```
///
/// This will generate a string like
///
/// > Grate Meminit et Praesentibus
///
/// which should be suitable for use in a document title for section
/// heading.
pub fn lipsum_title() -> String {
LOREM_IPSUM_CHAIN.with(|cell| {
let n = rand::thread_rng().gen_range(TITLE_MIN_WORDS, TITLE_MAX_WORDS);
let mut chain = cell.borrow_mut();
// The average word length with our corpus is 7.6 bytes so
// this capacity will avoid most allocations.
let mut title = String::with_capacity(8 * n);
let words = chain
.iter()
.map(|word| word.trim_matches(is_ascii_punctuation))
.filter(|word| !word.is_empty())
.take(n);
for (i, word) in words.enumerate() {
if i > 0 {
title.push(' ');
}
// Capitalize the first word and all long words.
if i == 0 || word.len() > TITLE_SMALL_WORD {
title.push_str(&capitalize(word));
} else {
title.push_str(word);
}
}
title
})
}
#[cfg(test)]
mod tests {
use super::*;
use rand::SeedableRng;
use rand_xorshift::XorShiftRng;
#[test]
fn starts_with_lorem_ipsum() {
assert_eq!(&lipsum(10)[..11], "Lorem ipsum");
}
#[test]
fn generate_zero_words() {
assert_eq!(lipsum(0).split_whitespace().count(), 0);
}
#[test]
fn generate_one_word() {
assert_eq!(lipsum(1).split_whitespace().count(), 1);
}
#[test]
fn generate_two_words() {
assert_eq!(lipsum(2).split_whitespace().count(), 2);
}
#[test]
fn starts_differently() {
// Check that calls to lipsum_words don't always start with
// "Lorem ipsum".
let idx = "Lorem ipsum".len();
assert_ne!(&lipsum_words(5)[..idx], &lipsum_words(5)[..idx]);
}
#[test]
fn generate_title() {
for word in lipsum_title().split_whitespace() {
assert!(
!word.starts_with(is_ascii_punctuation) && !word.ends_with(is_ascii_punctuation),
"Unexpected punctuation: {:?}",
word
);
if word.len() > TITLE_SMALL_WORD {
assert!(
word.starts_with(char::is_uppercase),
"Expected small word to be capitalized: {:?}",
word
);
}
}
}
#[test]
fn empty_chain() {
let mut chain = MarkovChain::new();
assert_eq!(chain.generate(10), "");
}
#[test]
fn generate_from() {
let mut chain = MarkovChain::new();
chain.learn("red orange yellow green blue indigo violet");
assert_eq!(
chain.generate_from(5, ("orange", "yellow")),
"Orange yellow green blue indigo."
);
}
#[test]
fn generate_last_bigram() {
// The bigram "yyy zzz" will not be present in the Markov
// chain's map, and so we will not generate "xxx yyy zzz" as
// one would expect. The chain moves from state "xxx yyy" to
// "yyy zzz", but sees that as invalid state and resets itself
// back to "xxx yyy".
let mut chain = MarkovChain::new();
chain.learn("xxx yyy zzz");
assert_ne!(chain.generate_from(3, ("xxx", "yyy")), "xxx yyy zzz");
}
#[test]
fn generate_from_no_panic() {
// No panic when asked to generate a chain from a starting
// point that doesn't exist in the chain.
let mut chain = MarkovChain::new();
chain.learn("foo bar baz");
chain.generate_from(3, ("xxx", "yyy"));
}
#[test]
fn chain_map() {
let mut chain = MarkovChain::new();
chain.learn("foo bar baz quuz");
let map = &chain.map;
assert_eq!(map.len(), 2);
assert_eq!(map[&("foo", "bar")], vec!["baz"]);
assert_eq!(map[&("bar", "baz")], vec!["quuz"]);
}
#[test]
fn new_with_rng() {
let rng = XorShiftRng::seed_from_u64(1234);
let mut chain = MarkovChain::new_with_rng(rng);
chain.learn("foo bar x y z");
chain.learn("foo bar a b c");
assert_eq!(chain.generate(15), "Bar x y a b x y y b b a b a b bar.");
}
}
| 31.471698 | 100 | 0.58099 |
f774396209b20cb2ecbfd2f2f3c61708bbe178ed | 1,185 | use instruction_def::*;
use test::run_test;
use Operand::*;
use Reg::*;
use RegScale::*;
use RegType::*;
use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
#[test]
fn kxnorw_1() {
run_test(
&Instruction {
mnemonic: Mnemonic::KXNORW,
operand1: Some(Direct(K3)),
operand2: Some(Direct(K7)),
operand3: Some(Direct(K1)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[197, 196, 70, 217],
OperandSize::Dword,
)
}
#[test]
fn kxnorw_2() {
run_test(
&Instruction {
mnemonic: Mnemonic::KXNORW,
operand1: Some(Direct(K4)),
operand2: Some(Direct(K3)),
operand3: Some(Direct(K1)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[197, 228, 70, 225],
OperandSize::Qword,
)
}
| 23.7 | 95 | 0.500422 |
fcfa4881f8caba947ad822e47b0f0aa044ef1452 | 2,508 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use crate::Display;
use crate::Screen;
use glib::object::IsA;
use glib::object::ObjectType as ObjectType_;
use glib::translate::*;
use glib::StaticType;
use std::fmt;
glib::wrapper! {
pub struct AppLaunchContext(Object<ffi::GdkAppLaunchContext>) @extends gio::AppLaunchContext;
match fn {
type_ => || ffi::gdk_app_launch_context_get_type(),
}
}
impl AppLaunchContext {
#[doc(alias = "gdk_app_launch_context_set_desktop")]
pub fn set_desktop(&self, desktop: i32) {
unsafe {
ffi::gdk_app_launch_context_set_desktop(self.to_glib_none().0, desktop);
}
}
#[doc(alias = "gdk_app_launch_context_set_icon")]
pub fn set_icon<P: IsA<gio::Icon>>(&self, icon: Option<&P>) {
unsafe {
ffi::gdk_app_launch_context_set_icon(
self.to_glib_none().0,
icon.map(|p| p.as_ref()).to_glib_none().0,
);
}
}
#[doc(alias = "gdk_app_launch_context_set_icon_name")]
pub fn set_icon_name(&self, icon_name: Option<&str>) {
unsafe {
ffi::gdk_app_launch_context_set_icon_name(
self.to_glib_none().0,
icon_name.to_glib_none().0,
);
}
}
#[doc(alias = "gdk_app_launch_context_set_screen")]
pub fn set_screen(&self, screen: &Screen) {
unsafe {
ffi::gdk_app_launch_context_set_screen(self.to_glib_none().0, screen.to_glib_none().0);
}
}
#[doc(alias = "gdk_app_launch_context_set_timestamp")]
pub fn set_timestamp(&self, timestamp: u32) {
unsafe {
ffi::gdk_app_launch_context_set_timestamp(self.to_glib_none().0, timestamp);
}
}
pub fn display(&self) -> Option<Display> {
unsafe {
let mut value = glib::Value::from_type(<Display as StaticType>::static_type());
glib::gobject_ffi::g_object_get_property(
self.as_ptr() as *mut glib::gobject_ffi::GObject,
b"display\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `display` getter")
}
}
}
impl fmt::Display for AppLaunchContext {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("AppLaunchContext")
}
}
| 30.216867 | 99 | 0.59689 |
334020b07798696e31c90d0ff47656cc43952133 | 1,768 | use crate::{
core::{Core, ObjectRef, OnAdded, Property},
shapes::path::Path,
};
#[derive(Debug)]
pub struct ParametricPath {
path: Path,
width: Property<f32>,
height: Property<f32>,
origin_x: Property<f32>,
origin_y: Property<f32>,
}
impl ObjectRef<'_, ParametricPath> {
pub fn width(&self) -> f32 {
self.width.get()
}
pub fn set_width(&self, width: f32) {
if self.width() == width {
return;
}
self.width.set(width);
self.cast::<Path>().mark_path_dirty();
}
pub fn height(&self) -> f32 {
self.height.get()
}
pub fn set_height(&self, height: f32) {
if self.height() == height {
return;
}
self.height.set(height);
self.cast::<Path>().mark_path_dirty();
}
pub fn origin_x(&self) -> f32 {
self.origin_x.get()
}
pub fn set_origin_x(&self, origin_x: f32) {
self.origin_x.set(origin_x);
}
pub fn origin_y(&self) -> f32 {
self.origin_y.get()
}
pub fn set_origin_y(&self, origin_y: f32) {
self.origin_y.set(origin_y);
}
}
impl Core for ParametricPath {
parent_types![(path, Path)];
properties![
(20, width, set_width),
(21, height, set_height),
(123, origin_x, set_origin_x),
(124, origin_y, set_origin_y),
path,
];
}
impl OnAdded for ObjectRef<'_, ParametricPath> {
on_added!(Path);
}
impl Default for ParametricPath {
fn default() -> Self {
Self {
path: Path::default(),
width: Property::new(0.0),
height: Property::new(0.0),
origin_x: Property::new(0.5),
origin_y: Property::new(0.5),
}
}
}
| 20.55814 | 48 | 0.539027 |
fc28e4cf165eff4d1d43463cf8c2ec4cd231e4e7 | 15,824 | //! Provides access to a remote `MultiFile` repository over SFTP as if it were a local Multi-File
//! Repository
use super::{BackendError, Result, SegmentDescriptor};
use crate::repository::backend::common::sync_backend::{BackendHandle, SyncBackend, SyncManifest};
use crate::repository::{Chunk, ChunkSettings, EncryptedKey, Key};
use serde_cbor as cbor;
use ssh2::{Session, Sftp};
use std::fmt::Debug;
use std::net::TcpStream;
use std::path::PathBuf;
use std::rc::Rc;
pub mod index;
pub mod manifest;
pub mod segment;
pub mod util;
use self::index::SFTPIndex;
use self::manifest::SFTPManifest;
use self::segment::SFTPSegmentHandler;
use self::util::LockedFile;
// Allow our result type to accept the ssh2 errors easily
// Maps to `BackendError::ConnectionError(error.to_string())`
impl From<ssh2::Error> for BackendError {
fn from(error: ssh2::Error) -> Self {
BackendError::ConnectionError(format!("libssh2 Error: {}", error))
}
}
/// Settings used for connecting to an SFTP server.
#[derive(Clone, Debug)]
pub struct SFTPSettings {
/// Hostname of the SFTP server to connect to.
pub hostname: String,
/// Optional port to connect to, will default to 22
pub port: Option<u16>,
/// Username of the user to connect as
pub username: String,
/// Password to connect with
///
/// Optional, will attempt to use ssh-agent if not provided.
pub password: Option<String>,
/// Path of the repository on the server
pub path: String,
}
#[derive(Clone)]
pub enum SFTPConnection {
Connected {
settings: SFTPSettings,
session: Session,
sftp: Rc<Sftp>,
},
NotConnected {
settings: SFTPSettings,
},
}
impl SFTPConnection {
/// Returns `true` if this `SFTPConnection` is in a connected state
pub fn connected(&self) -> bool {
match self {
SFTPConnection::Connected { .. } => true,
SFTPConnection::NotConnected { .. } => false,
}
}
/// Connects to the backend if needed
pub fn connect(&mut self) -> Result<()> {
if self.connected() {
Ok(())
} else {
let hostname: &str = &self.settings().hostname;
let port = self.settings().port.unwrap_or(22);
// Connect to the SSH server
let tcp = TcpStream::connect((hostname, port))?;
// Open up a session
let mut session = Session::new()?;
session.set_tcp_stream(tcp);
session.handshake()?;
// Attempt to authenticate with the ssh agent
let result = session.userauth_agent(&self.settings().username);
if result.is_err() {
// Grab the password
let password = self.settings().password.as_ref().ok_or_else(|| {
BackendError::ConnectionError(
format!(
"SFTP connection using ssh agent to {}@{}:{} failed, and no password was provided.",
self.settings().username,
hostname,
port)
)
})?;
// Attempt connecting with username/password
session.userauth_password(&self.settings().username, password)?;
}
// If we are here and not authenticated, something is horribly wrong
assert!(session.authenticated());
// Open an SFTP connection
let sftp = session.sftp()?;
// FIXME: This is not a high performance impact issue, since this method should only
// really be reached once per repository action, but right now I am sort of relying on
// rustc/llvm to be smart enough to optimize out this clone.
let new_settings = self.settings().clone();
*self = SFTPConnection::Connected {
settings: new_settings,
session,
sftp: Rc::new(sftp),
};
Ok(())
}
}
/// Connects to the backend if needed and converts to `SFTPConnection::Connected`, otherwise
/// returns `self` unaltered
pub fn with_connection(mut self) -> Result<Self> {
if self.connected() {
Ok(self)
} else {
self.connect()?;
Ok(self)
}
}
/// Provides a reference to the internal settings of this connection
pub fn settings(&self) -> &SFTPSettings {
match self {
SFTPConnection::Connected { settings, .. }
| SFTPConnection::NotConnected { settings } => &settings,
}
}
/// Provides a reference to the ssh session, or None if this connection is not in a connected
/// state
pub fn session(&self) -> Option<&Session> {
match self {
SFTPConnection::Connected { session, .. } => Some(&session),
SFTPConnection::NotConnected { .. } => None,
}
}
/// Provides a reference to the sftp session, or None if this connection is not in a connected
/// state
pub fn sftp(&self) -> Option<Rc<Sftp>> {
match self {
SFTPConnection::Connected { sftp, .. } => Some(Rc::clone(sftp)),
SFTPConnection::NotConnected { .. } => None,
}
}
}
impl From<SFTPSettings> for SFTPConnection {
fn from(settings: SFTPSettings) -> Self {
SFTPConnection::NotConnected { settings }
}
}
impl Debug for SFTPConnection {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SFTPConnection::Connected { settings, .. } => f
.debug_struct("SFTPConnection::Connected")
.field("settings", settings)
.finish(),
SFTPConnection::NotConnected { settings } => f
.debug_struct("SFTPConnection::NotConnected")
.field("settings", settings)
.finish(),
}
}
}
#[derive(Debug)]
pub struct SFTP {
manifest: SFTPManifest,
index: SFTPIndex,
segment_handler: SFTPSegmentHandler,
connection: SFTPConnection, // MUST be dropped last for safety with the C FFI in `ssh2`
}
impl SFTP {
pub fn connect_raw(
settings: impl Into<SFTPConnection>,
key: &Key,
chunk_settings: Option<ChunkSettings>,
) -> Result<Self> {
let connection = settings.into().with_connection()?;
let mut manifest = SFTPManifest::connect(connection.clone(), key, chunk_settings)?;
let index = SFTPIndex::connect(connection.clone())?;
let chunk_settings = manifest.chunk_settings();
let size_limit = 2_000_000_000;
let segments_per_directory = 100;
let segment_handler = SFTPSegmentHandler::connect(
connection.clone(),
size_limit,
segments_per_directory,
chunk_settings,
key.clone(),
)?;
Ok(SFTP {
connection,
manifest,
index,
segment_handler,
})
}
pub fn connect(
settings: SFTPSettings,
key: Key,
chunk_settings: Option<ChunkSettings>,
queue_depth: usize,
) -> Result<BackendHandle<SFTP>> {
use crossbeam_channel::bounded;
let (s, r) = bounded(1);
let handle = BackendHandle::new(queue_depth, move || {
let result = Self::connect_raw(settings, &key, chunk_settings);
match result {
Ok(backend) => {
s.send(None).unwrap();
backend
}
Err(e) => {
s.send(Some(e)).unwrap();
panic!("Opening an SFTP Backend Handle Failed")
}
}
});
let error = r
.recv()
.expect("Backend Handle thread died before it could send us its result");
if let Some(error) = error {
Err(error)
} else {
Ok(handle)
}
}
pub fn read_key<S>(settings: S) -> Result<EncryptedKey>
where
S: Into<SFTPConnection>,
{
let connection = settings.into().with_connection()?;
let sftp = connection.sftp().unwrap();
let key_path = PathBuf::from(&connection.settings().path).join("key");
let key_path = sftp.realpath(&key_path).map_err(|e| {
BackendError::ConnectionError(format!(
"Failed to resolve path of key file at: {:?}, Error was: {}",
key_path, e
))
})?;
let file = sftp.open(&key_path).map_err(|e| {
BackendError::ConnectionError(format!(
"Failed to open key file at: {:?} Error was: {}",
key_path, e
))
})?;
Ok(cbor::de::from_reader(file)?)
}
}
impl SyncBackend for SFTP {
type SyncManifest = SFTPManifest;
type SyncIndex = SFTPIndex;
fn get_index(&mut self) -> &mut Self::SyncIndex {
&mut self.index
}
fn get_manifest(&mut self) -> &mut Self::SyncManifest {
&mut self.manifest
}
fn write_key(&mut self, key: EncryptedKey) -> Result<()> {
let key_path = PathBuf::from(&self.connection.settings().path).join("key");
let sftp = self.connection.sftp().expect("Somehow not connected");
let mut file =
LockedFile::open_read_write(&key_path, sftp)?.ok_or(BackendError::FileLockError)?;
cbor::ser::to_writer(&mut file, &key)?;
Ok(())
}
fn read_key(&mut self) -> Result<EncryptedKey> {
let key_path = PathBuf::from(&self.connection.settings().path).join("key");
let sftp = self.connection.sftp().expect("Somehow not connected");
let file = sftp.open(&key_path)?;
Ok(cbor::de::from_reader(file)?)
}
fn read_chunk(&mut self, location: SegmentDescriptor) -> Result<Chunk> {
self.segment_handler.read_chunk(location)
}
fn write_chunk(&mut self, chunk: Chunk) -> Result<SegmentDescriptor> {
self.segment_handler.write_chunk(chunk)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::repository::{Compression, HMAC};
use crate::repository::{Encryption, Key};
use std::env;
fn get_settings(path: String) -> SFTPSettings {
let hostname = env::var_os("ASURAN_SFTP_HOSTNAME")
.map(|x| x.into_string().unwrap())
.expect("Server must be set");
let username = env::var_os("ASURAN_SFTP_USER")
.map(|x| x.into_string().unwrap())
.unwrap_or("asuran".to_string());
let password = env::var_os("ASURAN_SFTP_PASS")
.map(|x| x.into_string().unwrap())
.unwrap_or("asuran".to_string());
let port = env::var_os("ASURAN_SFTP_PORT")
.map(|x| x.into_string().unwrap())
.unwrap_or("22".to_string())
.parse::<u16>()
.expect("Unable to parse port");
SFTPSettings {
hostname,
username,
port: Some(port),
password: Some(password),
path,
}
}
#[test]
fn sftp_connect() {
let mut connection: SFTPConnection = get_settings("sftp_connect".to_string()).into();
connection
.connect()
.expect("Unable to make SFTP connection");
}
#[test]
fn sftp_handle_connect() {
let settings = get_settings("asuran/handle_connect".to_string());
let handle = SFTP::connect(
settings,
Key::random(32),
Some(ChunkSettings::lightweight()),
2,
);
assert!(handle.is_ok())
}
fn get_backend(path: impl AsRef<str>, key: &Key) -> SFTP {
let path = path.as_ref().to_string();
SFTP::connect_raw(get_settings(path), key, Some(ChunkSettings::lightweight()))
.expect("Unable to connect to backend")
}
#[test]
fn key_read_write() {
let key = Key::random(32);
let enc_key = EncryptedKey::encrypt_defaults(
&key,
Encryption::new_aes256ctr(),
"ASecurePassword".as_bytes(),
);
let mut backend = get_backend("asuran/key_read_write", &key);
backend
.write_key(enc_key.clone())
.expect("Unable to write key");
drop(backend);
let mut backend = get_backend("asuran/key_read_write", &key);
let result = backend.read_key().expect("Unable to read key");
let dec_result = result.decrypt("ASecurePassword".as_bytes()).unwrap();
assert!(key == dec_result);
let connection = backend.connection;
let result = SFTP::read_key(connection).expect("Unable to read key");
let dec_result = result.decrypt("ASecurePassword".as_bytes()).unwrap();
assert!(key == dec_result);
}
#[test]
fn chunk_read_write() {
let key = Key::random(32);
let chunk = Chunk::pack(
vec![1_u8; 1024],
Compression::NoCompression,
Encryption::NoEncryption,
HMAC::Blake3,
&key,
);
let mut backend = get_backend("asuran/chunk_read_write", &key);
let desc = backend
.write_chunk(chunk.clone())
.expect("Unable to write chunk");
drop(backend);
let mut backend = get_backend("asuran/chunk_read_write", &key);
let ret_chunk = backend.read_chunk(desc).expect("Unable to read chunk");
assert!(chunk == ret_chunk);
}
// Connecting without a password or valid ssh-agent credentials should fail
#[test]
fn connection_fails() {
let hostname = env::var_os("ASURAN_SFTP_HOSTNAME")
.map(|x| x.into_string().unwrap())
.expect("Server must be set");
let username = env::var_os("ASURAN_SFTP_USER")
.map(|x| x.into_string().unwrap())
.unwrap_or("asuran".to_string());
let port = env::var_os("ASURAN_SFTP_PORT")
.map(|x| x.into_string().unwrap())
.unwrap_or("22".to_string())
.parse::<u16>()
.expect("Unable to parse port");
let settings = SFTPSettings {
hostname,
username,
port: Some(port),
password: None,
path: "OhNo!".to_string(),
};
let connection: SFTPConnection = settings.into();
let result = connection.with_connection();
assert!(matches!(result, Err(BackendError::ConnectionError(_))));
}
// A not connected connection should return none, and a connected one should return Some
#[test]
fn get_session() {
let hostname = env::var_os("ASURAN_SFTP_HOSTNAME")
.map(|x| x.into_string().unwrap())
.expect("Server must be set");
let username = env::var_os("ASURAN_SFTP_USER")
.map(|x| x.into_string().unwrap())
.unwrap_or("asuran".to_string());
let password = env::var_os("ASURAN_SFTP_PASS")
.map(|x| x.into_string().unwrap())
.unwrap_or("asuran".to_string());
let port = env::var_os("ASURAN_SFTP_PORT")
.map(|x| x.into_string().unwrap())
.unwrap_or("22".to_string())
.parse::<u16>()
.expect("Unable to parse port");
let settings = SFTPSettings {
hostname,
username,
port: Some(port),
password: Some(password),
path: "yes".to_string(),
};
let connection: SFTPConnection = settings.into();
assert!(connection.session().is_none());
let connection = connection.with_connection().unwrap();
assert!(connection.session().is_some());
}
}
| 33.525424 | 112 | 0.564143 |
ab7747149c25ba24a6ab6328c1316e45094c60cf | 1,174 | use std::collections::HashMap;
use std::iter::FromIterator;
use std::fs::File;
use std::io::{BufReader, BufRead};
pub fn day15() {
let file = File::open("day15_input.txt").expect("file not found!");
let mut start_sequence = String::new();
BufReader::new(file).read_line(&mut start_sequence).unwrap();
let start_sequence = Vec::from_iter(start_sequence.split(",").map(|e| e.parse::<usize>().unwrap()));
println!("start_sequence {:?}", start_sequence);
let mut sequence = Vec::with_capacity(30000001);
let mut last_seen: HashMap<usize, usize> = HashMap::new(); // Instantiate empty memory dict
for (i, e) in start_sequence.iter().enumerate() {
sequence.push(*e);
if i < (start_sequence.len()-1) {last_seen.insert(*e, i as usize);}
}
for i in start_sequence.len()..2020 {
match last_seen.get_mut(&sequence[i-1]) {
Some(val) => {
sequence.push(i-1-*val);
*val = i-1;
},
None => {
last_seen.insert(sequence[i-1], i-1);
sequence.push(0);
},
}
}
println!("{:?}", sequence.last());
}
| 32.611111 | 104 | 0.568995 |
abe2512cdbae22f2a606212827ba8c1e7ed4ab77 | 47,246 | //! Contains the `TransitCurrency` enum and its associated traits. It is used to
//! specify a currency. Included for use with the transit fares returned by
//! Google Maps Directions API.
use crate::directions::error::Error;
use serde::{Deserialize, Serialize};
/// A comprehensive list of currencies. At the moment this is used only for
/// Google Maps Transit Directions. The intent behind having _Serde_ convert
/// the currency code `String` to an `enum` is for efficient currency
/// conversions, information lookups, and manipulation in the future.
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize, Deserialize)]
pub enum TransitCurrency {
#[serde(alias = "AED")]
UnitedArabEmiratesDirham,
#[serde(alias = "AFN")]
AfghanAfghani,
#[serde(alias = "ALL")]
AlbanianLek,
#[serde(alias = "AMD")]
ArmenianDram,
#[serde(alias = "ANG")]
NetherlandsAntilleanGuilder,
#[serde(alias = "AOA")]
AngolanKwanza,
#[serde(alias = "ARS")]
ArgentinePeso,
#[serde(alias = "AUD")]
AustralianDollar,
#[serde(alias = "AWG")]
ArubanFlorin,
#[serde(alias = "AZN")]
AzerbaijaniManat,
#[serde(alias = "BAM")]
BosniaAndHerzegovinaConvertibleMark,
#[serde(alias = "BBD")]
BarbadosDollar,
#[serde(alias = "BDT")]
BangladeshiTaka,
#[serde(alias = "BGN")]
BulgarianLev,
#[serde(alias = "BHD")]
BahrainiDinar,
#[serde(alias = "BIF")]
BurundianFranc,
#[serde(alias = "BMD")]
BermudianDollar,
#[serde(alias = "BND")]
BruneiDollar,
#[serde(alias = "BOB")]
Boliviano,
#[serde(alias = "BOV")]
BolivianMvdol,
#[serde(alias = "BRL")]
BrazilianReal,
#[serde(alias = "BSD")]
BahamianDollar,
#[serde(alias = "BTN")]
BhutaneseNgultrum,
#[serde(alias = "BWP")]
BotswanaPula,
#[serde(alias = "BYN")]
BelarusianRuble,
#[serde(alias = "BZD")]
BelizeDollar,
#[serde(alias = "CAD")]
CanadianDollar,
#[serde(alias = "CDF")]
CongoleseFranc,
#[serde(alias = "CHE")]
WirEuro,
#[serde(alias = "CHF")]
SwissFranc,
#[serde(alias = "CHW")]
WirFranc,
#[serde(alias = "CLF")]
UnidadDeFomento,
#[serde(alias = "CLP")]
ChileanPeso,
#[serde(alias = "CNY")]
RenminbiYuan,
#[serde(alias = "COP")]
ColombianPeso,
#[serde(alias = "COU")]
UnidadDeValorReal,
#[serde(alias = "CRC")]
CostaRicanColon,
#[serde(alias = "CUC")]
CubanConvertiblePeso,
#[serde(alias = "CUP")]
CubanPeso,
#[serde(alias = "CVE")]
CapeVerdeanEscudo,
#[serde(alias = "CZK")]
CzechKoruna,
#[serde(alias = "DJF")]
DjiboutianFranc,
#[serde(alias = "DKK")]
DanishKrone,
#[serde(alias = "DOP")]
DominicanPeso,
#[serde(alias = "DZD")]
AlgerianDinar,
#[serde(alias = "EGP")]
EgyptianPound,
#[serde(alias = "ERN")]
EritreanNakfa,
#[serde(alias = "ETB")]
EthiopianBirr,
#[serde(alias = "EUR")]
Euro,
#[serde(alias = "FJD")]
FijiDollar,
#[serde(alias = "FKP")]
FalklandIslandsPound,
#[serde(alias = "GBP")]
PoundSterling,
#[serde(alias = "GEL")]
GeorgianLari,
#[serde(alias = "GHS")]
GhanaianCedi,
#[serde(alias = "GIP")]
GibraltarPound,
#[serde(alias = "GMD")]
GambianDalasi,
#[serde(alias = "GNF")]
GuineanFranc,
#[serde(alias = "GTQ")]
GuatemalanQuetzal,
#[serde(alias = "GYD")]
GuyaneseDollar,
#[serde(alias = "HKD")]
HongKongDollar,
#[serde(alias = "HNL")]
HonduranLempira,
#[serde(alias = "HRK")]
CroatianKuna,
#[serde(alias = "HTG")]
HaitianGourde,
#[serde(alias = "HUF")]
HungarianForint,
#[serde(alias = "IDR")]
IndonesianRupiah,
#[serde(alias = "ILS")]
IsraeliNewShekel,
#[serde(alias = "INR")]
IndianRupee,
#[serde(alias = "IQD")]
IraqiDinar,
#[serde(alias = "IRR")]
IranianRial,
#[serde(alias = "ISK")]
IcelandicKrona,
#[serde(alias = "JMD")]
JamaicanDollar,
#[serde(alias = "JOD")]
JordanianDinar,
#[serde(alias = "JPY")]
JapaneseYen,
#[serde(alias = "KES")]
KenyanShilling,
#[serde(alias = "KGS")]
KyrgyzstaniSom,
#[serde(alias = "KHR")]
CambodianRiel,
#[serde(alias = "KMF")]
ComoroFranc,
#[serde(alias = "KPW")]
NorthKoreanWon,
#[serde(alias = "KRW")]
SouthKoreanWon,
#[serde(alias = "KWD")]
KuwaitiDinar,
#[serde(alias = "KYD")]
CaymanIslandsDollar,
#[serde(alias = "KZT")]
KazakhstaniTenge,
#[serde(alias = "LAK")]
LaoKip,
#[serde(alias = "LBP")]
LebanesePound,
#[serde(alias = "LKR")]
SriLankanRupee,
#[serde(alias = "LRD")]
LiberianDollar,
#[serde(alias = "LSL")]
LesothoLoti,
#[serde(alias = "LYD")]
LibyanDinar,
#[serde(alias = "MAD")]
MoroccanDirham,
#[serde(alias = "MDL")]
MoldovanLeu,
#[serde(alias = "MGA")]
MalagasyAriary,
#[serde(alias = "MKD")]
MacedonianDenar,
#[serde(alias = "MMK")]
MyanmarKyat,
#[serde(alias = "MNT")]
MongolianTogrog,
#[serde(alias = "MOP")]
MacanesePataca,
#[serde(alias = "MRU")]
MauritanianOuguiya,
#[serde(alias = "MUR")]
MauritianRupee,
#[serde(alias = "MVR")]
MaldivianRufiyaa,
#[serde(alias = "MWK")]
MalawianKwacha,
#[serde(alias = "MXN")]
MexicanPeso,
#[serde(alias = "MXV")]
MexicanUnidadDeInversion,
#[serde(alias = "MYR")]
MalaysianRinggit,
#[serde(alias = "MZN")]
MozambicanMetical,
#[serde(alias = "NAD")]
NamibianDollar,
#[serde(alias = "NGN")]
NigerianNaira,
#[serde(alias = "NIO")]
NicaraguanCordoba,
#[serde(alias = "NOK")]
NorwegianKrone,
#[serde(alias = "NPR")]
NepaleseRupee,
#[serde(alias = "NZD")]
NewZealandDollar,
#[serde(alias = "OMR")]
OmaniRial,
#[serde(alias = "PAB")]
PanamanianBalboa,
#[serde(alias = "PEN")]
PeruvianSol,
#[serde(alias = "PGK")]
PapuaNewGuineanKina,
#[serde(alias = "PHP")]
PhilippinePeso,
#[serde(alias = "PKR")]
PakistaniRupee,
#[serde(alias = "PLN")]
PolishZloty,
#[serde(alias = "PYG")]
ParaguayanGuarani,
#[serde(alias = "QAR")]
QatariRiyal,
#[serde(alias = "RON")]
RomanianLeu,
#[serde(alias = "RSD")]
SerbianDinar,
#[serde(alias = "RUB")]
RussianRuble,
#[serde(alias = "RWF")]
RwandanFranc,
#[serde(alias = "SAR")]
SaudiRiyal,
#[serde(alias = "SBD")]
SolomonIslandsDollar,
#[serde(alias = "SCR")]
SeychellesRupee,
#[serde(alias = "SDG")]
SudanesePound,
#[serde(alias = "SHP")]
SwedishKrona,
#[serde(alias = "SLL")]
SingaporeDollar,
#[serde(alias = "SHP")]
SaintHelenaPound,
#[serde(alias = "SLL")]
SierraLeoneanLeone,
#[serde(alias = "SOS")]
SomaliShilling,
#[serde(alias = "SRD")]
SurinameseDollar,
#[serde(alias = "SSP")]
SouthSudanesePound,
#[serde(alias = "STN")]
SaoTomeAndPrincipeDobra,
#[serde(alias = "SVC")]
SalvadoranColon,
#[serde(alias = "SYP")]
SyrianPound,
#[serde(alias = "SZL")]
SwaziLilangeni,
#[serde(alias = "THB")]
ThaiBaht,
#[serde(alias = "TJS")]
TajikistaniSomoni,
#[serde(alias = "TMT")]
TurkmenistanManat,
#[serde(alias = "TND")]
TunisianDinar,
#[serde(alias = "TOP")]
TonganPaanga,
#[serde(alias = "TRY")]
TurkishLira,
#[serde(alias = "TTD")]
TrinidadAndTobagoDollar,
#[serde(alias = "TWD")]
NewTaiwanDollar,
#[serde(alias = "TZS")]
TanzanianShilling,
#[serde(alias = "UAH")]
UkrainianHryvnia,
#[serde(alias = "UGX")]
UgandanShilling,
#[serde(alias = "USD")]
UnitedStatesDollar,
#[serde(alias = "USN")]
UnitedStatesDollarNextDay,
#[serde(alias = "UYI")]
UruguayPesoEnUnidadesIndexadas,
#[serde(alias = "UYU")]
UruguayanPeso,
#[serde(alias = "UYW")]
UnidadPrevisional,
#[serde(alias = "UZS")]
UzbekistanSom,
#[serde(alias = "VES")]
VenezuelanBolivarSoberano,
#[serde(alias = "VND")]
VietnameseDong,
#[serde(alias = "VUV")]
VanuatuVatu,
#[serde(alias = "WST")]
SamoanTala,
#[serde(alias = "XAF")]
CfaFrancBeac,
#[serde(alias = "XAG")]
Silver,
#[serde(alias = "XAU")]
Gold,
#[serde(alias = "XBA")]
EuropeanCompositeUnit,
#[serde(alias = "XBB")]
EuropeanMonetaryUnit,
#[serde(alias = "XBC")]
EuropeanUnitOfAccount9,
#[serde(alias = "XBD")]
EuropeanUnitOfAccount17,
#[serde(alias = "XCD")]
EastCaribbeanDollar,
#[serde(alias = "XDR")]
SpecialDrawingRights,
#[serde(alias = "XOF")]
CfaFrancBceao,
#[serde(alias = "XPD")]
Palladium,
#[serde(alias = "XPF")]
CfpFranc,
#[serde(alias = "XPT")]
Platinum,
#[serde(alias = "XSU")]
Sucre,
#[serde(alias = "XTS")]
CodeReservedForTesting,
#[serde(alias = "XUA")]
AdbUnitOfAccount,
#[serde(alias = "XXX")]
NoCurrency,
#[serde(alias = "YER")]
YemeniRial,
#[serde(alias = "ZAR")]
SouthAfricanRand,
#[serde(alias = "ZMW")]
ZambianKwacha,
#[serde(alias = "ZWL")]
ZimbabweanDollar,
} // enum
impl std::convert::From<&TransitCurrency> for String {
/// Converts a `TransitCurrency` enum to a `String` that contains an [ISO
/// 4217 currency code](https://en.wikipedia.org/wiki/ISO_4217).
fn from(currency: &TransitCurrency) -> String {
match currency {
TransitCurrency::UnitedArabEmiratesDirham => String::from("AED"),
TransitCurrency::AfghanAfghani => String::from("AFN"),
TransitCurrency::AlbanianLek => String::from("ALL"),
TransitCurrency::ArmenianDram => String::from("AMD"),
TransitCurrency::NetherlandsAntilleanGuilder => String::from("ANG"),
TransitCurrency::AngolanKwanza => String::from("AOA"),
TransitCurrency::ArgentinePeso => String::from("ARS"),
TransitCurrency::AustralianDollar => String::from("AUD"),
TransitCurrency::ArubanFlorin => String::from("AWG"),
TransitCurrency::AzerbaijaniManat => String::from("AZN"),
TransitCurrency::BosniaAndHerzegovinaConvertibleMark => String::from("BAM"),
TransitCurrency::BarbadosDollar => String::from("BBD"),
TransitCurrency::BangladeshiTaka => String::from("BDT"),
TransitCurrency::BulgarianLev => String::from("BGN"),
TransitCurrency::BahrainiDinar => String::from("BHD"),
TransitCurrency::BurundianFranc => String::from("BIF"),
TransitCurrency::BermudianDollar => String::from("BMD"),
TransitCurrency::BruneiDollar => String::from("BND"),
TransitCurrency::Boliviano => String::from("BOB"),
TransitCurrency::BolivianMvdol => String::from("BOV"),
TransitCurrency::BrazilianReal => String::from("BRL"),
TransitCurrency::BahamianDollar => String::from("BSD"),
TransitCurrency::BhutaneseNgultrum => String::from("BTN"),
TransitCurrency::BotswanaPula => String::from("BWP"),
TransitCurrency::BelarusianRuble => String::from("BYN"),
TransitCurrency::BelizeDollar => String::from("BZD"),
TransitCurrency::CanadianDollar => String::from("CAD"),
TransitCurrency::CongoleseFranc => String::from("CDF"),
TransitCurrency::WirEuro => String::from("CHE"),
TransitCurrency::SwissFranc => String::from("CHF"),
TransitCurrency::WirFranc => String::from("CHW"),
TransitCurrency::UnidadDeFomento => String::from("CLF"),
TransitCurrency::ChileanPeso => String::from("CLP"),
TransitCurrency::RenminbiYuan => String::from("CNY"),
TransitCurrency::ColombianPeso => String::from("COP"),
TransitCurrency::UnidadDeValorReal => String::from("COU"),
TransitCurrency::CostaRicanColon => String::from("CRC"),
TransitCurrency::CubanConvertiblePeso => String::from("CUC"),
TransitCurrency::CubanPeso => String::from("CUP"),
TransitCurrency::CapeVerdeanEscudo => String::from("CVE"),
TransitCurrency::CzechKoruna => String::from("CZK"),
TransitCurrency::DjiboutianFranc => String::from("DJF"),
TransitCurrency::DanishKrone => String::from("DKK"),
TransitCurrency::DominicanPeso => String::from("DOP"),
TransitCurrency::AlgerianDinar => String::from("DZD"),
TransitCurrency::EgyptianPound => String::from("EGP"),
TransitCurrency::EritreanNakfa => String::from("ERN"),
TransitCurrency::EthiopianBirr => String::from("ETB"),
TransitCurrency::Euro => String::from("EUR"),
TransitCurrency::FijiDollar => String::from("FJD"),
TransitCurrency::FalklandIslandsPound => String::from("FKP"),
TransitCurrency::PoundSterling => String::from("GBP"),
TransitCurrency::GeorgianLari => String::from("GEL"),
TransitCurrency::GhanaianCedi => String::from("GHS"),
TransitCurrency::GibraltarPound => String::from("GIP"),
TransitCurrency::GambianDalasi => String::from("GMD"),
TransitCurrency::GuineanFranc => String::from("GNF"),
TransitCurrency::GuatemalanQuetzal => String::from("GTQ"),
TransitCurrency::GuyaneseDollar => String::from("GYD"),
TransitCurrency::HongKongDollar => String::from("HKD"),
TransitCurrency::HonduranLempira => String::from("HNL"),
TransitCurrency::CroatianKuna => String::from("HRK"),
TransitCurrency::HaitianGourde => String::from("HTG"),
TransitCurrency::HungarianForint => String::from("HUF"),
TransitCurrency::IndonesianRupiah => String::from("IDR"),
TransitCurrency::IsraeliNewShekel => String::from("ILS"),
TransitCurrency::IndianRupee => String::from("INR"),
TransitCurrency::IraqiDinar => String::from("IQD"),
TransitCurrency::IranianRial => String::from("IRR"),
TransitCurrency::IcelandicKrona => String::from("ISK"),
TransitCurrency::JamaicanDollar => String::from("JMD"),
TransitCurrency::JordanianDinar => String::from("JOD"),
TransitCurrency::JapaneseYen => String::from("JPY"),
TransitCurrency::KenyanShilling => String::from("KES"),
TransitCurrency::KyrgyzstaniSom => String::from("KGS"),
TransitCurrency::CambodianRiel => String::from("KHR"),
TransitCurrency::ComoroFranc => String::from("KMF"),
TransitCurrency::NorthKoreanWon => String::from("KPW"),
TransitCurrency::SouthKoreanWon => String::from("KRW"),
TransitCurrency::KuwaitiDinar => String::from("KWD"),
TransitCurrency::CaymanIslandsDollar => String::from("KYD"),
TransitCurrency::KazakhstaniTenge => String::from("KZT"),
TransitCurrency::LaoKip => String::from("LAK"),
TransitCurrency::LebanesePound => String::from("LBP"),
TransitCurrency::SriLankanRupee => String::from("LKR"),
TransitCurrency::LiberianDollar => String::from("LRD"),
TransitCurrency::LesothoLoti => String::from("LSL"),
TransitCurrency::LibyanDinar => String::from("LYD"),
TransitCurrency::MoroccanDirham => String::from("MAD"),
TransitCurrency::MoldovanLeu => String::from("MDL"),
TransitCurrency::MalagasyAriary => String::from("MGA"),
TransitCurrency::MacedonianDenar => String::from("MKD"),
TransitCurrency::MyanmarKyat => String::from("MMK"),
TransitCurrency::MongolianTogrog => String::from("MNT"),
TransitCurrency::MacanesePataca => String::from("MOP"),
TransitCurrency::MauritanianOuguiya => String::from("MRU"),
TransitCurrency::MauritianRupee => String::from("MUR"),
TransitCurrency::MaldivianRufiyaa => String::from("MVR"),
TransitCurrency::MalawianKwacha => String::from("MWK"),
TransitCurrency::MexicanPeso => String::from("MXN"),
TransitCurrency::MexicanUnidadDeInversion => String::from("MXV"),
TransitCurrency::MalaysianRinggit => String::from("MYR"),
TransitCurrency::MozambicanMetical => String::from("MZN"),
TransitCurrency::NamibianDollar => String::from("NAD"),
TransitCurrency::NigerianNaira => String::from("NGN"),
TransitCurrency::NicaraguanCordoba => String::from("NIO"),
TransitCurrency::NorwegianKrone => String::from("NOK"),
TransitCurrency::NepaleseRupee => String::from("NPR"),
TransitCurrency::NewZealandDollar => String::from("NZD"),
TransitCurrency::OmaniRial => String::from("OMR"),
TransitCurrency::PanamanianBalboa => String::from("PAB"),
TransitCurrency::PeruvianSol => String::from("PEN"),
TransitCurrency::PapuaNewGuineanKina => String::from("PGK"),
TransitCurrency::PhilippinePeso => String::from("PHP"),
TransitCurrency::PakistaniRupee => String::from("PKR"),
TransitCurrency::PolishZloty => String::from("PLN"),
TransitCurrency::ParaguayanGuarani => String::from("PYG"),
TransitCurrency::QatariRiyal => String::from("QAR"),
TransitCurrency::RomanianLeu => String::from("RON"),
TransitCurrency::SerbianDinar => String::from("RSD"),
TransitCurrency::RussianRuble => String::from("RUB"),
TransitCurrency::RwandanFranc => String::from("RWF"),
TransitCurrency::SaudiRiyal => String::from("SAR"),
TransitCurrency::SolomonIslandsDollar => String::from("SBD"),
TransitCurrency::SeychellesRupee => String::from("SCR"),
TransitCurrency::SudanesePound => String::from("SDG"),
TransitCurrency::SwedishKrona => String::from("SEK"),
TransitCurrency::SingaporeDollar => String::from("SGD"),
TransitCurrency::SaintHelenaPound => String::from("SHP"),
TransitCurrency::SierraLeoneanLeone => String::from("SLL"),
TransitCurrency::SomaliShilling => String::from("SOS"),
TransitCurrency::SurinameseDollar => String::from("SRD"),
TransitCurrency::SouthSudanesePound => String::from("SSP"),
TransitCurrency::SaoTomeAndPrincipeDobra => String::from("STN"),
TransitCurrency::SalvadoranColon => String::from("SVC"),
TransitCurrency::SyrianPound => String::from("SYP"),
TransitCurrency::SwaziLilangeni => String::from("SZL"),
TransitCurrency::ThaiBaht => String::from("THB"),
TransitCurrency::TajikistaniSomoni => String::from("TJS"),
TransitCurrency::TurkmenistanManat => String::from("TMT"),
TransitCurrency::TunisianDinar => String::from("TND"),
TransitCurrency::TonganPaanga => String::from("TOP"),
TransitCurrency::TurkishLira => String::from("TRY"),
TransitCurrency::TrinidadAndTobagoDollar => String::from("TTD"),
TransitCurrency::NewTaiwanDollar => String::from("TWD"),
TransitCurrency::TanzanianShilling => String::from("TZS"),
TransitCurrency::UkrainianHryvnia => String::from("UAH"),
TransitCurrency::UgandanShilling => String::from("UGX"),
TransitCurrency::UnitedStatesDollar => String::from("USD"),
TransitCurrency::UnitedStatesDollarNextDay => String::from("USN"),
TransitCurrency::UruguayPesoEnUnidadesIndexadas => String::from("UYI"),
TransitCurrency::UruguayanPeso => String::from("UYU"),
TransitCurrency::UnidadPrevisional => String::from("UYW"),
TransitCurrency::UzbekistanSom => String::from("UZS"),
TransitCurrency::VenezuelanBolivarSoberano => String::from("VES"),
TransitCurrency::VietnameseDong => String::from("VND"),
TransitCurrency::VanuatuVatu => String::from("VUV"),
TransitCurrency::SamoanTala => String::from("WST"),
TransitCurrency::CfaFrancBeac => String::from("XAF"),
TransitCurrency::Silver => String::from("XAG"),
TransitCurrency::Gold => String::from("XAU"),
TransitCurrency::EuropeanCompositeUnit => String::from("XBA"),
TransitCurrency::EuropeanMonetaryUnit => String::from("XBB"),
TransitCurrency::EuropeanUnitOfAccount9 => String::from("XBC"),
TransitCurrency::EuropeanUnitOfAccount17 => String::from("XBD"),
TransitCurrency::EastCaribbeanDollar => String::from("XCD"),
TransitCurrency::SpecialDrawingRights => String::from("XDR"),
TransitCurrency::CfaFrancBceao => String::from("XOF"),
TransitCurrency::Palladium => String::from("XPD"),
TransitCurrency::CfpFranc => String::from("CFP franc"),
TransitCurrency::Platinum => String::from("XPT"),
TransitCurrency::Sucre => String::from("XSU"),
TransitCurrency::CodeReservedForTesting => String::from("XTS"),
TransitCurrency::AdbUnitOfAccount => String::from("XUA"),
TransitCurrency::NoCurrency => String::from("XXX"),
TransitCurrency::YemeniRial => String::from("YER"),
TransitCurrency::SouthAfricanRand => String::from("ZAR"),
TransitCurrency::ZambianKwacha => String::from("ZMW"),
TransitCurrency::ZimbabweanDollar => String::from("ZWL"),
} // match
} // fn
} // impl
impl std::convert::TryFrom<&str> for TransitCurrency {
// Error definitions are contained in the
// `google_maps\src\directions\error.rs` module.
type Error = crate::directions::error::Error;
/// Gets a `TransitCurrency` enum from a `String` that contains a supported
/// [ISO 4217 currency code](https://en.wikipedia.org/wiki/ISO_4217).
fn try_from(currency: &str) -> Result<TransitCurrency, Error> {
match currency {
"AED" => Ok(TransitCurrency::UnitedArabEmiratesDirham),
"AFN" => Ok(TransitCurrency::AfghanAfghani),
"ALL" => Ok(TransitCurrency::AlbanianLek),
"AMD" => Ok(TransitCurrency::ArmenianDram),
"ANG" => Ok(TransitCurrency::NetherlandsAntilleanGuilder),
"AOA" => Ok(TransitCurrency::AngolanKwanza),
"ARS" => Ok(TransitCurrency::ArgentinePeso),
"AUD" => Ok(TransitCurrency::AustralianDollar),
"AWG" => Ok(TransitCurrency::ArubanFlorin),
"AZN" => Ok(TransitCurrency::AzerbaijaniManat),
"BAM" => Ok(TransitCurrency::BosniaAndHerzegovinaConvertibleMark),
"BBD" => Ok(TransitCurrency::BarbadosDollar),
"BDT" => Ok(TransitCurrency::BangladeshiTaka),
"BGN" => Ok(TransitCurrency::BulgarianLev),
"BHD" => Ok(TransitCurrency::BahrainiDinar),
"BIF" => Ok(TransitCurrency::BurundianFranc),
"BMD" => Ok(TransitCurrency::BermudianDollar),
"BND" => Ok(TransitCurrency::BruneiDollar),
"BOB" => Ok(TransitCurrency::Boliviano),
"BOV" => Ok(TransitCurrency::BolivianMvdol),
"BRL" => Ok(TransitCurrency::BrazilianReal),
"BSD" => Ok(TransitCurrency::BahamianDollar),
"BTN" => Ok(TransitCurrency::BhutaneseNgultrum),
"BWP" => Ok(TransitCurrency::BotswanaPula),
"BYN" => Ok(TransitCurrency::BelarusianRuble),
"BZD" => Ok(TransitCurrency::BelizeDollar),
"CAD" => Ok(TransitCurrency::CanadianDollar),
"CDF" => Ok(TransitCurrency::CongoleseFranc),
"CHE" => Ok(TransitCurrency::WirEuro),
"CHF" => Ok(TransitCurrency::SwissFranc),
"CHW" => Ok(TransitCurrency::WirFranc),
"CLF" => Ok(TransitCurrency::UnidadDeFomento),
"CLP" => Ok(TransitCurrency::ChileanPeso),
"CNY" => Ok(TransitCurrency::RenminbiYuan),
"COP" => Ok(TransitCurrency::ColombianPeso),
"COU" => Ok(TransitCurrency::UnidadDeValorReal),
"CRC" => Ok(TransitCurrency::CostaRicanColon),
"CUC" => Ok(TransitCurrency::CubanConvertiblePeso),
"CUP" => Ok(TransitCurrency::CubanPeso),
"CVE" => Ok(TransitCurrency::CapeVerdeanEscudo),
"CZK" => Ok(TransitCurrency::CzechKoruna),
"DJF" => Ok(TransitCurrency::DjiboutianFranc),
"DKK" => Ok(TransitCurrency::DanishKrone),
"DOP" => Ok(TransitCurrency::DominicanPeso),
"DZD" => Ok(TransitCurrency::AlgerianDinar),
"EGP" => Ok(TransitCurrency::EgyptianPound),
"ERN" => Ok(TransitCurrency::EritreanNakfa),
"ETB" => Ok(TransitCurrency::EthiopianBirr),
"EUR" => Ok(TransitCurrency::Euro),
"FJD" => Ok(TransitCurrency::FijiDollar),
"FKP" => Ok(TransitCurrency::FalklandIslandsPound),
"GBP" => Ok(TransitCurrency::PoundSterling),
"GEL" => Ok(TransitCurrency::GeorgianLari),
"GHS" => Ok(TransitCurrency::GhanaianCedi),
"GIP" => Ok(TransitCurrency::GibraltarPound),
"GMD" => Ok(TransitCurrency::GambianDalasi),
"GNF" => Ok(TransitCurrency::GuineanFranc),
"GTQ" => Ok(TransitCurrency::GuatemalanQuetzal),
"GYD" => Ok(TransitCurrency::GuyaneseDollar),
"HKD" => Ok(TransitCurrency::HongKongDollar),
"HNL" => Ok(TransitCurrency::HonduranLempira),
"HRK" => Ok(TransitCurrency::CroatianKuna),
"HTG" => Ok(TransitCurrency::HaitianGourde),
"HUF" => Ok(TransitCurrency::HungarianForint),
"IDR" => Ok(TransitCurrency::IndonesianRupiah),
"ILS" => Ok(TransitCurrency::IsraeliNewShekel),
"INR" => Ok(TransitCurrency::IndianRupee),
"IQD" => Ok(TransitCurrency::IraqiDinar),
"IRR" => Ok(TransitCurrency::IranianRial),
"ISK" => Ok(TransitCurrency::IcelandicKrona),
"JMD" => Ok(TransitCurrency::JamaicanDollar),
"JOD" => Ok(TransitCurrency::JordanianDinar),
"JPY" => Ok(TransitCurrency::JapaneseYen),
"KES" => Ok(TransitCurrency::KenyanShilling),
"KGS" => Ok(TransitCurrency::KyrgyzstaniSom),
"KHR" => Ok(TransitCurrency::CambodianRiel),
"KMF" => Ok(TransitCurrency::ComoroFranc),
"KPW" => Ok(TransitCurrency::NorthKoreanWon),
"KRW" => Ok(TransitCurrency::SouthKoreanWon),
"KWD" => Ok(TransitCurrency::KuwaitiDinar),
"KYD" => Ok(TransitCurrency::CaymanIslandsDollar),
"KZT" => Ok(TransitCurrency::KazakhstaniTenge),
"LAK" => Ok(TransitCurrency::LaoKip),
"LBP" => Ok(TransitCurrency::LebanesePound),
"LKR" => Ok(TransitCurrency::SriLankanRupee),
"LRD" => Ok(TransitCurrency::LiberianDollar),
"LSL" => Ok(TransitCurrency::LesothoLoti),
"LYD" => Ok(TransitCurrency::LibyanDinar),
"MAD" => Ok(TransitCurrency::MoroccanDirham),
"MDL" => Ok(TransitCurrency::MoldovanLeu),
"MGA" => Ok(TransitCurrency::MalagasyAriary),
"MKD" => Ok(TransitCurrency::MacedonianDenar),
"MMK" => Ok(TransitCurrency::MyanmarKyat),
"MNT" => Ok(TransitCurrency::MongolianTogrog),
"MOP" => Ok(TransitCurrency::MacanesePataca),
"MRU" => Ok(TransitCurrency::MauritanianOuguiya),
"MUR" => Ok(TransitCurrency::MauritianRupee),
"MVR" => Ok(TransitCurrency::MaldivianRufiyaa),
"MWK" => Ok(TransitCurrency::MalawianKwacha),
"MXN" => Ok(TransitCurrency::MexicanPeso),
"MXV" => Ok(TransitCurrency::MexicanUnidadDeInversion),
"MYR" => Ok(TransitCurrency::MalaysianRinggit),
"MZN" => Ok(TransitCurrency::MozambicanMetical),
"NAD" => Ok(TransitCurrency::NamibianDollar),
"NGN" => Ok(TransitCurrency::NigerianNaira),
"NIO" => Ok(TransitCurrency::NicaraguanCordoba),
"NOK" => Ok(TransitCurrency::NorwegianKrone),
"NPR" => Ok(TransitCurrency::NepaleseRupee),
"NZD" => Ok(TransitCurrency::NewZealandDollar),
"OMR" => Ok(TransitCurrency::OmaniRial),
"PAB" => Ok(TransitCurrency::PanamanianBalboa),
"PEN" => Ok(TransitCurrency::PeruvianSol),
"PGK" => Ok(TransitCurrency::PapuaNewGuineanKina),
"PHP" => Ok(TransitCurrency::PhilippinePeso),
"PKR" => Ok(TransitCurrency::PakistaniRupee),
"PLN" => Ok(TransitCurrency::PolishZloty),
"PYG" => Ok(TransitCurrency::ParaguayanGuarani),
"QAR" => Ok(TransitCurrency::QatariRiyal),
"RON" => Ok(TransitCurrency::RomanianLeu),
"RSD" => Ok(TransitCurrency::SerbianDinar),
"RUB" => Ok(TransitCurrency::RussianRuble),
"RWF" => Ok(TransitCurrency::RwandanFranc),
"SAR" => Ok(TransitCurrency::SaudiRiyal),
"SBD" => Ok(TransitCurrency::SolomonIslandsDollar),
"SCR" => Ok(TransitCurrency::SeychellesRupee),
"SDG" => Ok(TransitCurrency::SudanesePound),
"SEK" => Ok(TransitCurrency::SwedishKrona),
"SGD" => Ok(TransitCurrency::SingaporeDollar),
"SHP" => Ok(TransitCurrency::SaintHelenaPound),
"SLL" => Ok(TransitCurrency::SierraLeoneanLeone),
"SOS" => Ok(TransitCurrency::SomaliShilling),
"SRD" => Ok(TransitCurrency::SurinameseDollar),
"SSP" => Ok(TransitCurrency::SouthSudanesePound),
"STN" => Ok(TransitCurrency::SaoTomeAndPrincipeDobra),
"SVC" => Ok(TransitCurrency::SalvadoranColon),
"SYP" => Ok(TransitCurrency::SyrianPound),
"SZL" => Ok(TransitCurrency::SwaziLilangeni),
"THB" => Ok(TransitCurrency::ThaiBaht),
"TJS" => Ok(TransitCurrency::TajikistaniSomoni),
"TMT" => Ok(TransitCurrency::TurkmenistanManat),
"TND" => Ok(TransitCurrency::TunisianDinar),
"TOP" => Ok(TransitCurrency::TonganPaanga),
"TRY" => Ok(TransitCurrency::TurkishLira),
"TTD" => Ok(TransitCurrency::TrinidadAndTobagoDollar),
"TWD" => Ok(TransitCurrency::NewTaiwanDollar),
"TZS" => Ok(TransitCurrency::TanzanianShilling),
"UAH" => Ok(TransitCurrency::UkrainianHryvnia),
"UGX" => Ok(TransitCurrency::UgandanShilling),
"USD" => Ok(TransitCurrency::UnitedStatesDollar),
"USN" => Ok(TransitCurrency::UnitedStatesDollarNextDay),
"UYI" => Ok(TransitCurrency::UruguayPesoEnUnidadesIndexadas),
"UYU" => Ok(TransitCurrency::UruguayanPeso),
"UYW" => Ok(TransitCurrency::UnidadPrevisional),
"UZS" => Ok(TransitCurrency::UzbekistanSom),
"VES" => Ok(TransitCurrency::VenezuelanBolivarSoberano),
"VND" => Ok(TransitCurrency::VietnameseDong),
"VUV" => Ok(TransitCurrency::VanuatuVatu),
"WST" => Ok(TransitCurrency::SamoanTala),
"XAF" => Ok(TransitCurrency::CfaFrancBeac),
"XAG" => Ok(TransitCurrency::Silver),
"XAU" => Ok(TransitCurrency::Gold),
"XBA" => Ok(TransitCurrency::EuropeanCompositeUnit),
"XBB" => Ok(TransitCurrency::EuropeanMonetaryUnit),
"XBC" => Ok(TransitCurrency::EuropeanUnitOfAccount9),
"XBD" => Ok(TransitCurrency::EuropeanUnitOfAccount17),
"XCD" => Ok(TransitCurrency::EastCaribbeanDollar),
"XDR" => Ok(TransitCurrency::SpecialDrawingRights),
"XOF" => Ok(TransitCurrency::CfaFrancBceao),
"XPD" => Ok(TransitCurrency::Palladium),
"XPF" => Ok(TransitCurrency::CfpFranc),
"XPT" => Ok(TransitCurrency::Platinum),
"XSU" => Ok(TransitCurrency::Sucre),
"XTS" => Ok(TransitCurrency::CodeReservedForTesting),
"XUA" => Ok(TransitCurrency::AdbUnitOfAccount),
"XXX" => Ok(TransitCurrency::NoCurrency),
"YER" => Ok(TransitCurrency::YemeniRial),
"ZAR" => Ok(TransitCurrency::SouthAfricanRand),
"ZMW" => Ok(TransitCurrency::ZambianKwacha),
"ZWL" => Ok(TransitCurrency::ZimbabweanDollar),
_ => Err(Error::InvalidCurrencyCode(currency.to_string())),
} // match
} // fn
} // impl
impl std::default::Default for TransitCurrency {
/// Returns a reasonable default variant for the `TransitCurrency` enum
/// type:
fn default() -> Self {
TransitCurrency::UnitedStatesDollar
} // fn
} // impl
impl std::fmt::Display for TransitCurrency {
/// Formats a `TransitCurrency` enum into a string that is presentable to
/// the end user.
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
TransitCurrency::UnitedArabEmiratesDirham => write!(f, "United Arab Emirates dirham"),
TransitCurrency::AfghanAfghani => write!(f, "Afghan afghani"),
TransitCurrency::AlbanianLek => write!(f, "Albanian lek"),
TransitCurrency::ArmenianDram => write!(f, "Armenian dram"),
TransitCurrency::NetherlandsAntilleanGuilder => write!(f, "Netherlands Antillean guilder"),
TransitCurrency::AngolanKwanza => write!(f, "Angolan kwanza"),
TransitCurrency::ArgentinePeso => write!(f, "Argentine peso"),
TransitCurrency::AustralianDollar => write!(f, "Australian dollar"),
TransitCurrency::ArubanFlorin => write!(f, "Aruban florin"),
TransitCurrency::AzerbaijaniManat => write!(f, "Azerbaijani manat"),
TransitCurrency::BosniaAndHerzegovinaConvertibleMark => write!(f, "Bosnia and Herzegovina convertible mark"),
TransitCurrency::BarbadosDollar => write!(f, "Barbados dollar"),
TransitCurrency::BangladeshiTaka => write!(f, "Bangladeshi taka"),
TransitCurrency::BulgarianLev => write!(f, "Bulgarian lev"),
TransitCurrency::BahrainiDinar => write!(f, "Bahraini dinar"),
TransitCurrency::BurundianFranc => write!(f, "Burundian franc"),
TransitCurrency::BermudianDollar => write!(f, "Bermudian dollar"),
TransitCurrency::BruneiDollar => write!(f, "Brunei dollar"),
TransitCurrency::Boliviano => write!(f, "Boliviano"),
TransitCurrency::BolivianMvdol => write!(f, "Bolivian Mvdol"),
TransitCurrency::BrazilianReal => write!(f, "Brazilian real"),
TransitCurrency::BahamianDollar => write!(f, "Bahamian dollar"),
TransitCurrency::BhutaneseNgultrum => write!(f, "Bhutanese ngultrum"),
TransitCurrency::BotswanaPula => write!(f, "Botswana pula"),
TransitCurrency::BelarusianRuble => write!(f, "Belarusian ruble"),
TransitCurrency::BelizeDollar => write!(f, "Belize dollar"),
TransitCurrency::CanadianDollar => write!(f, "Canadian dollar"),
TransitCurrency::CongoleseFranc => write!(f, "Congolese franc"),
TransitCurrency::WirEuro => write!(f, "WIR Euro"),
TransitCurrency::SwissFranc => write!(f, "Swiss franc"),
TransitCurrency::WirFranc => write!(f, "WIR Franc"),
TransitCurrency::UnidadDeFomento => write!(f, "Unidad de Fomento"),
TransitCurrency::ChileanPeso => write!(f, "Chilean peso"),
TransitCurrency::RenminbiYuan => write!(f, "Renminbi yuan"),
TransitCurrency::ColombianPeso => write!(f, "Colombian peso"),
TransitCurrency::UnidadDeValorReal => write!(f, "Unidad de Valor Real"),
TransitCurrency::CostaRicanColon => write!(f, "Costa Rican colon"),
TransitCurrency::CubanConvertiblePeso => write!(f, "Cuban convertible peso"),
TransitCurrency::CubanPeso => write!(f, "Cuban peso"),
TransitCurrency::CapeVerdeanEscudo => write!(f, "Cape Verdean escudo"),
TransitCurrency::CzechKoruna => write!(f, "Czech koruna"),
TransitCurrency::DjiboutianFranc => write!(f, "Djiboutian franc"),
TransitCurrency::DanishKrone => write!(f, "Danish krone"),
TransitCurrency::DominicanPeso => write!(f, "Dominican peso"),
TransitCurrency::AlgerianDinar => write!(f, "Algerian dinar"),
TransitCurrency::EgyptianPound => write!(f, "Egyptian pound"),
TransitCurrency::EritreanNakfa => write!(f, "Eritrean nakfa"),
TransitCurrency::EthiopianBirr => write!(f, "Ethiopian birr"),
TransitCurrency::Euro => write!(f, "Euro"),
TransitCurrency::FijiDollar => write!(f, "Fiji dollar"),
TransitCurrency::FalklandIslandsPound => write!(f, "Falkland Islands pound"),
TransitCurrency::PoundSterling => write!(f, "Pound sterling"),
TransitCurrency::GeorgianLari => write!(f, "Georgian lari"),
TransitCurrency::GhanaianCedi => write!(f, "Ghanaian cedi"),
TransitCurrency::GibraltarPound => write!(f, "Gibraltar pound"),
TransitCurrency::GambianDalasi => write!(f, "Gambian dalasi"),
TransitCurrency::GuineanFranc => write!(f, "Guinean franc"),
TransitCurrency::GuatemalanQuetzal => write!(f, "Guatemalan quetzal"),
TransitCurrency::GuyaneseDollar => write!(f, "Guyanese dollar"),
TransitCurrency::HongKongDollar => write!(f, "Hong Kong dollar"),
TransitCurrency::HonduranLempira => write!(f, "Honduran lempira"),
TransitCurrency::CroatianKuna => write!(f, "Croatian kuna"),
TransitCurrency::HaitianGourde => write!(f, "Haitian gourde"),
TransitCurrency::HungarianForint => write!(f, "Hungarian forint"),
TransitCurrency::IndonesianRupiah => write!(f, "Indonesian rupiah"),
TransitCurrency::IsraeliNewShekel => write!(f, "Israeli new shekel"),
TransitCurrency::IndianRupee => write!(f, "Indian rupee"),
TransitCurrency::IraqiDinar => write!(f, "Iraqi dinar"),
TransitCurrency::IranianRial => write!(f, "Iranian rial"),
TransitCurrency::IcelandicKrona => write!(f, "Icelandic krΓ³na"),
TransitCurrency::JamaicanDollar => write!(f, "Jamaican dollar"),
TransitCurrency::JordanianDinar => write!(f, "Jordanian dinar"),
TransitCurrency::JapaneseYen => write!(f, "Japanese yen"),
TransitCurrency::KenyanShilling => write!(f, "Kenyan shilling"),
TransitCurrency::KyrgyzstaniSom => write!(f, "Kyrgyzstani som"),
TransitCurrency::CambodianRiel => write!(f, "Cambodian riel"),
TransitCurrency::ComoroFranc => write!(f, "Comoro franc"),
TransitCurrency::NorthKoreanWon => write!(f, "North Korean won"),
TransitCurrency::SouthKoreanWon => write!(f, "South Korean won"),
TransitCurrency::KuwaitiDinar => write!(f, "Kuwaiti dinar"),
TransitCurrency::CaymanIslandsDollar => write!(f, "Cayman Islands dollar"),
TransitCurrency::KazakhstaniTenge => write!(f, "Kazakhstani tenge"),
TransitCurrency::LaoKip => write!(f, "Lao kip"),
TransitCurrency::LebanesePound => write!(f, "Lebanese pound"),
TransitCurrency::SriLankanRupee => write!(f, "Sri Lankan rupee"),
TransitCurrency::LiberianDollar => write!(f, "Liberian dollar"),
TransitCurrency::LesothoLoti => write!(f, "Lesotho loti"),
TransitCurrency::LibyanDinar => write!(f, "Libyan dinar"),
TransitCurrency::MoroccanDirham => write!(f, "Moroccan dirham"),
TransitCurrency::MoldovanLeu => write!(f, "Moldovan leu"),
TransitCurrency::MalagasyAriary => write!(f, "Malagasy ariary"),
TransitCurrency::MacedonianDenar => write!(f, "Macedonian denar"),
TransitCurrency::MyanmarKyat => write!(f, "Myanmar kyat"),
TransitCurrency::MongolianTogrog => write!(f, "Mongolian tΓΆgrΓΆg"),
TransitCurrency::MacanesePataca => write!(f, "Macanese pataca"),
TransitCurrency::MauritanianOuguiya => write!(f, "Mauritanian ouguiya"),
TransitCurrency::MauritianRupee => write!(f, "Mauritian rupee"),
TransitCurrency::MaldivianRufiyaa => write!(f, "Maldivian rufiyaa"),
TransitCurrency::MalawianKwacha => write!(f, "Malawian kwacha"),
TransitCurrency::MexicanPeso => write!(f, "Mexican peso"),
TransitCurrency::MexicanUnidadDeInversion => write!(f, "Mexican Unidad de Inversion"),
TransitCurrency::MalaysianRinggit => write!(f, "Malaysian ringgit"),
TransitCurrency::MozambicanMetical => write!(f, "Mozambican metical"),
TransitCurrency::NamibianDollar => write!(f, "Namibian dollar"),
TransitCurrency::NigerianNaira => write!(f, "Nigerian naira"),
TransitCurrency::NicaraguanCordoba => write!(f, "Nicaraguan cΓ³rdoba"),
TransitCurrency::NorwegianKrone => write!(f, "Norwegian krone"),
TransitCurrency::NepaleseRupee => write!(f, "Nepalese rupee"),
TransitCurrency::NewZealandDollar => write!(f, "New Zealand dollar"),
TransitCurrency::OmaniRial => write!(f, "Omani rial"),
TransitCurrency::PanamanianBalboa => write!(f, "Panamanian balboa"),
TransitCurrency::PeruvianSol => write!(f, "Peruvian sol"),
TransitCurrency::PapuaNewGuineanKina => write!(f, "Papua New Guinean kina"),
TransitCurrency::PhilippinePeso => write!(f, "Philippine peso"),
TransitCurrency::PakistaniRupee => write!(f, "Pakistani rupee"),
TransitCurrency::PolishZloty => write!(f, "Polish zΕoty"),
TransitCurrency::ParaguayanGuarani => write!(f, "Paraguayan guaranΓ"),
TransitCurrency::QatariRiyal => write!(f, "Qatari riyal"),
TransitCurrency::RomanianLeu => write!(f, "Romanian leu"),
TransitCurrency::SerbianDinar => write!(f, "Serbian dinar"),
TransitCurrency::RussianRuble => write!(f, "Russian ruble"),
TransitCurrency::RwandanFranc => write!(f, "Rwandan franc"),
TransitCurrency::SaudiRiyal => write!(f, "Saudi riyal"),
TransitCurrency::SolomonIslandsDollar => write!(f, "Solomon Islands dollar"),
TransitCurrency::SeychellesRupee => write!(f, "Seychelles rupee"),
TransitCurrency::SudanesePound => write!(f, "Sudanese pound"),
TransitCurrency::SwedishKrona => write!(f, "Swedish krona"),
TransitCurrency::SingaporeDollar => write!(f, "Singapore dollar"),
TransitCurrency::SaintHelenaPound => write!(f, "Saint Helena pound"),
TransitCurrency::SierraLeoneanLeone => write!(f, "Sierra Leonean leone"),
TransitCurrency::SomaliShilling => write!(f, "Somali shilling"),
TransitCurrency::SurinameseDollar => write!(f, "Surinamese dollar"),
TransitCurrency::SouthSudanesePound => write!(f, "South Sudanese pound"),
TransitCurrency::SaoTomeAndPrincipeDobra => write!(f, "SΓ£o TomΓ© and PrΓncipe dobra"),
TransitCurrency::SalvadoranColon => write!(f, "Salvadoran colΓ³n"),
TransitCurrency::SyrianPound => write!(f, "Syrian pound"),
TransitCurrency::SwaziLilangeni => write!(f, "Swazi lilangeni"),
TransitCurrency::ThaiBaht => write!(f, "Thai baht"),
TransitCurrency::TajikistaniSomoni => write!(f, "Tajikistani somoni"),
TransitCurrency::TurkmenistanManat => write!(f, "Turkmenistan manat"),
TransitCurrency::TunisianDinar => write!(f, "Tunisian dinar"),
TransitCurrency::TonganPaanga => write!(f, "Tongan paΚ»anga"),
TransitCurrency::TurkishLira => write!(f, "Turkish lira"),
TransitCurrency::TrinidadAndTobagoDollar => write!(f, "Trinidad and Tobago dollar"),
TransitCurrency::NewTaiwanDollar => write!(f, "New Taiwan dollar"),
TransitCurrency::TanzanianShilling => write!(f, "Tanzanian shilling"),
TransitCurrency::UkrainianHryvnia => write!(f, "Ukrainian hryvnia"),
TransitCurrency::UgandanShilling => write!(f, "Ugandan shilling"),
TransitCurrency::UnitedStatesDollar => write!(f, "United States dollar"),
TransitCurrency::UnitedStatesDollarNextDay => write!(f, "United States dollar next day"),
TransitCurrency::UruguayPesoEnUnidadesIndexadas => write!(f, "Uruguay Peso en Unidades Indexadas"),
TransitCurrency::UruguayanPeso => write!(f, "Uruguayan peso"),
TransitCurrency::UnidadPrevisional => write!(f, "Unidad previsional"),
TransitCurrency::UzbekistanSom => write!(f, "Uzbekistan som"),
TransitCurrency::VenezuelanBolivarSoberano => write!(f, "Venezuelan bolΓvar soberano"),
TransitCurrency::VietnameseDong => write!(f, "Vietnamese Δα»ng"),
TransitCurrency::VanuatuVatu => write!(f, "Vanuatu vatu"),
TransitCurrency::SamoanTala => write!(f, "Samoan tala"),
TransitCurrency::CfaFrancBeac => write!(f, "CFA franc BEAC"),
TransitCurrency::Silver => write!(f, "Silver"),
TransitCurrency::Gold => write!(f, "Gold"),
TransitCurrency::EuropeanCompositeUnit => write!(f, "European Composite Unit"),
TransitCurrency::EuropeanMonetaryUnit => write!(f, "European Monetary Unit"),
TransitCurrency::EuropeanUnitOfAccount9 => write!(f, "European Unit of Account 9"),
TransitCurrency::EuropeanUnitOfAccount17 => write!(f, "European Unit of Account 17"),
TransitCurrency::EastCaribbeanDollar => write!(f, "East Caribbean dollar"),
TransitCurrency::SpecialDrawingRights => write!(f, "Special drawing rights"),
TransitCurrency::CfaFrancBceao => write!(f, "CFA franc BCEAO"),
TransitCurrency::Palladium => write!(f, "Palladium"),
TransitCurrency::CfpFranc => write!(f, "CFP franc"),
TransitCurrency::Platinum => write!(f, "Platinum"),
TransitCurrency::Sucre => write!(f, "SUCRE"),
TransitCurrency::CodeReservedForTesting => write!(f, "Code reserved for testing"),
TransitCurrency::AdbUnitOfAccount => write!(f, "ADB Unit of Account"),
TransitCurrency::NoCurrency => write!(f, "No currency"),
TransitCurrency::YemeniRial => write!(f, "Yemeni rial"),
TransitCurrency::SouthAfricanRand => write!(f, "South African rand"),
TransitCurrency::ZambianKwacha => write!(f, "Zambian kwacha"),
TransitCurrency::ZimbabweanDollar => write!(f, "Zimbabwean dollar"),
} // match
} // fn
} // impl | 49.732632 | 121 | 0.60947 |
56c4418bf5e55004a4bb26adab0dd244b9aedf53 | 11,926 | use cmp::{Cmp, MemtableKeyCmp};
use key_types::{build_memtable_key, parse_internal_key, parse_memtable_key, ValueType};
use key_types::{LookupKey, UserKey};
use skipmap::{SkipMap, SkipMapIter};
use types::{current_key_val, LdbIterator, SequenceNumber};
use std::rc::Rc;
use integer_encoding::FixedInt;
/// Provides Insert/Get/Iterate, based on the SkipMap implementation.
/// MemTable uses MemtableKeys internally, that is, it stores key and value in the [Skipmap] key.
pub struct MemTable {
map: SkipMap,
}
impl MemTable {
/// Returns a new MemTable.
/// This wraps opt.cmp inside a MemtableKey-specific comparator.
pub fn new(cmp: Rc<Box<dyn Cmp>>) -> MemTable {
MemTable::new_raw(Rc::new(Box::new(MemtableKeyCmp(cmp))))
}
/// Doesn't wrap the comparator in a MemtableKeyCmp.
fn new_raw(cmp: Rc<Box<dyn Cmp>>) -> MemTable {
MemTable {
map: SkipMap::new(cmp),
}
}
pub fn len(&self) -> usize {
self.map.len()
}
pub fn approx_mem_usage(&self) -> usize {
self.map.approx_memory()
}
pub fn add<'a>(&mut self, seq: SequenceNumber, t: ValueType, key: UserKey<'a>, value: &[u8]) {
self.map
.insert(build_memtable_key(key, value, t, seq), Vec::new())
}
/// get returns the value for the given entry and whether the entry is marked as deleted. This
/// is to distinguish between not-found and found-deleted.
#[allow(unused_variables)]
pub fn get(&self, key: &LookupKey) -> (Option<Vec<u8>>, bool) {
let mut iter = self.map.iter();
iter.seek(key.memtable_key());
if let Some((foundkey, _)) = current_key_val(&iter) {
let (fkeylen, fkeyoff, tag, vallen, valoff) = parse_memtable_key(&foundkey);
// Compare user key -- if equal, proceed
// We only care about user key equality here
if key.user_key() == &foundkey[fkeyoff..fkeyoff + fkeylen] {
if tag & 0xff == ValueType::TypeValue as u64 {
return (Some(foundkey[valoff..valoff + vallen].to_vec()), false);
} else {
return (None, true);
}
}
}
(None, false)
}
pub fn iter(&self) -> MemtableIterator {
MemtableIterator {
skipmapiter: self.map.iter(),
}
}
}
/// MemtableIterator is an iterator over a MemTable. It is mostly concerned with converting to and
/// from the MemtableKey format used in the inner map; all key-taking or -returning methods deal
/// with InternalKeys.
///
/// This iterator does not skip deleted entries.
pub struct MemtableIterator {
skipmapiter: SkipMapIter,
}
impl LdbIterator for MemtableIterator {
fn advance(&mut self) -> bool {
if !self.skipmapiter.advance() {
return false;
}
self.skipmapiter.valid()
}
fn reset(&mut self) {
self.skipmapiter.reset();
}
fn prev(&mut self) -> bool {
// Make sure this is actually needed (skipping deleted values?).
let (mut key, mut val) = (vec![], vec![]);
loop {
if !self.skipmapiter.prev() {
return false;
}
if self.skipmapiter.current(&mut key, &mut val) {
let (_, _, tag, _, _) = parse_memtable_key(&key);
if tag & 0xff == ValueType::TypeValue as u64 {
return true;
} else {
continue;
}
} else {
return false;
}
}
}
fn valid(&self) -> bool {
self.skipmapiter.valid()
}
/// current places the current key (in InternalKey format) and value into the supplied vectors.
fn current(&self, key: &mut Vec<u8>, val: &mut Vec<u8>) -> bool {
if !self.valid() {
return false;
}
if self.skipmapiter.current(key, val) {
let (keylen, keyoff, _, vallen, valoff) = parse_memtable_key(&key);
val.clear();
val.extend_from_slice(&key[valoff..valoff + vallen]);
// zero-allocation truncation.
shift_left(key, keyoff);
// Truncate key to key+tag.
key.truncate(keylen + u64::required_space());
true
} else {
panic!("should not happen");
}
}
/// seek takes an InternalKey.
fn seek(&mut self, to: &[u8]) {
// Assemble the correct memtable key from the supplied InternalKey.
let (_, seq, ukey) = parse_internal_key(to);
self.skipmapiter
.seek(LookupKey::new(ukey, seq).memtable_key());
}
}
/// shift_left moves s[mid..] to s[0..s.len()-mid]. The new size is s.len()-mid.
fn shift_left(s: &mut Vec<u8>, mid: usize) {
for i in mid..s.len() {
s.swap(i, i - mid);
}
let newlen = s.len() - mid;
s.truncate(newlen);
}
#[cfg(test)]
#[allow(unused_variables)]
mod tests {
use super::*;
use key_types::*;
use options;
use test_util::{test_iterator_properties, LdbIteratorIter};
#[test]
fn test_shift_left() {
let mut v = vec![1, 2, 3, 4, 5];
shift_left(&mut v, 1);
assert_eq!(v, vec![2, 3, 4, 5]);
let mut v = vec![1, 2, 3, 4, 5];
shift_left(&mut v, 4);
assert_eq!(v, vec![5]);
}
fn get_memtable() -> MemTable {
let mut mt = MemTable::new(options::for_test().cmp);
let entries = vec![
(ValueType::TypeValue, 115, "abc", "122"),
(ValueType::TypeValue, 120, "abc", "123"),
(ValueType::TypeValue, 121, "abd", "124"),
(ValueType::TypeDeletion, 122, "abe", "125"),
(ValueType::TypeValue, 123, "abf", "126"),
];
for e in entries.iter() {
mt.add(e.1, e.0, e.2.as_bytes(), e.3.as_bytes());
}
mt
}
#[test]
fn test_memtable_parse_tag() {
let tag = (12345 << 8) | 1;
assert_eq!(parse_tag(tag), (ValueType::TypeValue, 12345));
}
#[test]
fn test_memtable_add() {
let mut mt = MemTable::new(options::for_test().cmp);
mt.add(
123,
ValueType::TypeValue,
"abc".as_bytes(),
"123".as_bytes(),
);
assert_eq!(
mt.map.iter().next().unwrap().0,
&[11, 97, 98, 99, 1, 123, 0, 0, 0, 0, 0, 0, 3, 49, 50, 51]
);
assert_eq!(
mt.iter().next().unwrap().0,
&[97, 98, 99, 1, 123, 0, 0, 0, 0, 0, 0]
);
}
#[test]
fn test_memtable_add_get() {
let mt = get_memtable();
// Smaller sequence number doesn't find entry
if let Some(v) = mt.get(&LookupKey::new("abc".as_bytes(), 110)).0 {
eprintln!("{:?}", v);
panic!("found");
}
if let Some(v) = mt.get(&LookupKey::new("abf".as_bytes(), 110)).0 {
eprintln!("{:?}", v);
panic!("found");
}
// Bigger sequence number falls back to next smaller
if let Some(v) = mt.get(&LookupKey::new("abc".as_bytes(), 116)).0 {
assert_eq!(v, "122".as_bytes());
} else {
panic!("not found");
}
// Exact match works
if let (Some(v), deleted) = mt.get(&LookupKey::new("abc".as_bytes(), 120)) {
assert_eq!(v, "123".as_bytes());
assert!(!deleted);
} else {
panic!("not found");
}
if let (None, deleted) = mt.get(&LookupKey::new("abe".as_bytes(), 122)) {
assert!(deleted);
} else {
panic!("found deleted");
}
if let Some(v) = mt.get(&LookupKey::new("abf".as_bytes(), 129)).0 {
assert_eq!(v, "126".as_bytes());
} else {
panic!("not found");
}
}
#[test]
fn test_memtable_iterator_init() {
let mt = get_memtable();
let mut iter = mt.iter();
assert!(!iter.valid());
iter.next();
assert!(iter.valid());
assert_eq!(
current_key_val(&iter).unwrap().0,
vec![97, 98, 99, 1, 120, 0, 0, 0, 0, 0, 0].as_slice()
);
iter.reset();
assert!(!iter.valid());
}
#[test]
fn test_memtable_iterator_seek() {
let mt = get_memtable();
let mut iter = mt.iter();
assert!(!iter.valid());
iter.seek(LookupKey::new("abc".as_bytes(), 400).internal_key());
let (mut gotkey, gotval) = current_key_val(&iter).unwrap();
truncate_to_userkey(&mut gotkey);
assert_eq!(
("abc".as_bytes(), "123".as_bytes()),
(gotkey.as_slice(), gotval.as_slice())
);
iter.seek(LookupKey::new("xxx".as_bytes(), 400).internal_key());
assert!(!iter.valid());
iter.seek(LookupKey::new("abd".as_bytes(), 400).internal_key());
let (mut gotkey, gotval) = current_key_val(&iter).unwrap();
truncate_to_userkey(&mut gotkey);
assert_eq!(
("abd".as_bytes(), "124".as_bytes()),
(gotkey.as_slice(), gotval.as_slice())
);
}
#[test]
fn test_memtable_iterator_fwd() {
let mt = get_memtable();
let mut iter = mt.iter();
let expected = vec![
"123".as_bytes(), /* i.e., the abc entry with
* higher sequence number comes first */
"122".as_bytes(),
"124".as_bytes(),
// deleted entry:
"125".as_bytes(),
"126".as_bytes(),
];
let mut i = 0;
for (k, v) in LdbIteratorIter::wrap(&mut iter) {
assert_eq!(v, expected[i]);
i += 1;
}
}
#[test]
fn test_memtable_iterator_reverse() {
let mt = get_memtable();
let mut iter = mt.iter();
// Bigger sequence number comes first
iter.next();
assert!(iter.valid());
assert_eq!(
current_key_val(&iter).unwrap().0,
vec![97, 98, 99, 1, 120, 0, 0, 0, 0, 0, 0].as_slice()
);
iter.next();
assert!(iter.valid());
assert_eq!(
current_key_val(&iter).unwrap().0,
vec![97, 98, 99, 1, 115, 0, 0, 0, 0, 0, 0].as_slice()
);
iter.next();
assert!(iter.valid());
assert_eq!(
current_key_val(&iter).unwrap().0,
vec![97, 98, 100, 1, 121, 0, 0, 0, 0, 0, 0].as_slice()
);
iter.prev();
assert!(iter.valid());
assert_eq!(
current_key_val(&iter).unwrap().0,
vec![97, 98, 99, 1, 115, 0, 0, 0, 0, 0, 0].as_slice()
);
iter.prev();
assert!(iter.valid());
assert_eq!(
current_key_val(&iter).unwrap().0,
vec![97, 98, 99, 1, 120, 0, 0, 0, 0, 0, 0].as_slice()
);
iter.prev();
assert!(!iter.valid());
}
#[test]
fn test_memtable_parse_key() {
let key = vec![11, 1, 2, 3, 1, 123, 0, 0, 0, 0, 0, 0, 3, 4, 5, 6];
let (keylen, keyoff, tag, vallen, valoff) = parse_memtable_key(&key);
assert_eq!(keylen, 3);
assert_eq!(&key[keyoff..keyoff + keylen], vec![1, 2, 3].as_slice());
assert_eq!(tag, 123 << 8 | 1);
assert_eq!(vallen, 3);
assert_eq!(&key[valoff..valoff + vallen], vec![4, 5, 6].as_slice());
}
#[test]
fn test_memtable_iterator_behavior() {
let mut mt = MemTable::new(options::for_test().cmp);
let entries = vec![
(115, "abc", "122"),
(120, "abd", "123"),
(121, "abe", "124"),
(123, "abf", "126"),
];
for e in entries.iter() {
mt.add(e.0, ValueType::TypeValue, e.1.as_bytes(), e.2.as_bytes());
}
test_iterator_properties(mt.iter());
}
}
| 30.192405 | 99 | 0.514422 |
5de4ca36e230d37383dbdbfcc986e12773e163f1 | 9,506 | //! Type-safe hunks
use core::{
fmt,
marker::PhantomData,
mem,
ops::Deref,
ptr::{slice_from_raw_parts, slice_from_raw_parts_mut},
};
use crate::{
kernel::{self, cfg::CfgBuilder, Kernel, Port, StartupHook},
utils::{Init, ZeroInit},
};
/// The priority of the [startup hooks] used to initialize [typed hunks]. It has
/// a negative value so that startup hooks with non-negative priorities (which
/// can be created without `unsafe` blocks) will never see an uninitialized
/// value in a typed hunk.
///
/// [startup hooks]: crate::kernel::StartupHook
/// [typed hunks]: Hunk
pub const INIT_HOOK_PRIORITY: i32 = -0x7000_0000;
/// Represents a single typed hunk in a system.
///
/// Hunks are nothing more than static variables defined in a kernel
/// configuration. They come in handy when you are designing a component that
/// can be instantiated by a kernel configuration and wanting each instance to
/// have its own separate state data.
///
/// This type is implemented on top of [`r3::kernel::Hunk`], the untyped
/// hunk type.
///
/// [`r3::kernel::Hunk`]: crate::kernel::Hunk
#[doc(include = "./common.md")]
pub struct Hunk<System, T: ?Sized> {
/// The offset of the hunk. `System::HUNK_ATTR.hunk_pool_ptr()` must be
/// added before dereferencing.
offset: *const T,
_phantom: PhantomData<System>,
}
unsafe impl<System, T: ?Sized + Send> Send for Hunk<System, T> {}
unsafe impl<System, T: ?Sized + Sync> Sync for Hunk<System, T> {}
impl<System: Kernel, T: ?Sized> Hunk<System, T> {
/// Construct a `CfgTaskBuilder` to define a hunk in [a configuration
/// function](crate#static-configuration).
pub const fn build() -> CfgHunkBuilder<System, T, DefaultInitTag> {
CfgHunkBuilder {
_phantom: PhantomData,
len: 1,
align: 1,
}
}
}
/// As a generic parameter of [`CfgHunkBuilder`], indicates that the [hunk]
/// should be initialized with [`Init`].
///
/// [`Init`]: crate::utils::Init
/// [hunk]: crate::kernel::Hunk
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub struct DefaultInitTag;
/// As a generic parameter of [`CfgHunkBuilder`], indicates that the [hunk]
/// should be zero-initialized.
///
/// [hunk]: crate::kernel::Hunk
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub struct ZeroInitTag;
/// Implemented on [`DefaultInitTag`] and [`ZeroInitTag`] when `T` can be
/// initialized in this way.
pub trait HunkIniter<T> {
/// A flag indicating whether [`Self::init`] should be called for
/// initialization.
const NEEDS_INIT: bool;
/// Initialize the specified memory region.
fn init(dest: &mut mem::MaybeUninit<T>);
}
impl<T: Init> HunkIniter<T> for DefaultInitTag {
const NEEDS_INIT: bool = true;
fn init(dest: &mut mem::MaybeUninit<T>) {
*dest = mem::MaybeUninit::new(T::INIT);
}
}
impl<T> HunkIniter<T> for ZeroInitTag {
const NEEDS_INIT: bool = false;
fn init(_: &mut mem::MaybeUninit<T>) {
// Do nothing - a hunk pool is zero-initialized by default
}
}
/// Configuration builder type for [`Hunk`].
///
/// `InitTag` is either [`DefaultInitTag`] or [`ZeroInitTag`].
///
/// [`Hunk`]: crate::kernel::Hunk
#[must_use = "must call `finish()` to complete registration"]
pub struct CfgHunkBuilder<System, T: ?Sized, InitTag> {
_phantom: PhantomData<(System, InitTag, T)>,
len: usize,
align: usize,
}
impl<System: Kernel, T: ?Sized, InitTag> CfgHunkBuilder<System, T, InitTag> {
/// Specify the element count. Defaults to `1`. Must be `1` for a non-array
/// hunk.
pub const fn len(self, len: usize) -> Self {
Self { len, ..self }
}
/// Specify the minimum alignment. Defaults to `1`.
pub const fn align(self, align: usize) -> Self {
Self { align, ..self }
}
/// Zero-initialize the hunk.
pub const fn zeroed(self) -> CfgHunkBuilder<System, T, ZeroInitTag>
where
T: ZeroInit,
{
// Safety: `T: ZeroInit`, so it's zero-initializable
unsafe { self.zeroed_unchecked() }
}
/// Zero-initialize the hunk even if it might be unsafe.
///
/// # Safety
///
/// If zero initialization is not a valid bit pattern for `T`, accessing the
/// hunk's contents may result in an undefined behavior.
pub const unsafe fn zeroed_unchecked(self) -> CfgHunkBuilder<System, T, ZeroInitTag> {
CfgHunkBuilder {
_phantom: PhantomData,
len: self.len,
align: self.align,
}
}
}
impl<System: Kernel, T, InitTag: HunkIniter<T>> CfgHunkBuilder<System, T, InitTag> {
/// Complete the definition of a hunk, returning a reference to the hunk.
pub const fn finish(self, cfg: &mut CfgBuilder<System>) -> Hunk<System, T> {
let untyped_hunk = kernel::Hunk::<System>::build()
.len(mem::size_of::<T>())
.align(max(mem::align_of::<T>(), self.align))
.finish(cfg);
assert!(self.len == 1, "Non-array hunk must have `len` of `1`");
let start = untyped_hunk.offset();
// Insert an initializer
if InitTag::NEEDS_INIT {
unsafe {
StartupHook::build()
.priority(INIT_HOOK_PRIORITY)
.start(|start| {
let untyped_hunk = kernel::Hunk::<System>::from_offset(start).as_ptr();
// Safety: The destination is large enough to contain `T`
InitTag::init(&mut *(untyped_hunk as *mut mem::MaybeUninit<T>));
})
.unchecked()
.param(start)
.finish(cfg);
}
}
Hunk {
offset: start as _,
_phantom: PhantomData,
}
}
}
impl<System: Port, T, InitTag: HunkIniter<T>> CfgHunkBuilder<System, [T], InitTag> {
/// Complete the definition of a hunk, returning a reference to the hunk.
pub const fn finish(self, cfg: &mut CfgBuilder<System>) -> Hunk<System, [T]> {
assert!(self.align.is_power_of_two(), "`align` is not power of two");
let untyped_hunk = kernel::Hunk::<System>::build()
.len(mem::size_of::<T>() * self.len)
.align(max(mem::align_of::<T>(), self.align))
.finish(cfg);
let start = untyped_hunk.offset();
// Insert an initializer
if InitTag::NEEDS_INIT {
// TODO: There is no way to pass a length into the initializer
todo!();
}
Hunk {
offset: slice_from_raw_parts_mut(start as _, self.len),
_phantom: PhantomData,
}
}
}
impl<System, T> Init for Hunk<System, [T]> {
// Safety: This is safe because it points to nothing
const INIT: Self = Self {
offset: slice_from_raw_parts_mut(core::ptr::null_mut(), 0),
_phantom: PhantomData,
};
}
impl<System: Kernel, T: fmt::Debug + ?Sized> fmt::Debug for Hunk<System, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Hunk")
.field(&Self::as_ptr(*self))
.field(&&**self)
.finish()
}
}
impl<System, T: ?Sized> Clone for Hunk<System, T> {
fn clone(&self) -> Self {
*self
}
}
impl<System, T: ?Sized> Copy for Hunk<System, T> {}
impl<System, T: ?Sized> Hunk<System, T> {
/// Reinterpret the hunk as another type.
///
/// # Safety
///
/// - Similarly to [`core::mem::transmute`], this is **incredibly** unsafe.
/// - The byte offset must be valid for the destination type.
///
pub const unsafe fn transmute<U>(self) -> Hunk<System, U> {
Hunk {
offset: self.offset.cast(),
_phantom: PhantomData,
}
}
}
impl<System: Kernel, T: ?Sized> Hunk<System, T> {
/// Get the untyped hunk.
#[inline]
pub fn untyped_hunk(this: Self) -> kernel::Hunk<System> {
kernel::Hunk::from_offset(this.offset as *const u8 as usize)
}
// FIXME: The following methods are not `const fn` on account of
// <https://github.com/rust-lang/const-eval/issues/11> being
// unresolved
/// Get a raw pointer to the hunk's contents.
#[inline]
pub fn as_ptr(this: Self) -> *const T {
this.offset.set_ptr_value(Self::untyped_hunk(this).as_ptr())
}
/// Get a raw pointer to the raw bytes of the hunk.
#[inline]
pub fn as_bytes_ptr(this: Self) -> *const [u8] {
slice_from_raw_parts(Self::untyped_hunk(this).as_ptr(), mem::size_of_val(&*this))
}
/// Get a reference to the raw bytes of the hunk.
///
/// # Safety
///
/// The result might include uninitialized bytes and/or interior mutability,
/// so it might be unsafe to access.
#[inline]
pub unsafe fn as_bytes(this: Self) -> &'static [u8] {
// Safety: The caller is responsible for making sure interpreting the
// contents as `[u8]` is safe
unsafe { &*Self::as_bytes_ptr(this) }
}
}
impl<System: Kernel, T: ?Sized> AsRef<T> for Hunk<System, T> {
fn as_ref(&self) -> &T {
unsafe { &*Self::as_ptr(*self) }
}
}
impl<System: Kernel, T: ?Sized> Deref for Hunk<System, T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
/// FIXME: `Ord::max` is not available in `const fn`
const fn max(x: usize, y: usize) -> usize {
if x > y {
x
} else {
y
}
}
| 30.964169 | 95 | 0.594677 |
28579fd4f3a89116720537687389116047213fd8 | 435 | pub use self::simple::Simple;
pub mod simple;
use Error;
use std::prelude::v1::*;
use std::io::prelude::*;
pub trait Transport
{
fn process_data(&mut self,
read: &mut Read) -> Result<(), Error>;
fn receive_raw_packet(&mut self) -> Result<Option<Vec<u8>>, Error>;
fn send_raw_packet(&mut self,
write: &mut Write,
packet: &[u8]) -> Result<(), Error>;
}
| 20.714286 | 71 | 0.544828 |
39340a6a4243eafa674f5a60064e81202cd0eda5 | 13,302 | #![allow(unused_imports, non_camel_case_types)]
use crate::models::r4::Annotation::Annotation;
use crate::models::r4::CodeableConcept::CodeableConcept;
use crate::models::r4::Element::Element;
use crate::models::r4::Extension::Extension;
use serde_json::json;
use serde_json::value::Value;
use std::borrow::Cow;
/// Risk of harmful or undesirable, physiological response which is unique to an
/// individual and associated with exposure to a substance.
#[derive(Debug)]
pub struct AllergyIntolerance_Reaction<'a> {
pub(crate) value: Cow<'a, Value>,
}
impl AllergyIntolerance_Reaction<'_> {
pub fn new(value: &Value) -> AllergyIntolerance_Reaction {
AllergyIntolerance_Reaction {
value: Cow::Borrowed(value),
}
}
pub fn to_json(&self) -> Value {
(*self.value).clone()
}
/// Extensions for description
pub fn _description(&self) -> Option<Element> {
if let Some(val) = self.value.get("_description") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for onset
pub fn _onset(&self) -> Option<Element> {
if let Some(val) = self.value.get("_onset") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for severity
pub fn _severity(&self) -> Option<Element> {
if let Some(val) = self.value.get("_severity") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Text description about the reaction as a whole, including details of the
/// manifestation if required.
pub fn description(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("description") {
return Some(string);
}
return None;
}
/// Identification of the route by which the subject was exposed to the substance.
pub fn exposure_route(&self) -> Option<CodeableConcept> {
if let Some(val) = self.value.get("exposureRoute") {
return Some(CodeableConcept {
value: Cow::Borrowed(val),
});
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the element. To make the use of extensions safe and manageable,
/// there is a strict set of governance applied to the definition and use of
/// extensions. Though any implementer can define an extension, there is a set of
/// requirements that SHALL be met as part of the definition of the extension.
pub fn extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("extension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Unique id for the element within a resource (for internal references). This may be
/// any string value that does not contain spaces.
pub fn id(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("id") {
return Some(string);
}
return None;
}
/// Clinical symptoms and/or signs that are observed or associated with the adverse
/// reaction event.
pub fn manifestation(&self) -> Vec<CodeableConcept> {
self.value
.get("manifestation")
.unwrap()
.as_array()
.unwrap()
.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>()
}
/// May be used to represent additional information that is not part of the basic
/// definition of the element and that modifies the understanding of the element
/// in which it is contained and/or the understanding of the containing element's
/// descendants. Usually modifier elements provide negation or qualification. To make
/// the use of extensions safe and manageable, there is a strict set of governance
/// applied to the definition and use of extensions. Though any implementer can define
/// an extension, there is a set of requirements that SHALL be met as part of the
/// definition of the extension. Applications processing a resource are required to
/// check for modifier extensions. Modifier extensions SHALL NOT change the meaning
/// of any elements on Resource or DomainResource (including cannot change the meaning
/// of modifierExtension itself).
pub fn modifier_extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("modifierExtension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Additional text about the adverse reaction event not captured in other fields.
pub fn note(&self) -> Option<Vec<Annotation>> {
if let Some(Value::Array(val)) = self.value.get("note") {
return Some(
val.into_iter()
.map(|e| Annotation {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Record of the date and/or time of the onset of the Reaction.
pub fn onset(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("onset") {
return Some(string);
}
return None;
}
/// Clinical assessment of the severity of the reaction event as a whole, potentially
/// considering multiple different manifestations.
pub fn severity(&self) -> Option<AllergyIntolerance_ReactionSeverity> {
if let Some(Value::String(val)) = self.value.get("severity") {
return Some(AllergyIntolerance_ReactionSeverity::from_string(&val).unwrap());
}
return None;
}
/// Identification of the specific substance (or pharmaceutical product) considered
/// to be responsible for the Adverse Reaction event. Note: the substance for a
/// specific reaction may be different from the substance identified as the cause
/// of the risk, but it must be consistent with it. For instance, it may be a
/// more specific substance (e.g. a brand medication) or a composite product that
/// includes the identified substance. It must be clinically safe to only process
/// the 'code' and ignore the 'reaction.substance'. If a receiving system is unable
/// to confirm that AllergyIntolerance.reaction.substance falls within the semantic
/// scope of AllergyIntolerance.code, then the receiving system should ignore
/// AllergyIntolerance.reaction.substance.
pub fn substance(&self) -> Option<CodeableConcept> {
if let Some(val) = self.value.get("substance") {
return Some(CodeableConcept {
value: Cow::Borrowed(val),
});
}
return None;
}
pub fn validate(&self) -> bool {
if let Some(_val) = self._description() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._onset() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._severity() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.description() {}
if let Some(_val) = self.exposure_route() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.id() {}
if !self
.manifestation()
.into_iter()
.map(|e| e.validate())
.all(|x| x == true)
{
return false;
}
if let Some(_val) = self.modifier_extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.note() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.onset() {}
if let Some(_val) = self.severity() {}
if let Some(_val) = self.substance() {
if !_val.validate() {
return false;
}
}
return true;
}
}
#[derive(Debug)]
pub struct AllergyIntolerance_ReactionBuilder {
pub(crate) value: Value,
}
impl AllergyIntolerance_ReactionBuilder {
pub fn build(&self) -> AllergyIntolerance_Reaction {
AllergyIntolerance_Reaction {
value: Cow::Owned(self.value.clone()),
}
}
pub fn with(existing: AllergyIntolerance_Reaction) -> AllergyIntolerance_ReactionBuilder {
AllergyIntolerance_ReactionBuilder {
value: (*existing.value).clone(),
}
}
pub fn new(manifestation: Vec<CodeableConcept>) -> AllergyIntolerance_ReactionBuilder {
let mut __value: Value = json!({});
__value["manifestation"] = json!(manifestation
.into_iter()
.map(|e| e.value)
.collect::<Vec<_>>());
return AllergyIntolerance_ReactionBuilder { value: __value };
}
pub fn _description<'a>(
&'a mut self,
val: Element,
) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["_description"] = json!(val.value);
return self;
}
pub fn _onset<'a>(&'a mut self, val: Element) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["_onset"] = json!(val.value);
return self;
}
pub fn _severity<'a>(&'a mut self, val: Element) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["_severity"] = json!(val.value);
return self;
}
pub fn description<'a>(&'a mut self, val: &str) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["description"] = json!(val);
return self;
}
pub fn exposure_route<'a>(
&'a mut self,
val: CodeableConcept,
) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["exposureRoute"] = json!(val.value);
return self;
}
pub fn extension<'a>(
&'a mut self,
val: Vec<Extension>,
) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn id<'a>(&'a mut self, val: &str) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["id"] = json!(val);
return self;
}
pub fn modifier_extension<'a>(
&'a mut self,
val: Vec<Extension>,
) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["modifierExtension"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn note<'a>(
&'a mut self,
val: Vec<Annotation>,
) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["note"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn onset<'a>(&'a mut self, val: &str) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["onset"] = json!(val);
return self;
}
pub fn severity<'a>(
&'a mut self,
val: AllergyIntolerance_ReactionSeverity,
) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["severity"] = json!(val.to_string());
return self;
}
pub fn substance<'a>(
&'a mut self,
val: CodeableConcept,
) -> &'a mut AllergyIntolerance_ReactionBuilder {
self.value["substance"] = json!(val.value);
return self;
}
}
#[derive(Debug)]
pub enum AllergyIntolerance_ReactionSeverity {
Mild,
Moderate,
Severe,
}
impl AllergyIntolerance_ReactionSeverity {
pub fn from_string(string: &str) -> Option<AllergyIntolerance_ReactionSeverity> {
match string {
"mild" => Some(AllergyIntolerance_ReactionSeverity::Mild),
"moderate" => Some(AllergyIntolerance_ReactionSeverity::Moderate),
"severe" => Some(AllergyIntolerance_ReactionSeverity::Severe),
_ => None,
}
}
pub fn to_string(&self) -> String {
match self {
AllergyIntolerance_ReactionSeverity::Mild => "mild".to_string(),
AllergyIntolerance_ReactionSeverity::Moderate => "moderate".to_string(),
AllergyIntolerance_ReactionSeverity::Severe => "severe".to_string(),
}
}
}
| 34.372093 | 100 | 0.576755 |
fe4e2758982ff81b2e56f16bba1114dffb129500 | 3,324 | use docchi_json5::jval::JVal;
use super::names::Names;
use super::json_name::{json_name, NameType, SystemNames};
use super::json_item_to_rust::json_item_to_rust;
use crate::error::CoreResult;
use crate::imp::json_to_rust::tmp::tmp_obj::TmpObj;
use crate::imp::json_to_rust::get_old::get_old;
use crate::imp::json_to_rust::get_id::get_id;
use crate::imp::json_to_rust::get_refs::get_ref;
use docchi_json5::jval::Span;
use crate::imp::json_to_rust::json_item_to_rust::json_item_to_rust_ref;
use linked_hash_map::LinkedHashMap;
//use linked_hash_map::LinkedHashMap;
pub(crate) fn json_obj_to_rust(v : &LinkedHashMap<String, JVal>, is_ref_obj : bool, span : &Span, names : &Names) -> CoreResult<TmpObj>{
let mut r = TmpObj::new(v.len(),span.clone());
let mut counter = 0;
for (k,v) in v{
let id = counter;
let name = json_name(k).ok_or_else(|| format!("{} {} is not a valid name {}",v.line_str(), k, names))?;
match name{
NameType::Name(name, vt) =>{
let v = if is_ref_obj {
//γγ£γγͺγγ³γΌγγ γͺγγγ§γγγγδΈηͺη°‘εγ§εγγγγγγ¨ζγγ»γ»γ»
json_item_to_rust_ref(&name, vt, v, names)?
} else{
json_item_to_rust(&name, vt,v, names)?
};
r.insert_default(name.to_string(), id, v);
counter += 1;
},
NameType::SystemName(sn) =>{
match sn{
SystemNames::ID =>{
if r.id.is_none() {
if let Some(id) = get_id(v){
r.id = Some(id);
} else {
Err(format!("{} ID must be a string or a num : {} {}", v.line_str(), v.slice(), names))?
}
} else{
Err(format!("{} ID is defined multiple times {}", v.line_str(), names))?;
}
},
SystemNames::Ref | SystemNames::Enum =>{
if r.refs.map.len() == 0{
match &v {
JVal::Map(map, span) =>{
let mut refs = get_ref(map, span,names)?;
match sn{
SystemNames::Enum =>{ refs.is_enum = true; }
_=>{},
}
r.refs = refs;
},
_ =>{ Err(format!("{} Ref must be an object {}", v.line_str(), names))?;}
}
} else {
Err(format!("{} (Ref|Enum) is defined multiple times {}", v.line_str(), names))?;
}
},
SystemNames::Old => {
match &v {
JVal::Array(a, _span) => {
r.old = get_old(a, names)?;
},
_ => { Err(format!("{} {}", v.line_str(), names))?; }
}
}
}
}
}
}
Ok(r)
} | 42.075949 | 136 | 0.405235 |
3a7ede6f4a6dc63b3550d2b5504c06c6e5501055 | 3,614 | /*
Copyright 2016 Martin Buck
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall
be included all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
//! FilterCircle, a circle filter within 2D space
use crate::*;
//------------------------------------------------------------------------------
#[derive(Debug, PartialEq, PartialOrd, Default, Clone, Eq, Ord, Hash)]
/// FilterCircle, a circle filter within 2D space
pub struct FilterCircle {
circle: Circle,
}
impl FilterCircle {
/// Creates a new FilterCircle with the given parameters
pub fn new(circle: Circle) -> Self {
FilterCircle { circle }
}
}
//------------------------------------------------------------------------------
impl IsND for FilterCircle {
fn n_dimensions() -> usize {
Circle::n_dimensions()
}
fn position_nd(&self, dimension: usize) -> Option<f64> {
self.circle.position_nd(dimension)
}
}
impl Is2D for FilterCircle {
#[inline(always)]
fn x(&self) -> f64 {
self.circle.x()
}
#[inline(always)]
fn y(&self) -> f64 {
self.circle.y()
}
}
impl IsBuildableND for FilterCircle {
#[inline(always)]
fn new_nd(coords: &[f64]) -> Result<Self> {
Ok(FilterCircle::new(Circle::new_nd(coords)?))
}
#[inline(always)]
fn from_nd<P>(&mut self, other: P) -> Result<()>
where
P: IsBuildableND,
{
self.circle.from_nd(other)
}
}
impl IsBuildable2D for FilterCircle {
#[inline(always)]
fn new(x: f64, y: f64) -> Self {
FilterCircle::new(Circle::new(x, y))
}
#[inline(always)]
fn from<P>(&mut self, other: &P)
where
P: Is2D,
{
self.circle.from(other)
}
}
impl IsEditableND for FilterCircle {
fn set_position(&mut self, dimension: usize, val: f64) -> Result<()> {
self.circle.set_position(dimension, val)
}
}
impl IsEditable2D for FilterCircle {
#[inline(always)]
fn set_x(&mut self, val: f64) {
self.circle.set_x(val);
}
#[inline(always)]
fn set_y(&mut self, val: f64) {
self.circle.set_y(val);
}
}
impl HasBoundingBox2D for FilterCircle {
fn bounding_box(&self) -> BoundingBox2D {
self.circle.bounding_box()
}
}
impl HasBoundingBox2DMaybe for FilterCircle {
fn bounding_box_maybe(&self) -> Option<BoundingBox2D> {
self.circle.bounding_box_maybe()
}
}
impl<T> IsFilter<T> for FilterCircle
where
T: Is2D,
{
fn is_allowed(&self, p: &T) -> bool {
dist_2d(p, &self.circle.center) <= *self.circle.radius
}
}
impl IsScalable for FilterCircle {
fn scale(&mut self, factor: Positive) {
self.circle.scale(factor);
}
}
| 26 | 80 | 0.634754 |
39df456732c325ab8d84729fd60d70270d92687f | 2,986 | //! # Service framework
//!
//! This module contains the building blocks for async services.
//!
//! It consists of the following modules:
//!
//! ## `initializer`
//!
//! This module contains the [ServiceInitializer] trait. Service modules should implement this trait and pass
//! that implementation to the [StackBuilder].
//!
//! ## `stack`
//!
//! Contains the [StackBuilder] that is responsible for collecting and 'executing' the implementations of
//! [ServiceInitializer].
//!
//! ## `handles`
//!
//! A set of utilities used to collect and share handles between services. The [StackBuilder] is responsible for
//! initializing a [ServiceHandlesFuture] and making it available to [ServiceInitializer] implementations.
//!
//! Handles are simply a way to communicate with their corresponding service. Typically, a [SenderService] would
//! be used for this purpose but a handle can be implemented in any way the implementor sees fit.
//!
//! ## `reply_channel`
//!
//! This provides for query messages to be sent to services along with a "reply channel" for the service to send back
//! results. The `reply_channel::unbounded` function is used to create a sender/receiver pair. The sender
//! implements `tower_service::Service` and can be used to make requests of a applicable type. The receiver
//! implements `futures::Stream` and will provide a `RequestContext` object that contains a `oneshot` reply channel
//! that the service can use to reply back to the caller.
//!
//! ## Examples
//!
//! ### `reply_channel`
//!
//! ```edition2018
//! # use futures::executor::block_on;
//! # use futures::StreamExt;
//! # use futures::join;
//! use tari_service_framework::{reply_channel, tower::ServiceExt};
//!
//! block_on(async {
//! let (mut sender, mut receiver) = reply_channel::unbounded();
//!
//! let (result, _) = futures::join!(
//! // Make the request and make progress on the resulting future
//! sender.call_ready("upper"),
//! // At the same time receive the request and reply
//! async move {
//! let req_context = receiver.next().await.unwrap();
//! let msg = req_context.request().unwrap().clone();
//! req_context.reply(msg.to_uppercase());
//! }
//! );
//!
//! assert_eq!(result.unwrap(), "UPPER");
//! });
//! ```
//!
//! [ServiceInitializer]: ./initializer/trait.ServiceInitializer.html
//! [StackBuilder]: ./stack/struct.StackBuilder.html
//! [ServiceHandlesFuture]: ./handles/future/struct.ServiceHandlesFuture.html
//! [SenderService]: ./reply_channel/struct.SenderService.html
// Used to eliminate the need for boxing futures in many cases.
// Tracking issue: https://github.com/rust-lang/rust/issues/63063
#![feature(type_alias_impl_trait)]
mod initializer;
mod stack;
pub mod handles;
pub mod reply_channel;
pub mod tower;
pub use self::{
initializer::{ServiceInitializationError, ServiceInitializer},
reply_channel::RequestContext,
stack::StackBuilder,
};
| 36.414634 | 117 | 0.6929 |
0a02b5b65beaa9b2e8ef89f0734ff00bf9addcf6 | 4,745 | use std::env;
use actix_web::HttpServer;
use main_error::MainError;
use meilisearch_http::{create_app, Data, Opt};
use structopt::StructOpt;
#[cfg(all(not(debug_assertions), feature = "analytics"))]
use meilisearch_http::analytics;
#[cfg(target_os = "linux")]
#[global_allocator]
static ALLOC: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
#[actix_web::main]
async fn main() -> Result<(), MainError> {
let opt = Opt::from_args();
let mut log_builder = env_logger::Builder::new();
log_builder.parse_filters(&opt.log_level);
if opt.log_level == "info" {
// if we are in info we only allow the warn log_level for milli
log_builder.filter_module("milli", log::LevelFilter::Warn);
}
log_builder.init();
match opt.env.as_ref() {
"production" => {
if opt.master_key.is_none() {
return Err(
"In production mode, the environment variable MEILI_MASTER_KEY is mandatory"
.into(),
);
}
}
"development" => (),
_ => unreachable!(),
}
let data = Data::new(opt.clone())?;
#[cfg(all(not(debug_assertions), feature = "analytics"))]
if !opt.no_analytics {
let analytics_data = data.clone();
let analytics_opt = opt.clone();
tokio::task::spawn(analytics::analytics_sender(analytics_data, analytics_opt));
}
print_launch_resume(&opt, &data);
run_http(data, opt).await?;
Ok(())
}
async fn run_http(data: Data, opt: Opt) -> Result<(), Box<dyn std::error::Error>> {
let _enable_dashboard = &opt.env == "development";
let http_server = HttpServer::new(move || create_app!(data, _enable_dashboard))
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
.disable_signals();
if let Some(config) = opt.get_ssl_config()? {
http_server
.bind_rustls(opt.http_addr, config)?
.run()
.await?;
} else {
http_server.bind(opt.http_addr)?.run().await?;
}
Ok(())
}
pub fn print_launch_resume(opt: &Opt, data: &Data) {
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
let ascii_name = r#"
888b d888 d8b 888 d8b .d8888b. 888
8888b d8888 Y8P 888 Y8P d88P Y88b 888
88888b.d88888 888 Y88b. 888
888Y88888P888 .d88b. 888 888 888 "Y888b. .d88b. 8888b. 888d888 .d8888b 88888b.
888 Y888P 888 d8P Y8b 888 888 888 "Y88b. d8P Y8b "88b 888P" d88P" 888 "88b
888 Y8P 888 88888888 888 888 888 "888 88888888 .d888888 888 888 888 888
888 " 888 Y8b. 888 888 888 Y88b d88P Y8b. 888 888 888 Y88b. 888 888
888 888 "Y8888 888 888 888 "Y8888P" "Y8888 "Y888888 888 "Y8888P 888 888
"#;
eprintln!("{}", ascii_name);
eprintln!("Database path:\t\t{:?}", opt.db_path);
eprintln!("Server listening on:\t\"http://{}\"", opt.http_addr);
eprintln!("Environment:\t\t{:?}", opt.env);
eprintln!("Commit SHA:\t\t{:?}", commit_sha.to_string());
eprintln!("Commit date:\t\t{:?}", commit_date.to_string());
eprintln!(
"Package version:\t{:?}",
env!("CARGO_PKG_VERSION").to_string()
);
#[cfg(all(not(debug_assertions), feature = "analytics"))]
{
if opt.no_analytics {
eprintln!("Anonymous telemetry:\t\"Disabled\"");
} else {
eprintln!(
"
Thank you for using MeiliSearch!
We collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html
Anonymous telemetry: \"Enabled\""
);
}
}
eprintln!();
if data.api_keys().master.is_some() {
eprintln!("A Master Key has been set. Requests to MeiliSearch won't be authorized unless you provide an authentication key.");
} else {
eprintln!("No master key found; The server will accept unidentified requests. \
If you need some protection in development mode, please export a key: export MEILI_MASTER_KEY=xxx");
}
eprintln!();
eprintln!("Documentation:\t\thttps://docs.meilisearch.com");
eprintln!("Source code:\t\thttps://github.com/meilisearch/meilisearch");
eprintln!("Contact:\t\thttps://docs.meilisearch.com/resources/contact.html or bonjour@meilisearch.com");
eprintln!();
}
| 35.94697 | 237 | 0.609484 |
ac5fa66852d0f686ba477762dcf5f249610e1598 | 3,500 | use crate::convert::FruityTryFrom;
use crate::introspect::IntrospectError;
use crate::serialize::serialized::Serialized;
use std::any::Any;
use std::iter::Enumerate;
use std::vec::IntoIter as VecIntoIter;
/// Cast an any introspect object
///
/// # Arguments
/// * `any` - The introspect object as an any reference
///
pub fn cast_introspect_ref<T: Any>(any: &dyn Any) -> &T {
any.downcast_ref::<T>().unwrap()
}
/// Cast an any introspect object with mutability
///
/// # Arguments
/// * `any` - The introspect object as an any mutable reference
///
pub fn cast_introspect_mut<T: Any>(any: &mut dyn Any) -> &mut T {
any.downcast_mut::<T>().unwrap()
}
/// A tool that is used to cast serialized arguments, intended to be used into IntrospectMethod implementations
pub struct ArgumentCaster<'s> {
method: &'s str,
args_count: usize,
iter: Enumerate<VecIntoIter<Serialized>>,
last_index: usize,
}
impl<'s> ArgumentCaster<'s> {
/// Return an ArgumentCaster
pub fn new<'a>(method: &'a str, args: Vec<Serialized>) -> ArgumentCaster<'a> {
ArgumentCaster::<'a> {
method,
args_count: args.len(),
iter: args.into_iter().enumerate(),
last_index: 1,
}
}
/// Get a serialized argument from an argument list
pub fn next(&mut self) -> Result<Serialized, IntrospectError> {
match self.iter.next() {
Some((index, arg)) => {
self.last_index = index + 1;
Ok(arg)
}
None => Err(IntrospectError::WrongNumberArguments {
method: self.method.to_string(),
have: self.last_index,
expected: self.args_count,
}),
}
}
/// Get all the remaining serialized arguments from an argument list
pub fn rest(&mut self) -> Vec<Serialized> {
let mut result = Vec::new();
while let Some(elem) = self.iter.next() {
result.push(elem.1);
}
result
}
/// Cast a serialized argument from an argument list
///
/// # Generic Arguments
/// * `T` - The type to cast
///
pub fn cast_next<T: FruityTryFrom<Serialized> + ?Sized>(
&mut self,
) -> Result<T, IntrospectError> {
match self.iter.next() {
Some((index, arg)) => {
self.last_index = index + 1;
T::fruity_try_from(arg).map_err(|_| IntrospectError::IncorrectArgument {
method: self.method.to_string(),
arg_index: index,
})
}
None => Err(IntrospectError::WrongNumberArguments {
method: self.method.to_string(),
have: self.last_index,
expected: self.args_count,
}),
}
}
/// Cast a serialized optional argument from an argument list
///
/// # Generic Arguments
/// * `T` - The type to cast
///
pub fn cast_next_optional<T: FruityTryFrom<Serialized> + ?Sized>(&mut self) -> Option<T> {
match self.iter.next() {
Some((index, arg)) => {
self.last_index = index + 1;
T::fruity_try_from(arg)
.map_err(|_| IntrospectError::IncorrectArgument {
method: self.method.to_string(),
arg_index: index,
})
.ok()
}
None => None,
}
}
}
| 30.701754 | 111 | 0.544286 |
de859788cbbd80eefef7892a6c39cbde24d2ed40 | 1,408 | //! Utilities for story content.
use crate::story::types::LineBuffer;
/// Read all text from lines in a buffer into a single string and return it.
///
/// # Examples
/// ```
/// # use inkling::{copy_lines_into_string, read_story_from_string};
/// let content = "\
/// Gamle gode VΓ€inΓ€mΓΆinen
/// rustade sig nu att resa
/// bort till kyligare trakter
/// till de dunkla Nordanlanden.
/// ";
///
/// let mut story = read_story_from_string(content).unwrap();
/// let mut line_buffer = Vec::new();
///
/// story.resume(&mut line_buffer);
///
/// let text = copy_lines_into_string(&line_buffer);
/// assert_eq!(&text, content);
/// ```
pub fn copy_lines_into_string(line_buffer: &LineBuffer) -> String {
line_buffer
.iter()
.map(|line| line.text.clone())
.collect::<Vec<_>>()
.join("")
}
#[cfg(test)]
mod tests {
use super::*;
use crate::story::Line;
#[test]
fn string_from_line_buffer_joins_without_extra_newlines() {
let lines = vec![
Line {
text: "Start of line, ".to_string(),
tags: Vec::new(),
},
Line {
text: "end of line without new lines".to_string(),
tags: Vec::new(),
},
];
assert_eq!(
©_lines_into_string(&lines),
"Start of line, end of line without new lines"
);
}
}
| 24.275862 | 76 | 0.56392 |
38253cb7653e2a6a4f418573427dd485ffd7ae8a | 6,912 | // Copyright 2018-2019 Parity Technologies (UK) Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use core::marker::PhantomData;
use scale::{
Decode,
Encode,
};
use crate::{
env::{
srml::sys,
CallError,
CreateError,
Env,
EnvStorage,
EnvTypes,
},
storage::Key,
};
use ink_prelude::vec::Vec;
/// Load the contents of the scratch buffer
fn read_scratch_buffer() -> Vec<u8> {
let size = unsafe { sys::ext_scratch_size() };
let mut value = Vec::new();
if size > 0 {
value.resize(size as usize, 0);
unsafe {
sys::ext_scratch_read(value.as_mut_ptr() as u32, 0, size);
}
}
value
}
/// Writes the contents of `data` into the scratch buffer.
fn write_scratch_buffer(data: &[u8]) {
unsafe {
sys::ext_scratch_write(data.as_ptr() as u32, data.len() as u32);
}
}
/// The SRML contract environment storage
pub enum SrmlEnvStorage {}
impl EnvStorage for SrmlEnvStorage {
/// Stores the given bytes under the given key.
unsafe fn store(key: Key, value: &[u8]) {
sys::ext_set_storage(
key.as_bytes().as_ptr() as u32,
1,
value.as_ptr() as u32,
value.len() as u32,
);
}
/// Clears the value stored under the given key.
unsafe fn clear(key: Key) {
sys::ext_set_storage(key.as_bytes().as_ptr() as u32, 0, 0, 0)
}
/// Loads the value stored at the given key if any.
unsafe fn load(key: Key) -> Option<Vec<u8>> {
const SUCCESS: u32 = 0;
if sys::ext_get_storage(key.as_bytes().as_ptr() as u32) == SUCCESS {
return Some(read_scratch_buffer())
}
None
}
}
/// The SRML contracts environment.
pub struct SrmlEnv<T>
where
T: EnvTypes,
{
marker: PhantomData<fn() -> T>,
}
impl<T> EnvTypes for SrmlEnv<T>
where
T: EnvTypes,
{
type AccountId = <T as EnvTypes>::AccountId;
type Balance = <T as EnvTypes>::Balance;
type Hash = <T as EnvTypes>::Hash;
type Moment = <T as EnvTypes>::Moment;
type BlockNumber = <T as EnvTypes>::BlockNumber;
type Call = <T as EnvTypes>::Call;
}
macro_rules! impl_getters_for_srml_env {
( $( ($name:ident, $ext_name:ident, $ret_type:ty) ),* ) => {
$(
fn $name() -> $ret_type {
unsafe { sys::$ext_name() };
Decode::decode(&mut &read_scratch_buffer()[..])
.expect(concat!(
stringify!($name), " expects to receive a correctly sized buffer"
))
}
)*
}
}
impl<T> SrmlEnv<T>
where
T: EnvTypes,
{
fn call(
callee: <Self as EnvTypes>::AccountId,
gas: u64,
value: <Self as EnvTypes>::Balance,
input_data: &[u8],
) -> u32 {
let callee = callee.encode();
let value = value.encode();
unsafe {
sys::ext_call(
callee.as_ptr() as u32,
callee.len() as u32,
gas,
value.as_ptr() as u32,
value.len() as u32,
input_data.as_ptr() as u32,
input_data.len() as u32,
)
}
}
}
impl<T> Env for SrmlEnv<T>
where
T: EnvTypes,
{
fn input() -> Vec<u8> {
read_scratch_buffer()
}
impl_getters_for_srml_env!(
(address, ext_address, <Self as EnvTypes>::AccountId),
(balance, ext_balance, <Self as EnvTypes>::Balance),
(caller, ext_caller, <Self as EnvTypes>::AccountId),
(random_seed, ext_random_seed, <Self as EnvTypes>::Hash),
(now, ext_now, <Self as EnvTypes>::Moment),
(
block_number,
ext_block_number,
<Self as EnvTypes>::BlockNumber
),
(gas_price, ext_gas_price, <Self as EnvTypes>::Balance),
(gas_left, ext_gas_left, <Self as EnvTypes>::Balance),
(
value_transferred,
ext_value_transferred,
<Self as EnvTypes>::Balance
)
);
fn return_data(data: &[u8]) {
write_scratch_buffer(data)
}
fn println(content: &str) {
unsafe { sys::ext_println(content.as_ptr() as u32, content.len() as u32) }
}
fn deposit_raw_event(topics: &[<Self as EnvTypes>::Hash], data: &[u8]) {
unsafe {
sys::ext_deposit_event(
topics.as_ptr() as u32,
topics.len() as u32,
data.as_ptr() as u32,
data.len() as u32,
)
}
}
fn dispatch_raw_call(data: &[u8]) {
unsafe { sys::ext_dispatch_call(data.as_ptr() as u32, data.len() as u32) }
}
fn call_invoke(
callee: <Self as EnvTypes>::AccountId,
gas: u64,
value: <Self as EnvTypes>::Balance,
input_data: &[u8],
) -> Result<(), CallError> {
let result = Self::call(callee, gas, value, input_data);
if result != 0 {
return Err(CallError)
}
Ok(())
}
fn call_evaluate<U: Decode>(
callee: <Self as EnvTypes>::AccountId,
gas: u64,
value: <Self as EnvTypes>::Balance,
input_data: &[u8],
) -> Result<U, CallError> {
let result = Self::call(callee, gas, value, input_data);
if result != 0 {
return Err(CallError)
}
U::decode(&mut &read_scratch_buffer()[..]).map_err(|_| CallError)
}
fn create(
code_hash: <Self as EnvTypes>::Hash,
gas_limit: u64,
value: <Self as EnvTypes>::Balance,
input_data: &[u8],
) -> Result<<Self as EnvTypes>::AccountId, CreateError> {
let result = {
let code_hash = code_hash.encode();
let value = value.encode();
unsafe {
sys::ext_create(
code_hash.as_ptr() as u32,
code_hash.len() as u32,
gas_limit,
value.as_ptr() as u32,
value.len() as u32,
input_data.as_ptr() as u32,
input_data.len() as u32,
)
}
};
if result != 0 {
return Err(CreateError)
}
<Self as EnvTypes>::AccountId::decode(&mut &read_scratch_buffer()[..])
.map_err(|_| CreateError)
}
}
| 27.759036 | 89 | 0.542101 |
4b15323e1edacec2a047af734e6e13d55ec60e72 | 13,106 | // Copyright (c) 2019 Rafael Alcaraz Mercado. All rights reserved.
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
// THE SOURCE CODE IS AVAILABLE UNDER THE ABOVE CHOSEN LICENSE "AS IS", WITH NO WARRANTIES.
//! This file contains the interface definitions for the VmSavedState Dump Provider APIs.
use crate::vmsavedstatedumpdefs::*;
use crate::windefs::*;
#[link(name = "vmsavedstatedumpprovider")]
extern "C" {
/// Locates the saved state file(s) for a given VM and/or snapshot. This function uses WMI and the V1 or V2
/// virtualization namespace. So this is expected to fail if ran on a machine without Hyper-V installed.
/// * If the given VM has a VMRS file, parameters BinPath and VsvPath will be a single null terminator character.
/// * If the given VM has BIN and VSV files, parameter VmrsPath will be a single null terminator character.
/// * If no saved state files are found, all three returned string parameters will be single null terminator characters.
///
/// # Arguments
///
/// * `VmName` - Supplies the VM name for which the saved state file will be located.
/// * `SnapshotName` - Supplies an optional snapshot name to locate its saved state file
/// on relation to the given VM name.
/// * `BinPath` - Returns a pointer to a NULL-terminated string containing the full path name to the BIN file.
/// The caller must call LocalFree on the returned pointer in order to release the memory occupied by the string.
/// * `VsvPath` - Returns a pointer to a NULL-terminated string containing the full path name to the VSV file.
/// The caller must call LocalFree on the returned pointer in order to release the memory occupied by the string.
/// * `VmrsPath` - Returns a pointer to a NULL-terminated string containing the full path name to the VMRS file.
/// The caller must call LocalFree on the returned pointer in order to release the memory occupied by the string.
///
/// # Returns
///
/// * `S_OK` - The full path(s) to the saved state file were returned successfully.
/// * `E_OUTOFMEMORY` - There was insufficient memory to return the full path(s).
/// * `HResult` - Other HResult failure codes might be returned.
///
pub fn LocateSavedStateFiles(
VmName: LPCWStr,
SnapshotName: LPCWStr,
BinPath: *mut LPWStr,
VsvPath: *mut LPWStr,
VmrsPath: *mut LPWStr,
) -> HResult;
/// Loads the given saved state file and creates an instance of VmSavedStateDump.
/// This instance can be referenced on the other methods with the returned UINT64 Id.
///
/// # Arguments
///
/// * `VmrsFile` - Supplies the path to the VMRS file to load.
/// * `VmSavedStateDumpHandle` - Returns a Handle to the dump provider instance created.
///
/// # Returns
///
/// * `HResult`.
///
pub fn LoadSavedStateFile(
VmrsFile: LPCWStr,
VmSavedStateDumpHandle: *mut VmSavedStateDumpHandle,
) -> HResult;
/// Opens the given saved state file in read-write exclusive mode so that it applies any pending
/// replay logs to the contents. This method doesn't loads the saved state file into the library
/// and can't be used to get content data; function LoadSavedStateFile must be used instead.
///
/// # Arguments
///
/// * `VmrsFile` - Supplies the path to the VMRS file whose any pending replay log will be applied.
///
/// # Returns
///
/// * `HResult`.
///
pub fn ApplyPendingSavedStateFileReplayLog(VmrsFile: LPCWStr) -> HResult;
/// Loads the given saved state files and creates an instance of VmSavedStateDump.
/// This instance can be referenced on the other methods with the returned UINT64 Id.
///
/// # Arguments
///
/// * `BinFile` - Supplies the path to the BIN file to load.
/// * `VsvFile` - Supplies the path to the VSV file to load.
/// * `VmSavedStateDumpHandle` - Returns the ID for the dump provider instance created.
///
/// # Returns
///
/// * `HResult`.
///
pub fn LoadSavedStateFiles(
BinFile: LPCWStr,
VsvFile: LPCWStr,
VmSavedStateDumpHandle: *mut VmSavedStateDumpHandle,
) -> HResult;
/// Releases the given VmSavedStateDump provider that matches the supplied ID.
/// Releasing the provider releases the locks to the saved state files.
/// This means that it won't be available for use on other methods.
///
/// # Arguments
///
/// * `mSavedStateDumpHandle` - Supplies the ID of the dump provider instance to release.
///
/// # Returns
///
/// * `HResult`.
///
pub fn ReleaseSavedStateFiles(VmSavedStateDumpHandle: VmSavedStateDumpHandle) -> HResult;
/// Queries for the Virtual Processor count for a given VmSavedStateDump.
///
/// # Arguments
///
/// * `VmSavedStateDumpHandle` - Supplies a handle to a dump provider instance.
/// * `VpCount` - Returns the Virtual Processor count.
///
/// # Returns
///
/// * `HResult`.
///
pub fn GetVpCount(VmSavedStateDumpHandle: VmSavedStateDumpHandle, VpCount: *mut u32)
-> HResult;
/// Queries for the current Architecture/ISA the virtual processor was running at the time the
/// saved state file was generated.
///
/// # Arguments
///
/// * `VmSavedStateDumpHandle` - Supplies a handle to a dump provider instance.
/// * `VpId` - Supplies the VP to query.
/// * `Architecture` - Returns the architecture of the supplied vp.
///
/// # Returns
///
/// * `HResult`.
///
pub fn GetArchitecture(
VmSavedStateDumpHandle: VmSavedStateDumpHandle,
VpId: u32,
Architecture: *mut VirtualProcessorArch,
) -> HResult;
/// Queries for a specific register value for a given VP in a VmSavedStateDump.
/// Callers must specify architecture and register ID in parameter Register, and this function
/// returns the register value through it.
///
/// # Arguments
///
/// * `VmSavedStateDumpHandle` - Supplies a handle to a dump provider instance.
/// * `VpId` - Supplies the Virtual Processor Id.
/// * `Register` - Supplies the register architecture and ID, and returns the value.
///
/// # Returns
///
/// * `HResult`.
///
pub fn GetRegisterValue(
VmSavedStateDumpHandle: VmSavedStateDumpHandle,
VpId: u32,
Register: *mut VirtualProcessorRegister,
) -> HResult;
/// Queries for the current Paging Mode in use by the virtual processor at the time the
/// saved state file was generated.
///
/// # Arguments
///
/// * `VmSavedStateDumpHandle` - Supplies a handle to a dump provider instance.
/// * `VpId` - Supplies the Virtual Processor Id.
/// * `PagingMode` - Returns the paging mode.
///
/// # Returns
///
/// * `HResult`.
///
pub fn GetPagingMode(
VmSavedStateDumpHandle: VmSavedStateDumpHandle,
VpId: u32,
PagingMode: *mut PagingMode,
) -> HResult;
/// Reads from the saved state file the given guest physical address range and then
/// it is written into the supplied buffer.
/// If BytesRead returns something lower than BufferSize, then the end of memory has been reached.
///
/// # Arguments
///
/// * `VmSavedStateDumpHandle` - Supplies a handle to a dump provider instance.
/// * `PhysicalAddress` - Supplies the physical address to read.
/// * `Buffer` - Returns the read memory range on the given address.
/// * `BufferSize` - Supplies the requested byte count to read.
/// * `BytesRead` - Optionally returns the bytes actually read.
///
/// # Returns
///
/// * `HResult`.
///
pub fn ReadGuestPhysicalAddress(
VmSavedStateDumpHandle: VmSavedStateDumpHandle,
PhysicalAddress: GuestPhysicalAddress,
Buffer: PVoid,
BufferSize: u32,
BytesRead: *mut u32,
) -> HResult;
/// Translates a virtual address to a physical address using information found in the
/// guest's memory and processor's state.
///
/// # Arguments
///
/// * `VmSavedStateDumpHandle` - Supplies a handle to a dump provider instance.
/// * `VpId` - Supplies the VP from where the virtual address is read.
/// * `VirtualAddress` - Supplies the virtual address to translate.
/// * `PhysicalAddress` - Returns the physical address assigned to the supplied virtual address.
///
/// # Returns
///
/// * `HResult`.
///
pub fn GuestVirtualAddressToPhysicalAddress(
VmSavedStateDumpHandle: VmSavedStateDumpHandle,
VpId: u32,
VirtualAddress: GuestVirtualAddress,
PhysicalAddress: *mut GuestPhysicalAddress,
) -> HResult;
/// Returns the layout of the physical memory of the guest. This information contains the chunks of memory
/// with consecutive pages and from where each one starts. If the supplied count is less than the amount
/// of chunks for this guest, then this function returns the expected chunk count.
///
/// # Arguments
///
/// * `VmSavedStateDumpHandle` - Supplies a handle to a dump provider instance.
/// * `MemoryChunkPageSize` - Returns the size of a page in the memory chunk layout.
/// * `MemoryChunks` - Supplies a buffer of memory chunk structures that are filled up with the
/// requested information if the buffer size is the same or bigger than the
/// memory chunks count for this guest.
/// * `MemoryChunkCount` - Supplies the size of the MemoryChunks buffer. If this count is lower than
/// what the guest really has, then it returns the expected count. If it was
/// higher than what the guest has, then it returns the exact count.
///
/// # Returns
///
/// * `HResult`.
///
pub fn GetGuestPhysicalMemoryChunks(
VmSavedStateDumpHandle: VmSavedStateDumpHandle,
MemoryChunkPageSize: *mut u64,
MemoryChunks: *mut GpaMemoryChunk,
MemoryChunkCount: *mut u64,
) -> HResult;
/// Translates the given guest physical address to a raw saved memory offset. This is specially useful
/// if callers need to read a memory range directly from all of the guest's saved memory starting
/// in the saved memory address equivalent to the supplied guest physical address.
/// Translation from raw saved memory offset to physical address is not supported.
///
/// # Arguments
///
/// * `VmSavedStateDumpHandle` - Supplies a handle to a dump provider instance.
/// * `PhysicalAddress` - Supplies the guest physical address to translate.
/// * `RawSavedMemoryOffset` - Returns the raw saved memory offset for a given physical address.
///
/// # Returns
///
/// * `HResult`.
///
pub fn GuestPhysicalAddressToRawSavedMemoryOffset(
VmSavedStateDumpHandle: VmSavedStateDumpHandle,
PhysicalAddress: GuestPhysicalAddress,
RawSavedMemoryOffset: *mut u64,
) -> HResult;
/// Reads raw memory from the saved state file. This function reads raw memory from the saved state file
/// as if it were a flat memory layout, regardless of the guest memory layout.
/// If BytesRead returns something lower than BufferSize, then the end of memory has been reached.
///
/// # Arguments
///
/// * `VmSavedStateDumpHandle` - Supplies a handle to a dump provider instance.
/// * `RawSavedMemoryOffset` - Byte offset on the raw saved memory from where to start reading.
/// * `Buffer` - Returns the raw memory read on the current raw memory offset.
/// * `BufferSize` - Supplies the requested byte count to read.
/// * `BytesRead` - Optionally returns the bytes actually read.
///
/// # Returns
///
/// * `HResult`.
///
pub fn ReadGuestRawSavedMemory(
VmSavedStateDumpHandle: VmSavedStateDumpHandle,
RawSavedMemoryOffset: u64,
Buffer: PVoid,
BufferSize: u32,
BytesRead: *mut u32,
) -> HResult;
/// Returns the size in bytes of the saved memory for a given VM saved state file.
///
/// # Arguments
///
/// * `VmSavedStateDumpHandle` - Supplies a handle to a dump provider instance.
/// * `GuestRawSavedMemorySize` - Returns the size of the saved memory of a given guest in bytes.
///
/// # Returns
///
/// * `HResult`.
///
pub fn GetGuestRawSavedMemorySize(
VmSavedStateDumpHandle: VmSavedStateDumpHandle,
GuestRawSavedMemorySize: *mut u64,
) -> HResult;
}
| 41.213836 | 132 | 0.648405 |
ac2bc3270ebecccf48824f6c4eb72da3caca32ce | 531 | use mimalloc_rust::*;
use std::env;
use std::fs::read_to_string;
use std::time::Instant;
use tokenizer::Tokenizer;
#[global_allocator]
static GLOBAL_MIMALLOC: GlobalMiMalloc = GlobalMiMalloc;
fn main() {
let file = env::args().nth(1).unwrap();
let css: &str = &read_to_string(format!("assets/{}", file)).unwrap();
let start = Instant::now();
let processor = Tokenizer::new(css, false);
while !processor.end_of_file() {
processor.next_token(false);
}
let end = start.elapsed();
print!("{}", end.as_nanos());
}
| 25.285714 | 71 | 0.6742 |
fcf1559ae3c5c14d7b4e96401b284e7e12bea13b | 10,122 | #![allow(clippy::too_many_arguments)]
mod napi1 {
use super::super::types::*;
use std::os::raw::{c_char, c_void};
generate!(
extern "C" {
fn get_undefined(env: Env, result: *mut Value) -> Status;
fn get_null(env: Env, result: *mut Value) -> Status;
fn get_global(env: Env, result: *mut Value) -> Status;
fn get_boolean(env: Env, value: bool, result: *mut Value) -> Status;
fn create_double(env: Env, value: f64, result: *mut Value) -> Status;
fn create_object(env: Env, result: *mut Value) -> Status;
fn get_value_bool(env: Env, value: Value, result: *mut bool) -> Status;
fn get_value_double(env: Env, value: Value, result: *mut f64) -> Status;
fn create_array_with_length(env: Env, length: usize, result: *mut Value) -> Status;
fn get_array_length(env: Env, value: Value, result: *mut u32) -> Status;
fn get_new_target(env: Env, cbinfo: CallbackInfo, result: *mut Value) -> Status;
fn coerce_to_object(env: Env, value: Value, result: *mut Value) -> Status;
fn coerce_to_string(env: Env, value: Value, result: *mut Value) -> Status;
fn throw(env: Env, error: Value) -> Status;
fn create_error(env: Env, code: Value, msg: Value, result: *mut Value) -> Status;
fn get_and_clear_last_exception(env: Env, result: *mut Value) -> Status;
fn is_exception_pending(env: Env, result: *mut bool) -> Status;
fn get_value_external(env: Env, value: Value, result: *mut *mut c_void) -> Status;
fn typeof_value(env: Env, value: Value, result: *mut ValueType) -> Status;
fn close_escapable_handle_scope(env: Env, scope: EscapableHandleScope) -> Status;
fn open_escapable_handle_scope(env: Env, result: *mut EscapableHandleScope) -> Status;
fn open_handle_scope(env: Env, result: *mut HandleScope) -> Status;
fn close_handle_scope(env: Env, scope: HandleScope) -> Status;
fn is_arraybuffer(env: Env, value: Value, result: *mut bool) -> Status;
fn is_buffer(env: Env, value: Value, result: *mut bool) -> Status;
fn is_error(env: Env, value: Value, result: *mut bool) -> Status;
fn is_array(env: Env, value: Value, result: *mut bool) -> Status;
fn get_value_string_utf8(
env: Env,
value: Value,
buf: *mut c_char,
bufsize: usize,
result: *mut usize,
) -> Status;
fn create_type_error(env: Env, code: Value, msg: Value, result: *mut Value) -> Status;
fn create_range_error(env: Env, code: Value, msg: Value, result: *mut Value) -> Status;
fn create_string_utf8(
env: Env,
str: *const c_char,
length: usize,
result: *mut Value,
) -> Status;
fn create_arraybuffer(
env: Env,
byte_length: usize,
data: *mut *mut c_void,
result: *mut Value,
) -> Status;
fn get_arraybuffer_info(
env: Env,
arraybuffer: Value,
data: *mut *mut c_void,
byte_length: *mut usize,
) -> Status;
fn create_buffer(
env: Env,
length: usize,
data: *mut *mut c_void,
result: *mut Value,
) -> Status;
fn get_buffer_info(
env: Env,
value: Value,
data: *mut *mut c_void,
length: *mut usize,
) -> Status;
fn get_cb_info(
env: Env,
cbinfo: CallbackInfo,
argc: *mut usize,
argv: *mut Value,
this_arg: *mut Value,
data: *mut *mut c_void,
) -> Status;
fn create_external(
env: Env,
data: *mut c_void,
finalize_cb: Finalize,
finalize_hint: *mut c_void,
result: *mut Value,
) -> Status;
fn new_instance(
env: Env,
constructor: Value,
argc: usize,
argv: *const Value,
result: *mut Value,
) -> Status;
fn call_function(
env: Env,
recv: Value,
func: Value,
argc: usize,
argv: *const Value,
result: *mut Value,
) -> Status;
fn create_function(
env: Env,
utf8name: *const c_char,
length: usize,
cb: Callback,
data: *mut c_void,
result: *mut Value,
) -> Status;
fn set_property(env: Env, object: Value, key: Value, value: Value) -> Status;
fn get_property(env: Env, object: Value, key: Value, result: *mut Value) -> Status;
fn set_element(env: Env, object: Value, index: u32, value: Value) -> Status;
fn get_element(env: Env, object: Value, index: u32, result: *mut Value) -> Status;
fn escape_handle(
env: Env,
scope: EscapableHandleScope,
escapee: Value,
result: *mut Value,
) -> Status;
fn create_reference(
env: Env,
value: Value,
initial_ref_count: u32,
result: *mut Ref,
) -> Status;
fn reference_ref(env: Env, reference: Ref, result: *mut u32) -> Status;
fn reference_unref(env: Env, reference: Ref, result: *mut u32) -> Status;
fn get_reference_value(env: Env, reference: Ref, result: *mut Value) -> Status;
fn strict_equals(env: Env, lhs: Value, rhs: Value, result: *mut bool) -> Status;
fn create_external_arraybuffer(
env: Env,
data: *mut c_void,
length: usize,
finalize_cb: Finalize,
finalize_hint: *mut c_void,
result: *mut Value,
) -> Status;
fn create_external_buffer(
env: Env,
length: usize,
data: *mut c_void,
finalize_cb: Finalize,
finalize_hint: *mut c_void,
result: *mut Value,
) -> Status;
fn run_script(env: Env, script: Value, result: *mut Value) -> Status;
}
);
}
#[cfg(feature = "napi-4")]
mod napi4 {
use super::super::types::*;
use std::os::raw::c_void;
generate!(
extern "C" {
fn create_threadsafe_function(
env: Env,
func: Value,
async_resource: Value,
async_resource_name: Value,
max_queue_size: usize,
initial_thread_count: usize,
thread_finalize_data: *mut c_void,
thread_finalize_cb: Finalize,
context: *mut c_void,
call_js_cb: ThreadsafeFunctionCallJs,
result: *mut ThreadsafeFunction,
) -> Status;
fn call_threadsafe_function(
func: ThreadsafeFunction,
data: *mut c_void,
is_blocking: ThreadsafeFunctionCallMode,
) -> Status;
fn release_threadsafe_function(
func: ThreadsafeFunction,
mode: ThreadsafeFunctionReleaseMode,
) -> Status;
fn ref_threadsafe_function(env: Env, func: ThreadsafeFunction) -> Status;
fn unref_threadsafe_function(env: Env, func: ThreadsafeFunction) -> Status;
}
);
}
#[cfg(feature = "napi-5")]
mod napi5 {
use super::super::types::*;
generate!(
extern "C" {
fn create_date(env: Env, value: f64, result: *mut Value) -> Status;
fn get_date_value(env: Env, value: Value, result: *mut f64) -> Status;
fn is_date(env: Env, value: Value, result: *mut bool) -> Status;
}
);
}
#[cfg(feature = "napi-6")]
mod napi6 {
use super::super::types::*;
generate!(
extern "C" {
fn get_all_property_names(
env: Env,
object: Value,
key_mode: KeyCollectionMode,
key_filter: KeyFilter,
key_conversion: KeyConversion,
result: *mut Value,
) -> Status;
}
);
}
pub(crate) use napi1::*;
#[cfg(feature = "napi-4")]
pub(crate) use napi4::*;
#[cfg(feature = "napi-5")]
pub(crate) use napi5::*;
#[cfg(feature = "napi-6")]
pub(crate) use napi6::*;
use super::{Env, Status};
// This symbol is loaded separately because it is a prerequisite
unsafe fn get_version(host: &libloading::Library, env: Env) -> Result<u32, libloading::Error> {
let get_version = host.get::<fn(Env, *mut u32) -> Status>(b"napi_get_version")?;
let mut version = 0;
assert_eq!(get_version(env, &mut version as *mut _), Status::Ok,);
Ok(version)
}
pub(crate) unsafe fn load(env: Env) -> Result<(), libloading::Error> {
#[cfg(not(windows))]
let host = libloading::os::unix::Library::this().into();
#[cfg(windows)]
let host = libloading::os::windows::Library::this()?.into();
// This never fail since `get_version` is in N-API Version 1 and the module will fail
// with `Error: Module did not self-register` if N-API does not exist.
let version = get_version(&host, env).expect("Failed to find N-API version");
napi1::load(&host, version, 1)?;
#[cfg(feature = "napi-4")]
napi4::load(&host, version, 4)?;
#[cfg(feature = "napi-5")]
napi5::load(&host, version, 5)?;
#[cfg(feature = "napi-6")]
napi6::load(&host, version, 6)?;
Ok(())
}
| 31.830189 | 99 | 0.51719 |
211f63416b0a2ea1020876dd757c1c413a6dc70d | 10,588 | use crate::glottis::Glottis;
use crate::math::{interpolate, sqr};
use crate::noise::{self, NoiseSource};
use crate::transient::Transient;
use crate::turbulence::TurbulencePoint;
pub struct Tract {
pub glottis: Glottis,
sample_rate: u32,
frication_noise_source: Box<dyn FnMut() -> f64 + Send + 'static>,
sample_count: usize,
pub time: f32,
left: [f64; Tract::N],
right: [f64; Tract::N],
reflection: [f64; Tract::N],
new_reflection: [f64; Tract::N],
junction_output_right: [f64; Tract::N],
justion_output_left: [f64; Tract::N + 1],
max_amplitude: [f64; Tract::N],
/// vocal tract cell diameters
pub diameter: [f64; Tract::N],
pub transients: Vec<Transient>,
pub turbulence_points: Vec<TurbulencePoint>,
nose_right: [f64; NOSE_LEN],
nose_left: [f64; NOSE_LEN],
nose_junction_output_right: [f64; NOSE_LEN],
nose_junction_output_left: [f64; NOSE_LEN + 1],
nose_reflection: [f64; NOSE_LEN],
pub nose_diameter: [f64; NOSE_LEN],
nose_max_amplitude: [f64; NOSE_LEN],
reflection_left: f64,
reflection_right: f64,
new_reflection_left: f64,
new_reflection_right: f64,
reflection_nose: f64,
new_reflection_nose: f64,
}
const N: usize = Tract::N;
const GLOTTAL_REFLECTION: f64 = 0.75;
const LIP_REFLECTION: f64 = -0.85;
const NOSE_LEN: usize = 28;
const NOSE_START: usize = N - NOSE_LEN + 1;
impl Tract {
pub const N: usize = 44;
pub const BLADE_START: usize = 10;
pub const TIP_START: usize = 32;
pub const LIP_START: usize = 39;
pub const NOSE_START: usize = NOSE_START;
pub const NOSE_LEN: usize = NOSE_LEN;
pub fn new(glottis: Glottis, sample_rate: u32, rng: &mut dyn NoiseSource<f64>) -> Tract {
if sample_rate == 0 {
panic!("sample_rate must be > 0")
};
Tract {
glottis,
sample_rate,
frication_noise_source: noise::new_filtered_noise_source(
1000.0,
0.5,
sample_rate,
0x8000,
rng,
),
transients: Vec::new(),
turbulence_points: Vec::new(),
sample_count: 0,
time: 0.0,
left: [0.0; Tract::N],
right: [0.0; Tract::N],
reflection: [0.0; Tract::N],
new_reflection: [0.0; Tract::N],
junction_output_right: [0.0; Tract::N],
justion_output_left: [0.0; Tract::N + 1],
max_amplitude: [0.0; Tract::N],
diameter: [0.0; Tract::N],
nose_right: [0.0; NOSE_LEN],
nose_left: [0.0; NOSE_LEN],
nose_junction_output_right: [0.0; NOSE_LEN],
nose_junction_output_left: [0.0; NOSE_LEN + 1],
nose_reflection: [0.0; NOSE_LEN],
nose_diameter: [0.0; NOSE_LEN],
nose_max_amplitude: [0.0; NOSE_LEN],
reflection_left: 0.0,
reflection_right: 0.0,
new_reflection_left: 0.0,
new_reflection_right: 0.0,
reflection_nose: 0.0,
new_reflection_nose: 0.0,
}
}
pub fn calculate_nose_reflections(&mut self) {
let mut a = [0.0; NOSE_LEN];
for i in 0..NOSE_LEN {
a[i] = 1e-6_f64.max(sqr(self.nose_diameter[i]));
}
for i in 1..NOSE_LEN {
self.nose_reflection[i] = assert_volume((a[i - 1] - a[i]) / (a[i - 1] + a[i]));
}
}
pub fn calculate_new_block_parameters(&mut self) {
self.calculate_main_tract_reflections();
self.calculate_nose_junction_reflections();
}
fn calculate_main_tract_reflections(&mut self) {
let mut a = [0.0; Tract::N];
for i in 0..Tract::N {
a[i] = sqr(self.diameter[i]);
}
for i in 1..Tract::N {
self.reflection[i] = self.new_reflection[i];
let sum = a[i - 1] + a[i];
self.new_reflection[i] = if sum.abs() > 1e-6 {
(a[i - 1] - a[i]) / sum
} else {
1.0
};
}
}
fn calculate_nose_junction_reflections(&mut self) {
self.reflection_left = self.new_reflection_left;
self.reflection_right = self.new_reflection_right;
self.reflection_nose = self.new_reflection_nose;
let velum_a = sqr(self.nose_diameter[0]);
let an0 = sqr(self.diameter[NOSE_START]);
let an1 = sqr(self.diameter[NOSE_START + 1]);
let sum = an0 + an1 + velum_a;
if sum.abs() > 1e-6 {
self.new_reflection_left = (2.0 * an0 - sum) / sum;
self.new_reflection_right = (2.0 * an1 - sum) / sum;
self.new_reflection_nose = (2.0 * velum_a - sum) / sum;
} else {
self.new_reflection_left = 1.0;
self.new_reflection_right = 1.0;
self.new_reflection_nose = 1.0;
}
}
pub fn step(&mut self, glottal_output: f64, lambda: f64) -> f32 {
// mouth
self.process_transients();
self.add_turbulence_noise();
// self.glottalReflection = -0.8 + 1.6 * self.glottis.newTenseness;
self.junction_output_right[0] = self.left[0] * GLOTTAL_REFLECTION + glottal_output;
self.justion_output_left[N] = self.right[N - 1] * LIP_REFLECTION;
for i in 1..N {
let r = interpolate(self.reflection[i], self.new_reflection[i], lambda);
let w = r * (self.right[i - 1] + self.left[i]);
self.junction_output_right[i] = assert_volume(self.right[i - 1] - w);
self.justion_output_left[i] = assert_volume(self.left[i] + w);
}
// now at junction with nose
let i = NOSE_START;
let r = interpolate(self.reflection_left, self.new_reflection_left, lambda);
self.justion_output_left[i] =
assert_volume(r * self.right[i - 1] + (1.0 + r) * (self.nose_left[0] + self.left[i]));
let r = interpolate(self.reflection_right, self.new_reflection_right, lambda);
self.junction_output_right[i] =
assert_volume(r * self.left[i] + (1.0 + r) * (self.right[i - 1] + self.nose_left[0]));
let r = interpolate(self.reflection_nose, self.new_reflection_nose, lambda);
self.nose_junction_output_right[0] =
assert_volume(r * self.nose_left[0] + (1.0 + r) * (self.left[i] + self.right[i - 1]));
for i in 0..N {
let right = self.junction_output_right[i] * 0.999;
let left = self.justion_output_left[i + 1] * 0.999;
self.right[i] = right;
self.left[i] = left;
let amplitude = (right + left).abs();
self.max_amplitude[i] *= 0.9999;
self.max_amplitude[i] = self.max_amplitude[i].max(amplitude);
}
let lip_output = self.right[N - 1];
// nose
self.nose_junction_output_left[NOSE_LEN] = self.nose_right[NOSE_LEN - 1] * LIP_REFLECTION;
for i in 1..NOSE_LEN {
let w = self.nose_reflection[i] * (self.nose_right[i - 1] + self.nose_left[i]);
self.nose_junction_output_right[i] = assert_volume(self.nose_right[i - 1] - w);
self.nose_junction_output_left[i] = assert_volume(self.nose_left[i] + w);
}
for i in 0..NOSE_LEN {
let right = self.nose_junction_output_right[i];
let left = self.nose_junction_output_left[i + 1];
self.nose_right[i] = right;
self.nose_left[i] = left;
let amplitude = (right + left).abs();
self.nose_max_amplitude[i] *= 0.9999;
self.nose_max_amplitude[i] = self.nose_max_amplitude[i].max(amplitude);
}
let nose_output = self.nose_right[NOSE_LEN - 1];
self.sample_count += 1;
self.time = self.sample_count as f32 / self.sample_rate as f32;
(lip_output + nose_output) as f32
}
fn process_transients(&mut self) {
for i in (0..self.transients.len()).rev() {
let trans = &self.transients[i];
let time_alive = self.time - trans.start_time;
if time_alive > trans.life_time {
self.transients.remove(i);
continue;
}
let amplitude = trans.strength * 2.0_f64.powf(-trans.exponent * time_alive as f64);
self.right[trans.position] += amplitude * 0.5;
self.left[trans.position] += amplitude * 0.5;
}
}
fn add_turbulence_noise(&mut self) {
const FRICATIVE_ATTACK_TIME: f32 = 0.1; // seconds
let mut turbulence_noises = Vec::<(f64, f64, f64)>::new();
for p in &self.turbulence_points {
if p.position < 2.0 || p.position > N as f32 {
continue;
}
if p.diameter <= 0.0 {
continue;
}
let intensity = if f32::is_nan(p.end_time) {
(self.time - p.start_time) / FRICATIVE_ATTACK_TIME
} else {
1.0 - (self.time - p.end_time) / FRICATIVE_ATTACK_TIME
}
.clamp(0.0, 1.0);
if intensity <= 0.0 {
continue;
}
let turbulence_noise = 0.66
* (self.frication_noise_source)()
* intensity as f64
* self.glottis.get_noise_modulator() as f64;
turbulence_noises.push((turbulence_noise, p.position as f64, p.diameter as f64));
}
for (noise, pos, diameter) in turbulence_noises.into_iter() {
self.add_turbulence_noise_at_position(noise, pos, diameter);
}
}
fn add_turbulence_noise_at_position(
&mut self,
turbulence_noise: f64,
position: f64,
diameter: f64,
) {
let i = position.floor() as i64;
let delta = position - i as f64;
let thinnes0 = (8.0 * (0.7 - diameter)).clamp(0.0, 1.0);
let openness = (30.0 * (diameter - 0.3)).clamp(0.0, 1.0);
let noise0 = turbulence_noise * (1.0 - delta) * thinnes0 * openness;
let noise1 = turbulence_noise * delta * thinnes0 * openness;
if i + 1 < N as i64 {
let idx = (i + 1) as usize;
self.right[idx] += noise0 * 0.5;
self.left[idx] += noise0 * 0.5;
}
if i + 2 < N as i64 {
let idx = (i + 2) as usize;
self.right[idx] += noise1 * 0.5;
self.left[idx] += noise1 * 0.5;
}
}
}
fn assert_volume(val: f64) -> f64 {
//assert!(val.abs() <= 1.0);
val
}
| 34.154839 | 98 | 0.55714 |
7aae2231577e21586c1c26799792b67ff330023e | 108 | use anyhow::Result;
#[tokio::main(flavor = "current_thread")]
async fn main() -> Result<()> {
Ok(())
}
| 15.428571 | 41 | 0.583333 |
d5c8bada1451f78bdc0d673c15bf9d6000b591ba | 5,194 | //! The compiler code necessary to implement the `#[derive]` extensions.
use syntax::ast::{self, ItemKind, MetaItem};
use syntax::ptr::P;
use syntax::symbol::{sym, Symbol};
use syntax_expand::base::{Annotatable, ExtCtxt, MultiItemModifier};
use syntax_pos::Span;
macro path_local($x:ident) {
generic::ty::Path::new_local(stringify!($x))
}
macro pathvec_std($cx:expr, $($rest:ident)::+) {{
vec![ $( stringify!($rest) ),+ ]
}}
macro path_std($($x:tt)*) {
generic::ty::Path::new( pathvec_std!( $($x)* ) )
}
pub mod bounds;
pub mod clone;
pub mod debug;
pub mod decodable;
pub mod default;
pub mod encodable;
pub mod hash;
#[path = "cmp/eq.rs"]
pub mod eq;
#[path = "cmp/ord.rs"]
pub mod ord;
#[path = "cmp/partial_eq.rs"]
pub mod partial_eq;
#[path = "cmp/partial_ord.rs"]
pub mod partial_ord;
pub mod generic;
crate struct BuiltinDerive(
crate fn(&mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable)),
);
impl MultiItemModifier for BuiltinDerive {
fn expand(
&self,
ecx: &mut ExtCtxt<'_>,
span: Span,
meta_item: &MetaItem,
item: Annotatable,
) -> Vec<Annotatable> {
// FIXME: Built-in derives often forget to give spans contexts,
// so we are doing it here in a centralized way.
let span = ecx.with_def_site_ctxt(span);
let mut items = Vec::new();
(self.0)(ecx, span, meta_item, &item, &mut |a| items.push(a));
items
}
}
/// Constructs an expression that calls an intrinsic
fn call_intrinsic(
cx: &ExtCtxt<'_>,
span: Span,
intrinsic: &str,
args: Vec<P<ast::Expr>>,
) -> P<ast::Expr> {
let span = cx.with_def_site_ctxt(span);
let path = cx.std_path(&[sym::intrinsics, Symbol::intern(intrinsic)]);
let call = cx.expr_call_global(span, path, args);
cx.expr_block(P(ast::Block {
stmts: vec![cx.stmt_expr(call)],
id: ast::DUMMY_NODE_ID,
rules: ast::BlockCheckMode::Unsafe(ast::CompilerGenerated),
span,
}))
}
// Injects `impl<...> Structural for ItemType<...> { }`. In particular,
// does *not* add `where T: Structural` for parameters `T` in `...`.
// (That's the main reason we cannot use TraitDef here.)
fn inject_impl_of_structural_trait(
cx: &mut ExtCtxt<'_>,
span: Span,
item: &Annotatable,
structural_path: generic::ty::Path<'_>,
push: &mut dyn FnMut(Annotatable),
) {
let item = match *item {
Annotatable::Item(ref item) => item,
_ => {
// Non-Item derive is an error, but it should have been
// set earlier; see
// libsyntax/ext/expand.rs:MacroExpander::expand()
return;
}
};
let generics = match item.kind {
ItemKind::Struct(_, ref generics) | ItemKind::Enum(_, ref generics) => generics,
// Do not inject `impl Structural for Union`. (`PartialEq` does not
// support unions, so we will see error downstream.)
ItemKind::Union(..) => return,
_ => unreachable!(),
};
// Create generics param list for where clauses and impl headers
let mut generics = generics.clone();
// Create the type of `self`.
//
// in addition, remove defaults from type params (impls cannot have them).
let self_params: Vec<_> = generics
.params
.iter_mut()
.map(|param| match &mut param.kind {
ast::GenericParamKind::Lifetime => {
ast::GenericArg::Lifetime(cx.lifetime(span, param.ident))
}
ast::GenericParamKind::Type { default } => {
*default = None;
ast::GenericArg::Type(cx.ty_ident(span, param.ident))
}
ast::GenericParamKind::Const { ty: _ } => {
ast::GenericArg::Const(cx.const_ident(span, param.ident))
}
})
.collect();
let type_ident = item.ident;
let trait_ref = cx.trait_ref(structural_path.to_path(cx, span, type_ident, &generics));
let self_type = cx.ty_path(cx.path_all(span, false, vec![type_ident], self_params));
// It would be nice to also encode constraint `where Self: Eq` (by adding it
// onto `generics` cloned above). Unfortunately, that strategy runs afoul of
// rust-lang/rust#48214. So we perform that additional check in the compiler
// itself, instead of encoding it here.
// Keep the lint and stability attributes of the original item, to control
// how the generated implementation is linted.
let mut attrs = Vec::new();
attrs.extend(
item.attrs
.iter()
.filter(|a| {
[sym::allow, sym::warn, sym::deny, sym::forbid, sym::stable, sym::unstable]
.contains(&a.name_or_empty())
})
.cloned(),
);
let newitem = cx.item(
span,
ast::Ident::invalid(),
attrs,
ItemKind::Impl(
ast::Unsafety::Normal,
ast::ImplPolarity::Positive,
ast::Defaultness::Final,
generics,
Some(trait_ref),
self_type,
Vec::new(),
),
);
push(Annotatable::Item(newitem));
}
| 30.374269 | 91 | 0.592607 |
33d3c696eed5ee98d4c9bf014674e9bf942a3b18 | 41 | pub use archive::Archiver;
mod archive;
| 10.25 | 26 | 0.756098 |
bb35caa8442075c05f9e207461815ecff09d2db5 | 2,046 | //! Mocks for the gradually-update module.
#![cfg(test)]
use frame_support::{impl_outer_event, impl_outer_origin, parameter_types};
use frame_system as system;
use sp_core::H256;
use sp_runtime::{testing::Header, traits::IdentityLookup, Perbill};
use super::*;
impl_outer_origin! {
pub enum Origin for Runtime {}
}
mod gradually_update {
pub use crate::Event;
}
impl_outer_event! {
pub enum TestEvent for Runtime {
frame_system<T>,
gradually_update<T>,
}
}
// Workaround for https://github.com/rust-lang/rust/issues/26925 . Remove when sorted.
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct Runtime;
parameter_types! {
pub const BlockHashCount: u64 = 250;
pub const MaximumBlockWeight: u32 = 1024;
pub const MaximumBlockLength: u32 = 2 * 1024;
pub const AvailableBlockRatio: Perbill = Perbill::one();
}
pub type AccountId = u64;
pub type BlockNumber = u64;
impl frame_system::Trait for Runtime {
type Origin = Origin;
type Index = u64;
type BlockNumber = BlockNumber;
type Call = ();
type Hash = H256;
type Hashing = ::sp_runtime::traits::BlakeTwo256;
type AccountId = AccountId;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = TestEvent;
type BlockHashCount = BlockHashCount;
type MaximumBlockWeight = MaximumBlockWeight;
type MaximumBlockLength = MaximumBlockLength;
type AvailableBlockRatio = AvailableBlockRatio;
type Version = ();
type ModuleToIndex = ();
type AccountData = ();
type OnNewAccount = ();
type OnKilledAccount = ();
}
pub type System = system::Module<Runtime>;
parameter_types! {
pub const UpdateFrequency: BlockNumber = 10;
}
impl Trait for Runtime {
type Event = TestEvent;
type UpdateFrequency = UpdateFrequency;
}
pub type GraduallyUpdateModule = Module<Runtime>;
pub struct ExtBuilder;
impl Default for ExtBuilder {
fn default() -> Self {
ExtBuilder
}
}
impl ExtBuilder {
pub fn build(self) -> sp_io::TestExternalities {
let t = frame_system::GenesisConfig::default()
.build_storage::<Runtime>()
.unwrap();
t.into()
}
}
| 22.733333 | 86 | 0.731672 |
fe748e01037fd7b48d5eec348197e5888a0cf1b9 | 35,105 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{test_utils, test_utils::make_timeout_cert, Error, SafetyRules, TSafetyRules};
use consensus_types::{
block::block_test_utils::random_payload,
common::Round,
quorum_cert::QuorumCert,
timeout::Timeout,
timeout_2chain::{TwoChainTimeout, TwoChainTimeoutCertificate},
vote_proposal::MaybeSignedVoteProposal,
};
use diem_crypto::{
ed25519::Ed25519PrivateKey,
hash::{CryptoHash, HashValue},
};
use diem_global_constants::CONSENSUS_KEY;
use diem_secure_storage::CryptoStorage;
use diem_types::{
epoch_state::EpochState, validator_signer::ValidatorSigner,
validator_verifier::ValidatorVerifier,
};
type Proof = test_utils::Proof;
fn make_proposal_with_qc_and_proof(
round: Round,
proof: Proof,
qc: QuorumCert,
signer: &ValidatorSigner,
exec_key: Option<&Ed25519PrivateKey>,
) -> MaybeSignedVoteProposal {
test_utils::make_proposal_with_qc_and_proof(vec![], round, proof, qc, signer, exec_key)
}
fn make_proposal_with_parent(
round: Round,
parent: &MaybeSignedVoteProposal,
committed: Option<&MaybeSignedVoteProposal>,
signer: &ValidatorSigner,
exec_key: Option<&Ed25519PrivateKey>,
) -> MaybeSignedVoteProposal {
test_utils::make_proposal_with_parent(vec![], round, parent, committed, signer, exec_key)
}
pub type Callback = Box<
dyn Fn() -> (
Box<dyn TSafetyRules + Send + Sync>,
ValidatorSigner,
Option<Ed25519PrivateKey>,
),
>;
pub fn run_test_suite(safety_rules: &Callback) {
test_bad_execution_output(safety_rules);
test_commit_rule_consecutive_rounds(safety_rules);
test_end_to_end(safety_rules);
test_initialize(safety_rules);
test_preferred_block_rule(safety_rules);
test_sign_timeout(safety_rules);
test_voting(safety_rules);
test_voting_potential_commit_id(safety_rules);
test_voting_bad_epoch(safety_rules);
test_sign_old_proposal(safety_rules);
test_sign_proposal_with_bad_signer(safety_rules);
test_sign_proposal_with_invalid_qc(safety_rules);
test_sign_proposal_with_early_preferred_round(safety_rules);
test_uninitialized_signer(safety_rules);
test_reconcile_key(safety_rules);
test_validator_not_in_set(safety_rules);
test_key_not_in_store(safety_rules);
test_2chain_rules(safety_rules);
test_2chain_timeout(safety_rules);
}
fn test_bad_execution_output(safety_rules: &Callback) {
// build a tree of the following form:
// _____
// / \
// genesis---a1--a2--a3 evil_a3
//
// evil_a3 attempts to append to a1 but fails append only check
// a3 works as it properly extends a2
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
let a1 = test_utils::make_proposal_with_qc(round + 1, genesis_qc, &signer, key.as_ref());
let a2 = make_proposal_with_parent(round + 2, &a1, None, &signer, key.as_ref());
let a3 = make_proposal_with_parent(round + 3, &a2, None, &signer, key.as_ref());
safety_rules.initialize(&proof).unwrap();
let a1_output = a1
.accumulator_extension_proof()
.verify(
a1.block()
.quorum_cert()
.certified_block()
.executed_state_id(),
)
.unwrap();
let evil_proof = Proof::new(
a1_output.frozen_subtree_roots().clone(),
a1_output.num_leaves(),
vec![Timeout::new(0, a3.block().round()).hash()],
);
let evil_a3 = make_proposal_with_qc_and_proof(
round,
evil_proof,
a3.block().quorum_cert().clone(),
&signer,
key.as_ref(),
);
let evil_a3_block = safety_rules.construct_and_sign_vote(&evil_a3);
assert!(evil_a3_block.is_err());
let a3_block = safety_rules.construct_and_sign_vote(&a3);
a3_block.unwrap();
}
fn test_commit_rule_consecutive_rounds(safety_rules: &Callback) {
// build a tree of the following form:
// ___________
// / \
// genesis---a1 b1---b2 a2---a3---a4
// \_____/
//
// a1 cannot be committed after a3 gathers QC because a1 and a2 are not consecutive
// a2 can be committed after a4 gathers QC
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
let a1 =
test_utils::make_proposal_with_qc(round + 1, genesis_qc.clone(), &signer, key.as_ref());
let b1 = test_utils::make_proposal_with_qc(round + 2, genesis_qc, &signer, key.as_ref());
let b2 = make_proposal_with_parent(round + 3, &b1, None, &signer, key.as_ref());
let a2 = make_proposal_with_parent(round + 4, &a1, None, &signer, key.as_ref());
let a3 = make_proposal_with_parent(round + 5, &a2, None, &signer, key.as_ref());
let a4 = make_proposal_with_parent(round + 6, &a3, Some(&a2), &signer, key.as_ref());
safety_rules.initialize(&proof).unwrap();
safety_rules.construct_and_sign_vote(&a1).unwrap();
safety_rules.construct_and_sign_vote(&b1).unwrap();
safety_rules.construct_and_sign_vote(&b2).unwrap();
safety_rules.construct_and_sign_vote(&a2).unwrap();
safety_rules.construct_and_sign_vote(&a3).unwrap();
safety_rules.construct_and_sign_vote(&a4).unwrap();
}
fn test_end_to_end(safety_rules: &Callback) {
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
let data = random_payload(2048);
let p0 =
test_utils::make_proposal_with_qc(round + 1, genesis_qc.clone(), &signer, key.as_ref());
let p1 = test_utils::make_proposal_with_parent(
data.clone(),
round + 2,
&p0,
None,
&signer,
key.as_ref(),
);
let p2 = test_utils::make_proposal_with_parent(
data.clone(),
round + 3,
&p1,
None,
&signer,
key.as_ref(),
);
let p3 = test_utils::make_proposal_with_parent(
data,
round + 4,
&p2,
Some(&p0),
&signer,
key.as_ref(),
);
let state = safety_rules.consensus_state().unwrap();
assert_eq!(
state.last_voted_round(),
genesis_qc.certified_block().round()
);
assert_eq!(
state.preferred_round(),
genesis_qc.certified_block().round()
);
safety_rules.initialize(&proof).unwrap();
safety_rules.construct_and_sign_vote(&p0).unwrap();
safety_rules.construct_and_sign_vote(&p1).unwrap();
safety_rules.construct_and_sign_vote(&p2).unwrap();
safety_rules.construct_and_sign_vote(&p3).unwrap();
let state = safety_rules.consensus_state().unwrap();
assert_eq!(state.last_voted_round(), round + 4);
assert_eq!(state.preferred_round(), round + 2);
}
/// Initialize from scratch, ensure that SafetyRules can properly initialize from a Waypoint and
/// that it rejects invalid LedgerInfos or those that do not match.
fn test_initialize(safety_rules: &Callback) {
let (mut safety_rules, signer, _key) = safety_rules();
let state = safety_rules.consensus_state().unwrap();
assert_eq!(state.last_voted_round(), 0);
assert_eq!(state.preferred_round(), 0);
assert_eq!(state.epoch(), 1);
let (proof, _genesis_qc) = test_utils::make_genesis(&signer);
safety_rules.initialize(&proof).unwrap();
let signer1 = ValidatorSigner::from_int(1);
let (bad_proof, _bad_genesis_qc) = test_utils::make_genesis(&signer1);
match safety_rules.initialize(&bad_proof) {
Err(Error::InvalidEpochChangeProof(_)) => (),
_ => panic!("Unexpected output"),
};
}
fn test_preferred_block_rule(safety_rules: &Callback) {
// Preferred block is the highest 2-chain head.
//
// build a tree of the following form:
// _____ _____
// / \ / \
// genesis---a1 b1 b2 a2 b3 a3---a4
// \_____/ \_____/ \_____/
//
// PB should change from genesis to b1 and then a2.
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let genesis_round = genesis_qc.certified_block().round();
let round = genesis_round;
let a1 =
test_utils::make_proposal_with_qc(round + 1, genesis_qc.clone(), &signer, key.as_ref());
let b1 = test_utils::make_proposal_with_qc(round + 2, genesis_qc, &signer, key.as_ref());
let b2 = make_proposal_with_parent(round + 3, &a1, None, &signer, key.as_ref());
let a2 = make_proposal_with_parent(round + 4, &b1, None, &signer, key.as_ref());
let b3 = make_proposal_with_parent(round + 5, &b2, None, &signer, key.as_ref());
let a3 = make_proposal_with_parent(round + 6, &a2, None, &signer, key.as_ref());
let a4 = make_proposal_with_parent(round + 7, &a3, None, &signer, key.as_ref());
safety_rules.initialize(&proof).unwrap();
safety_rules.construct_and_sign_vote(&a1).unwrap();
assert_eq!(
safety_rules.consensus_state().unwrap().preferred_round(),
genesis_round
);
safety_rules.construct_and_sign_vote(&b1).unwrap();
assert_eq!(
safety_rules.consensus_state().unwrap().preferred_round(),
genesis_round
);
safety_rules.construct_and_sign_vote(&a2).unwrap();
assert_eq!(
safety_rules.consensus_state().unwrap().preferred_round(),
genesis_round
);
safety_rules.construct_and_sign_vote(&b2).unwrap_err();
assert_eq!(
safety_rules.consensus_state().unwrap().preferred_round(),
genesis_round
);
safety_rules.construct_and_sign_vote(&a3).unwrap();
assert_eq!(
safety_rules.consensus_state().unwrap().preferred_round(),
b1.block().round()
);
safety_rules.construct_and_sign_vote(&b3).unwrap_err();
assert_eq!(
safety_rules.consensus_state().unwrap().preferred_round(),
b1.block().round()
);
safety_rules.construct_and_sign_vote(&a4).unwrap();
assert_eq!(
safety_rules.consensus_state().unwrap().preferred_round(),
a2.block().round()
);
}
/// Verify first that we can successfully sign a timeout on the correct conditions, then ensure
/// that poorly set last_voted_rounds both historical and in the future fail as well as
/// synchronization issues on preferred round are correct. Effectivelly ensure that equivocation is
/// impossible for signing timeouts.
fn test_sign_timeout(safety_rules: &Callback) {
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
let epoch = genesis_qc.certified_block().epoch();
let p0 = test_utils::make_proposal_with_qc(round + 1, genesis_qc, &signer, key.as_ref());
let p1 = make_proposal_with_parent(round + 2, &p0, None, &signer, key.as_ref());
let p2 = make_proposal_with_parent(round + 3, &p1, None, &signer, key.as_ref());
let p3 = make_proposal_with_parent(round + 4, &p2, None, &signer, key.as_ref());
let p4 = make_proposal_with_parent(round + 5, &p3, None, &signer, key.as_ref());
safety_rules.initialize(&proof).unwrap();
safety_rules.construct_and_sign_vote(&p0).unwrap();
// Verify multiple signings are the same
let timeout = Timeout::new(epoch, p0.block().round());
let sign1 = safety_rules.sign_timeout(&timeout).unwrap();
let sign2 = safety_rules.sign_timeout(&timeout).unwrap();
assert_eq!(sign1, sign2);
// Verify can sign last_voted_round + 1
let timeout_plus_1 = Timeout::new(timeout.epoch(), timeout.round() + 1);
safety_rules.sign_timeout(&timeout_plus_1).unwrap();
// Verify cannot sign round older rounds now
let actual_err = safety_rules.sign_timeout(&timeout).unwrap_err();
let expected_err = Error::IncorrectLastVotedRound(timeout.round(), timeout.round() + 1);
assert_eq!(actual_err, expected_err);
// Verify cannot sign last_voted_round < vote < preferred_round
safety_rules.construct_and_sign_vote(&p4).unwrap();
let preferred_round = p4.block().quorum_cert().parent_block().round();
let ptimeout = Timeout::new(timeout.epoch(), preferred_round - 1);
let actual_err = safety_rules.sign_timeout(&ptimeout).unwrap_err();
let expected_err = Error::IncorrectPreferredRound(ptimeout.round(), preferred_round);
assert_eq!(actual_err, expected_err);
// Verify cannot sign for different epoch
let etimeout = Timeout::new(timeout.epoch() + 1, round + 1);
let actual_err = safety_rules.sign_timeout(&etimeout).unwrap_err();
let expected_err = Error::IncorrectEpoch(etimeout.epoch(), timeout.epoch());
assert_eq!(actual_err, expected_err);
}
fn test_voting(safety_rules: &Callback) {
// build a tree of the following form:
// _____ __________
// / \ / \
// genesis---a1 b1 b2 a2---a3 b3 a4 b4
// \_____/ \_____/ \______/ /
// \__________________/
//
//
// We'll introduce the votes in the following order:
// a1 (ok), potential_commit is None
// b1 (ok), potential commit is None
// a2 (ok), potential_commit is None
// b2 (old proposal)
// a3 (ok), potential commit is None
// b3 (ok), potential commit is None
// a4 (ok), potential commit is None
// a4 (old proposal)
// b4 (round lower then round of pb. PB: a2, parent(b4)=b2)
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
let a1 =
test_utils::make_proposal_with_qc(round + 1, genesis_qc.clone(), &signer, key.as_ref());
let b1 = test_utils::make_proposal_with_qc(round + 2, genesis_qc, &signer, key.as_ref());
let b2 = make_proposal_with_parent(round + 3, &a1, None, &signer, key.as_ref());
let a2 = make_proposal_with_parent(round + 4, &b1, None, &signer, key.as_ref());
let a3 = make_proposal_with_parent(round + 5, &a2, None, &signer, key.as_ref());
let b3 = make_proposal_with_parent(round + 6, &b2, None, &signer, key.as_ref());
let a4 = make_proposal_with_parent(round + 7, &a3, None, &signer, key.as_ref());
let a4_prime = make_proposal_with_parent(round + 7, &a2, None, &signer, key.as_ref());
let b4 = make_proposal_with_parent(round + 8, &b2, None, &signer, key.as_ref());
safety_rules.initialize(&proof).unwrap();
let mut vote = safety_rules.construct_and_sign_vote(&a1).unwrap();
assert_eq!(vote.ledger_info().consensus_block_id(), HashValue::zero());
vote = safety_rules.construct_and_sign_vote(&b1).unwrap();
assert_eq!(vote.ledger_info().consensus_block_id(), HashValue::zero());
vote = safety_rules.construct_and_sign_vote(&a2).unwrap();
assert_eq!(vote.ledger_info().consensus_block_id(), HashValue::zero());
assert_eq!(
safety_rules.construct_and_sign_vote(&b2),
Err(Error::IncorrectLastVotedRound(3, 4))
);
vote = safety_rules.construct_and_sign_vote(&a3).unwrap();
assert_eq!(vote.ledger_info().consensus_block_id(), HashValue::zero());
vote = safety_rules.construct_and_sign_vote(&b3).unwrap();
assert_eq!(vote.ledger_info().consensus_block_id(), HashValue::zero());
vote = safety_rules.construct_and_sign_vote(&a4).unwrap();
assert_eq!(vote.ledger_info().consensus_block_id(), HashValue::zero());
assert_eq!(
safety_rules.construct_and_sign_vote(&a3),
Err(Error::IncorrectLastVotedRound(5, 7))
);
// return the last vote for the same round
assert_eq!(
safety_rules.construct_and_sign_vote(&a4_prime),
Ok(vote.clone())
);
assert_eq!(safety_rules.construct_and_sign_vote(&a4), Ok(vote));
assert_eq!(
safety_rules.construct_and_sign_vote(&b4),
Err(Error::IncorrectPreferredRound(3, 4))
);
}
fn test_voting_bad_epoch(safety_rules: &Callback) {
// Test to verify epoch is the same between parent and proposed in a vote proposal
// genesis--a1 -> a2 fails due to jumping to a different epoch
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
let a1 = test_utils::make_proposal_with_qc(round + 1, genesis_qc, &signer, key.as_ref());
let a2 = test_utils::make_proposal_with_parent_and_overrides(
vec![],
round + 3,
&a1,
None,
&signer,
Some(21),
None,
key.as_ref(),
);
safety_rules.initialize(&proof).unwrap();
safety_rules.construct_and_sign_vote(&a1).unwrap();
assert_eq!(
safety_rules.construct_and_sign_vote(&a2),
Err(Error::IncorrectEpoch(21, 1))
);
}
fn test_voting_potential_commit_id(safety_rules: &Callback) {
// Test the potential ledger info that we're going to use in case of voting
// build a tree of the following form:
// _____
// / \
// genesis--a1 b1 a2--a3--a4--a5
// \_____/
//
// All the votes before a4 cannot produce any potential commits.
// A potential commit for proposal a4 is a2, a potential commit for proposal a5 is a3.
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
let a1 =
test_utils::make_proposal_with_qc(round + 1, genesis_qc.clone(), &signer, key.as_ref());
let b1 = test_utils::make_proposal_with_qc(round + 2, genesis_qc, &signer, key.as_ref());
let a2 = make_proposal_with_parent(round + 3, &a1, None, &signer, key.as_ref());
let a3 = make_proposal_with_parent(round + 4, &a2, None, &signer, key.as_ref());
let a4 = make_proposal_with_parent(round + 5, &a3, Some(&a2), &signer, key.as_ref());
let a5 = make_proposal_with_parent(round + 6, &a4, Some(&a3), &signer, key.as_ref());
safety_rules.initialize(&proof).unwrap();
for b in &[&a1, &b1, &a2, &a3] {
let vote = safety_rules.construct_and_sign_vote(b).unwrap();
assert_eq!(vote.ledger_info().consensus_block_id(), HashValue::zero());
}
assert_eq!(
safety_rules
.construct_and_sign_vote(&a4)
.unwrap()
.ledger_info()
.consensus_block_id(),
a2.block().id(),
);
assert_eq!(
safety_rules
.construct_and_sign_vote(&a5)
.unwrap()
.ledger_info()
.consensus_block_id(),
a3.block().id(),
);
}
fn test_sign_old_proposal(safety_rules: &Callback) {
// Test to sign a proposal which makes no progress, compared with last voted round
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
safety_rules.initialize(&proof).unwrap();
let a1 = test_utils::make_proposal_with_qc(round, genesis_qc, &signer, key.as_ref());
let err = safety_rules
.sign_proposal(a1.block().block_data().clone())
.unwrap_err();
assert!(matches!(err, Error::InvalidProposal(_)));
}
fn test_sign_proposal_with_bad_signer(safety_rules: &Callback) {
// Test to sign a proposal signed by an unrecognizable signer
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
safety_rules.initialize(&proof).unwrap();
let a1 = test_utils::make_proposal_with_qc(round + 1, genesis_qc, &signer, key.as_ref());
safety_rules
.sign_proposal(a1.block().block_data().clone())
.unwrap();
let bad_signer = ValidatorSigner::from_int(0xef);
let a2 = make_proposal_with_parent(round + 2, &a1, None, &bad_signer, key.as_ref());
let err = safety_rules
.sign_proposal(a2.block().block_data().clone())
.unwrap_err();
assert_eq!(
err,
Error::InvalidProposal("Proposal author is not validator signer!".into())
);
}
fn test_sign_proposal_with_invalid_qc(safety_rules: &Callback) {
// Test to sign a proposal with an invalid qc inherited from proposal a2, which
// is signed by a bad_signer.
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
safety_rules.initialize(&proof).unwrap();
let a1 = test_utils::make_proposal_with_qc(round + 1, genesis_qc, &signer, key.as_ref());
safety_rules
.sign_proposal(a1.block().block_data().clone())
.unwrap();
let bad_signer = ValidatorSigner::from_int(0xef);
let a2 = make_proposal_with_parent(round + 2, &a1, Some(&a1), &bad_signer, key.as_ref());
let a3 = test_utils::make_proposal_with_qc(
round + 3,
a2.block().quorum_cert().clone(),
&signer,
key.as_ref(),
);
let err = safety_rules
.sign_proposal(a3.block().block_data().clone())
.unwrap_err();
assert_eq!(
err,
Error::InvalidQuorumCertificate("Fail to verify QuorumCert".into())
);
}
fn test_sign_proposal_with_early_preferred_round(safety_rules: &Callback) {
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
safety_rules.initialize(&proof).unwrap();
let a1 = test_utils::make_proposal_with_qc(round + 1, genesis_qc, &signer, key.as_ref());
safety_rules
.sign_proposal(a1.block().block_data().clone())
.unwrap();
// Update preferred round with a few legal proposals
let a2 = make_proposal_with_parent(round + 2, &a1, None, &signer, key.as_ref());
let a3 = make_proposal_with_parent(round + 3, &a2, None, &signer, key.as_ref());
let a4 = make_proposal_with_parent(round + 4, &a3, Some(&a2), &signer, key.as_ref());
safety_rules.construct_and_sign_vote(&a2).unwrap();
safety_rules.construct_and_sign_vote(&a3).unwrap();
safety_rules.construct_and_sign_vote(&a4).unwrap();
let a5 = make_proposal_with_qc_and_proof(
round + 5,
test_utils::empty_proof(),
a1.block().quorum_cert().clone(),
&signer,
key.as_ref(),
);
let err = safety_rules
.sign_proposal(a5.block().block_data().clone())
.unwrap_err();
assert_eq!(err, Error::IncorrectPreferredRound(0, 2));
}
fn test_uninitialized_signer(safety_rules: &Callback) {
// Testing for an uninitialized Option<ValidatorSigner>
let (mut safety_rules, signer, key) = safety_rules();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
let a1 = test_utils::make_proposal_with_qc(round + 1, genesis_qc, &signer, key.as_ref());
let err = safety_rules.construct_and_sign_vote(&a1).unwrap_err();
assert_eq!(err, Error::NotInitialized("validator_signer".into()));
let err = safety_rules
.sign_proposal(a1.block().block_data().clone())
.unwrap_err();
assert_eq!(err, Error::NotInitialized("validator_signer".into()));
safety_rules.initialize(&proof).unwrap();
safety_rules.construct_and_sign_vote(&a1).unwrap();
}
fn test_validator_not_in_set(safety_rules: &Callback) {
// Testing for a validator missing from the validator set
// It does so by updating the safey rule to an epoch state, which does not contain the
// current validator and check the consensus state
let (mut safety_rules, signer, key) = safety_rules();
let (mut proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
safety_rules.initialize(&proof).unwrap();
// validator_signer is set during initialization
let state = safety_rules.consensus_state().unwrap();
assert_eq!(state.in_validator_set(), true);
let a1 = test_utils::make_proposal_with_qc(round + 1, genesis_qc, &signer, key.as_ref());
// remove the validator_signer in next epoch
let mut next_epoch_state = EpochState::empty();
next_epoch_state.epoch = 1;
let rand_signer = ValidatorSigner::random([0xfu8; 32]);
next_epoch_state.verifier =
ValidatorVerifier::new_single(rand_signer.author(), rand_signer.public_key());
let a2 = test_utils::make_proposal_with_parent_and_overrides(
vec![],
round + 2,
&a1,
Some(&a1),
&signer,
Some(1),
Some(next_epoch_state),
key.as_ref(),
);
proof
.ledger_info_with_sigs
.push(a2.block().quorum_cert().ledger_info().clone());
assert!(matches!(
safety_rules.initialize(&proof),
Err(Error::ValidatorNotInSet(_))
));
let state = safety_rules.consensus_state().unwrap();
assert_eq!(state.in_validator_set(), false);
}
fn test_reconcile_key(_safety_rules: &Callback) {
// Test to verify desired consensus key can be retrieved according to validator set.
// It does so by updating the safey rule to a desired epoch state, reconciling old signer key
// with the new one. Later when it tries to verify the QC signed by the old signer key, safety
// rules fails the check.
// Initialize the storage with two versions of signer keys
let signer = ValidatorSigner::from_int(0);
let mut storage = test_utils::test_storage(&signer);
let new_pub_key = storage.internal_store().rotate_key(CONSENSUS_KEY).unwrap();
let mut safety_rules = Box::new(SafetyRules::new(storage, false, false));
let (mut proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
safety_rules.initialize(&proof).unwrap();
let a1 = test_utils::make_proposal_with_qc(round + 1, genesis_qc, &signer, None);
safety_rules.construct_and_sign_vote(&a1).unwrap();
// Update validator epoch state, reconciling the old key with the new pub key
let mut next_epoch_state = EpochState::empty();
next_epoch_state.epoch = 2;
next_epoch_state.verifier = ValidatorVerifier::new_single(signer.author(), new_pub_key);
let a2 = test_utils::make_proposal_with_parent_and_overrides(
vec![],
round + 2,
&a1,
Some(&a1),
&signer,
Some(1),
Some(next_epoch_state),
None,
);
proof
.ledger_info_with_sigs
.push(a2.block().quorum_cert().ledger_info().clone());
safety_rules.initialize(&proof).unwrap();
// Verification fails for proposal signed by the outdated key
let outdated_signer = &signer;
let a3 = test_utils::make_proposal_with_parent_and_overrides(
vec![],
round + 3,
&a2,
Some(&a2),
outdated_signer,
Some(2),
None,
None,
);
let err = safety_rules.construct_and_sign_vote(&a3).unwrap_err();
assert_eq!(
err,
Error::InvalidQuorumCertificate("Fail to verify QuorumCert".into())
);
}
// Tests for fetching a missing validator key from persistent storage.
fn test_key_not_in_store(safety_rules: &Callback) {
let (mut safety_rules, signer, key) = safety_rules();
let (mut proof, genesis_qc) = test_utils::make_genesis(&signer);
let round = genesis_qc.certified_block().round();
safety_rules.initialize(&proof).unwrap();
let a1 = test_utils::make_proposal_with_qc(round + 1, genesis_qc, &signer, key.as_ref());
// Update to an epoch where the validator fails to retrive the respective key
// from persistent storage
let mut next_epoch_state = EpochState::empty();
next_epoch_state.epoch = 1;
let rand_signer = ValidatorSigner::random([0xfu8; 32]);
next_epoch_state.verifier =
ValidatorVerifier::new_single(signer.author(), rand_signer.public_key());
let a2 = test_utils::make_proposal_with_parent_and_overrides(
vec![],
round + 2,
&a1,
Some(&a1),
&signer,
Some(1),
Some(next_epoch_state),
key.as_ref(),
);
proof
.ledger_info_with_sigs
.push(a2.block().quorum_cert().ledger_info().clone());
// Expected failure due to validator key not being found.
safety_rules.initialize(&proof).unwrap_err();
let state = safety_rules.consensus_state().unwrap();
assert_eq!(state.in_validator_set(), false);
}
fn test_2chain_rules(constructor: &Callback) {
// One chain round is the highest quorum cert round.
//
// build a tree of the following form:
// _____ _____ _________
// / \ / \ / \
// genesis---a1 b1 b2 a2 b3 a3---a4 b4 a5---a6
// \_____/ \_____/ \_____/ \_________/
//
let (mut safety_rules, signer, key) = constructor();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let genesis_round = genesis_qc.certified_block().round();
let round = genesis_round;
safety_rules.initialize(&proof).unwrap();
let a1 =
test_utils::make_proposal_with_qc(round + 1, genesis_qc.clone(), &signer, key.as_ref());
let b1 = test_utils::make_proposal_with_qc(round + 2, genesis_qc, &signer, key.as_ref());
let b2 = make_proposal_with_parent(round + 3, &a1, None, &signer, key.as_ref());
let a2 = make_proposal_with_parent(round + 4, &b1, None, &signer, key.as_ref());
let b3 = make_proposal_with_parent(round + 5, &b2, None, &signer, key.as_ref());
let b4 = make_proposal_with_parent(round + 6, &b3, None, &signer, key.as_ref());
let a3 = make_proposal_with_parent(round + 6, &a2, None, &signer, key.as_ref());
let a4 = make_proposal_with_parent(round + 7, &a3, None, &signer, key.as_ref());
let a5 = make_proposal_with_parent(round + 8, &a3, None, &signer, key.as_ref());
let a6 = make_proposal_with_parent(round + 9, &a5, None, &signer, key.as_ref());
safety_rules.initialize(&proof).unwrap();
let mut expect = |p, maybe_tc: Option<TwoChainTimeoutCertificate>, vote, commit| {
let result = safety_rules.construct_and_sign_vote_two_chain(p, maybe_tc.as_ref());
let qc = p.vote_proposal.block().quorum_cert();
if vote {
let vote = result.unwrap();
let id = if commit {
qc.certified_block().id()
} else {
HashValue::zero()
};
assert_eq!(vote.ledger_info().consensus_block_id(), id);
assert!(
safety_rules.consensus_state().unwrap().one_chain_round()
>= qc.certified_block().round()
);
} else {
result.unwrap_err();
}
};
// block == qc + 1, commit
expect(&a1, None, true, true);
// block != qc + 1 && block != tc + 1
expect(
&b1,
Some(make_timeout_cert(
3,
b1.vote_proposal.block().quorum_cert(),
&signer,
)),
false,
false,
);
// block != qc + 1, no TC
expect(&b2, None, false, false);
// block = tc + 1, qc == tc.hqc
expect(
&a2,
Some(make_timeout_cert(
3,
a2.vote_proposal.block().quorum_cert(),
&signer,
)),
true,
false,
);
// block = tc + 1, qc < tc.hqc
expect(
&b3,
Some(make_timeout_cert(
4,
a3.vote_proposal.block().quorum_cert(),
&signer,
)),
false,
false,
);
// block != qc + 1, no TC
expect(&a3, None, false, false);
// block = qc + 1, with TC, commit
expect(
&a4,
Some(make_timeout_cert(
7,
a3.vote_proposal.block().quorum_cert(),
&signer,
)),
true,
true,
);
// block = tc + 1, qc > tc.hqc
expect(
&a5,
Some(make_timeout_cert(
7,
b4.vote_proposal.block().quorum_cert(),
&signer,
)),
true,
false,
);
// block = qc + 1, block != tc + 1 (tc is ignored)
expect(
&a6,
Some(make_timeout_cert(
7,
b4.vote_proposal.block().quorum_cert(),
&signer,
)),
true,
true,
);
}
fn test_2chain_timeout(constructor: &Callback) {
let (mut safety_rules, signer, key) = constructor();
let (proof, genesis_qc) = test_utils::make_genesis(&signer);
let genesis_round = genesis_qc.certified_block().round();
let round = genesis_round;
safety_rules.initialize(&proof).unwrap();
let a1 =
test_utils::make_proposal_with_qc(round + 1, genesis_qc.clone(), &signer, key.as_ref());
let a2 = make_proposal_with_parent(round + 2, &a1, None, &signer, key.as_ref());
let a3 = make_proposal_with_parent(round + 3, &a2, None, &signer, key.as_ref());
safety_rules
.sign_timeout_with_qc(&TwoChainTimeout::new(1, 1, genesis_qc.clone()), None)
.unwrap();
assert_eq!(
safety_rules
.sign_timeout_with_qc(&TwoChainTimeout::new(1, 2, genesis_qc.clone()), None)
.unwrap_err(),
Error::NotSafeToTimeout(2, 0, 0, 0),
);
assert_eq!(
safety_rules
.sign_timeout_with_qc(&TwoChainTimeout::new(2, 2, genesis_qc.clone()), None)
.unwrap_err(),
Error::IncorrectEpoch(2, 1)
);
safety_rules
.sign_timeout_with_qc(
&TwoChainTimeout::new(1, 2, genesis_qc.clone()),
Some(make_timeout_cert(1, &genesis_qc, &signer)).as_ref(),
)
.unwrap();
assert_eq!(
safety_rules
.sign_timeout_with_qc(&TwoChainTimeout::new(1, 1, genesis_qc.clone()), None)
.unwrap_err(),
Error::IncorrectLastVotedRound(1, 2)
);
// update one-chain to 2
safety_rules
.construct_and_sign_vote_two_chain(&a3, None)
.unwrap();
assert_eq!(
safety_rules
.sign_timeout_with_qc(
&TwoChainTimeout::new(1, 4, a3.vote_proposal.block().quorum_cert().clone(),),
Some(make_timeout_cert(2, &genesis_qc, &signer)).as_ref()
)
.unwrap_err(),
Error::NotSafeToTimeout(4, 2, 2, 2)
);
assert_eq!(
safety_rules
.sign_timeout_with_qc(
&TwoChainTimeout::new(1, 4, a2.vote_proposal.block().quorum_cert().clone(),),
Some(make_timeout_cert(3, &genesis_qc, &signer)).as_ref()
)
.unwrap_err(),
Error::NotSafeToTimeout(4, 1, 3, 2)
);
}
| 36.529657 | 99 | 0.647572 |
1451988dea3566854cf72618e2ce0b252662664b | 1,766 | use solana_program::{
account_info::AccountInfo, entrypoint::ProgramResult, msg, program_error::ProgramError,
pubkey::Pubkey,
};
use crate::state::PoolHeader;
pub fn check_pool_key(program_id: &Pubkey, key: &Pubkey, pool_seed: &[u8; 32]) -> ProgramResult {
let expected_key = Pubkey::create_program_address(&[pool_seed], program_id)?;
if &expected_key != key {
msg!("Provided pool account does not match the provided pool seed");
return Err(ProgramError::InvalidArgument);
}
Ok(())
}
pub fn check_signal_provider(
pool_header: &PoolHeader,
signal_provider_account: &AccountInfo,
is_signer: bool,
) -> ProgramResult {
if &pool_header.signal_provider != signal_provider_account.key {
msg!("A wrong signal provider account was provided.");
return Err(ProgramError::MissingRequiredSignature);
}
if is_signer & !signal_provider_account.is_signer {
msg!("The signal provider's signature is required.");
return Err(ProgramError::MissingRequiredSignature);
}
Ok(())
}
pub fn fill_slice(target: &mut [u8], val: u8) {
for i in 0..target.len() {
target[i] = val;
}
}
pub fn pow_fixedpoint_u16(x: u32, n: u64) -> u32 {
if n == 1{
x
} else {
let q = n >> 1;
if q == 0 {
return x
}
let p = pow_fixedpoint_u16(x, n >> 1);
let sq = (p * p) >> 16;
if n & 1 == 1 {
(sq * x) >> 16
} else {
sq
}
}
}
#[cfg(test)]
mod tests {
use super::pow_fixedpoint_u16;
#[test]
fn test_exp(){
let half:u16 = 1<<15;
for i in 1..16 {
assert_eq!(pow_fixedpoint_u16(half as u32, i), 1<<(16 - i));
}
}
}
| 24.873239 | 97 | 0.582106 |
33217f50b25191b87c24e71af3d2c2aafca343c8 | 4,191 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use common_base::tokio;
use common_exception::Result;
use futures::TryStreamExt;
use pretty_assertions::assert_eq;
use crate::catalogs::Table;
use crate::configs::Config;
use crate::datasources::database::system::ConfigsTable;
use crate::tests::try_create_context_with_config;
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
async fn test_configs_table() -> Result<()> {
let config = Config::default();
let ctx = try_create_context_with_config(config)?;
ctx.get_settings().set_max_threads(8)?;
let table = ConfigsTable::create(1);
let source_plan = table.read_plan(
ctx.clone(),
None,
Some(ctx.get_settings().get_max_threads()? as usize),
)?;
let stream = table.read(ctx, &source_plan).await?;
let result = stream.try_collect::<Vec<_>>().await?;
let block = &result[0];
assert_eq!(block.num_columns(), 4);
assert_eq!(block.num_rows(), 25);
let expected = vec![
"+-----------------------------------+----------------+-------+-------------+",
"| name | value | group | description |",
"+-----------------------------------+----------------+-------+-------------+",
"| api_tls_server_cert | | query | |",
"| api_tls_server_key | | query | |",
"| api_tls_server_root_ca_cert | | query | |",
"| clickhouse_handler_host | 127.0.0.1 | query | |",
"| clickhouse_handler_port | 9000 | query | |",
"| flight_api_address | 127.0.0.1:9090 | query | |",
"| http_api_address | 127.0.0.1:8080 | query | |",
"| log_dir | ./_logs | log | |",
"| log_level | INFO | log | |",
"| max_active_sessions | 256 | query | |",
"| meta_address | | meta | |",
"| meta_password | | meta | |",
"| meta_username | root | meta | |",
"| metric_api_address | 127.0.0.1:7070 | query | |",
"| mysql_handler_host | 127.0.0.1 | query | |",
"| mysql_handler_port | 3307 | query | |",
"| namespace | | query | |",
"| num_cpus | 8 | query | |",
"| rpc_tls_meta_server_root_ca_cert | | meta | |",
"| rpc_tls_meta_service_domain_name | localhost | meta | |",
"| rpc_tls_query_server_root_ca_cert | | query | |",
"| rpc_tls_query_service_domain_name | localhost | query | |",
"| rpc_tls_server_cert | | query | |",
"| rpc_tls_server_key | | query | |",
"| tenant | | query | |",
"+-----------------------------------+----------------+-------+-------------+",
];
common_datablocks::assert_blocks_sorted_eq(expected, result.as_slice());
Ok(())
}
| 53.730769 | 87 | 0.43999 |
ff8e325a93d01328b5cf5a5c93d3d2432d97776a | 11,349 | use super::*;
use crate::{access::AccessControl, dictionary::*, error::Error};
use core_foundation::base::TCFType;
use std::ptr;
/// Public key pairs (i.e. public and private key) stored in the keychain.
#[derive(Debug)]
pub struct KeyPair {
/// Public key
pub public_key: Key,
/// Private key
pub private_key: Key,
}
impl KeyPair {
/// An asymmetric cryptographic key pair is composed of a public and a private key that are generated together.
/// The public key can be distributed freely, but keep the private key secret.
/// One or both may be stored in a keychain for safekeeping.
///
/// Wrapper for the `SecKeyCreateRandomKey` function see:
/// <https://developer.apple.com/documentation/security/1823694-seckeycreaterandomkey>
pub fn create(params: KeyPairGenerateParams) -> Result<KeyPair, Error> {
let mut error: CFErrorRef = ptr::null_mut();
let private_key_ref: KeyRef = unsafe {
SecKeyCreateRandomKey(Dictionary::from(params).as_concrete_TypeRef(), &mut error)
};
if private_key_ref.is_null() {
Err(error.into())
} else {
let public_key_ref = unsafe { SecKeyCopyPublicKey(private_key_ref) };
assert!(!public_key_ref.is_null());
assert!(!private_key_ref.is_null());
Ok(unsafe {
KeyPair {
public_key: Key::wrap_under_create_rule(public_key_ref),
private_key: Key::wrap_under_create_rule(private_key_ref),
}
})
}
}
/// Generate a public/private `KeyPair` using the given
/// `GeneratePairParams`.
///
/// Wrapper for the `SecKeyGeneratePair` function. See:
/// <https://developer.apple.com/documentation/security/1395339-seckeygeneratepair>
pub fn generate(params: KeyPairGenerateParams) -> Result<KeyPair, Error> {
let mut public_key_ref: KeyRef = ptr::null_mut();
let mut private_key_ref: KeyRef = ptr::null_mut();
let status = unsafe {
SecKeyGeneratePair(
Dictionary::from(params).as_concrete_TypeRef(),
&mut public_key_ref,
&mut private_key_ref,
)
};
// Return an error if the status was unsuccessful
if let Some(e) = Error::maybe_from_OSStatus(status) {
return Err(e);
}
assert!(!public_key_ref.is_null());
assert!(!private_key_ref.is_null());
Ok(unsafe {
KeyPair {
public_key: Key::wrap_under_create_rule(public_key_ref),
private_key: Key::wrap_under_create_rule(private_key_ref),
}
})
}
}
/// Builder for key generation parameters (passed to the underlying
/// `SecKeyGeneratePair` function)
///
/// For more information on generating cryptographic keys in a keychain, see:
/// <https://developer.apple.com/documentation/security/certificate_key_and_trust_services/keys/generating_new_cryptographic_keys>
#[derive(Clone, Debug)]
pub struct KeyPairGenerateParams {
key_type: AttrKeyType,
key_size: usize,
attrs: DictionaryBuilder,
}
impl KeyPairGenerateParams {
/// Create a new `GeneratePairParams`
pub fn new(key_type: AttrKeyType, key_size: usize) -> Self {
Self {
key_type,
key_size,
attrs: <_>::default(),
}
}
/// Set the access control policy (a.k.a. ACL) for the `Key`.
///
/// Wrapper for the `kSecAttrAccessControl` attribute key. See:
/// <https://developer.apple.com/documentation/security/ksecattraccesscontrol>
pub fn access_control(mut self, access_control: &AccessControl) -> Self {
self.attrs.add(AttrKind::AccessControl, access_control);
self
}
/// Set a tag (private, application-specific identifier) on this key.
/// Tags are useful as the "primary key" for looking up keychain items.
///
/// Wrapper for `kSecAttrApplicationTag` attribute key. See:
/// <https://developer.apple.com/documentation/security/ksecattrapplicationtag>
pub fn application_tag<T>(mut self, tag: T) -> Self
where
T: Into<AttrApplicationTag>,
{
self.attrs.add_attr(&tag.into());
self
}
/// Set whether this key can be used in a key derivation operation
///
/// Wrapper for the `kSecKeyDerive` attribute key. See:
/// <https://developer.apple.com/documentation/security/kseckeyderive>
pub fn can_derive(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Derive, value);
self
}
/// Set whether this key can be used in a decrypt operation.
///
/// Wrapper for the `kSecKeyDecrypt` attribute key. See:
/// <https://developer.apple.com/documentation/security/kseckeydecrypt>
pub fn can_decrypt(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Decrypt, value);
self
}
/// Set whether this key can be used in a encrypt operation.
///
/// Wrapper for the `kSecKeyEncrypt` attribute key. See:
/// <https://developer.apple.com/documentation/security/kseckeyencrypt>
pub fn can_encrypt(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Encrypt, value);
self
}
/// Set whether this key can be used in a signing operation.
///
/// Wrapper for the `kSecKeySign` attribute key. See:
/// <https://developer.apple.com/documentation/security/kseckeysign>
pub fn can_sign(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Sign, value);
self
}
/// Set whether this key can be used to verify a signatures.
///
/// Wrapper for the `kSecKeyVerify` attribute key. See:
/// <https://developer.apple.com/documentation/security/kseckeyverify>
pub fn can_verify(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Verify, value);
self
}
/// Set whether this key can be used to wrap another key.
///
/// Wrapper for the `kSecKeyWrap` attribute key. See:
/// <https://developer.apple.com/documentation/security/kseckeywrap>
pub fn can_wrap(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Wrap, value);
self
}
/// Set whether this key can be used to unwrap another key.
///
/// Wrapper for the `kSecKeyUnwrap` attribute key. See:
/// <https://developer.apple.com/documentation/security/kseckeyunwrap>
pub fn can_unwrap(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Unwrap, value);
self
}
/// Set a key's cryptographic class.
///
/// Wrapper for the `kSecAttrKeyClass` attribute key. See:
/// <https://developer.apple.com/documentation/security/ksecattrkeyclass>
pub fn key_class(mut self, value: AttrKeyClass) -> Self {
self.attrs.add(AttrKind::KeyClass, &value.as_CFString());
self
}
/// Set whether this key can be extractable when wrapped
///
/// Wrapper for the `kSecKeyExtractable` attribute key. See:
/// <https://developer.apple.com/documentation/security/kseckeyextractable>
pub fn extractable(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Extractable, value);
self
}
/// Set whether this key is stored permanently in the keychain (default: false).
///
/// Wrapper for the `kSecAttrIsPermanent` attribute key. See:
/// <https://developer.apple.com/documentation/security/ksecattrispermanent>
pub fn permanent(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Permanent, value);
self
}
/// Set whether this key can be wrapped with NONE algorithm. True
/// means it cannot be wrapped with NONE, false means it can.
///
/// Wrapper for `kSecKeySensitive` attribute key. See
/// <https://developer.apple.com/documentation/security/kseckeysensitive>
pub fn sensitive(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Sensitive, value);
self
}
/// Set a string label on this key. SecAttrLabels are useful for providing
/// additional descriptions or context on keys.
///
/// Wrapper for the `kSecAttrLabel` attribute key. See:
/// <https://developer.apple.com/documentation/security/ksecattrlabel>
pub fn label<L: Into<AttrLabel>>(mut self, label: L) -> Self {
self.attrs.add_attr(&label.into());
self
}
/// Set whether this key can be synchronized with other devices owned by
/// the same account (default: false).
///
/// Wrapper for the `kSecAttrSynchronizable` attribute key. See:
/// <https://developer.apple.com/documentation/security/ksecattrsynchronizable>
pub fn synchronizable(mut self, value: bool) -> Self {
self.attrs.add_boolean(AttrKind::Synchronizable, value);
self
}
/// Store this key in an external token i.e. Secure Enclave Processor (SEP).
///
/// Wrapper for the `kSecAttrTokenID` attribute key. See:
/// <https://developer.apple.com/documentation/security/ksecattrtokenid>
pub fn token_id(mut self, value: AttrTokenId) -> Self {
self.attrs.add_attr(&value);
self
}
}
impl From<KeyPairGenerateParams> for Dictionary {
fn from(params: KeyPairGenerateParams) -> Dictionary {
let mut result = DictionaryBuilder::new();
result.add_attr(¶ms.key_type);
result.add_number(AttrKind::KeySizeInBits, params.key_size as i64);
result.add(
unsafe { kSecPrivateKeyAttrs },
&Dictionary::from(params.attrs),
);
result.into()
}
}
/// Builder for restoring a key from an external representation of that key parameters
/// (passed to the underlying `SecKeyCreateWithData` function).
///
/// The key must have already been imported or generated.
///
/// For more information on restoring cryptographic keys in keychain, see
/// <https://developer.apple.com/documentation/security/1643701-seckeycreatewithdata>
#[derive(Clone, Debug)]
pub struct RestoreKeyParams {
/// The category the key fits (public, private, or symmetric)
pub key_class: AttrKeyClass,
/// Data representing the key. The format of the data depends on the type of key
/// being created.
///
/// - RSA: PKCS#1 format
/// - EC: ANSI X9.63 bytestring:
/// - Public key: `04 || X || Y`
/// - Private key: Concatenation of public key with big endian encoding
/// of the secret scalar, i.e. `04 || X || Y || K`
///
/// All representations use fixed-size integers with leading zeroes.
pub key_data: Vec<u8>,
/// The type of key algorithm
pub key_type: AttrKeyType,
}
impl RestoreKeyParams {
/// Return the attributes that will be used to restore the key
pub fn attributes(&self) -> Dictionary {
let mut result = DictionaryBuilder::new();
result.add_attr(&self.key_type);
result.add(AttrKind::KeyClass, &self.key_class.as_CFString());
result.add_number(AttrKind::KeySizeInBits, (self.key_data.len() * 8) as i64);
result.into()
}
/// Return the `key_data` as a slice
pub fn as_bytes(&self) -> &[u8] {
self.key_data.as_slice()
}
}
| 37.088235 | 130 | 0.646048 |
efdd4fd38f5205547d2230cce25c53fd8de015b6 | 153 | use day_22::*;
fn main() {
let data = read_data("./data");
println!("Part 1: {}", part_1(&data));
println!("Part 2: {}", part_2(&data));
}
| 17 | 42 | 0.51634 |
9b3b04bc054ebd51bcde48fa7db4ea131e055320 | 3,295 | #[doc = "Register `irrx_data_word1` reader"]
pub struct R(crate::R<IRRX_DATA_WORD1_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<IRRX_DATA_WORD1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<IRRX_DATA_WORD1_SPEC>> for R {
fn from(reader: crate::R<IRRX_DATA_WORD1_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `irrx_data_word1` writer"]
pub struct W(crate::W<IRRX_DATA_WORD1_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<IRRX_DATA_WORD1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<IRRX_DATA_WORD1_SPEC>> for W {
fn from(writer: crate::W<IRRX_DATA_WORD1_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `sts_irrx_data_word1` reader - "]
pub struct STS_IRRX_DATA_WORD1_R(crate::FieldReader<u32, u32>);
impl STS_IRRX_DATA_WORD1_R {
pub(crate) fn new(bits: u32) -> Self {
STS_IRRX_DATA_WORD1_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for STS_IRRX_DATA_WORD1_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `sts_irrx_data_word1` writer - "]
pub struct STS_IRRX_DATA_WORD1_W<'a> {
w: &'a mut W,
}
impl<'a> STS_IRRX_DATA_WORD1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:31"]
#[inline(always)]
pub fn sts_irrx_data_word1(&self) -> STS_IRRX_DATA_WORD1_R {
STS_IRRX_DATA_WORD1_R::new((self.bits & 0xffff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:31"]
#[inline(always)]
pub fn sts_irrx_data_word1(&mut self) -> STS_IRRX_DATA_WORD1_W {
STS_IRRX_DATA_WORD1_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "irrx_data_word1.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [irrx_data_word1](index.html) module"]
pub struct IRRX_DATA_WORD1_SPEC;
impl crate::RegisterSpec for IRRX_DATA_WORD1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [irrx_data_word1::R](R) reader structure"]
impl crate::Readable for IRRX_DATA_WORD1_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [irrx_data_word1::W](W) writer structure"]
impl crate::Writable for IRRX_DATA_WORD1_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets irrx_data_word1 to value 0"]
impl crate::Resettable for IRRX_DATA_WORD1_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 32.95 | 412 | 0.641578 |
ac4a24e86bfc9f637ae1841b2ee53a0ac563b45a | 30,810 | //! Instruction types
use crate::check_program_account;
use serde_derive::{Deserialize, Serialize};
use mundis_sdk::instruction::{AccountMeta, Instruction, InstructionError};
use mundis_sdk::pubkey::Pubkey;
/// Minimum number of multisignature signers (min N)
pub const MIN_SIGNERS: usize = 1;
/// Maximum number of multisignature signers (max N)
pub const MAX_SIGNERS: usize = 11;
/// Instructions supported by the token program.
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
pub enum TokenInstruction {
/// Initializes a new mint and optionally deposits all the newly minted
/// tokens in an account.
///
/// The `InitializeMint` instruction requires no signers and MUST be
/// included within the same Transaction as the system program's
/// `CreateAccount` instruction that creates the account being initialized.
/// Otherwise another party can acquire ownership of the uninitialized
/// account.
///
/// Accounts expected by this instruction:
///
/// 0. `[writable]` The mint to initialize.
///
InitializeMint {
name: String,
symbol: String,
/// Number of base 10 digits to the right of the decimal place.
decimals: u8,
/// The authority/multisignature to mint tokens.
mint_authority: Pubkey,
/// The freeze authority/multisignature of the mint.
freeze_authority: Option<Pubkey>,
},
/// Initializes a new account to hold tokens. If this account is associated
/// with the native mint then the token balance of the initialized account
/// will be equal to the amount of MUNDIS in the account. If this account is
/// associated with another mint, that mint must be initialized before this
/// command can succeed.
///
/// The `InitializeAccount` instruction requires no signers and MUST be
/// included within the same Transaction as the system program's
/// `CreateAccount` instruction that creates the account being initialized.
/// Otherwise another party can acquire ownership of the uninitialized
/// account.
///
/// Accounts expected by this instruction:
///
/// 0. `[writable]` The account to initialize.
/// 1. `[]` The mint this account will be associated with.
/// 2. `[]` The new account's owner/multisignature.
InitializeAccount,
/// Initializes a multisignature account with N provided signers.
///
/// Multisignature accounts can used in place of any single owner/delegate
/// accounts in any token instruction that require an owner/delegate to be
/// present. The variant field represents the number of signers (M)
/// required to validate this multisignature account.
///
/// The `InitializeMultisig` instruction requires no signers and MUST be
/// included within the same Transaction as the system program's
/// `CreateAccount` instruction that creates the account being initialized.
/// Otherwise another party can acquire ownership of the uninitialized
/// account.
///
/// Accounts expected by this instruction:
///
/// 0. `[writable]` The multisignature account to initialize.
/// 2. ..2+N. `[]` The signer accounts, must equal to N where 1 <= N <=
/// 11.
InitializeMultisig {
/// The number of signers (M) required to validate this multisignature
/// account.
m: u8,
},
/// Transfers tokens from one account to another either directly or via a
/// delegate. If this account is associated with the native mint then equal
/// amounts of MUNDIS and Tokens will be transferred to the destination
/// account.
///
/// Accounts expected by this instruction:
///
/// * Single owner/delegate
/// 0. `[writable]` The source account.
/// 1. `[writable]` The destination account.
/// 2. `[signer]` The source account's owner/delegate.
///
/// * Multisignature owner/delegate
/// 0. `[writable]` The source account.
/// 1. `[writable]` The destination account.
/// 2. `[]` The source account's multisignature owner/delegate.
/// 3. ..3+M `[signer]` M signer accounts.
Transfer {
/// The amount of tokens to transfer.
amount: u64,
},
/// Approves a delegate. A delegate is given the authority over tokens on
/// behalf of the source account's owner.
///
/// Accounts expected by this instruction:
///
/// * Single owner
/// 0. `[writable]` The source account.
/// 1. `[]` The delegate.
/// 2. `[signer]` The source account owner.
///
/// * Multisignature owner
/// 0. `[writable]` The source account.
/// 1. `[]` The delegate.
/// 2. `[]` The source account's multisignature owner.
/// 3. ..3+M `[signer]` M signer accounts
Approve {
/// The amount of tokens the delegate is approved for.
amount: u64,
},
/// Revokes the delegate's authority.
///
/// Accounts expected by this instruction:
///
/// * Single owner
/// 0. `[writable]` The source account.
/// 1. `[signer]` The source account owner.
///
/// * Multisignature owner
/// 0. `[writable]` The source account.
/// 1. `[]` The source account's multisignature owner.
/// 2. ..2+M `[signer]` M signer accounts
Revoke,
/// Sets a new authority of a mint or account.
///
/// Accounts expected by this instruction:
///
/// * Single authority
/// 0. `[writable]` The mint or account to change the authority of.
/// 1. `[signer]` The current authority of the mint or account.
///
/// * Multisignature authority
/// 0. `[writable]` The mint or account to change the authority of.
/// 1. `[]` The mint's or account's current multisignature authority.
/// 2. ..2+M `[signer]` M signer accounts
SetAuthority {
/// The type of authority to update.
authority_type: AuthorityType,
/// The new authority
new_authority: Option<Pubkey>,
},
/// Mints new tokens to an account. The native mint does not support
/// minting.
///
/// Accounts expected by this instruction:
///
/// * Single authority
/// 0. `[writable]` The mint.
/// 1. `[writable]` The account to mint tokens to.
/// 2. `[signer]` The mint's minting authority.
///
/// * Multisignature authority
/// 0. `[writable]` The mint.
/// 1. `[writable]` The account to mint tokens to.
/// 2. `[]` The mint's multisignature mint-tokens authority.
/// 3. ..3+M `[signer]` M signer accounts.
MintTo {
/// The amount of new tokens to mint.
amount: u64,
},
/// Burns tokens by removing them from an account. `Burn` does not support
/// accounts associated with the native mint, use `CloseAccount` instead.
///
/// Accounts expected by this instruction:
///
/// * Single owner/delegate
/// 0. `[writable]` The account to burn from.
/// 1. `[writable]` The token mint.
/// 2. `[signer]` The account's owner/delegate.
///
/// * Multisignature owner/delegate
/// 0. `[writable]` The account to burn from.
/// 1. `[writable]` The token mint.
/// 2. `[]` The account's multisignature owner/delegate.
/// 3. ..3+M `[signer]` M signer accounts.
Burn {
/// The amount of tokens to burn.
amount: u64,
},
/// Close an account by transferring all its MUNDIS to the destination account.
/// Non-native accounts may only be closed if its token amount is zero.
///
/// Accounts expected by this instruction:
///
/// * Single owner
/// 0. `[writable]` The account to close.
/// 1. `[writable]` The destination account.
/// 2. `[signer]` The account's owner.
///
/// * Multisignature owner
/// 0. `[writable]` The account to close.
/// 1. `[writable]` The destination account.
/// 2. `[]` The account's multisignature owner.
/// 3. ..3+M `[signer]` M signer accounts.
CloseAccount,
/// Freeze an Initialized account using the Mint's freeze_authority (if
/// set).
///
/// Accounts expected by this instruction:
///
/// * Single owner
/// 0. `[writable]` The account to freeze.
/// 1. `[]` The token mint.
/// 2. `[signer]` The mint freeze authority.
///
/// * Multisignature owner
/// 0. `[writable]` The account to freeze.
/// 1. `[]` The token mint.
/// 2. `[]` The mint's multisignature freeze authority.
/// 3. ..3+M `[signer]` M signer accounts.
FreezeAccount,
/// Thaw a Frozen account using the Mint's freeze_authority (if set).
///
/// Accounts expected by this instruction:
///
/// * Single owner
/// 0. `[writable]` The account to freeze.
/// 1. `[]` The token mint.
/// 2. `[signer]` The mint freeze authority.
///
/// * Multisignature owner
/// 0. `[writable]` The account to freeze.
/// 1. `[]` The token mint.
/// 2. `[]` The mint's multisignature freeze authority.
/// 3. ..3+M `[signer]` M signer accounts.
ThawAccount,
/// Transfers tokens from one account to another either directly or via a
/// delegate. If this account is associated with the native mint then equal
/// amounts of MUNDIS and Tokens will be transferred to the destination
/// account.
///
/// This instruction differs from Transfer in that the token mint and
/// decimals value is checked by the caller. This may be useful when
/// creating transactions offline or within a hardware wallet.
///
/// Accounts expected by this instruction:
///
/// * Single owner/delegate
/// 0. `[writable]` The source account.
/// 1. `[]` The token mint.
/// 2. `[writable]` The destination account.
/// 3. `[signer]` The source account's owner/delegate.
///
/// * Multisignature owner/delegate
/// 0. `[writable]` The source account.
/// 1. `[]` The token mint.
/// 2. `[writable]` The destination account.
/// 3. `[]` The source account's multisignature owner/delegate.
/// 4. ..4+M `[signer]` M signer accounts.
TransferChecked {
/// The amount of tokens to transfer.
amount: u64,
/// Expected number of base 10 digits to the right of the decimal place.
decimals: u8,
},
/// Approves a delegate. A delegate is given the authority over tokens on
/// behalf of the source account's owner.
///
/// This instruction differs from Approve in that the token mint and
/// decimals value is checked by the caller. This may be useful when
/// creating transactions offline or within a hardware wallet.
///
/// Accounts expected by this instruction:
///
/// * Single owner
/// 0. `[writable]` The source account.
/// 1. `[]` The token mint.
/// 2. `[]` The delegate.
/// 3. `[signer]` The source account owner.
///
/// * Multisignature owner
/// 0. `[writable]` The source account.
/// 1. `[]` The token mint.
/// 2. `[]` The delegate.
/// 3. `[]` The source account's multisignature owner.
/// 4. ..4+M `[signer]` M signer accounts
ApproveChecked {
/// The amount of tokens the delegate is approved for.
amount: u64,
/// Expected number of base 10 digits to the right of the decimal place.
decimals: u8,
},
/// Mints new tokens to an account. The native mint does not support
/// minting.
///
/// This instruction differs from MintTo in that the decimals value is
/// checked by the caller. This may be useful when creating transactions
/// offline or within a hardware wallet.
///
/// Accounts expected by this instruction:
///
/// * Single authority
/// 0. `[writable]` The mint.
/// 1. `[writable]` The account to mint tokens to.
/// 2. `[signer]` The mint's minting authority.
///
/// * Multisignature authority
/// 0. `[writable]` The mint.
/// 1. `[writable]` The account to mint tokens to.
/// 2. `[]` The mint's multisignature mint-tokens authority.
/// 3. ..3+M `[signer]` M signer accounts.
MintToChecked {
/// The amount of new tokens to mint.
amount: u64,
/// Expected number of base 10 digits to the right of the decimal place.
decimals: u8,
},
/// Burns tokens by removing them from an account. `BurnChecked` does not
/// support accounts associated with the native mint, use `CloseAccount`
/// instead.
///
/// This instruction differs from Burn in that the decimals value is checked
/// by the caller. This may be useful when creating transactions offline or
/// within a hardware wallet.
///
/// Accounts expected by this instruction:
///
/// * Single owner/delegate
/// 0. `[writable]` The account to burn from.
/// 1. `[writable]` The token mint.
/// 2. `[signer]` The account's owner/delegate.
///
/// * Multisignature owner/delegate
/// 0. `[writable]` The account to burn from.
/// 1. `[writable]` The token mint.
/// 2. `[]` The account's multisignature owner/delegate.
/// 3. ..3+M `[signer]` M signer accounts.
BurnChecked {
/// The amount of tokens to burn.
amount: u64,
/// Expected number of base 10 digits to the right of the decimal place.
decimals: u8,
},
/// Like InitializeAccount, but the owner pubkey is passed via instruction data
/// rather than the accounts list. This variant may be preferable when using
/// Cross Program Invocation from an instruction that does not need the owner's
/// `AccountInfo` otherwise.
///
/// Accounts expected by this instruction:
///
/// 0. `[writable]` The account to initialize.
/// 1. `[]` The mint this account will be associated with.
InitializeAccount2 {
/// The new account's owner/multisignature.
owner: Pubkey,
},
/// Given a wrapped / native token account (a token account containing MUNDIS)
/// updates its amount field based on the account's underlying `lamports`.
/// This is useful if a non-wrapped MUNDIS account uses `system_instruction::transfer`
/// to move lamports to a wrapped token account, and needs to have its token
/// `amount` field updated.
///
/// Accounts expected by this instruction:
///
/// 0. `[writable]` The native token account to sync with its underlying lamports.
SyncNative,
}
/// Specifies the authority type for SetAuthority instructions
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone, Copy)]
pub enum AuthorityType {
/// Authority to mint new tokens
MintTokens,
/// Authority to freeze any account associated with the Mint
FreezeAccount,
/// Owner of a given token account
AccountOwner,
/// Authority to close a token account
CloseAccount,
}
/// Creates a `InitializeMint` instruction.
pub fn initialize_mint(
token_program_id: &Pubkey,
mint_pubkey: &Pubkey,
mint_authority_pubkey: &Pubkey,
freeze_authority_pubkey: Option<&Pubkey>,
name: &String,
symbol: &String,
decimals: u8,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let freeze_authority = freeze_authority_pubkey.cloned().into();
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::InitializeMint {
mint_authority: *mint_authority_pubkey,
freeze_authority,
name: name.clone(),
symbol: symbol.clone(),
decimals,
},
vec![
AccountMeta::new(*mint_pubkey, false),
]
))
}
/// Creates a `InitializeAccount` instruction.
pub fn initialize_account(
token_program_id: &Pubkey,
account_pubkey: &Pubkey,
mint_pubkey: &Pubkey,
owner_pubkey: &Pubkey,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::InitializeAccount {},
vec![
AccountMeta::new(*account_pubkey, false),
AccountMeta::new_readonly(*mint_pubkey, false),
AccountMeta::new_readonly(*owner_pubkey, false),
],
))
}
/// Creates a `InitializeAccount2` instruction.
pub fn initialize_account2(
token_program_id: &Pubkey,
account_pubkey: &Pubkey,
mint_pubkey: &Pubkey,
owner_pubkey: &Pubkey,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::InitializeAccount2 {
owner: *owner_pubkey,
},
vec![
AccountMeta::new(*account_pubkey, false),
AccountMeta::new_readonly(*mint_pubkey, false),
]
))
}
/// Creates a `InitializeMultisig` instruction.
pub fn initialize_multisig(
token_program_id: &Pubkey,
multisig_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
m: u8,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
if !is_valid_signer_index(m as usize)
|| !is_valid_signer_index(signer_pubkeys.len())
|| m as usize > signer_pubkeys.len()
{
return Err(InstructionError::MissingRequiredSignature);
}
let mut accounts = Vec::with_capacity(1 + 1 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*multisig_pubkey, false));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, false));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::InitializeMultisig {
m,
},
accounts
))
}
/// Creates a `Transfer` instruction.
pub fn transfer(
token_program_id: &Pubkey,
source_pubkey: &Pubkey,
destination_pubkey: &Pubkey,
authority_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
amount: u64,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(3 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*source_pubkey, false));
accounts.push(AccountMeta::new(*destination_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*authority_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::Transfer {
amount
},
accounts
))
}
/// Creates an `Approve` instruction.
pub fn approve(
token_program_id: &Pubkey,
source_pubkey: &Pubkey,
delegate_pubkey: &Pubkey,
owner_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
amount: u64,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(3 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*source_pubkey, false));
accounts.push(AccountMeta::new_readonly(*delegate_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*owner_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::Approve {
amount
},
accounts
))
}
/// Creates a `Revoke` instruction.
pub fn revoke(
token_program_id: &Pubkey,
source_pubkey: &Pubkey,
owner_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(2 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*source_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*owner_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::Revoke,
accounts
))
}
/// Creates a `SetAuthority` instruction.
pub fn set_authority(
token_program_id: &Pubkey,
owned_pubkey: &Pubkey,
new_authority_pubkey: Option<&Pubkey>,
authority_type: AuthorityType,
owner_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let new_authority = new_authority_pubkey.cloned().into();
let mut accounts = Vec::with_capacity(3 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*owned_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*owner_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::SetAuthority {
authority_type,
new_authority,
},
accounts
))
}
/// Creates a `MintTo` instruction.
pub fn mint_to(
token_program_id: &Pubkey,
mint_pubkey: &Pubkey,
account_pubkey: &Pubkey,
owner_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
amount: u64,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(3 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*mint_pubkey, false));
accounts.push(AccountMeta::new(*account_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*owner_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::MintTo {
amount,
},
accounts
))
}
/// Creates a `Burn` instruction.
pub fn burn(
token_program_id: &Pubkey,
account_pubkey: &Pubkey,
mint_pubkey: &Pubkey,
authority_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
amount: u64,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(3 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*account_pubkey, false));
accounts.push(AccountMeta::new(*mint_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*authority_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::Burn {
amount,
},
accounts
))
}
/// Creates a `CloseAccount` instruction.
pub fn close_account(
token_program_id: &Pubkey,
account_pubkey: &Pubkey,
destination_pubkey: &Pubkey,
owner_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(3 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*account_pubkey, false));
accounts.push(AccountMeta::new(*destination_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*owner_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::CloseAccount,
accounts
))
}
/// Creates a `FreezeAccount` instruction.
pub fn freeze_account(
token_program_id: &Pubkey,
account_pubkey: &Pubkey,
mint_pubkey: &Pubkey,
owner_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(3 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*account_pubkey, false));
accounts.push(AccountMeta::new_readonly(*mint_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*owner_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::FreezeAccount,
accounts
))
}
/// Creates a `ThawAccount` instruction.
pub fn thaw_account(
token_program_id: &Pubkey,
account_pubkey: &Pubkey,
mint_pubkey: &Pubkey,
owner_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(3 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*account_pubkey, false));
accounts.push(AccountMeta::new_readonly(*mint_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*owner_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::ThawAccount,
accounts
))
}
/// Creates a `TransferChecked` instruction.
#[allow(clippy::too_many_arguments)]
pub fn transfer_checked(
token_program_id: &Pubkey,
source_pubkey: &Pubkey,
mint_pubkey: &Pubkey,
destination_pubkey: &Pubkey,
authority_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
amount: u64,
decimals: u8,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(4 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*source_pubkey, false));
accounts.push(AccountMeta::new_readonly(*mint_pubkey, false));
accounts.push(AccountMeta::new(*destination_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*authority_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::TransferChecked {
amount,
decimals
},
accounts
))
}
/// Creates an `ApproveChecked` instruction.
#[allow(clippy::too_many_arguments)]
pub fn approve_checked(
token_program_id: &Pubkey,
source_pubkey: &Pubkey,
mint_pubkey: &Pubkey,
delegate_pubkey: &Pubkey,
owner_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
amount: u64,
decimals: u8,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(4 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*source_pubkey, false));
accounts.push(AccountMeta::new_readonly(*mint_pubkey, false));
accounts.push(AccountMeta::new_readonly(*delegate_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*owner_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::ApproveChecked {
amount,
decimals
},
accounts
))
}
/// Creates a `MintToChecked` instruction.
pub fn mint_to_checked(
token_program_id: &Pubkey,
mint_pubkey: &Pubkey,
account_pubkey: &Pubkey,
owner_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
amount: u64,
decimals: u8,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(3 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*mint_pubkey, false));
accounts.push(AccountMeta::new(*account_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*owner_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::MintToChecked {
amount,
decimals
},
accounts
))
}
/// Creates a `BurnChecked` instruction.
pub fn burn_checked(
token_program_id: &Pubkey,
account_pubkey: &Pubkey,
mint_pubkey: &Pubkey,
authority_pubkey: &Pubkey,
signer_pubkeys: &[&Pubkey],
amount: u64,
decimals: u8,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let mut accounts = Vec::with_capacity(3 + signer_pubkeys.len());
accounts.push(AccountMeta::new(*account_pubkey, false));
accounts.push(AccountMeta::new(*mint_pubkey, false));
accounts.push(AccountMeta::new_readonly(
*authority_pubkey,
signer_pubkeys.is_empty(),
));
for signer_pubkey in signer_pubkeys.iter() {
accounts.push(AccountMeta::new_readonly(**signer_pubkey, true));
}
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::BurnChecked {
amount,
decimals
},
accounts
))
}
/// Creates a `SyncNative` instruction
pub fn sync_native(
token_program_id: &Pubkey,
account_pubkey: &Pubkey,
) -> Result<Instruction, InstructionError> {
check_program_account(token_program_id)?;
let accounts = vec![AccountMeta::new(*account_pubkey, false)];
Ok(Instruction::new_with_bincode(
*token_program_id,
&TokenInstruction::SyncNative,
accounts,
))
}
/// Utility function that checks index is between MIN_SIGNERS and MAX_SIGNERS
pub fn is_valid_signer_index(index: usize) -> bool {
(MIN_SIGNERS..=MAX_SIGNERS).contains(&index)
}
| 34.006623 | 90 | 0.640149 |
e5cbbdb74a7a7402267df9a554d4a1f677d1122a | 6,624 | // Copyright (c) The XPeer Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
chained_bft::QuorumCert,
counters,
state_replication::{StateComputeResult, StateComputer},
state_synchronizer::{StateSynchronizer, SyncStatus},
};
use crypto::HashValue;
use execution_proto::proto::{
execution::{CommitBlockRequest, CommitBlockStatus, ExecuteBlockRequest, ExecuteBlockResponse},
execution_grpc::ExecutionClient,
};
use failure::Result;
use futures::{compat::Future01CompatExt, Future, FutureExt};
use proto_conv::{FromProto, IntoProto};
use std::{pin::Pin, sync::Arc, time::Instant};
use types::{
ledger_info::LedgerInfoWithSignatures,
transaction::{SignedTransaction, TransactionListWithProof, TransactionStatus},
};
/// Basic communication with the Execution module;
/// implements StateComputer traits.
pub struct ExecutionProxy {
execution: Arc<ExecutionClient>,
synchronizer: Arc<StateSynchronizer>,
}
impl ExecutionProxy {
pub fn new(execution: Arc<ExecutionClient>, synchronizer: Arc<StateSynchronizer>) -> Self {
Self {
execution: Arc::clone(&execution),
synchronizer,
}
}
fn process_exec_response(
response: ExecuteBlockResponse,
pre_execution_instant: Instant,
) -> StateComputeResult {
let execution_block_response = execution_proto::ExecuteBlockResponse::from_proto(response)
.expect("Couldn't decode ExecutionBlockResponse from protobuf");
let execution_duration_ms = pre_execution_instant.elapsed().as_millis();
let num_txns = execution_block_response.status().len();
if num_txns == 0 {
// no txns in that block
counters::EMPTY_BLOCK_EXECUTION_DURATION_MS.observe(execution_duration_ms as f64);
} else {
counters::BLOCK_EXECUTION_DURATION_MS.observe(execution_duration_ms as f64);
let per_txn_duration = (execution_duration_ms as f64) / (num_txns as f64);
counters::TXN_EXECUTION_DURATION_MS.observe(per_txn_duration);
}
let mut compute_status = vec![];
let mut num_successful_txns = 0;
for vm_status in execution_block_response.status() {
let status = match vm_status {
TransactionStatus::Keep(_) => {
num_successful_txns += 1;
true
}
TransactionStatus::Discard(_) => false,
};
compute_status.push(status);
}
StateComputeResult {
new_state_id: execution_block_response.root_hash(),
compute_status,
num_successful_txns,
validators: execution_block_response.validators().clone(),
}
}
}
impl StateComputer for ExecutionProxy {
type Payload = Vec<SignedTransaction>;
fn compute(
&self,
// The id of a parent block, on top of which the given transactions should be executed.
parent_block_id: HashValue,
// The id of a current block.
block_id: HashValue,
// Transactions to execute.
transactions: &Self::Payload,
) -> Pin<Box<dyn Future<Output = Result<StateComputeResult>> + Send>> {
let mut exec_req = ExecuteBlockRequest::new();
exec_req.set_parent_block_id(parent_block_id.to_vec());
exec_req.set_block_id(block_id.to_vec());
exec_req.set_transactions(::protobuf::RepeatedField::from_vec(
transactions
.clone()
.into_iter()
.map(IntoProto::into_proto)
.collect(),
));
let pre_execution_instant = Instant::now();
match self.execution.execute_block_async(&exec_req) {
Ok(receiver) => {
// convert from grpcio enum to failure::Error
async move {
match receiver.compat().await {
Ok(response) => {
Ok(Self::process_exec_response(response, pre_execution_instant))
}
Err(e) => Err(e.into()),
}
}
.boxed()
}
Err(e) => async move { Err(e.into()) }.boxed(),
}
}
/// Send a successful commit. A future is fulfilled when the state is finalized.
fn commit(
&self,
commit: LedgerInfoWithSignatures,
) -> Pin<Box<dyn Future<Output = Result<()>> + Send>> {
counters::LAST_COMMITTED_VERSION.set(commit.ledger_info().version() as i64);
let mut commit_req = CommitBlockRequest::new();
commit_req.set_ledger_info_with_sigs(commit.into_proto());
let pre_commit_instant = Instant::now();
match self.execution.commit_block_async(&commit_req) {
Ok(receiver) => {
// convert from grpcio enum to failure::Error
async move {
match receiver.compat().await {
Ok(response) => {
if response.get_status() == CommitBlockStatus::SUCCEEDED {
let commit_duration_ms = pre_commit_instant.elapsed().as_millis();
counters::BLOCK_COMMIT_DURATION_MS
.observe(commit_duration_ms as f64);
Ok(())
} else {
Err(grpcio::Error::RpcFailure(grpcio::RpcStatus::new(
grpcio::RpcStatusCode::Unknown,
Some("Commit failure!".to_string()),
))
.into())
}
}
Err(e) => Err(e.into()),
}
}
.boxed()
}
Err(e) => async move { Err(e.into()) }.boxed(),
}
}
/// Synchronize to a commit that not present locally.
fn sync_to(
&self,
commit: QuorumCert,
) -> Pin<Box<dyn Future<Output = Result<SyncStatus>> + Send>> {
counters::STATE_SYNC_COUNT.inc();
self.synchronizer.sync_to(commit).boxed()
}
fn get_chunk(
&self,
start_version: u64,
target_version: u64,
batch_size: u64,
) -> Pin<Box<dyn Future<Output = Result<TransactionListWithProof>> + Send>> {
self.synchronizer
.get_chunk(start_version, target_version, batch_size)
.boxed()
}
}
| 37.636364 | 98 | 0.566878 |
ebad822171943275e570f8ec7a6b455bbe76a41c | 29,355 | //! Tests for `#[graphql_union]` macro.
use juniper::{
execute, graphql_object, graphql_union, graphql_value, DefaultScalarValue, EmptyMutation,
EmptySubscription, GraphQLObject, GraphQLType, RootNode, ScalarValue, Variables,
};
#[derive(GraphQLObject)]
struct Human {
id: String,
home_planet: String,
}
#[derive(GraphQLObject)]
struct Droid {
id: String,
primary_function: String,
}
#[derive(GraphQLObject)]
struct Ewok {
id: String,
funny: bool,
}
pub enum CustomContext {
Human,
Droid,
Ewok,
}
impl juniper::Context for CustomContext {}
#[derive(GraphQLObject)]
#[graphql(context = CustomContext)]
pub struct HumanCustomContext {
id: String,
home_planet: String,
}
#[derive(GraphQLObject)]
#[graphql(context = CustomContext)]
pub struct DroidCustomContext {
id: String,
primary_function: String,
}
#[derive(GraphQLObject)]
#[graphql(context = CustomContext)]
struct EwokCustomContext {
id: String,
funny: bool,
}
fn schema<'q, C, S, Q>(query_root: Q) -> RootNode<'q, Q, EmptyMutation<C>, EmptySubscription<C>, S>
where
Q: GraphQLType<S, Context = C, TypeInfo = ()> + 'q,
S: ScalarValue + 'q,
{
RootNode::new(
query_root,
EmptyMutation::<C>::new(),
EmptySubscription::<C>::new(),
)
}
mod trivial {
use super::*;
#[graphql_union]
trait Character {
fn as_human(&self) -> Option<&Human> {
None
}
fn as_droid(&self) -> Option<&Droid> {
None
}
}
impl Character for Human {
fn as_human(&self) -> Option<&Human> {
Some(&self)
}
}
impl Character for Droid {
fn as_droid(&self) -> Option<&Droid> {
Some(&self)
}
}
type DynCharacter<'a> = dyn Character + Send + Sync + 'a;
enum QueryRoot {
Human,
Droid,
}
#[graphql_object]
impl QueryRoot {
fn character(&self) -> Box<DynCharacter<'_>> {
let ch: Box<DynCharacter<'_>> = match self {
Self::Human => Box::new(Human {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
}),
Self::Droid => Box::new(Droid {
id: "droid-99".to_string(),
primary_function: "run".to_string(),
}),
};
ch
}
}
const DOC: &str = r#"{
character {
... on Human {
humanId: id
homePlanet
}
... on Droid {
droidId: id
primaryFunction
}
}
}"#;
#[tokio::test]
async fn resolves_human() {
let schema = schema(QueryRoot::Human);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn resolves_droid() {
let schema = schema(QueryRoot::Droid);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"droidId": "droid-99", "primaryFunction": "run"}}),
vec![],
)),
);
}
#[tokio::test]
async fn is_graphql_union() {
const DOC: &str = r#"{
__type(name: "Character") {
kind
}
}"#;
let schema = schema(QueryRoot::Human);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((graphql_value!({"__type": {"kind": "UNION"}}), vec![])),
);
}
#[tokio::test]
async fn uses_type_name() {
const DOC: &str = r#"{
__type(name: "Character") {
name
}
}"#;
let schema = schema(QueryRoot::Human);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((graphql_value!({"__type": {"name": "Character"}}), vec![])),
);
}
#[tokio::test]
async fn has_no_description() {
const DOC: &str = r#"{
__type(name: "Character") {
description
}
}"#;
let schema = schema(QueryRoot::Human);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((graphql_value!({"__type": {"description": None}}), vec![])),
);
}
}
mod generic {
use super::*;
#[graphql_union]
trait Character<A, B> {
fn as_human(&self) -> Option<&Human> {
None
}
fn as_droid(&self) -> Option<&Droid> {
None
}
}
impl<A, B> Character<A, B> for Human {
fn as_human(&self) -> Option<&Human> {
Some(&self)
}
}
impl<A, B> Character<A, B> for Droid {
fn as_droid(&self) -> Option<&Droid> {
Some(&self)
}
}
type DynCharacter<'a, A, B> = dyn Character<A, B> + Send + Sync + 'a;
enum QueryRoot {
Human,
Droid,
}
#[graphql_object]
impl QueryRoot {
fn character(&self) -> Box<DynCharacter<'_, u8, ()>> {
let ch: Box<DynCharacter<'_, u8, ()>> = match self {
Self::Human => Box::new(Human {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
}),
Self::Droid => Box::new(Droid {
id: "droid-99".to_string(),
primary_function: "run".to_string(),
}),
};
ch
}
}
const DOC: &str = r#"{
character {
... on Human {
humanId: id
homePlanet
}
... on Droid {
droidId: id
primaryFunction
}
}
}"#;
#[tokio::test]
async fn resolves_human() {
let schema = schema(QueryRoot::Human);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn resolves_droid() {
let schema = schema(QueryRoot::Droid);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"droidId": "droid-99", "primaryFunction": "run"}}),
vec![],
)),
);
}
#[tokio::test]
async fn uses_type_name_without_type_params() {
const DOC: &str = r#"{
__type(name: "Character") {
name
}
}"#;
let schema = schema(QueryRoot::Human);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((graphql_value!({"__type": {"name": "Character"}}), vec![])),
);
}
}
mod description_from_doc_comment {
use super::*;
/// Rust docs.
#[graphql_union]
trait Character {
fn as_human(&self) -> Option<&Human> {
None
}
}
impl Character for Human {
fn as_human(&self) -> Option<&Human> {
Some(&self)
}
}
type DynCharacter<'a> = dyn Character + Send + Sync + 'a;
struct QueryRoot;
#[graphql_object]
impl QueryRoot {
fn character(&self) -> Box<DynCharacter<'_>> {
Box::new(Human {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
})
}
}
#[tokio::test]
async fn resolves_human() {
const DOC: &str = r#"{
character {
... on Human {
humanId: id
homePlanet
}
}
}"#;
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn uses_doc_comment_as_description() {
const DOC: &str = r#"{
__type(name: "Character") {
description
}
}"#;
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"__type": {"description": "Rust docs."}}),
vec![],
)),
);
}
}
mod explicit_name_and_description {
use super::*;
/// Rust docs.
#[graphql_union(name = "MyChar", desc = "My character.")]
trait Character {
fn as_human(&self) -> Option<&Human> {
None
}
}
impl Character for Human {
fn as_human(&self) -> Option<&Human> {
Some(&self)
}
}
type DynCharacter<'a> = dyn Character + Send + Sync + 'a;
struct QueryRoot;
#[graphql_object]
impl QueryRoot {
fn character(&self) -> Box<DynCharacter<'_>> {
Box::new(Human {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
})
}
}
#[tokio::test]
async fn resolves_human() {
const DOC: &str = r#"{
character {
... on Human {
humanId: id
homePlanet
}
}
}"#;
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn uses_custom_name() {
const DOC: &str = r#"{
__type(name: "MyChar") {
name
}
}"#;
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((graphql_value!({"__type": {"name": "MyChar"}}), vec![])),
);
}
#[tokio::test]
async fn uses_custom_description() {
const DOC: &str = r#"{
__type(name: "MyChar") {
description
}
}"#;
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"__type": {"description": "My character."}}),
vec![],
)),
);
}
}
mod explicit_scalar {
use super::*;
#[graphql_union(scalar = DefaultScalarValue)]
trait Character {
fn as_human(&self) -> Option<&Human> {
None
}
fn as_droid(&self) -> Option<&Droid> {
None
}
}
impl Character for Human {
fn as_human(&self) -> Option<&Human> {
Some(&self)
}
}
impl Character for Droid {
fn as_droid(&self) -> Option<&Droid> {
Some(&self)
}
}
type DynCharacter<'a> = dyn Character + Send + Sync + 'a;
enum QueryRoot {
Human,
Droid,
}
#[graphql_object(scalar = DefaultScalarValue)]
impl QueryRoot {
fn character(&self) -> Box<DynCharacter<'_>> {
let ch: Box<DynCharacter<'_>> = match self {
Self::Human => Box::new(Human {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
}),
Self::Droid => Box::new(Droid {
id: "droid-99".to_string(),
primary_function: "run".to_string(),
}),
};
ch
}
}
const DOC: &str = r#"{
character {
... on Human {
humanId: id
homePlanet
}
... on Droid {
droidId: id
primaryFunction
}
}
}"#;
#[tokio::test]
async fn resolves_human() {
let schema = schema::<_, DefaultScalarValue, _>(QueryRoot::Human);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn resolves_droid() {
let schema = schema::<_, DefaultScalarValue, _>(QueryRoot::Droid);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"droidId": "droid-99", "primaryFunction": "run"}}),
vec![],
)),
);
}
}
mod custom_scalar {
use crate::custom_scalar::MyScalarValue;
use super::*;
#[graphql_union(scalar = MyScalarValue)]
trait Character {
fn as_human(&self) -> Option<&Human> {
None
}
fn as_droid(&self) -> Option<&Droid> {
None
}
}
impl Character for Human {
fn as_human(&self) -> Option<&Human> {
Some(&self)
}
}
impl Character for Droid {
fn as_droid(&self) -> Option<&Droid> {
Some(&self)
}
}
type DynCharacter<'a> = dyn Character + Send + Sync + 'a;
enum QueryRoot {
Human,
Droid,
}
#[graphql_object(scalar = MyScalarValue)]
impl QueryRoot {
fn character(&self) -> Box<DynCharacter<'_>> {
let ch: Box<DynCharacter<'_>> = match self {
Self::Human => Box::new(Human {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
}),
Self::Droid => Box::new(Droid {
id: "droid-99".to_string(),
primary_function: "run".to_string(),
}),
};
ch
}
}
const DOC: &str = r#"{
character {
... on Human {
humanId: id
homePlanet
}
... on Droid {
droidId: id
primaryFunction
}
}
}"#;
#[tokio::test]
async fn resolves_human() {
let schema = schema::<_, MyScalarValue, _>(QueryRoot::Human);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn resolves_droid() {
let schema = schema::<_, MyScalarValue, _>(QueryRoot::Droid);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"droidId": "droid-99", "primaryFunction": "run"}}),
vec![],
)),
);
}
}
mod explicit_custom_context {
use super::*;
#[graphql_union(context = CustomContext)]
trait Character {
fn as_human(&self) -> Option<&HumanCustomContext> {
None
}
fn as_droid(&self) -> Option<&DroidCustomContext> {
None
}
}
impl Character for HumanCustomContext {
fn as_human(&self) -> Option<&HumanCustomContext> {
Some(&self)
}
}
impl Character for DroidCustomContext {
fn as_droid(&self) -> Option<&DroidCustomContext> {
Some(&self)
}
}
type DynCharacter<'a> = dyn Character + Send + Sync + 'a;
struct QueryRoot;
#[graphql_object(context = CustomContext)]
impl QueryRoot {
fn character(&self, ctx: &CustomContext) -> Box<DynCharacter<'_>> {
let ch: Box<DynCharacter<'_>> = match ctx {
CustomContext::Human => Box::new(HumanCustomContext {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
}),
CustomContext::Droid => Box::new(DroidCustomContext {
id: "droid-99".to_string(),
primary_function: "run".to_string(),
}),
_ => unimplemented!(),
};
ch
}
}
const DOC: &str = r#"{
character {
... on HumanCustomContext {
humanId: id
homePlanet
}
... on DroidCustomContext {
droidId: id
primaryFunction
}
}
}"#;
#[tokio::test]
async fn resolves_human() {
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &CustomContext::Human).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn resolves_droid() {
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &CustomContext::Droid).await,
Ok((
graphql_value!({"character": {"droidId": "droid-99", "primaryFunction": "run"}}),
vec![],
)),
);
}
}
mod inferred_custom_context {
use super::*;
#[graphql_union]
trait Character {
fn as_human(&self, _: &CustomContext) -> Option<&HumanCustomContext> {
None
}
fn as_droid(&self, _: &()) -> Option<&DroidCustomContext> {
None
}
}
impl Character for HumanCustomContext {
fn as_human(&self, _: &CustomContext) -> Option<&HumanCustomContext> {
Some(&self)
}
}
impl Character for DroidCustomContext {
fn as_droid(&self, _: &()) -> Option<&DroidCustomContext> {
Some(&self)
}
}
type DynCharacter<'a> = dyn Character + Send + Sync + 'a;
struct QueryRoot;
#[graphql_object(context = CustomContext)]
impl QueryRoot {
fn character(&self, ctx: &CustomContext) -> Box<DynCharacter<'_>> {
let ch: Box<DynCharacter<'_>> = match ctx {
CustomContext::Human => Box::new(HumanCustomContext {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
}),
CustomContext::Droid => Box::new(DroidCustomContext {
id: "droid-99".to_string(),
primary_function: "run".to_string(),
}),
_ => unimplemented!(),
};
ch
}
}
const DOC: &str = r#"{
character {
... on HumanCustomContext {
humanId: id
homePlanet
}
... on DroidCustomContext {
droidId: id
primaryFunction
}
}
}"#;
#[tokio::test]
async fn resolves_human() {
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &CustomContext::Human).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn resolves_droid() {
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &CustomContext::Droid).await,
Ok((
graphql_value!({"character": {"droidId": "droid-99", "primaryFunction": "run"}}),
vec![],
)),
);
}
}
mod ignored_method {
use super::*;
#[graphql_union]
trait Character {
fn as_human(&self) -> Option<&Human> {
None
}
#[graphql_union(ignore)]
fn ignored(&self) -> Option<&Ewok> {
None
}
#[graphql_union(skip)]
fn skipped(&self) {}
}
impl Character for Human {
fn as_human(&self) -> Option<&Human> {
Some(&self)
}
}
type DynCharacter<'a> = dyn Character + Send + Sync + 'a;
struct QueryRoot;
#[graphql_object]
impl QueryRoot {
fn character(&self) -> Box<DynCharacter<'_>> {
Box::new(Human {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
})
}
}
#[tokio::test]
async fn resolves_human() {
const DOC: &str = r#"{
character {
... on Human {
humanId: id
homePlanet
}
}
}"#;
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn ignores_ewok() {
const DOC: &str = r#"{
__type(name: "Character") {
possibleTypes {
name
}
}
}"#;
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &()).await,
Ok((
graphql_value!({"__type": {"possibleTypes": [{"name": "Human"}]}}),
vec![],
)),
);
}
}
mod external_resolver {
use super::*;
#[graphql_union(context = Database)]
#[graphql_union(on Droid = DynCharacter::as_droid)]
trait Character {
fn as_human(&self) -> Option<&Human> {
None
}
}
impl Character for Human {
fn as_human(&self) -> Option<&Human> {
Some(&self)
}
}
impl Character for Droid {}
type DynCharacter<'a> = dyn Character + Send + Sync + 'a;
impl<'a> DynCharacter<'a> {
fn as_droid<'db>(&self, db: &'db Database) -> Option<&'db Droid> {
db.droid.as_ref()
}
}
struct Database {
droid: Option<Droid>,
}
impl juniper::Context for Database {}
enum QueryRoot {
Human,
Droid,
}
#[graphql_object(context = Database)]
impl QueryRoot {
fn character(&self) -> Box<DynCharacter<'_>> {
let ch: Box<DynCharacter<'_>> = match self {
Self::Human => Box::new(Human {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
}),
Self::Droid => Box::new(Droid {
id: "?????".to_string(),
primary_function: "???".to_string(),
}),
};
ch
}
}
const DOC: &str = r#"{
character {
... on Human {
humanId: id
homePlanet
}
... on Droid {
droidId: id
primaryFunction
}
}
}"#;
#[tokio::test]
async fn resolves_human() {
let schema = schema(QueryRoot::Human);
let db = Database { droid: None };
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &db).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn resolves_droid() {
let schema = schema(QueryRoot::Droid);
let db = Database {
droid: Some(Droid {
id: "droid-99".to_string(),
primary_function: "run".to_string(),
}),
};
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &db).await,
Ok((
graphql_value!({"character": {"droidId": "droid-99", "primaryFunction": "run"}}),
vec![],
)),
);
}
}
mod full_featured {
use super::*;
/// Rust doc.
#[graphql_union(name = "MyChar")]
#[graphql_union(description = "My character.")]
#[graphql_union(context = CustomContext, scalar = DefaultScalarValue)]
#[graphql_union(on EwokCustomContext = resolve_ewok)]
trait Character<T> {
fn as_human(&self, _: &()) -> Option<&HumanCustomContext> {
None
}
fn as_droid(&self) -> Option<&DroidCustomContext> {
None
}
#[graphql_union(ignore)]
fn as_ewok(&self) -> Option<&EwokCustomContext> {
None
}
#[graphql_union(ignore)]
fn ignored(&self) {}
}
impl<T> Character<T> for HumanCustomContext {
fn as_human(&self, _: &()) -> Option<&HumanCustomContext> {
Some(&self)
}
}
impl<T> Character<T> for DroidCustomContext {
fn as_droid(&self) -> Option<&DroidCustomContext> {
Some(&self)
}
}
impl<T> Character<T> for EwokCustomContext {
fn as_ewok(&self) -> Option<&EwokCustomContext> {
Some(&self)
}
}
type DynCharacter<'a, T> = dyn Character<T> + Send + Sync + 'a;
fn resolve_ewok<'a, T>(
ewok: &'a DynCharacter<'a, T>,
_: &CustomContext,
) -> Option<&'a EwokCustomContext> {
ewok.as_ewok()
}
struct QueryRoot;
#[graphql_object(context = CustomContext)]
impl QueryRoot {
fn character(&self, ctx: &CustomContext) -> Box<DynCharacter<'_, ()>> {
let ch: Box<DynCharacter<'_, ()>> = match ctx {
CustomContext::Human => Box::new(HumanCustomContext {
id: "human-32".to_string(),
home_planet: "earth".to_string(),
}),
CustomContext::Droid => Box::new(DroidCustomContext {
id: "droid-99".to_string(),
primary_function: "run".to_string(),
}),
CustomContext::Ewok => Box::new(EwokCustomContext {
id: "ewok-1".to_string(),
funny: true,
}),
};
ch
}
}
const DOC: &str = r#"{
character {
... on HumanCustomContext {
humanId: id
homePlanet
}
... on DroidCustomContext {
droidId: id
primaryFunction
}
... on EwokCustomContext {
ewokId: id
funny
}
}
}"#;
#[tokio::test]
async fn resolves_human() {
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &CustomContext::Human).await,
Ok((
graphql_value!({"character": {"humanId": "human-32", "homePlanet": "earth"}}),
vec![],
)),
);
}
#[tokio::test]
async fn resolves_droid() {
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &CustomContext::Droid).await,
Ok((
graphql_value!({"character": {"droidId": "droid-99", "primaryFunction": "run"}}),
vec![],
)),
);
}
#[tokio::test]
async fn resolves_ewok() {
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &CustomContext::Ewok).await,
Ok((
graphql_value!({"character": {"ewokId": "ewok-1", "funny": true}}),
vec![],
)),
);
}
#[tokio::test]
async fn uses_custom_name() {
const DOC: &str = r#"{
__type(name: "MyChar") {
name
}
}"#;
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &CustomContext::Ewok).await,
Ok((graphql_value!({"__type": {"name": "MyChar"}}), vec![])),
);
}
#[tokio::test]
async fn uses_custom_description() {
const DOC: &str = r#"{
__type(name: "MyChar") {
description
}
}"#;
let schema = schema(QueryRoot);
assert_eq!(
execute(DOC, None, &schema, &Variables::new(), &CustomContext::Ewok).await,
Ok((
graphql_value!({"__type": {"description": "My character."}}),
vec![],
)),
);
}
}
| 25.111206 | 99 | 0.457333 |
f435bffbf27078f68a85c7ebc876d4b4fed163bd | 169 | // variables4.rs
// Make me compile! Execute the command `rustlings hint variables4` if you want a hint :)
fn main() {
let x:u8;
println!("Number {}", x=10);
}
| 21.125 | 89 | 0.633136 |
e503b82c9a21b07d4492d1c15d8196cec99add95 | 5,026 | use rlua::{Integer, Lua, Result, String, Table, ToLua, Value};
fn valid_float(verify: Result<Value>, expected: f64) {
let verify_unwrap = verify.unwrap();
assert_eq!(verify_unwrap.type_name(), "number");
match verify_unwrap {
Value::Number(value) => assert_eq!(value, expected),
_ => panic!("unexpected type"),
};
}
#[cfg(rlua_lua51)]
fn valid_int(verify: Result<Value>, expected: Integer) {
let verify_unwrap = verify.unwrap();
assert_eq!(verify_unwrap.type_name(), "number");
match verify_unwrap {
Value::Number(value) => assert_eq!(value as Integer, expected),
_ => panic!("unexpected type"),
};
}
#[cfg(not(rlua_lua51))]
fn valid_int(verify: Result<Value>, expected: Integer) {
let verify_unwrap = verify.unwrap();
assert_eq!(verify_unwrap.type_name(), "integer");
match verify_unwrap {
Value::Integer(value) => assert_eq!(value, expected),
_ => panic!("unexpected type"),
};
}
fn valid_table(verify: Result<Value>, handler: fn(tbl: Table)) {
let verify_unwrap = verify.unwrap();
assert_eq!(verify_unwrap.type_name(), "table");
match verify_unwrap {
Value::Table(value) => handler(value),
_ => panic!("unexpected type"),
};
}
fn valid_string(verify: Result<Value>, val: String) {
let verify_unwrap = verify.unwrap();
assert_eq!(verify_unwrap.type_name(), "string");
match verify_unwrap {
Value::String(value) => assert_eq!(value, val),
_ => panic!("unexpected type"),
};
}
fn valid_boolean(verify: Result<Value>, val: bool) {
let verify_unwrap = verify.unwrap();
assert_eq!(verify_unwrap.type_name(), "boolean");
match verify_unwrap {
Value::Boolean(value) => assert_eq!(value, val),
_ => panic!("unexpected type"),
};
}
#[test]
fn test_conversion_int_primitives() {
let lua = Lua::new();
let v: i8 = 10;
let v2: u8 = 10;
let v3: i16 = 10;
let v4: u16 = 10;
let v5: i32 = 10;
let v6: u32 = 10;
let v7: i64 = 10;
let v8: u64 = 10;
let v9: i128 = 10;
let v10: u128 = 10;
let v11: isize = 10;
let v12: usize = 10;
lua.context(|ctx| {
valid_int(v.to_lua(ctx), 10);
valid_int(v2.to_lua(ctx), 10);
valid_int(v3.to_lua(ctx), 10);
valid_int(v4.to_lua(ctx), 10);
valid_int(v5.to_lua(ctx), 10);
valid_int(v6.to_lua(ctx), 10);
valid_int(v7.to_lua(ctx), 10);
valid_int(v8.to_lua(ctx), 10);
valid_int(v9.to_lua(ctx), 10);
valid_int(v10.to_lua(ctx), 10);
valid_int(v11.to_lua(ctx), 10);
valid_int(v12.to_lua(ctx), 10);
});
}
#[test]
fn test_conversion_float_primatives() {
let lua = Lua::new();
let v: f32 = 10.0;
let v2: f64 = 10.0;
lua.context(|ctx| {
valid_float(v.to_lua(ctx), 10.0);
valid_float(v2.to_lua(ctx), 10.0);
});
}
#[test]
fn test_conversion_int_array_table() {
let v1: [u32; 3] = [10, 15, 4];
let v2: [u8; 3] = [10, 15, 4];
let v3: [i16; 3] = [10, 15, 4];
let v4: [u16; 3] = [10, 15, 4];
let v5: [i32; 3] = [10, 15, 4];
let v6: [u32; 3] = [10, 15, 4];
let v7: [i64; 3] = [10, 15, 4];
let v8: [u64; 3] = [10, 15, 4];
let v9: [i128; 3] = [10, 15, 4];
let v10: [u128; 3] = [10, 15, 4];
let v11: [isize; 3] = [10, 15, 4];
let v12: [usize; 3] = [10, 15, 4];
let v1f: [f32; 3] = [10.0, 15.0, 4.0];
let v2f: [f64; 3] = [10.0, 15.0, 4.0];
let lua = Lua::new();
lua.context(|ctx| {
let validate_arr_int = |tbl: Table| {
valid_int(tbl.get(1), 10);
valid_int(tbl.get(2), 15);
valid_int(tbl.get(3), 4);
};
valid_table(v1.to_lua(ctx), validate_arr_int);
valid_table(v2.to_lua(ctx), validate_arr_int);
valid_table(v3.to_lua(ctx), validate_arr_int);
valid_table(v4.to_lua(ctx), validate_arr_int);
valid_table(v5.to_lua(ctx), validate_arr_int);
valid_table(v6.to_lua(ctx), validate_arr_int);
valid_table(v7.to_lua(ctx), validate_arr_int);
valid_table(v8.to_lua(ctx), validate_arr_int);
valid_table(v9.to_lua(ctx), validate_arr_int);
valid_table(v10.to_lua(ctx), validate_arr_int);
valid_table(v11.to_lua(ctx), validate_arr_int);
valid_table(v12.to_lua(ctx), validate_arr_int);
let validate_arr_float = |tbl: Table| {
valid_float(tbl.get(1), 10.0);
valid_float(tbl.get(2), 15.0);
valid_float(tbl.get(3), 4.0);
};
valid_table(v1f.to_lua(ctx), validate_arr_float);
valid_table(v2f.to_lua(ctx), validate_arr_float);
});
}
#[test]
fn test_conversion_string() {
Lua::new().context(|ctx| {
valid_string(
"hello world".to_lua(ctx),
ctx.create_string("hello world").unwrap(),
);
});
}
#[test]
fn test_conversion_boolean() {
Lua::new().context(|ctx| {
valid_boolean(true.to_lua(ctx), true);
});
}
| 29.739645 | 71 | 0.580382 |
3ae312d128cf4068f2305f03007c07308f7ab8d0 | 7,116 | use couchbase_lite::{
fallible_streaming_iterator::FallibleStreamingIterator, use_web_sockets, Database,
DatabaseConfig, Document, ReplicatorState,
};
use log::{error, trace};
use serde::{Deserialize, Serialize};
use std::{collections::HashSet, env, path::Path, sync::mpsc};
use tokio::prelude::*;
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "type")]
struct Message {
msg: String,
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
env_logger::init();
let mut runtime = tokio::runtime::Runtime::new()?;
let db_path = env::args().nth(1).expect("No path to db file");
let db_path = Path::new(&db_path);
let sync_url = env::args()
.nth(2)
.unwrap_or_else(|| "ws://vps820494.ovh.net:4984/billeo-db".to_string());
let token: Option<String> = Some("1b91ed5c6a58efd479c74011b592c86fc43f1c82".into());
use_web_sockets(runtime.handle().clone());
let (db_thread, db_exec) = run_db_thread(db_path);
let db_exec_repl = db_exec.clone();
db_exec.spawn(move |db| {
if let Some(db) = db.as_mut() {
db.start_replicator(
&sync_url,
token.as_ref().map(String::as_str),
move |repl_state| {
println!("replicator state changed: {:?}", repl_state);
match repl_state {
ReplicatorState::Stopped(_) | ReplicatorState::Offline => {
db_exec_repl.spawn(|db| {
if let Some(db) = db.as_mut() {
println!("restarting replicator");
std::thread::sleep(std::time::Duration::from_secs(5));
db.restart_replicator().expect("restart_replicator failed");
} else {
eprintln!("db is NOT open");
}
});
}
_ => {}
}
},
)
.expect("replicator start failed");
} else {
eprintln!("db is NOT open");
}
});
let db_exec_repl = db_exec.clone();
db_exec.spawn(move |db| {
if let Some(db) = db.as_mut() {
db.register_observer(move || {
db_exec_repl
.spawn(|db| print_external_changes(db).expect("read external changes failed"));
})
.expect("register observer failed");
} else {
eprintln!("db is NOT open");
}
});
let mut stdin = tokio::io::BufReader::new(tokio::io::stdin());
static EDIT_PREFIX: &'static str = "edit ";
let db_exec_repl = db_exec.clone();
runtime.block_on(async move {
let mut buf = String::new();
loop {
stdin
.read_line(&mut buf)
.await
.expect("reading from stdin fail");
db_exec_repl.spawn(move |db| {
if let Some(db) = db.as_mut() {
db.restart_replicator().expect("restart_replicator failed");
}
});
}
});
db_exec.spawn(|db| {
if let Some(db) = db.as_mut() {
db.clear_observers();
db.stop_replicator();
} else {
eprintln!("db is NOT open");
}
});
drop(db_exec);
db_thread.join().unwrap();
println!("exiting");
Ok(())
}
type Job<T> = Box<dyn FnOnce(&mut Option<T>) + Send>;
#[derive(Clone)]
struct DbQueryExecutor {
inner: mpsc::Sender<Job<Database>>,
}
impl DbQueryExecutor {
pub fn spawn<F: FnOnce(&mut Option<Database>) + Send + 'static>(&self, job: F) {
self.inner
.send(Box::new(job))
.expect("thread_pool::Executor::spawn failed");
}
}
fn run_db_thread(db_path: &Path) -> (std::thread::JoinHandle<()>, DbQueryExecutor) {
let (sender, receiver) = std::sync::mpsc::channel::<Job<Database>>();
let db_path: std::path::PathBuf = db_path.into();
let join_handle = std::thread::spawn(move || {
let mut db = match Database::open(&db_path, DatabaseConfig::default()) {
Ok(db) => {
println!("We read all messages after open:");
print_all_messages(&db).expect("read from db failed");
println!("read all messages after open done");
Some(db)
}
Err(err) => {
error!("Initialiazion cause error: {}", err);
None
}
};
loop {
match receiver.recv() {
Ok(x) => x(&mut db),
Err(err) => {
trace!("db_thread: recv error: {}", err);
break;
}
}
}
});
(join_handle, DbQueryExecutor { inner: sender })
}
fn save_msg(
db: &mut Database,
data: &str,
doc_id: Option<&str>,
) -> Result<(), Box<dyn std::error::Error>> {
let mut trans = db.transaction()?;
let msg = Message { msg: data.into() };
let mut doc = if let Some(doc_id) = doc_id {
println!("save_msg: edit message");
Document::new_with_id(doc_id, &msg)?
} else {
Document::new(&msg)?
};
println!("save_msg: doc id {}", doc.id());
trans.save(&mut doc)?;
trans.commit()?;
Ok(())
}
fn print_all_messages(db: &Database) -> Result<(), Box<dyn std::error::Error>> {
let query = db.query(r#"{"WHAT": ["._id"], "WHERE": ["=", [".type"], "Message"]}"#)?;
let mut iter = query.run()?;
while let Some(item) = iter.next()? {
// work with item
let id = item.get_raw_checked(0)?;
let id = id.as_str()?;
println!("iteration id {}", id);
let doc = db.get_existing(id)?;
println!("doc id {}", doc.id());
let db_msg: Message = doc.decode_data()?;
println!("db_msg: {:?}", db_msg);
}
Ok(())
}
fn print_external_changes(db: &mut Option<Database>) -> Result<(), Box<dyn std::error::Error>> {
let db = db
.as_mut()
.ok_or_else(|| format!("print_external_changes: db not OPEN"))?;
let mut doc_ids = HashSet::<String>::new();
for change in db.observed_changes() {
println!(
"observed change: doc id {} was changed, external {}",
change.doc_id(),
change.external()
);
if change.external() {
doc_ids.insert(change.doc_id().into());
}
}
for doc_id in &doc_ids {
let doc = match db.get_existing(doc_id.as_str()) {
Ok(x) => x,
Err(err) => {
eprintln!("Can not get {}: {}", doc_id, err);
continue;
}
};
let db_msg: Message = match doc.decode_data() {
Ok(x) => x,
Err(err) => {
eprintln!("Can not decode data: {}", err);
continue;
}
};
println!("external: {}", db_msg.msg);
}
Ok(())
}
| 32.199095 | 99 | 0.496768 |
7a13744f1537558cc17755884b4bdfc456d48b29 | 364 | //! A base library for iterfacing with streams of vectors and matrices.
//!
//! This library extends the abstraction layer provided by [`ark_std::iterable::Iterable`]
//! with streams that repeat the same element over and over, and that iterate in reversed order.
pub mod dummy;
pub(crate) mod slice;
pub use ark_std::iterable::Iterable;
pub use slice::Reverse;
| 33.090909 | 96 | 0.755495 |
082e9b65fb213cf84bc50c7cc0271838bacae1d9 | 1,593 | // Copyright 2018-2022 Cargill Incorporated
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Builder for the BiomeSubsystem
use splinter::biome::UserProfileStore;
use splinter::error::InternalError;
use crate::node::runnable::biome::RunnableBiomeSubsystem;
pub struct BiomeSubsystemBuilder {
profile_store: Option<Box<dyn UserProfileStore>>,
}
impl BiomeSubsystemBuilder {
pub fn new() -> Self {
Self {
profile_store: None,
}
}
/// Specifies the store factory to use with the node. Defaults to the MemoryStoreFactory.
pub fn with_profile_store(mut self, profile_store: Box<dyn UserProfileStore>) -> Self {
self.profile_store = Some(profile_store);
self
}
pub fn build(self) -> Result<RunnableBiomeSubsystem, InternalError> {
let profile_store = self.profile_store.ok_or_else(|| {
InternalError::with_message(
"Cannot build BiomeSubsystem without a store factory".to_string(),
)
})?;
Ok(RunnableBiomeSubsystem { profile_store })
}
}
| 32.510204 | 93 | 0.695543 |
9c6ffd49c21d9d9f17a1028a8e816a8625a27983 | 2,835 | use crate::core::error::ContractError;
use crate::core::state::PayableScopeAttribute;
use crate::testutil::test_utilities::{
mock_default_scope_attribute, mock_scope_attribute, MockOwnedDeps,
};
use crate::util::provenance_util::{ProvenanceUtil, ProvenanceUtilImpl, WriteAttributeMessages};
use cosmwasm_std::{CosmosMsg, Deps, QuerierWrapper, StdResult};
use provwasm_std::{ProvenanceMsg, ProvenanceQuery, Scope};
use std::cell::RefCell;
pub struct MockProvenanceUtil {
captured_attributes: RefCell<Vec<PayableScopeAttribute>>,
}
impl MockProvenanceUtil {
pub fn new() -> MockProvenanceUtil {
MockProvenanceUtil {
captured_attributes: RefCell::new(vec![]),
}
}
fn add_attribute(&self, attribute: PayableScopeAttribute) {
self.captured_attributes.borrow_mut().push(attribute);
}
}
impl ProvenanceUtil for MockProvenanceUtil {
fn get_scope_by_id(
&self,
querier: &QuerierWrapper<ProvenanceQuery>,
scope_id: impl Into<String>,
) -> StdResult<Scope> {
ProvenanceUtilImpl.get_scope_by_id(querier, scope_id)
}
fn get_add_initial_attribute_to_scope_msg(
&self,
deps: &Deps<ProvenanceQuery>,
attribute: &PayableScopeAttribute,
contract_name: impl Into<String>,
) -> Result<CosmosMsg<ProvenanceMsg>, ContractError> {
self.add_attribute(attribute.clone());
ProvenanceUtilImpl.get_add_initial_attribute_to_scope_msg(deps, attribute, contract_name)
}
fn upsert_attribute_to_scope(
&self,
attribute: &PayableScopeAttribute,
contract_name: impl Into<String>,
) -> Result<WriteAttributeMessages, ContractError> {
self.add_attribute(attribute.clone());
ProvenanceUtilImpl.upsert_attribute_to_scope(attribute, contract_name)
}
}
impl MockProvenanceUtil {
pub fn bind_captured_attribute(&self, deps: &mut MockOwnedDeps) {
if let Some(attr) = self.captured_attributes.borrow().last() {
mock_default_scope_attribute(deps, attr);
}
}
pub fn bind_captured_attribute_named(
&self,
deps: &mut MockOwnedDeps,
contract_name: impl Into<String>,
) {
if let Some(attr) = self.captured_attributes.borrow().last() {
mock_scope_attribute(deps, contract_name, attr);
}
}
pub fn assert_attribute_matches_latest(&self, attribute: &PayableScopeAttribute) {
if let Some(attr) = self.captured_attributes.borrow().last() {
assert_eq!(
attribute,
attr,
"the latest attribute captured via MockProvenanceUtil is not equivalent to the checked value",
);
} else {
panic!("no attributes have ever been captured by MockProvenanceUtil");
}
}
}
| 34.573171 | 110 | 0.675485 |
d5721c24003a7083a80a9c2e3fb4b0f0b23c12b2 | 207 | #![feature(async_await)]
use heim_common::prelude::*;
use heim_host as host;
#[runtime::main]
async fn main() -> Result<()> {
let platform = host::platform().await?;
dbg!(platform);
Ok(())
}
| 14.785714 | 43 | 0.608696 |
6285822570f12fe3d3353bac478210697ae887c8 | 1,619 | // Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
mod check_leader;
mod cleanup;
mod cleanup_sst;
mod compact;
mod consistency_check;
mod metrics;
mod pd;
mod query_stats;
mod raftlog_fetch;
mod raftlog_gc;
mod read;
mod refresh_config;
mod region;
mod split_check;
mod split_config;
mod split_controller;
pub use self::check_leader::{Runner as CheckLeaderRunner, Task as CheckLeaderTask};
pub use self::cleanup::{Runner as CleanupRunner, Task as CleanupTask};
pub use self::cleanup_sst::{Runner as CleanupSSTRunner, Task as CleanupSSTTask};
pub use self::compact::{Runner as CompactRunner, Task as CompactTask};
pub use self::consistency_check::{Runner as ConsistencyCheckRunner, Task as ConsistencyCheckTask};
pub use self::pd::{
FlowStatistics, FlowStatsReporter, HeartbeatTask, Runner as PdRunner, Task as PdTask,
};
pub use self::query_stats::QueryStats;
pub use self::raftlog_fetch::{Runner as RaftlogFetchRunner, Task as RaftlogFetchTask};
pub use self::raftlog_gc::{Runner as RaftlogGcRunner, Task as RaftlogGcTask};
pub use self::read::{LocalReader, Progress as ReadProgress, ReadDelegate, ReadExecutor, TrackVer};
pub use self::refresh_config::{
BatchComponent as RaftStoreBatchComponent, Runner as RefreshConfigRunner,
Task as RefreshConfigTask,
};
pub use self::region::{Runner as RegionRunner, Task as RegionTask};
pub use self::split_check::{
Bucket, BucketRange, KeyEntry, Runner as SplitCheckRunner, Task as SplitCheckTask,
};
pub use self::split_config::{SplitConfig, SplitConfigManager};
pub use self::split_controller::{AutoSplitController, ReadStats, WriteStats};
| 38.547619 | 98 | 0.792464 |
e95a978faad218a1bba254d03f33f4d91c528a64 | 1,802 | use bumpalo::Bump;
use rhizome::Node;
use tap::Pipe;
asteracea::component! {
Bound(
priv _early: usize,
)() []
}
asteracea::component! {
Never
#[allow(unreachable_code)]
(
priv _early: usize,
)()
new with { unreachable!(); }
[]
}
asteracea::component! {
Binder()(
late: usize = 1,
) -> Sync
[
bind <*Bound *_early = {late}>
spread if {false} bind <*Never *_early = {late}>
spread if {false} bind box <*Binder .late = {late}>
]
}
asteracea::component! {
BinderMover()(
late: usize = 1,
) -> Sync
[
bind move <*Bound *_early = {late}>
spread if {false} bind move <*Never *_early = {late}>
spread if {false} bind move box <*BinderMover .late = {late}>
]
}
asteracea::component! {
Named()(
late: usize = 1,
) -> Sync
bind priv bound: struct NamedBound <*Bound *_early = {late}>
}
asteracea::component! {
NamedMoved()(
late: usize = 1,
) -> Sync
bind priv bound: struct NamedMovedBound move <*Bound *_early = {late}>
}
#[test]
fn bind() {
let root = std::sync::Arc::new(Node::new_for::<()>());
let component = Binder::new(&root, Binder::new_args_builder().build()).unwrap();
let bump = Bump::new();
let _vdom = Box::pin(component)
.as_ref()
.render(&bump, Binder::render_args_builder().build())
.unwrap();
BinderMover::new(&root, BinderMover::new_args_builder().build())
.unwrap()
.pipe(Box::pin)
.as_ref()
.render(&bump, BinderMover::render_args_builder().build())
.unwrap();
Named::new(&root, Named::new_args_builder().build())
.unwrap()
.pipe(Box::pin)
.as_ref()
.render(&bump, Named::render_args_builder().build())
.unwrap();
NamedMoved::new(&root, NamedMoved::new_args_builder().build())
.unwrap()
.pipe(Box::pin)
.as_ref()
.render(&bump, NamedMoved::render_args_builder().build())
.unwrap();
}
| 18.770833 | 81 | 0.628746 |
fe1b4a4428e0273779a7092d9e65269669982972 | 8,780 | use crate::mutators::either::Either;
use crate::traits::{CompatibleWithSensor, CorpusDelta, Pool, Sensor, TestCase};
use owo_colors::OwoColorize;
use std::fmt::Display;
use std::path::PathBuf;
const NBR_ARTIFACTS_PER_ERROR_AND_CPLX: usize = 8;
pub(crate) static mut TEST_FAILURE: Option<TestFailure> = None;
#[derive(Debug, Clone)]
pub struct TestFailure {
pub display: String,
pub id: u64,
}
#[derive(Default)]
pub struct TestFailureSensor {
error: Option<TestFailure>,
}
impl Sensor for TestFailureSensor {
type ObservationHandler<'a> = &'a mut Option<TestFailure>;
#[no_coverage]
fn start_recording(&mut self) {
self.error = None;
unsafe {
TEST_FAILURE = None;
}
}
#[no_coverage]
fn stop_recording(&mut self) {
unsafe {
self.error = TEST_FAILURE.clone();
}
}
#[no_coverage]
fn iterate_over_observations(&mut self, handler: Self::ObservationHandler<'_>) {
*handler = std::mem::take(&mut self.error);
}
}
#[derive(Clone, Copy, Default)]
pub(crate) struct Stats {
count: usize,
}
impl Display for Stats {
#[no_coverage]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.count == 0 {
write!(f, "{}", format!("artifacts({})", self.count))
} else {
write!(f, "{}", format!("artifacts({})", self.count).red())
}
}
}
#[derive(Clone)]
pub(crate) struct Input<T> {
data: T,
}
struct ArftifactList<T> {
error: TestFailure,
inputs: Vec<ArtifactListForError<T>>,
}
struct ArtifactListForError<T> {
cplx: f64,
inputs: Vec<Input<T>>,
}
pub(crate) struct ArtifactsPool<T> {
name: String,
inputs: Vec<ArftifactList<T>>,
rng: fastrand::Rng,
}
impl<T> ArtifactsPool<T> {
#[no_coverage]
pub(crate) fn new(name: &str) -> Self {
Self {
name: name.to_string(),
inputs: vec![],
rng: fastrand::Rng::new(),
}
}
}
impl<T: TestCase> Pool for ArtifactsPool<T> {
type TestCase = T;
type Index = (usize, usize, usize);
type Stats = Stats;
#[no_coverage]
fn stats(&self) -> Self::Stats {
Stats {
count: self.inputs.len(),
}
}
#[no_coverage]
fn len(&self) -> usize {
self.inputs.len()
}
#[no_coverage]
fn get_random_index(&mut self) -> Option<Self::Index> {
if self.inputs.is_empty() {
return None;
}
let error_choice = self.rng.usize(0..self.inputs.len());
let list_for_error = &self.inputs[error_choice];
let complexity_choice = list_for_error.inputs.len() - 1;
let least_complexity = &list_for_error.inputs[complexity_choice];
if least_complexity.inputs.is_empty() {
return None;
}
let input_choice = self.rng.usize(0..least_complexity.inputs.len());
Some((error_choice, complexity_choice, input_choice))
}
#[no_coverage]
fn get(&self, idx: Self::Index) -> &Self::TestCase {
&self.inputs[idx.0].inputs[idx.1].inputs[idx.2].data
}
#[no_coverage]
fn get_mut(&mut self, idx: Self::Index) -> &mut Self::TestCase {
&mut self.inputs[idx.0].inputs[idx.1].inputs[idx.2].data
}
#[no_coverage]
fn retrieve_after_processing(&mut self, idx: Self::Index, generation: usize) -> Option<&mut Self::TestCase> {
if let Some(input) = self.inputs[idx.0]
.inputs
.get_mut(idx.1)
.map(|inputs| inputs.inputs.get_mut(idx.2))
.flatten()
{
if input.data.generation() == generation {
Some(&mut input.data)
} else {
None
}
} else {
None
}
}
#[no_coverage]
fn mark_test_case_as_dead_end(&mut self, idx: Self::Index) {
self.inputs[idx.0].inputs[idx.1].inputs.remove(idx.2);
}
#[no_coverage]
fn minify(
&mut self,
_target_len: usize,
_event_handler: impl FnMut(CorpusDelta<&Self::TestCase, Self::Index>, Self::Stats) -> Result<(), std::io::Error>,
) -> Result<(), std::io::Error> {
// TODO
Ok(())
}
}
impl<T> CompatibleWithSensor<TestFailureSensor> for ArtifactsPool<T>
where
T: TestCase,
{
#[no_coverage]
fn process(
&mut self,
sensor: &mut TestFailureSensor,
get_input_ref: crate::mutators::either::Either<Self::Index, &Self::TestCase>,
clone_input: &impl Fn(&Self::TestCase) -> Self::TestCase,
complexity: f64,
mut event_handler: impl FnMut(CorpusDelta<&Self::TestCase, Self::Index>, Self::Stats) -> Result<(), std::io::Error>,
) -> Result<(), std::io::Error> {
let mut error = None;
sensor.iterate_over_observations(&mut error);
enum PositionOfNewInput {
NewError,
ExistingErrorNewCplx(usize),
ExistingErrorAndCplx(usize),
}
let mut is_interesting = None;
if let Some(error) = error {
if let Some(list_index) = self.inputs.iter().position(|xs| xs.error.id == error.id) {
let list = &self.inputs[list_index];
if let Some(least_complex) = list.inputs.last() {
if least_complex.cplx > complexity {
is_interesting = Some(PositionOfNewInput::ExistingErrorNewCplx(list_index));
} else if least_complex.cplx == complexity {
if least_complex.inputs.len() < NBR_ARTIFACTS_PER_ERROR_AND_CPLX
&& self
.inputs
.iter()
.position(|xs| xs.error.display == error.display)
.is_none()
{
is_interesting = Some(PositionOfNewInput::ExistingErrorAndCplx(list_index));
}
}
} else {
is_interesting = Some(PositionOfNewInput::ExistingErrorNewCplx(list_index));
}
} else {
// a new error we haven't seen before
is_interesting = Some(PositionOfNewInput::NewError);
}
if let Some(position) = is_interesting {
let data = match get_input_ref {
Either::Left(x) => {
let input = &self.inputs[x.0].inputs[x.1].inputs[x.2].data;
clone_input(input)
}
Either::Right(x) => clone_input(x),
};
let input = Input { data: data };
let mut path = PathBuf::new();
path.push(&self.name);
path.push(format!("{}", error.id));
path.push(format!("{:.4}", complexity));
let new_index = match position {
PositionOfNewInput::NewError => {
self.inputs.push(ArftifactList {
error,
inputs: vec![ArtifactListForError {
cplx: complexity,
inputs: vec![input],
}],
});
(self.inputs.len() - 1, 0, 0)
}
PositionOfNewInput::ExistingErrorNewCplx(error_idx) => {
// TODO: handle event
self.inputs[error_idx].inputs.push(ArtifactListForError {
cplx: complexity,
inputs: vec![input],
});
(error_idx, self.inputs[error_idx].inputs.len() - 1, 0)
}
PositionOfNewInput::ExistingErrorAndCplx(error_idx) => {
// NOTE: the complexity must be the last one
// TODO: handle event
self.inputs[error_idx].inputs.last_mut().unwrap().inputs.push(input);
(
error_idx,
self.inputs[error_idx].inputs.len() - 1,
self.inputs[error_idx].inputs.last().unwrap().inputs.len() - 1,
)
}
};
let data = self.get(new_index);
let delta = CorpusDelta {
path,
add: Some((data, new_index)),
remove: vec![],
};
event_handler(delta, self.stats())?;
}
}
Ok(())
}
}
| 32.279412 | 124 | 0.50672 |
e61058c2749083b616ae5676375bef2e2f9aff25 | 7,088 | use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::{snippet, snippet_with_macro_callsite};
use clippy_utils::ty::is_type_diagnostic_item;
use clippy_utils::{differing_macro_contexts, in_macro, match_def_path, match_qpath, paths};
use if_chain::if_chain;
use rustc_errors::Applicability;
use rustc_hir::{Expr, ExprKind, LangItem, MatchSource, QPath};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{self, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::sym;
declare_clippy_lint! {
/// **What it does:** Checks for usages of `Err(x)?`.
///
/// **Why is this bad?** The `?` operator is designed to allow calls that
/// can fail to be easily chained. For example, `foo()?.bar()` or
/// `foo(bar()?)`. Because `Err(x)?` can't be used that way (it will
/// always return), it is more clear to write `return Err(x)`.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// fn foo(fail: bool) -> Result<i32, String> {
/// if fail {
/// Err("failed")?;
/// }
/// Ok(0)
/// }
/// ```
/// Could be written:
///
/// ```rust
/// fn foo(fail: bool) -> Result<i32, String> {
/// if fail {
/// return Err("failed".into());
/// }
/// Ok(0)
/// }
/// ```
pub TRY_ERR,
style,
"return errors explicitly rather than hiding them behind a `?`"
}
declare_lint_pass!(TryErr => [TRY_ERR]);
impl<'tcx> LateLintPass<'tcx> for TryErr {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// Looks for a structure like this:
// match ::std::ops::Try::into_result(Err(5)) {
// ::std::result::Result::Err(err) =>
// #[allow(unreachable_code)]
// return ::std::ops::Try::from_error(::std::convert::From::from(err)),
// ::std::result::Result::Ok(val) =>
// #[allow(unreachable_code)]
// val,
// };
if_chain! {
if !in_external_macro(cx.tcx.sess, expr.span);
if let ExprKind::Match(ref match_arg, _, MatchSource::TryDesugar) = expr.kind;
if let ExprKind::Call(ref match_fun, ref try_args) = match_arg.kind;
if let ExprKind::Path(ref match_fun_path) = match_fun.kind;
if matches!(match_fun_path, QPath::LangItem(LangItem::TryIntoResult, _));
if let Some(ref try_arg) = try_args.get(0);
if let ExprKind::Call(ref err_fun, ref err_args) = try_arg.kind;
if let Some(ref err_arg) = err_args.get(0);
if let ExprKind::Path(ref err_fun_path) = err_fun.kind;
if match_qpath(err_fun_path, &paths::RESULT_ERR);
if let Some(return_ty) = find_return_type(cx, &expr.kind);
then {
let prefix;
let suffix;
let err_ty;
if let Some(ty) = result_error_type(cx, return_ty) {
prefix = "Err(";
suffix = ")";
err_ty = ty;
} else if let Some(ty) = poll_result_error_type(cx, return_ty) {
prefix = "Poll::Ready(Err(";
suffix = "))";
err_ty = ty;
} else if let Some(ty) = poll_option_result_error_type(cx, return_ty) {
prefix = "Poll::Ready(Some(Err(";
suffix = ")))";
err_ty = ty;
} else {
return;
};
let expr_err_ty = cx.typeck_results().expr_ty(err_arg);
let differing_contexts = differing_macro_contexts(expr.span, err_arg.span);
let origin_snippet = if in_macro(expr.span) && in_macro(err_arg.span) && differing_contexts {
snippet(cx, err_arg.span.ctxt().outer_expn_data().call_site, "_")
} else if err_arg.span.from_expansion() && !in_macro(expr.span) {
snippet_with_macro_callsite(cx, err_arg.span, "_")
} else {
snippet(cx, err_arg.span, "_")
};
let suggestion = if err_ty == expr_err_ty {
format!("return {}{}{}", prefix, origin_snippet, suffix)
} else {
format!("return {}{}.into(){}", prefix, origin_snippet, suffix)
};
span_lint_and_sugg(
cx,
TRY_ERR,
expr.span,
"returning an `Err(_)` with the `?` operator",
"try this",
suggestion,
Applicability::MachineApplicable
);
}
}
}
}
/// Finds function return type by examining return expressions in match arms.
fn find_return_type<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx ExprKind<'_>) -> Option<Ty<'tcx>> {
if let ExprKind::Match(_, ref arms, MatchSource::TryDesugar) = expr {
for arm in arms.iter() {
if let ExprKind::Ret(Some(ref ret)) = arm.body.kind {
return Some(cx.typeck_results().expr_ty(ret));
}
}
}
None
}
/// Extracts the error type from Result<T, E>.
fn result_error_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
if_chain! {
if let ty::Adt(_, subst) = ty.kind();
if is_type_diagnostic_item(cx, ty, sym::result_type);
let err_ty = subst.type_at(1);
then {
Some(err_ty)
} else {
None
}
}
}
/// Extracts the error type from Poll<Result<T, E>>.
fn poll_result_error_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
if_chain! {
if let ty::Adt(def, subst) = ty.kind();
if match_def_path(cx, def.did, &paths::POLL);
let ready_ty = subst.type_at(0);
if let ty::Adt(ready_def, ready_subst) = ready_ty.kind();
if cx.tcx.is_diagnostic_item(sym::result_type, ready_def.did);
let err_ty = ready_subst.type_at(1);
then {
Some(err_ty)
} else {
None
}
}
}
/// Extracts the error type from Poll<Option<Result<T, E>>>.
fn poll_option_result_error_type<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
if_chain! {
if let ty::Adt(def, subst) = ty.kind();
if match_def_path(cx, def.did, &paths::POLL);
let ready_ty = subst.type_at(0);
if let ty::Adt(ready_def, ready_subst) = ready_ty.kind();
if cx.tcx.is_diagnostic_item(sym::option_type, ready_def.did);
let some_ty = ready_subst.type_at(0);
if let ty::Adt(some_def, some_subst) = some_ty.kind();
if cx.tcx.is_diagnostic_item(sym::result_type, some_def.did);
let err_ty = some_subst.type_at(1);
then {
Some(err_ty)
} else {
None
}
}
}
| 37.109948 | 109 | 0.537387 |
29622b4da80db6c738e4487af4b3fdd8c3749525 | 30,708 | // Copyright (C) 2021 Scott Lamb <slamb@slamb.org>
// SPDX-License-Identifier: MIT OR Apache-2.0
//! Proof-of-concept `.mp4` writer.
//!
//! This writes media data (`mdat`) to a stream, buffering parameters for a
//! `moov` atom at the end. This avoids the need to buffer the media data
//! (`mdat`) first or reserved a fixed size for the `moov`, but it will slow
//! playback, particularly when serving `.mp4` files remotely.
//!
//! For a more high-quality implementation, see [Moonfire NVR](https://github.com/scottlamb/moonfire-nvr).
//! It's better tested, places the `moov` atom at the start, can do HTTP range
//! serving for arbitrary time ranges, and supports standard and fragmented
//! `.mp4` files.
//!
//! See the BMFF spec, ISO/IEC 14496-12:2015:
//! https://github.com/scottlamb/moonfire-nvr/wiki/Standards-and-specifications
//! https://standards.iso.org/ittf/PubliclyAvailableStandards/c068960_ISO_IEC_14496-12_2015.zip
use anyhow::{anyhow, bail, Context, Error};
use bytes::{Buf, BufMut, BytesMut};
use futures::{Future, StreamExt};
use log::{debug, info, warn};
use retina::{
client::Transport,
codec::{AudioParameters, CodecItem, Parameters, VideoParameters},
};
use std::num::NonZeroU32;
use std::path::PathBuf;
use std::{convert::TryFrom, pin::Pin};
use std::{io::SeekFrom, sync::Arc};
use tokio::{
fs::File,
io::{AsyncSeek, AsyncSeekExt, AsyncWrite, AsyncWriteExt},
};
#[derive(structopt::StructOpt)]
pub struct Opts {
#[structopt(flatten)]
src: super::Source,
/// Policy for handling the `rtptime` parameter normally seem in the `RTP-Info` header.
/// One of `default`, `require`, `ignore`, `permissive`.
#[structopt(default_value, long)]
initial_timestamp: retina::client::InitialTimestampPolicy,
/// Don't attempt to include video streams.
#[structopt(long)]
no_video: bool,
/// Don't attempt to include audio streams.
#[structopt(long)]
no_audio: bool,
/// Allow lost packets mid-stream without aborting.
#[structopt(long)]
allow_loss: bool,
/// When to issue a `TEARDOWN` request: `auto`, `always`, or `never`.
#[structopt(default_value, long)]
teardown: retina::client::TeardownPolicy,
/// Duration after which to exit automatically, in seconds.
#[structopt(long, name = "secs")]
duration: Option<u64>,
/// The transport to use: `tcp` or `udp` (experimental).
///
/// Note: `--allow-loss` is strongly recommended with `udp`.
#[structopt(default_value, long)]
transport: retina::client::Transport,
/// Path to `.mp4` file to write.
#[structopt(parse(try_from_str))]
out: PathBuf,
}
/// Writes a box length for everything appended in the supplied scope.
macro_rules! write_box {
($buf:expr, $fourcc:expr, $b:block) => {{
let _: &mut BytesMut = $buf; // type-check.
let pos_start = $buf.len();
let fourcc: &[u8; 4] = $fourcc;
$buf.extend_from_slice(&[0, 0, 0, 0, fourcc[0], fourcc[1], fourcc[2], fourcc[3]]);
let r = {
$b;
};
let pos_end = $buf.len();
let len = pos_end.checked_sub(pos_start).unwrap();
$buf[pos_start..pos_start + 4].copy_from_slice(&u32::try_from(len)?.to_be_bytes()[..]);
r
}};
}
async fn write_all_buf<W: AsyncWrite + Unpin, B: Buf>(
writer: &mut W,
buf: &mut B,
) -> Result<(), Error> {
// TODO: this doesn't use vectored I/O. Annoying.
while buf.has_remaining() {
writer.write_buf(buf).await?;
}
Ok(())
}
/// Writes `.mp4` data to a sink.
/// See module-level documentation for details.
pub struct Mp4Writer<W: AsyncWrite + AsyncSeek + Send + Unpin> {
mdat_start: u32,
mdat_pos: u32,
video_params: Option<Box<VideoParameters>>,
audio_params: Option<Box<AudioParameters>>,
allow_loss: bool,
/// The (1-indexed) video sample (frame) number of each sync sample (random access point).
video_sync_sample_nums: Vec<u32>,
video_trak: TrakTracker,
audio_trak: TrakTracker,
inner: W,
}
/// Tracks the parts of a `trak` atom which are common between video and audio samples.
#[derive(Default)]
struct TrakTracker {
samples: u32,
next_pos: Option<u32>,
chunks: Vec<(u32, u32)>, // (1-based sample_number, byte_pos)
sizes: Vec<u32>,
/// The durations of samples in a run-length encoding form: (number of samples, duration).
/// This lags one sample behind calls to `add_sample` because each sample's duration
/// is calculated using the PTS of the following sample.
durations: Vec<(u32, u32)>,
last_pts: Option<i64>,
tot_duration: u64,
}
impl TrakTracker {
fn add_sample(
&mut self,
pos: u32,
size: u32,
timestamp: retina::Timestamp,
loss: u16,
allow_loss: bool,
) -> Result<(), Error> {
if self.samples > 0 && loss > 0 && !allow_loss {
bail!("Lost {} RTP packets mid-stream", loss);
}
self.samples += 1;
if self.next_pos != Some(pos) {
self.chunks.push((self.samples, pos));
}
self.sizes.push(size);
self.next_pos = Some(pos + size);
if let Some(last_pts) = self.last_pts.replace(timestamp.timestamp()) {
let duration = timestamp.timestamp().checked_sub(last_pts).unwrap();
self.tot_duration += u64::try_from(duration).unwrap();
let duration = u32::try_from(duration)?;
match self.durations.last_mut() {
Some((s, d)) if *d == duration => *s += 1,
_ => self.durations.push((1, duration)),
}
}
Ok(())
}
fn finish(&mut self) {
if self.last_pts.is_some() {
self.durations.push((1, 0));
}
}
/// Estimates the sum of the variable-sized portions of the data.
fn size_estimate(&self) -> usize {
(self.durations.len() * 8) + // stts
(self.chunks.len() * 12) + // stsc
(self.sizes.len() * 4) + // stsz
(self.chunks.len() * 4) // stco
}
fn write_common_stbl_parts(&self, buf: &mut BytesMut) -> Result<(), Error> {
// TODO: add an edit list so the video and audio tracks are in sync.
write_box!(buf, b"stts", {
buf.put_u32(0);
buf.put_u32(u32::try_from(self.durations.len())?);
for (samples, duration) in &self.durations {
buf.put_u32(*samples);
buf.put_u32(*duration);
}
});
write_box!(buf, b"stsc", {
buf.put_u32(0); // version
buf.put_u32(u32::try_from(self.chunks.len())?);
let mut prev_sample_number = 1;
let mut chunk_number = 1;
if !self.chunks.is_empty() {
for &(sample_number, _pos) in &self.chunks[1..] {
buf.put_u32(chunk_number);
buf.put_u32(sample_number - prev_sample_number);
buf.put_u32(1); // sample_description_index
prev_sample_number = sample_number;
chunk_number += 1;
}
buf.put_u32(chunk_number);
buf.put_u32(self.samples + 1 - prev_sample_number);
buf.put_u32(1); // sample_description_index
}
});
write_box!(buf, b"stsz", {
buf.put_u32(0); // version
buf.put_u32(0); // sample_size
buf.put_u32(u32::try_from(self.sizes.len())?);
for s in &self.sizes {
buf.put_u32(*s);
}
});
write_box!(buf, b"stco", {
buf.put_u32(0); // version
buf.put_u32(u32::try_from(self.chunks.len())?); // entry_count
for &(_sample_number, pos) in &self.chunks {
buf.put_u32(pos);
}
});
Ok(())
}
}
impl<W: AsyncWrite + AsyncSeek + Send + Unpin> Mp4Writer<W> {
pub async fn new(
video_params: Option<Box<VideoParameters>>,
audio_params: Option<Box<AudioParameters>>,
allow_loss: bool,
mut inner: W,
) -> Result<Self, Error> {
let mut buf = BytesMut::new();
write_box!(&mut buf, b"ftyp", {
buf.extend_from_slice(&[
b'i', b's', b'o', b'm', // major_brand
0, 0, 0, 0, // minor_version
b'i', b's', b'o', b'm', // compatible_brands[0]
]);
});
buf.extend_from_slice(&b"\0\0\0\0mdat"[..]);
let mdat_start = u32::try_from(buf.len())?;
write_all_buf(&mut inner, &mut buf).await?;
Ok(Mp4Writer {
inner,
video_params,
audio_params,
allow_loss,
video_trak: TrakTracker::default(),
audio_trak: TrakTracker::default(),
video_sync_sample_nums: Vec::new(),
mdat_start,
mdat_pos: mdat_start,
})
}
pub async fn finish(mut self) -> Result<(), Error> {
self.video_trak.finish();
self.audio_trak.finish();
let mut buf = BytesMut::with_capacity(
1024 + self.video_trak.size_estimate()
+ self.audio_trak.size_estimate()
+ 4 * self.video_sync_sample_nums.len(),
);
write_box!(&mut buf, b"moov", {
write_box!(&mut buf, b"mvhd", {
buf.put_u32(1 << 24); // version
buf.put_u64(0); // creation_time
buf.put_u64(0); // modification_time
buf.put_u32(90000); // timescale
buf.put_u64(self.video_trak.tot_duration);
buf.put_u32(0x00010000); // rate
buf.put_u16(0x0100); // volume
buf.put_u16(0); // reserved
buf.put_u64(0); // reserved
for v in &[0x00010000, 0, 0, 0, 0x00010000, 0, 0, 0, 0x40000000] {
buf.put_u32(*v); // matrix
}
for _ in 0..6 {
buf.put_u32(0); // pre_defined
}
buf.put_u32(2); // next_track_id
});
if let Some(p) = self.video_params.as_ref() {
self.write_video_trak(&mut buf, p)?;
}
if let Some(p) = self.audio_params.as_ref() {
self.write_audio_trak(&mut buf, p)?;
}
});
write_all_buf(&mut self.inner, &mut buf.freeze()).await?;
self.inner
.seek(SeekFrom::Start(u64::from(self.mdat_start - 8)))
.await?;
self.inner
.write_all(&u32::try_from(self.mdat_pos + 8 - self.mdat_start)?.to_be_bytes()[..])
.await?;
Ok(())
}
fn write_video_trak(
&self,
buf: &mut BytesMut,
parameters: &VideoParameters,
) -> Result<(), Error> {
write_box!(buf, b"trak", {
write_box!(buf, b"tkhd", {
buf.put_u32((1 << 24) | 7); // version, flags
buf.put_u64(0); // creation_time
buf.put_u64(0); // modification_time
buf.put_u32(1); // track_id
buf.put_u32(0); // reserved
buf.put_u64(self.video_trak.tot_duration);
buf.put_u64(0); // reserved
buf.put_u16(0); // layer
buf.put_u16(0); // alternate_group
buf.put_u16(0); // volume
buf.put_u16(0); // reserved
for v in &[0x00010000, 0, 0, 0, 0x00010000, 0, 0, 0, 0x40000000] {
buf.put_u32(*v); // matrix
}
let dims = self
.video_params
.as_ref()
.map(|p| p.pixel_dimensions())
.unwrap_or((0, 0));
let width = u32::from(u16::try_from(dims.0)?) << 16;
let height = u32::from(u16::try_from(dims.1)?) << 16;
buf.put_u32(width);
buf.put_u32(height);
});
write_box!(buf, b"mdia", {
write_box!(buf, b"mdhd", {
buf.put_u32(1 << 24); // version
buf.put_u64(0); // creation_time
buf.put_u64(0); // modification_time
buf.put_u32(90000); // timebase
buf.put_u64(self.video_trak.tot_duration);
buf.put_u32(0x55c40000); // language=und + pre-defined
});
write_box!(buf, b"hdlr", {
buf.extend_from_slice(&[
0x00, 0x00, 0x00, 0x00, // version + flags
0x00, 0x00, 0x00, 0x00, // pre_defined
b'v', b'i', b'd', b'e', // handler = vide
0x00, 0x00, 0x00, 0x00, // reserved[0]
0x00, 0x00, 0x00, 0x00, // reserved[1]
0x00, 0x00, 0x00, 0x00, // reserved[2]
0x00, // name, zero-terminated (empty)
]);
});
write_box!(buf, b"minf", {
write_box!(buf, b"vmhd", {
buf.put_u32(1);
buf.put_u64(0);
});
write_box!(buf, b"dinf", {
write_box!(buf, b"dref", {
buf.put_u32(0);
buf.put_u32(1); // entry_count
write_box!(buf, b"url ", {
buf.put_u32(1); // version, flags=self-contained
});
});
});
write_box!(buf, b"stbl", {
write_box!(buf, b"stsd", {
buf.put_u32(0); // version
buf.put_u32(1); // entry_count
self.write_video_sample_entry(buf, parameters)?;
});
self.video_trak.write_common_stbl_parts(buf)?;
write_box!(buf, b"stss", {
buf.put_u32(0); // version
buf.put_u32(u32::try_from(self.video_sync_sample_nums.len())?);
for n in &self.video_sync_sample_nums {
buf.put_u32(*n);
}
});
});
});
});
});
Ok(())
}
fn write_audio_trak(
&self,
buf: &mut BytesMut,
parameters: &AudioParameters,
) -> Result<(), Error> {
write_box!(buf, b"trak", {
write_box!(buf, b"tkhd", {
buf.put_u32((1 << 24) | 7); // version, flags
buf.put_u64(0); // creation_time
buf.put_u64(0); // modification_time
buf.put_u32(2); // track_id
buf.put_u32(0); // reserved
buf.put_u64(self.audio_trak.tot_duration);
buf.put_u64(0); // reserved
buf.put_u16(0); // layer
buf.put_u16(0); // alternate_group
buf.put_u16(0); // volume
buf.put_u16(0); // reserved
for v in &[0x00010000, 0, 0, 0, 0x00010000, 0, 0, 0, 0x40000000] {
buf.put_u32(*v); // matrix
}
buf.put_u32(0); // width
buf.put_u32(0); // height
});
write_box!(buf, b"mdia", {
write_box!(buf, b"mdhd", {
buf.put_u32(1 << 24); // version
buf.put_u64(0); // creation_time
buf.put_u64(0); // modification_time
buf.put_u32(parameters.clock_rate());
buf.put_u64(self.audio_trak.tot_duration);
buf.put_u32(0x55c40000); // language=und + pre-defined
});
write_box!(buf, b"hdlr", {
buf.extend_from_slice(&[
0x00, 0x00, 0x00, 0x00, // version + flags
0x00, 0x00, 0x00, 0x00, // pre_defined
b's', b'o', b'u', b'n', // handler = soun
0x00, 0x00, 0x00, 0x00, // reserved[0]
0x00, 0x00, 0x00, 0x00, // reserved[1]
0x00, 0x00, 0x00, 0x00, // reserved[2]
0x00, // name, zero-terminated (empty)
]);
});
write_box!(buf, b"minf", {
write_box!(buf, b"smhd", {
buf.extend_from_slice(&[
0x00, 0x00, 0x00, 0x00, // version + flags
0x00, 0x00, // balance
0x00, 0x00, // reserved
]);
});
write_box!(buf, b"dinf", {
write_box!(buf, b"dref", {
buf.put_u32(0);
buf.put_u32(1); // entry_count
write_box!(buf, b"url ", {
buf.put_u32(1); // version, flags=self-contained
});
});
});
write_box!(buf, b"stbl", {
write_box!(buf, b"stsd", {
buf.put_u32(0); // version
buf.put_u32(1); // entry_count
buf.extend_from_slice(
¶meters
.sample_entry()
.expect("all added streams have sample entries")[..],
);
});
self.audio_trak.write_common_stbl_parts(buf)?;
// AAC requires two samples (really, each is a set of 960 or 1024 samples)
// to decode accurately. See
// https://developer.apple.com/library/archive/documentation/QuickTime/QTFF/QTFFAppenG/QTFFAppenG.html .
write_box!(buf, b"sgpd", {
// BMFF section 8.9.3: SampleGroupDescriptionBox
buf.put_u32(0); // version
buf.extend_from_slice(b"roll"); // grouping type
buf.put_u32(1); // entry_count
// BMFF section 10.1: AudioRollRecoveryEntry
buf.put_i16(-1); // roll_distance
});
write_box!(buf, b"sbgp", {
// BMFF section 8.9.2: SampleToGroupBox
buf.put_u32(0); // version
buf.extend_from_slice(b"roll"); // grouping type
buf.put_u32(1); // entry_count
buf.put_u32(self.audio_trak.samples);
buf.put_u32(1); // group_description_index
});
});
});
});
});
Ok(())
}
fn write_video_sample_entry(
&self,
buf: &mut BytesMut,
parameters: &VideoParameters,
) -> Result<(), Error> {
// TODO: this should move to client::VideoParameters::sample_entry() or some such.
write_box!(buf, b"avc1", {
buf.put_u32(0);
buf.put_u32(1); // data_reference_index = 1
buf.extend_from_slice(&[0; 16]);
buf.put_u16(u16::try_from(parameters.pixel_dimensions().0)?);
buf.put_u16(u16::try_from(parameters.pixel_dimensions().1)?);
buf.extend_from_slice(&[
0x00, 0x48, 0x00, 0x00, // horizresolution
0x00, 0x48, 0x00, 0x00, // vertresolution
0x00, 0x00, 0x00, 0x00, // reserved
0x00, 0x01, // frame count
0x00, 0x00, 0x00, 0x00, // compressorname
0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, //
0x00, 0x00, 0x00, 0x00, //
0x00, 0x18, 0xff, 0xff, // depth + pre_defined
]);
write_box!(buf, b"avcC", {
buf.extend_from_slice(parameters.extra_data());
});
});
Ok(())
}
async fn video(&mut self, mut frame: retina::codec::VideoFrame) -> Result<(), Error> {
println!(
"{}: {}-byte video frame",
&frame.timestamp,
frame.data().remaining(),
);
if let Some(p) = frame.new_parameters.take() {
if self.video_trak.samples > 0 {
let old = self.video_params.as_ref().unwrap();
bail!(
"video parameters change unimplemented.\nold: {:#?}\nnew: {:#?}",
old,
p
);
}
self.video_params = Some(p);
} else if self.video_params.is_none() {
debug!("Discarding video frame received before parameters");
return Ok(());
}
let size = u32::try_from(frame.data().remaining())?;
self.video_trak.add_sample(
self.mdat_pos,
size,
frame.timestamp,
frame.loss,
self.allow_loss,
)?;
self.mdat_pos = self
.mdat_pos
.checked_add(size)
.ok_or_else(|| anyhow!("mdat_pos overflow"))?;
if frame.is_random_access_point {
self.video_sync_sample_nums
.push(u32::try_from(self.video_trak.samples)?);
}
let mut data = frame.into_data();
write_all_buf(&mut self.inner, &mut data).await?;
Ok(())
}
async fn audio(&mut self, mut frame: retina::codec::AudioFrame) -> Result<(), Error> {
println!(
"{}: {}-byte audio frame",
&frame.timestamp,
frame.data.remaining()
);
let size = u32::try_from(frame.data.remaining())?;
self.audio_trak.add_sample(
self.mdat_pos,
size,
frame.timestamp,
frame.loss,
self.allow_loss,
)?;
self.mdat_pos = self
.mdat_pos
.checked_add(size)
.ok_or_else(|| anyhow!("mdat_pos overflow"))?;
write_all_buf(&mut self.inner, &mut frame.data).await?;
Ok(())
}
}
/// Copies packets from `session` to `mp4` without handling any cleanup on error.
async fn copy<'a>(
opts: &'a Opts,
session: &'a mut retina::client::Demuxed,
stop_signal: Pin<Box<dyn Future<Output = Result<(), std::io::Error>>>>,
mp4: &'a mut Mp4Writer<File>,
) -> Result<(), Error> {
let sleep = match opts.duration {
Some(secs) => {
futures::future::Either::Left(tokio::time::sleep(std::time::Duration::from_secs(secs)))
}
None => futures::future::Either::Right(futures::future::pending()),
};
tokio::pin!(stop_signal);
tokio::pin!(sleep);
loop {
tokio::select! {
pkt = session.next() => {
match pkt.ok_or_else(|| anyhow!("EOF"))?? {
CodecItem::VideoFrame(f) => {
let start_ctx = f.start_ctx();
mp4.video(f).await.with_context(
|| format!("Error processing video frame starting with {}", start_ctx))?;
},
CodecItem::AudioFrame(f) => {
let ctx = f.ctx;
mp4.audio(f).await.with_context(
|| format!("Error processing audio frame, {}", ctx))?;
},
CodecItem::SenderReport(sr) => {
println!("{}: SR ts={}", sr.timestamp, sr.ntp_timestamp);
},
_ => continue,
};
},
_ = &mut stop_signal => {
info!("Stopping due to signal");
break;
},
_ = &mut sleep => {
info!("Stopping after {} seconds", opts.duration.unwrap());
break;
},
}
}
Ok(())
}
/// Writes the `.mp4`, including trying to finish or clean up the file.
async fn write_mp4<'a>(
opts: &'a Opts,
session: retina::client::Session<retina::client::Described>,
video_params: Option<Box<VideoParameters>>,
audio_params: Option<Box<AudioParameters>>,
stop_signal: Pin<Box<dyn Future<Output = Result<(), std::io::Error>>>>,
) -> Result<(), Error> {
let mut session = session
.play(
retina::client::PlayOptions::default()
.initial_timestamp(opts.initial_timestamp)
.enforce_timestamps_with_max_jump_secs(NonZeroU32::new(10).unwrap()),
)
.await?
.demuxed()?;
// Append into a filename suffixed with ".partial", then try to either rename it into
// place if it's complete or delete it otherwise.
const PARTIAL_SUFFIX: &str = ".partial";
let mut tmp_filename = opts.out.as_os_str().to_owned();
tmp_filename.push(PARTIAL_SUFFIX); // OsString::push doesn't put in a '/', unlike PathBuf::.
let tmp_filename: PathBuf = tmp_filename.into();
let out = tokio::fs::File::create(&tmp_filename).await?;
let mut mp4 = Mp4Writer::new(video_params, audio_params, opts.allow_loss, out).await?;
let result = copy(opts, &mut session, stop_signal, &mut mp4).await;
if let Err(e) = result {
// Log errors about finishing, returning the original error.
if let Err(e) = mp4.finish().await {
log::error!(".mp4 finish failed: {}", e);
if let Err(e) = tokio::fs::remove_file(&tmp_filename).await {
log::error!("and removing .mp4 failed too: {}", e);
}
} else {
if let Err(e) = tokio::fs::rename(&tmp_filename, &opts.out).await {
log::error!("unable to move completed .mp4 into place: {}", e);
}
}
Err(e)
} else {
// Directly return errors about finishing.
if let Err(e) = mp4.finish().await {
log::error!(".mp4 finish failed: {}", e);
if let Err(e) = tokio::fs::remove_file(&tmp_filename).await {
log::error!("and removing .mp4 failed too: {}", e);
}
Err(e)
} else {
tokio::fs::rename(&tmp_filename, &opts.out).await?;
Ok(())
}
}
}
pub async fn run(opts: Opts) -> Result<(), Error> {
if matches!(opts.transport, Transport::Udp) && !opts.allow_loss {
warn!("Using --transport=udp without strongly recommended --allow-loss!");
}
let creds = super::creds(opts.src.username.clone(), opts.src.password.clone());
let stop_signal = Box::pin(tokio::signal::ctrl_c());
let session_group = Arc::new(retina::client::SessionGroup::default());
let mut session = retina::client::Session::describe(
opts.src.url.clone(),
retina::client::SessionOptions::default()
.creds(creds)
.session_group(session_group.clone())
.user_agent("Retina mp4 example".to_owned())
.transport(opts.transport)
.teardown(opts.teardown),
)
.await?;
let (video_stream_i, video_params) = if !opts.no_video {
let s = session.streams().iter().enumerate().find_map(|(i, s)| {
if s.media == "video" {
if s.encoding_name == "h264" {
log::info!("Using h264 video stream");
return Some((
i,
match s.parameters() {
Some(Parameters::Video(v)) => Some(Box::new(v)),
Some(_) => panic!("expected parameters to match stream type video"),
None => None,
},
));
}
log::info!(
"Ignoring {} video stream because it's unsupported",
&s.encoding_name
);
}
None
});
if let Some((i, p)) = s {
(Some(i), p)
} else {
log::info!("No suitable video stream found");
(None, None)
}
} else {
log::info!("Ignoring video streams (if any) because of --no-video");
(None, None)
};
if let Some(i) = video_stream_i {
session.setup(i).await?;
}
let audio_stream = if !opts.no_audio {
let s = session
.streams()
.iter()
.enumerate()
.find_map(|(i, s)| match s.parameters() {
// Only consider audio streams that can produce a .mp4 sample
// entry.
Some(retina::codec::Parameters::Audio(a)) if a.sample_entry().is_some() => {
log::info!("Using {} audio stream (rfc 6381 codec {})", &s.encoding_name, a.rfc6381_codec().unwrap());
Some((i, Box::new(a.clone())))
}
_ if s.media == "audio" => {
log::info!("Ignoring {} audio stream because it can't be placed into a .mp4 file without transcoding", &s.encoding_name);
None
}
_ => None,
});
if s.is_none() {
log::info!("No suitable audio stream found");
}
s
} else {
log::info!("Ignoring audio streams (if any) because of --no-audio");
None
};
if let Some((i, _)) = audio_stream {
session.setup(i).await?;
}
if video_stream_i.is_none() && audio_stream.is_none() {
bail!("Exiting because no video or audio stream was selected; see info log messages above");
}
let result = write_mp4(
&opts,
session,
video_params,
audio_stream.map(|(_i, p)| p),
stop_signal,
)
.await;
// Session has now been dropped, on success or failure. A TEARDOWN should
// be pending if necessary. session_group.await_teardown() will wait for it.
if let Err(e) = session_group.await_teardown().await {
log::error!("TEARDOWN failed: {}", e);
}
result
}
| 38.821745 | 141 | 0.491761 |
ab46778a172aaa6c5692f3ce349f2e942c247f81 | 14,476 | use crate::{
api::inject_endpoints, db::DatabasePeer, with_admin_session, AdminSession, Context, ServerError,
};
use shared::PeerContents;
use warp::{
http::{response::Response, StatusCode},
Filter,
};
use wgctrl::DeviceConfigBuilder;
pub mod routes {
use crate::form_body;
use super::*;
pub fn all(
context: Context,
) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
warp::path("peers").and(
list(context.clone())
.or(list(context.clone()))
.or(create(context.clone()))
.or(update(context.clone()))
.or(delete(context)),
)
}
// POST /v1/admin/peers
pub fn create(
context: Context,
) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
warp::path::end()
.and(warp::post())
.and(form_body())
.and(with_admin_session(context))
.and_then(handlers::create)
}
// PUT /v1/admin/peers/:id
pub fn update(
context: Context,
) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
warp::path::param()
.and(warp::path::end())
.and(warp::put())
.and(form_body())
.and(with_admin_session(context))
.and_then(handlers::update)
}
// GET /v1/admin/peers
pub fn list(
context: Context,
) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
warp::path::end()
.and(warp::get())
.and(with_admin_session(context))
.and_then(handlers::list)
}
// DELETE /v1/admin/peers/:id
pub fn delete(
context: Context,
) -> impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone {
warp::path::param()
.and(warp::path::end())
.and(warp::delete())
.and(with_admin_session(context))
.and_then(handlers::delete)
}
}
mod handlers {
use super::*;
pub async fn create(
form: PeerContents,
session: AdminSession,
) -> Result<impl warp::Reply, warp::Rejection> {
let conn = session.context.db.lock();
let peer = DatabasePeer::create(&conn, form)?;
log::info!("adding peer {}", &*peer);
if cfg!(not(test)) {
// Update the current WireGuard interface with the new peers.
DeviceConfigBuilder::new()
.add_peer((&*peer).into())
.apply(&session.context.interface)
.map_err(|_| ServerError::WireGuard)?;
log::info!("updated WireGuard interface, adding {}", &*peer);
}
let response = Response::builder()
.status(StatusCode::CREATED)
.body(serde_json::to_string(&*peer).unwrap());
Ok(response)
}
pub async fn update(
id: i64,
form: PeerContents,
session: AdminSession,
) -> Result<impl warp::Reply, warp::Rejection> {
let conn = session.context.db.lock();
let mut peer = DatabasePeer::get(&conn, id)?;
peer.update(&conn, form)?;
Ok(StatusCode::NO_CONTENT)
}
/// List all peers, including disabled ones. This is an admin-only endpoint.
pub async fn list(session: AdminSession) -> Result<impl warp::Reply, warp::Rejection> {
let conn = session.context.db.lock();
let mut peers = DatabasePeer::list(&conn)?
.into_iter()
.map(|peer| peer.inner)
.collect::<Vec<_>>();
inject_endpoints(&session, &mut peers);
Ok(warp::reply::json(&peers))
}
pub async fn delete(
id: i64,
session: AdminSession,
) -> Result<impl warp::Reply, warp::Rejection> {
let conn = session.context.db.lock();
DatabasePeer::disable(&conn, id)?;
Ok(StatusCode::NO_CONTENT)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test;
use anyhow::Result;
use shared::Peer;
#[tokio::test]
async fn test_add_peer() -> Result<()> {
let server = test::Server::new()?;
let old_peers = DatabasePeer::list(&server.db().lock())?;
let peer = test::developer_peer_contents("developer3", "10.80.64.4")?;
let filter = crate::routes(server.context());
let res = test::post_request_from_ip(test::ADMIN_PEER_IP)
.path("/v1/admin/peers")
.body(serde_json::to_string(&peer)?)
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::CREATED);
// The response contains the new peer information.
let peer_res: Peer = serde_json::from_slice(&res.body())?;
assert_eq!(peer, peer_res.contents);
// The number of peer entries in the database increased by 1.
let new_peers = DatabasePeer::list(&server.db().lock())?;
assert_eq!(old_peers.len() + 1, new_peers.len());
Ok(())
}
#[tokio::test]
async fn test_add_peer_with_invalid_name() -> Result<()> {
let server = test::Server::new()?;
let peer = test::developer_peer_contents("devel oper", "10.80.64.4")?;
let filter = crate::routes(server.context());
let res = test::post_request_from_ip(test::ADMIN_PEER_IP)
.path("/v1/admin/peers")
.body(serde_json::to_string(&peer)?)
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
Ok(())
}
#[tokio::test]
async fn test_add_peer_with_duplicate_name() -> Result<()> {
let server = test::Server::new()?;
let old_peers = DatabasePeer::list(&server.db().lock())?;
// Try to add a peer with a name that is already taken.
let peer = test::developer_peer_contents("developer2", "10.80.64.4")?;
let filter = crate::routes(server.context());
let res = test::post_request_from_ip(test::ADMIN_PEER_IP)
.path("/v1/admin/peers")
.body(serde_json::to_string(&peer)?)
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
// The number of peer entries in the database should not change.
let new_peers = DatabasePeer::list(&server.db().lock())?;
assert_eq!(old_peers.len(), new_peers.len());
Ok(())
}
#[tokio::test]
async fn test_add_peer_with_duplicate_ip() -> Result<()> {
let server = test::Server::new()?;
let old_peers = DatabasePeer::list(&server.db().lock())?;
// Try to add a peer with an IP that is already taken.
let peer = test::developer_peer_contents("developer3", "10.80.64.3")?;
let filter = crate::routes(server.context());
let res = test::post_request_from_ip(test::ADMIN_PEER_IP)
.path("/v1/admin/peers")
.body(serde_json::to_string(&peer)?)
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
// The number of peer entries in the database should not change.
let new_peers = DatabasePeer::list(&server.db().lock())?;
assert_eq!(old_peers.len(), new_peers.len());
Ok(())
}
#[tokio::test]
async fn test_add_peer_with_outside_cidr_range_ip() -> Result<()> {
let server = test::Server::new()?;
let filter = crate::routes(server.context());
let old_peers = DatabasePeer::list(&server.db().lock())?;
// Try to add IP outside of the CIDR network.
let peer = test::developer_peer_contents("developer3", "10.80.65.4")?;
let res = test::post_request_from_ip(test::ADMIN_PEER_IP)
.path("/v1/admin/peers")
.body(serde_json::to_string(&peer)?)
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
// Try to use the network address as peer IP.
let peer = test::developer_peer_contents("developer3", "10.80.64.0")?;
let res = test::post_request_from_ip(test::ADMIN_PEER_IP)
.path("/v1/admin/peers")
.body(serde_json::to_string(&peer)?)
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
// Try to use the broadcast address as peer IP.
let peer = test::developer_peer_contents("developer3", "10.80.64.255")?;
let res = test::post_request_from_ip(test::ADMIN_PEER_IP)
.path("/v1/admin/peers")
.body(serde_json::to_string(&peer)?)
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
// The number of peer entries in the database should not change.
let new_peers = DatabasePeer::list(&server.db().lock())?;
assert_eq!(old_peers.len(), new_peers.len());
Ok(())
}
#[tokio::test]
async fn test_add_peer_from_non_admin() -> Result<()> {
let server = test::Server::new()?;
let peer = test::developer_peer_contents("developer3", "10.80.64.4")?;
// Try to create a new developer peer from a user peer.
let filter = crate::routes(server.context());
let res = test::post_request_from_ip(test::USER1_PEER_IP)
.path("/v1/admin/peers")
.body(serde_json::to_string(&peer)?)
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
Ok(())
}
#[tokio::test]
async fn test_update_peer_from_admin() -> Result<()> {
let server = test::Server::new()?;
let old_peer = DatabasePeer::get(&server.db.lock(), test::DEVELOPER1_PEER_ID)?;
let change = PeerContents {
name: "new-peer-name".to_string(),
..old_peer.contents.clone()
};
// Try to create a new developer peer from a user peer.
let filter = crate::routes(server.context());
let res = test::put_request_from_ip(test::ADMIN_PEER_IP)
.path(&format!("/v1/admin/peers/{}", test::DEVELOPER1_PEER_ID))
.body(serde_json::to_string(&change)?)
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::NO_CONTENT);
let new_peer = DatabasePeer::get(&server.db.lock(), test::DEVELOPER1_PEER_ID)?;
assert_eq!(new_peer.name, "new-peer-name");
Ok(())
}
#[tokio::test]
async fn test_update_peer_from_non_admin() -> Result<()> {
let server = test::Server::new()?;
let peer = test::developer_peer_contents("developer3", "10.80.64.4")?;
// Try to create a new developer peer from a user peer.
let filter = crate::routes(server.context());
let res = test::put_request_from_ip(test::USER1_PEER_IP)
.path(&format!("/v1/admin/peers/{}", test::ADMIN_PEER_ID))
.body(serde_json::to_string(&peer)?)
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
Ok(())
}
#[tokio::test]
async fn test_list_all_peers_from_admin() -> Result<()> {
let server = test::Server::new()?;
let filter = crate::routes(server.context());
let res = test::request_from_ip(test::ADMIN_PEER_IP)
.path("/v1/admin/peers")
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::OK);
let peers: Vec<Peer> = serde_json::from_slice(&res.body())?;
let peer_names = peers.iter().map(|p| &p.contents.name).collect::<Vec<_>>();
// An admin peer should see all the peers.
assert_eq!(
&[
"innernet-server",
"admin",
"developer1",
"developer2",
"user1",
"user2"
],
&peer_names[..]
);
Ok(())
}
#[tokio::test]
async fn test_list_all_peers_from_non_admin() -> Result<()> {
let server = test::Server::new()?;
let filter = crate::routes(server.context());
let res = test::request_from_ip(test::DEVELOPER1_PEER_IP)
.path("/v1/admin/peers")
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
Ok(())
}
#[tokio::test]
async fn test_delete() -> Result<()> {
let server = test::Server::new()?;
let filter = crate::routes(server.context());
let old_peers = DatabasePeer::list(&server.db().lock())?;
let res = test::request_from_ip(test::ADMIN_PEER_IP)
.method("DELETE")
.path(&format!("/v1/admin/peers/{}", test::USER1_PEER_ID))
.reply(&filter)
.await;
assert!(res.status().is_success());
// The number of peer entries in the database decreased by 1.
let all_peers = DatabasePeer::list(&server.db().lock())?;
let new_peers = all_peers.iter().filter(|p| !p.is_disabled).count();
assert_eq!(old_peers.len() - 1, new_peers);
Ok(())
}
#[tokio::test]
async fn test_delete_from_non_admin() -> Result<()> {
let server = test::Server::new()?;
let filter = crate::routes(server.context());
let old_peers = DatabasePeer::list(&server.db().lock())?;
let res = test::request_from_ip(test::DEVELOPER1_PEER_IP)
.method("DELETE")
.path(&format!("/v1/admin/peers/{}", test::USER1_PEER_ID))
.reply(&filter)
.await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
// The number of peer entries in the database hasn't changed.
let new_peers = DatabasePeer::list(&server.db().lock())?;
assert_eq!(old_peers.len(), new_peers.len());
Ok(())
}
#[tokio::test]
async fn test_delete_unknown_id() -> Result<()> {
let server = test::Server::new()?;
let filter = crate::routes(server.context());
let res = test::request_from_ip(test::ADMIN_PEER_IP)
.method("DELETE")
.path(&format!("/v1/admin/peers/{}", test::USER1_PEER_ID + 100))
.reply(&filter)
.await;
// Trying to delete a peer of non-existing ID will result in error.
assert_eq!(res.status(), StatusCode::NOT_FOUND);
Ok(())
}
}
| 32.603604 | 100 | 0.562725 |
892215c3af692c641022b519aa061af47e3074e0 | 44,113 | use std::fmt::{Debug, Formatter};
use std::iter::FusedIterator;
use std::{fmt, iter, marker::PhantomData, ops::Range};
use crate::{
cursor::{self},
Direction, GreenNode, NodeOrToken, SyntaxKind, SyntaxText, TextRange, TextSize, TokenAtOffset,
WalkEvent,
};
pub trait Language: Sized + Clone + Copy + fmt::Debug + Eq + Ord + std::hash::Hash {
type Kind: fmt::Debug + PartialEq;
fn kind_from_raw(raw: SyntaxKind) -> Self::Kind;
fn kind_to_raw(kind: Self::Kind) -> SyntaxKind;
}
#[derive(Debug, Default, Hash, Copy, Eq, Ord, PartialEq, PartialOrd, Clone)]
pub struct RawLanguage;
impl Language for RawLanguage {
type Kind = SyntaxKind;
fn kind_from_raw(raw: SyntaxKind) -> Self::Kind {
raw
}
fn kind_to_raw(kind: Self::Kind) -> SyntaxKind {
kind
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum TriviaPiece {
Whitespace(usize),
Comments(usize),
}
impl TriviaPiece {
#[inline]
pub fn text_len(&self) -> TextSize {
match self {
TriviaPiece::Whitespace(n) => (*n as u32).into(),
TriviaPiece::Comments(n) => (*n as u32).into(),
}
}
}
pub struct SyntaxTriviaPieceWhitespace<L: Language>(SyntaxTriviaPiece<L>);
pub struct SyntaxTriviaPieceComments<L: Language>(SyntaxTriviaPiece<L>);
impl<L: Language> SyntaxTriviaPieceWhitespace<L> {
pub fn text(&self) -> &str {
self.0.text()
}
pub fn text_len(&self) -> TextSize {
self.0.text_len()
}
pub fn text_range(&self) -> TextRange {
self.0.text_range()
}
}
impl<L: Language> SyntaxTriviaPieceComments<L> {
pub fn text(&self) -> &str {
self.0.text()
}
pub fn text_len(&self) -> TextSize {
self.0.text_len()
}
pub fn text_range(&self) -> TextRange {
self.0.text_range()
}
}
/// [SyntaxTriviaPiece] gives access to the most granular information about the trivia
/// that was specified by the lexer at the token creation time.
///
/// For example:
///
/// ```ignore
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t /**/let \t\t",
/// vec![TriviaPiece::Whitespace(3), TriviaPiece::Comments(4)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
///
/// This token has two pieces in the leading trivia, and one piece at the trailing trivia. Each
/// piece is defined by the [TriviaPiece]; its content is irrelevant.
/// ```
#[derive(Clone)]
pub struct SyntaxTriviaPiece<L: Language> {
raw: cursor::SyntaxTrivia,
offset: TextSize,
trivia: TriviaPiece,
_p: PhantomData<L>,
}
impl<L: Language> SyntaxTriviaPiece<L> {
/// Returns the associated text just for this trivia piece. This is different from [SyntaxTrivia::text()],
/// which returns the text of the whole trivia.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// use std::iter::Iterator;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t /**/let \t\t",
/// vec![TriviaPiece::Whitespace(3), TriviaPiece::Comments(4)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// let pieces: Vec<_> = node.first_leading_trivia().unwrap().pieces().collect();
/// assert_eq!("\n\t ", pieces[0].text());
/// ```
pub fn text(&self) -> &str {
let txt = self.raw.text();
let start = self.offset - self.raw.offset();
let end = start + self.text_len();
&txt[start.into()..end.into()]
}
/// Returns the associated text length just for this trivia piece. This is different from [SyntaxTrivia::text_len()],
/// which returns the text length of the whole trivia.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// use std::iter::Iterator;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t /**/let \t\t",
/// vec![TriviaPiece::Whitespace(3), TriviaPiece::Comments(4)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// let pieces: Vec<_> = node.first_leading_trivia().unwrap().pieces().collect();
/// assert_eq!(TextSize::from(3), pieces[0].text_len());
/// ```
pub fn text_len(&self) -> TextSize {
self.trivia.text_len()
}
/// Returns the associated text range just for this trivia piece. This is different from [SyntaxTrivia::text_range()],
/// which returns the text range of the whole trivia.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// use std::iter::Iterator;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t /**/let \t\t",
/// vec![TriviaPiece::Whitespace(3), TriviaPiece::Comments(4)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// let pieces: Vec<_> = node.first_leading_trivia().unwrap().pieces().collect();
/// assert_eq!(TextRange::new(0.into(), 3.into()), pieces[0].text_range());
/// ```
pub fn text_range(&self) -> TextRange {
TextRange::at(self.offset, self.text_len())
}
/// Cast this trivia piece to [SyntaxTriviaPieceWhitespace].
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// use std::iter::Iterator;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t /**/let \t\t",
/// vec![TriviaPiece::Whitespace(3), TriviaPiece::Comments(4)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// let pieces: Vec<_> = node.first_leading_trivia().unwrap().pieces().collect();
/// let w = pieces[0].as_whitespace();
/// assert!(w.is_some());
/// let w = pieces[1].as_whitespace();
/// assert!(w.is_none());
/// ```
pub fn as_whitespace(&self) -> Option<SyntaxTriviaPieceWhitespace<L>> {
match &self.trivia {
TriviaPiece::Whitespace(_) => Some(SyntaxTriviaPieceWhitespace(self.clone())),
_ => None,
}
}
/// Cast this trivia piece to [SyntaxTriviaPieceComments].
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// use std::iter::Iterator;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t /**/let \t\t",
/// vec![TriviaPiece::Whitespace(3), TriviaPiece::Comments(4)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// let pieces: Vec<_> = node.first_leading_trivia().unwrap().pieces().collect();
/// let w = pieces[0].as_comments();
/// assert!(w.is_none());
/// let w = pieces[1].as_comments();
/// assert!(w.is_some());
/// ```
pub fn as_comments(&self) -> Option<SyntaxTriviaPieceComments<L>> {
match &self.trivia {
TriviaPiece::Comments(_) => Some(SyntaxTriviaPieceComments(self.clone())),
_ => None,
}
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct SyntaxTrivia<L: Language> {
raw: cursor::SyntaxTrivia,
_p: PhantomData<L>,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct SyntaxNode<L: Language> {
raw: cursor::SyntaxNode,
_p: PhantomData<L>,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct SyntaxToken<L: Language> {
raw: cursor::SyntaxToken,
_p: PhantomData<L>,
}
pub type SyntaxElement<L> = NodeOrToken<SyntaxNode<L>, SyntaxToken<L>>;
impl<L: Language> fmt::Debug for SyntaxNode<L> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if f.alternate() {
let mut level = 0;
for event in self.raw.preorder_slots() {
match event {
WalkEvent::Enter(element) => {
for _ in 0..level {
write!(f, " ")?;
}
match element {
cursor::SyntaxSlot::Node(node) => {
writeln!(f, "{}: {:?}", node.index(), SyntaxNode::<L>::from(node))?
}
cursor::SyntaxSlot::Token(token) => writeln!(
f,
"{}: {:?}",
token.index(),
SyntaxToken::<L>::from(token)
)?,
cursor::SyntaxSlot::Empty { index, .. } => {
writeln!(f, "{}: (empty)", index)?
}
}
level += 1;
}
WalkEvent::Leave(_) => level -= 1,
}
}
assert_eq!(level, 0);
Ok(())
} else {
write!(f, "{:?}@{:?}", self.kind(), self.text_range())
}
}
}
impl<L: Language> fmt::Display for SyntaxNode<L> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.raw, f)
}
}
fn print_debug_str<S: AsRef<str>>(text: S, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let text = text.as_ref();
return if text.len() < 25 {
write!(f, "{:?}", text)
} else {
for idx in 21..25 {
if text.is_char_boundary(idx) {
let text = format!("{} ...", &text[..idx]);
return write!(f, "{:?}", text);
}
}
write!(f, "")
};
}
fn print_debug_trivia_piece<L: Language>(
piece: SyntaxTriviaPiece<L>,
f: &mut fmt::Formatter<'_>,
) -> fmt::Result {
match piece.trivia {
TriviaPiece::Whitespace(_) => {
write!(f, "Whitespace(")?;
print_debug_str(piece.text(), f)?;
write!(f, ")")
}
TriviaPiece::Comments(_) => {
write!(f, "Comments(")?;
print_debug_str(piece.text(), f)?;
write!(f, ")")
}
}
}
impl<L: Language> fmt::Debug for SyntaxToken<L> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{:?}@{:?} {:?} ",
self.kind(),
self.text_range(),
self.text_trimmed()
)?;
write!(f, "[")?;
let mut first_piece = true;
for piece in self.leading_trivia().pieces() {
if !first_piece {
write!(f, ", ")?;
}
first_piece = false;
print_debug_trivia_piece(piece, f)?;
}
write!(f, "] [")?;
let mut first_piece = true;
for piece in self.trailing_trivia().pieces() {
if !first_piece {
write!(f, ", ")?;
}
first_piece = false;
print_debug_trivia_piece(piece, f)?;
}
write!(f, "]")
}
}
impl<L: Language> fmt::Display for SyntaxToken<L> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.raw, f)
}
}
impl<L: Language> From<SyntaxNode<L>> for SyntaxElement<L> {
fn from(node: SyntaxNode<L>) -> SyntaxElement<L> {
NodeOrToken::Node(node)
}
}
impl<L: Language> From<SyntaxToken<L>> for SyntaxElement<L> {
fn from(token: SyntaxToken<L>) -> SyntaxElement<L> {
NodeOrToken::Token(token)
}
}
pub struct SyntaxTriviaPiecesIterator<L: Language> {
iter: cursor::SyntaxTriviaPiecesIterator,
_p: PhantomData<L>,
}
impl<L: Language> Iterator for SyntaxTriviaPiecesIterator<L> {
type Item = SyntaxTriviaPiece<L>;
fn next(&mut self) -> Option<Self::Item> {
let (offset, trivia) = self.iter.next()?;
Some(SyntaxTriviaPiece {
raw: self.iter.raw.clone(),
offset,
trivia,
_p: PhantomData,
})
}
}
impl<L: Language> SyntaxTrivia<L> {
/// Returns all [SyntaxTriviaPiece] of this trivia.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// use std::iter::Iterator;
/// use crate::*;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t /**/let \t\t",
/// vec![TriviaPiece::Whitespace(3), TriviaPiece::Comments(4)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// let pieces: Vec<_> = node.first_leading_trivia().unwrap().pieces().collect();
/// assert_eq!(2, pieces.len());
/// let pieces: Vec<_> = node.last_trailing_trivia().unwrap().pieces().collect();
/// assert_eq!(1, pieces.len());
/// ```
pub fn pieces(&self) -> SyntaxTriviaPiecesIterator<L> {
SyntaxTriviaPiecesIterator {
iter: self.raw.pieces(),
_p: PhantomData,
}
}
pub fn text(&self) -> &str {
self.raw.text()
}
pub fn text_range(&self) -> TextRange {
self.raw.text_range()
}
}
impl<L: Language> SyntaxNode<L> {
pub(crate) fn new_root(green: GreenNode) -> SyntaxNode<L> {
SyntaxNode::from(cursor::SyntaxNode::new_root(green))
}
/// Returns the element stored in the slot with the given index. Returns [None] if the slot is empty.
///
/// ## Panics
/// If the slot index is out of bounds
pub fn element_in_slot(&self, slot: u32) -> Option<SyntaxElement<L>> {
self.raw.element_in_slot(slot).map(SyntaxElement::from)
}
pub fn kind(&self) -> L::Kind {
L::kind_from_raw(self.raw.kind())
}
/// Returns the text of all descendants tokens combined, including all trivia.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t let \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// builder.token(SyntaxKind(1), "a");
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "; \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// assert_eq!("\n\t let \t\ta; \t\t", node.text());
/// ```
pub fn text(&self) -> SyntaxText {
self.raw.text()
}
/// Returns the text of all descendants tokens combined,
/// excluding the first token leading trivia, and the last token trailing trivia.
/// All other trivia is included.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t let \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// builder.token(SyntaxKind(1), "a");
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "; \t\t",
/// vec![],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// assert_eq!("let \t\ta;", node.text_trimmed());
/// ```
pub fn text_trimmed(&self) -> SyntaxText {
self.raw.text_trimmed()
}
/// Returns the range corresponding for the text of all descendants tokens combined, including all trivia.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t let \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// builder.token(SyntaxKind(1), "a");
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "; \t\t",
/// vec![],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// let range = node.text_range();
/// assert_eq!(0u32, range.start().into());
/// assert_eq!(14u32, range.end().into());
/// ```
pub fn text_range(&self) -> TextRange {
self.raw.text_range()
}
/// Returns the range corresponding for the text of all descendants tokens combined,
/// excluding the first token leading trivia, and the last token trailing trivia.
/// All other trivia is included.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t let \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// builder.token(SyntaxKind(1), "a");
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "; \t\t",
/// vec![],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// let range = node.text_trimmed_range();
/// assert_eq!(3u32, range.start().into());
/// assert_eq!(11u32, range.end().into());
/// ```
pub fn text_trimmed_range(&self) -> TextRange {
self.raw.text_trimmed_range()
}
/// Returns the leading trivia of the [first_token](SyntaxNode::first_token), or [None] if the node does not have any descendant tokens.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t let \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// builder.token(SyntaxKind(1), "a");
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "; \t\t",
/// vec![],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// let trivia = node.first_leading_trivia();
/// assert!(trivia.is_some());
/// assert_eq!("\n\t ", trivia.unwrap().text());
///
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {});
/// let trivia = node.first_leading_trivia();
/// assert!(trivia.is_none());
/// ```
pub fn first_leading_trivia(&self) -> Option<SyntaxTrivia<L>> {
self.raw.first_leading_trivia().map(|raw| SyntaxTrivia {
raw,
_p: PhantomData,
})
}
/// Returns the trailing trivia of the [last_token](SyntaxNode::last_token), or [None] if the node does not have any descendant tokens.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t let \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// builder.token(SyntaxKind(1), "a");
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "; \t\t",
/// vec![],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// });
/// let trivia = node.last_trailing_trivia();
/// assert!(trivia.is_some());
/// assert_eq!(" \t\t", trivia.unwrap().text());
///
/// let mut node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {});
/// let trivia = node.last_trailing_trivia();
/// assert!(trivia.is_none());
/// ```
pub fn last_trailing_trivia(&self) -> Option<SyntaxTrivia<L>> {
self.raw.last_trailing_trivia().map(|raw| SyntaxTrivia {
raw,
_p: PhantomData,
})
}
pub fn parent(&self) -> Option<SyntaxNode<L>> {
self.raw.parent().map(Self::from)
}
pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode<L>> {
self.raw.ancestors().map(SyntaxNode::from)
}
pub fn children(&self) -> SyntaxNodeChildren<L> {
SyntaxNodeChildren {
raw: self.raw.children(),
_p: PhantomData,
}
}
/// Returns an iterator over all the slots of this syntax node.
pub fn slots(&self) -> SyntaxSlots<L> {
SyntaxSlots {
raw: self.raw.slots(),
_p: PhantomData,
}
}
pub fn children_with_tokens(&self) -> SyntaxElementChildren<L> {
SyntaxElementChildren {
raw: self.raw.children_with_tokens(),
_p: PhantomData,
}
}
pub fn first_child(&self) -> Option<SyntaxNode<L>> {
self.raw.first_child().map(Self::from)
}
pub fn last_child(&self) -> Option<SyntaxNode<L>> {
self.raw.last_child().map(Self::from)
}
pub fn first_child_or_token(&self) -> Option<SyntaxElement<L>> {
self.raw.first_child_or_token().map(NodeOrToken::from)
}
pub fn last_child_or_token(&self) -> Option<SyntaxElement<L>> {
self.raw.last_child_or_token().map(NodeOrToken::from)
}
pub fn next_sibling(&self) -> Option<SyntaxNode<L>> {
self.raw.next_sibling().map(Self::from)
}
pub fn prev_sibling(&self) -> Option<SyntaxNode<L>> {
self.raw.prev_sibling().map(Self::from)
}
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
self.raw.next_sibling_or_token().map(NodeOrToken::from)
}
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
self.raw.prev_sibling_or_token().map(NodeOrToken::from)
}
/// Return the leftmost token in the subtree of this node.
pub fn first_token(&self) -> Option<SyntaxToken<L>> {
self.raw.first_token().map(SyntaxToken::from)
}
/// Return the rightmost token in the subtree of this node.
pub fn last_token(&self) -> Option<SyntaxToken<L>> {
self.raw.last_token().map(SyntaxToken::from)
}
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = SyntaxNode<L>> {
self.raw.siblings(direction).map(SyntaxNode::from)
}
pub fn siblings_with_tokens(
&self,
direction: Direction,
) -> impl Iterator<Item = SyntaxElement<L>> {
self.raw
.siblings_with_tokens(direction)
.map(SyntaxElement::from)
}
pub fn descendants(&self) -> impl Iterator<Item = SyntaxNode<L>> {
self.raw.descendants().map(SyntaxNode::from)
}
pub fn descendants_tokens(&self) -> impl Iterator<Item = SyntaxToken<L>> {
self.descendants_with_tokens()
.filter_map(|x| x.as_token().cloned())
}
pub fn descendants_with_tokens(&self) -> impl Iterator<Item = SyntaxElement<L>> {
self.raw.descendants_with_tokens().map(NodeOrToken::from)
}
/// Traverse the subtree rooted at the current node (including the current
/// node) in preorder, excluding tokens.
pub fn preorder(&self) -> Preorder<L> {
Preorder {
raw: self.raw.preorder(),
_p: PhantomData,
}
}
/// Traverse the subtree rooted at the current node (including the current
/// node) in preorder, including tokens.
pub fn preorder_with_tokens(&self) -> PreorderWithTokens<L> {
PreorderWithTokens {
raw: self.raw.preorder_with_tokens(),
_p: PhantomData,
}
}
/// Find a token in the subtree corresponding to this node, which covers the offset.
/// Precondition: offset must be withing node's range.
pub fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken<L>> {
self.raw.token_at_offset(offset).map(SyntaxToken::from)
}
/// Return the deepest node or token in the current subtree that fully
/// contains the range. If the range is empty and is contained in two leaf
/// nodes, either one can be returned. Precondition: range must be contained
/// withing the current node
pub fn covering_element(&self, range: TextRange) -> SyntaxElement<L> {
NodeOrToken::from(self.raw.covering_element(range))
}
/// Finds a [`SyntaxElement`] which intersects with a given `range`. If
/// there are several intersecting elements, any one can be returned.
///
/// The method uses binary search internally, so it's complexity is
/// `O(log(N))` where `N = self.children_with_tokens().count()`.
pub fn child_or_token_at_range(&self, range: TextRange) -> Option<SyntaxElement<L>> {
self.raw
.child_or_token_at_range(range)
.map(SyntaxElement::from)
}
/// Returns an independent copy of the subtree rooted at this node.
///
/// The parent of the returned node will be `None`, the start offset will be
/// zero, but, otherwise, it'll be equivalent to the source node.
pub fn clone_subtree(&self) -> SyntaxNode<L> {
SyntaxNode::from(self.raw.clone_subtree())
}
pub fn clone_for_update(&self) -> SyntaxNode<L> {
SyntaxNode::from(self.raw.clone_for_update())
}
pub fn detach(&self) {
self.raw.detach()
}
pub fn splice_children(&self, to_delete: Range<usize>, to_insert: Vec<SyntaxElement<L>>) {
let to_insert = to_insert
.into_iter()
.map(cursor::SyntaxElement::from)
.collect::<Vec<_>>();
self.raw.splice_children(to_delete, to_insert)
}
pub fn into_list(self) -> SyntaxList<L> {
SyntaxList::new(self)
}
}
impl<L: Language> SyntaxToken<L> {
pub fn kind(&self) -> L::Kind {
L::kind_from_raw(self.raw.kind())
}
pub fn text_range(&self) -> TextRange {
self.raw.text_range()
}
pub fn text_trimmed_range(&self) -> TextRange {
self.raw.text_trimmed_range()
}
pub fn index(&self) -> usize {
self.raw.index()
}
/// Returns the text of the token, including all trivia.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// let mut token = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t let \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// }).first_token().unwrap();
/// assert_eq!("\n\t let \t\t", token.text());
/// ```
pub fn text(&self) -> &str {
self.raw.text()
}
/// Returns the text of the token, excluding all trivia.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// let mut token = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t let \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// }).first_token().unwrap();
/// assert_eq!("let", token.text_trimmed());
/// ```
pub fn text_trimmed(&self) -> &str {
self.raw.text_trimmed()
}
pub fn parent(&self) -> Option<SyntaxNode<L>> {
self.raw.parent().map(SyntaxNode::from)
}
pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode<L>> {
self.raw.ancestors().map(SyntaxNode::from)
}
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
self.raw.next_sibling_or_token().map(NodeOrToken::from)
}
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
self.raw.prev_sibling_or_token().map(NodeOrToken::from)
}
pub fn siblings_with_tokens(
&self,
direction: Direction,
) -> impl Iterator<Item = SyntaxElement<L>> {
self.raw
.siblings_with_tokens(direction)
.map(SyntaxElement::from)
}
/// Next token in the tree (i.e, not necessary a sibling).
pub fn next_token(&self) -> Option<SyntaxToken<L>> {
self.raw.next_token().map(SyntaxToken::from)
}
/// Previous token in the tree (i.e, not necessary a sibling).
pub fn prev_token(&self) -> Option<SyntaxToken<L>> {
self.raw.prev_token().map(SyntaxToken::from)
}
pub fn detach(&self) {
self.raw.detach()
}
/// Returns the token leading trivia.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// let mut token = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t let \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// }).first_token().unwrap();
/// assert_eq!("\n\t ", token.leading_trivia().text());
/// ```
#[inline]
pub fn leading_trivia(&self) -> SyntaxTrivia<L> {
SyntaxTrivia {
raw: self.raw.leading_trivia(),
_p: PhantomData,
}
}
/// Returns the token trailing trivia.
///
/// ```
/// use rome_rowan::*;
/// use rome_rowan::api::RawLanguage;
/// let mut token = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0),|builder| {
/// builder.token_with_trivia(
/// SyntaxKind(1),
/// "\n\t let \t\t",
/// vec![TriviaPiece::Whitespace(3)],
/// vec![TriviaPiece::Whitespace(3)],
/// );
/// }).first_token().unwrap();
/// assert_eq!(" \t\t", token.trailing_trivia().text());
/// ```
#[inline]
pub fn trailing_trivia(&self) -> SyntaxTrivia<L> {
SyntaxTrivia {
raw: self.raw.trailing_trivia(),
_p: PhantomData,
}
}
}
impl<L: Language> SyntaxElement<L> {
pub fn text_range(&self) -> TextRange {
match self {
NodeOrToken::Node(it) => it.text_range(),
NodeOrToken::Token(it) => it.text_range(),
}
}
pub fn text_trimmed_range(&self) -> TextRange {
match self {
NodeOrToken::Node(it) => it.text_trimmed_range(),
NodeOrToken::Token(it) => it.text_trimmed_range(),
}
}
pub fn leading_trivia(&self) -> Option<SyntaxTrivia<L>> {
match self {
NodeOrToken::Node(it) => it.first_leading_trivia(),
NodeOrToken::Token(it) => Some(it.leading_trivia()),
}
}
pub fn trailing_trivia(&self) -> Option<SyntaxTrivia<L>> {
match self {
NodeOrToken::Node(it) => it.last_trailing_trivia(),
NodeOrToken::Token(it) => Some(it.trailing_trivia()),
}
}
pub fn kind(&self) -> L::Kind {
match self {
NodeOrToken::Node(it) => it.kind(),
NodeOrToken::Token(it) => it.kind(),
}
}
pub fn parent(&self) -> Option<SyntaxNode<L>> {
match self {
NodeOrToken::Node(it) => it.parent(),
NodeOrToken::Token(it) => it.parent(),
}
}
pub fn ancestors(&self) -> impl Iterator<Item = SyntaxNode<L>> {
let first = match self {
NodeOrToken::Node(it) => Some(it.clone()),
NodeOrToken::Token(it) => it.parent(),
};
iter::successors(first, SyntaxNode::parent)
}
pub fn next_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
match self {
NodeOrToken::Node(it) => it.next_sibling_or_token(),
NodeOrToken::Token(it) => it.next_sibling_or_token(),
}
}
pub fn prev_sibling_or_token(&self) -> Option<SyntaxElement<L>> {
match self {
NodeOrToken::Node(it) => it.prev_sibling_or_token(),
NodeOrToken::Token(it) => it.prev_sibling_or_token(),
}
}
pub fn detach(&self) {
match self {
NodeOrToken::Node(it) => it.detach(),
NodeOrToken::Token(it) => it.detach(),
}
}
}
#[derive(Debug, Clone)]
pub struct SyntaxNodeChildren<L: Language> {
raw: cursor::SyntaxNodeChildren,
_p: PhantomData<L>,
}
impl<L: Language> Iterator for SyntaxNodeChildren<L> {
type Item = SyntaxNode<L>;
fn next(&mut self) -> Option<Self::Item> {
self.raw.next().map(SyntaxNode::from)
}
}
#[derive(Clone)]
pub struct SyntaxElementChildren<L: Language> {
raw: cursor::SyntaxElementChildren,
_p: PhantomData<L>,
}
impl<L: Language> Debug for SyntaxElementChildren<L> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
impl<L: Language> Default for SyntaxElementChildren<L> {
fn default() -> Self {
SyntaxElementChildren {
raw: cursor::SyntaxElementChildren::default(),
_p: PhantomData,
}
}
}
impl<L: Language> Iterator for SyntaxElementChildren<L> {
type Item = SyntaxElement<L>;
fn next(&mut self) -> Option<Self::Item> {
self.raw.next().map(NodeOrToken::from)
}
}
pub struct Preorder<L: Language> {
raw: cursor::Preorder,
_p: PhantomData<L>,
}
impl<L: Language> Preorder<L> {
pub fn skip_subtree(&mut self) {
self.raw.skip_subtree()
}
}
impl<L: Language> Iterator for Preorder<L> {
type Item = WalkEvent<SyntaxNode<L>>;
fn next(&mut self) -> Option<Self::Item> {
self.raw.next().map(|it| it.map(SyntaxNode::from))
}
}
pub struct PreorderWithTokens<L: Language> {
raw: cursor::PreorderWithTokens,
_p: PhantomData<L>,
}
impl<L: Language> PreorderWithTokens<L> {
pub fn skip_subtree(&mut self) {
self.raw.skip_subtree()
}
}
impl<L: Language> Iterator for PreorderWithTokens<L> {
type Item = WalkEvent<SyntaxElement<L>>;
fn next(&mut self) -> Option<Self::Item> {
self.raw.next().map(|it| it.map(SyntaxElement::from))
}
}
impl<L: Language> From<cursor::SyntaxNode> for SyntaxNode<L> {
fn from(raw: cursor::SyntaxNode) -> SyntaxNode<L> {
SyntaxNode {
raw,
_p: PhantomData,
}
}
}
impl<L: Language> From<SyntaxNode<L>> for cursor::SyntaxNode {
fn from(node: SyntaxNode<L>) -> cursor::SyntaxNode {
node.raw
}
}
impl<L: Language> From<cursor::SyntaxToken> for SyntaxToken<L> {
fn from(raw: cursor::SyntaxToken) -> SyntaxToken<L> {
SyntaxToken {
raw,
_p: PhantomData,
}
}
}
impl<L: Language> From<SyntaxToken<L>> for cursor::SyntaxToken {
fn from(token: SyntaxToken<L>) -> cursor::SyntaxToken {
token.raw
}
}
impl<L: Language> From<cursor::SyntaxElement> for SyntaxElement<L> {
fn from(raw: cursor::SyntaxElement) -> SyntaxElement<L> {
match raw {
NodeOrToken::Node(it) => NodeOrToken::Node(it.into()),
NodeOrToken::Token(it) => NodeOrToken::Token(it.into()),
}
}
}
impl<L: Language> From<SyntaxElement<L>> for cursor::SyntaxElement {
fn from(element: SyntaxElement<L>) -> cursor::SyntaxElement {
match element {
NodeOrToken::Node(it) => NodeOrToken::Node(it.into()),
NodeOrToken::Token(it) => NodeOrToken::Token(it.into()),
}
}
}
/// Each node has a slot for each of its children regardless if the child is present or not.
/// A child that isn't present either because it's optional or because of a syntax error
/// is stored in an [SyntaxSlot::Empty] to preserve the index of each child.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum SyntaxSlot<L: Language> {
/// Slot that stores a node child
Node(SyntaxNode<L>),
/// Slot that stores a token child
Token(SyntaxToken<L>),
/// Slot that marks that the child in this position isn't present in the source code.
Empty,
}
impl<L: Language> SyntaxSlot<L> {
pub fn into_node(self) -> Option<SyntaxNode<L>> {
match self {
SyntaxSlot::Node(node) => Some(node),
_ => None,
}
}
pub fn kind(&self) -> Option<L::Kind> {
match self {
SyntaxSlot::Node(node) => Some(node.kind()),
SyntaxSlot::Token(token) => Some(token.kind()),
SyntaxSlot::Empty => None,
}
}
}
impl<L: Language> From<cursor::SyntaxSlot> for SyntaxSlot<L> {
fn from(raw: cursor::SyntaxSlot) -> Self {
match raw {
cursor::SyntaxSlot::Node(node) => SyntaxSlot::Node(node.into()),
cursor::SyntaxSlot::Token(token) => SyntaxSlot::Token(token.into()),
cursor::SyntaxSlot::Empty { .. } => SyntaxSlot::Empty,
}
}
}
/// Iterator over the slots of a node.
#[derive(Debug, Clone)]
pub struct SyntaxSlots<L> {
raw: cursor::SyntaxSlots,
_p: PhantomData<L>,
}
impl<L: Language> Iterator for SyntaxSlots<L> {
type Item = SyntaxSlot<L>;
fn next(&mut self) -> Option<Self::Item> {
self.raw.next().map(SyntaxSlot::from)
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.raw.nth(n).map(SyntaxSlot::from)
}
fn last(self) -> Option<Self::Item>
where
Self: Sized,
{
self.raw.last().map(SyntaxSlot::from)
}
}
impl<'a, L: Language> FusedIterator for SyntaxSlots<L> {}
impl<'a, L: Language> ExactSizeIterator for SyntaxSlots<L> {
fn len(&self) -> usize {
self.raw.len()
}
}
/// A list of `SyntaxNode`s and/or `SyntaxToken`s
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct SyntaxList<L: Language> {
list: SyntaxNode<L>,
}
impl<L: Language> SyntaxList<L> {
/// Creates a new list wrapping a List `SyntaxNode`
fn new(node: SyntaxNode<L>) -> Self {
Self { list: node }
}
/// Iterates over the elements in the list.
pub fn iter(&self) -> SyntaxSlots<L> {
self.list.slots()
}
/// Returns the number of items in this list
pub fn len(&self) -> usize {
self.list.slots().len()
}
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn first(&self) -> Option<SyntaxSlot<L>> {
self.list.slots().next()
}
pub fn last(&self) -> Option<SyntaxSlot<L>> {
self.list.slots().last()
}
pub fn node(&self) -> &SyntaxNode<L> {
&self.list
}
}
impl<L: Language> IntoIterator for &SyntaxList<L> {
type Item = SyntaxSlot<L>;
type IntoIter = SyntaxSlots<L>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<L: Language> IntoIterator for SyntaxList<L> {
type Item = SyntaxSlot<L>;
type IntoIter = SyntaxSlots<L>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
#[cfg(test)]
mod tests {
use text_size::TextRange;
use crate::api::{RawLanguage, TriviaPiece};
use crate::{Direction, SyntaxKind, TreeBuilder};
const LIST_KIND: SyntaxKind = SyntaxKind(0);
#[test]
fn empty_list() {
let mut builder: TreeBuilder<RawLanguage> = TreeBuilder::new();
builder.start_node(LIST_KIND);
builder.finish_node();
let list = builder.finish().into_list();
assert!(list.is_empty());
assert_eq!(list.len(), 0);
assert_eq!(list.first(), None);
assert_eq!(list.last(), None);
assert_eq!(list.iter().collect::<Vec<_>>(), Vec::default());
}
#[test]
fn node_list() {
let mut builder: TreeBuilder<RawLanguage> = TreeBuilder::new();
builder.start_node(LIST_KIND);
builder.start_node(SyntaxKind(1));
builder.token(SyntaxKind(2), "1");
builder.finish_node();
builder.start_node(SyntaxKind(1));
builder.token(SyntaxKind(2), "2");
builder.finish_node();
builder.finish_node();
let node = builder.finish();
let list = node.into_list();
assert!(!list.is_empty());
assert_eq!(list.len(), 2);
let first = list.first().and_then(|e| e.into_node()).unwrap();
assert_eq!(first.kind(), SyntaxKind(1));
assert_eq!(first.text(), "1");
let last = list.last().and_then(|e| e.into_node()).unwrap();
assert_eq!(last.kind(), SyntaxKind(1));
assert_eq!(last.text(), "2");
let node_texts: Vec<_> = list
.iter()
.map(|e| e.into_node().map(|n| n.text().to_string()))
.collect();
assert_eq!(
node_texts,
vec![Some(String::from("1")), Some(String::from("2"))]
)
}
#[test]
fn node_or_token_list() {
let mut builder: TreeBuilder<RawLanguage> = TreeBuilder::new();
builder.start_node(LIST_KIND);
builder.start_node(SyntaxKind(1));
builder.token(SyntaxKind(2), "1");
builder.finish_node();
builder.token(SyntaxKind(3), ",");
builder.start_node(SyntaxKind(1));
builder.token(SyntaxKind(2), "2");
builder.finish_node();
builder.finish_node();
let node = builder.finish();
let list = node.into_list();
assert!(!list.is_empty());
assert_eq!(list.len(), 3);
let first = list.first().and_then(|e| e.into_node()).unwrap();
assert_eq!(first.kind(), SyntaxKind(1));
assert_eq!(first.text(), "1");
let last = list.last().and_then(|e| e.into_node()).unwrap();
assert_eq!(last.kind(), SyntaxKind(1));
assert_eq!(last.text(), "2");
let kinds: Vec<_> = list.iter().map(|e| e.kind()).collect();
assert_eq!(
kinds,
vec![
Some(SyntaxKind(1)),
Some(SyntaxKind(3)),
Some(SyntaxKind(1))
]
)
}
#[test]
fn siblings() {
let mut builder: TreeBuilder<RawLanguage> = TreeBuilder::new();
// list
builder.start_node(SyntaxKind(1));
// element 1
builder.start_node(SyntaxKind(2));
builder.token(SyntaxKind(3), "a");
builder.finish_node();
// element 2
builder.start_node(SyntaxKind(2));
builder.token(SyntaxKind(3), "b");
builder.finish_node();
// Missing ,
builder.missing();
// element 3
builder.start_node(SyntaxKind(2));
builder.token(SyntaxKind(3), "c");
builder.finish_node();
builder.finish_node();
let root = builder.finish();
let first = root.children().next().unwrap();
assert_eq!(first.text().to_string(), "a");
assert_eq!(
first.next_sibling().map(|e| e.text().to_string()),
Some(String::from("b"))
);
let second = root.children().nth(1).unwrap();
assert_eq!(second.text().to_string(), "b");
// Skips the missing element
assert_eq!(
second.next_sibling().map(|e| e.text().to_string()),
Some(String::from("c"))
);
assert_eq!(
second.prev_sibling().map(|e| e.text().to_string()),
Some(String::from("a"))
);
let last = root.children().last().unwrap();
assert_eq!(last.text(), "c");
assert_eq!(last.next_sibling(), None);
assert_eq!(
last.prev_sibling().map(|e| e.text().to_string()),
Some(String::from("b"))
);
assert_eq!(
first
.siblings(Direction::Next)
.map(|s| s.text().to_string())
.collect::<Vec<_>>(),
vec!["a", "b", "c"]
);
assert_eq!(
last.siblings(Direction::Prev)
.map(|s| s.text().to_string())
.collect::<Vec<_>>(),
vec!["c", "b", "a"]
);
}
#[test]
fn siblings_with_tokens() {
let mut builder: TreeBuilder<RawLanguage> = TreeBuilder::new();
builder.start_node(LIST_KIND);
builder.token(SyntaxKind(1), "for");
builder.token(SyntaxKind(2), "(");
builder.token(SyntaxKind(3), ";");
builder.start_node(SyntaxKind(4));
builder.token(SyntaxKind(5), "x");
builder.finish_node();
builder.token(SyntaxKind(3), ";");
builder.token(SyntaxKind(6), ")");
builder.finish_node();
let root = builder.finish();
let first_semicolon = root
.children_with_tokens()
.nth(2)
.and_then(|e| e.into_token())
.unwrap();
assert_eq!(first_semicolon.text(), ";");
assert_eq!(
first_semicolon
.siblings_with_tokens(Direction::Next)
.map(|e| e.to_string())
.collect::<Vec<_>>(),
vec!["x", ";", ")"]
);
assert_eq!(
first_semicolon.next_sibling_or_token(),
first_semicolon.siblings_with_tokens(Direction::Next).next()
);
assert_eq!(
first_semicolon.prev_sibling_or_token(),
first_semicolon.siblings_with_tokens(Direction::Prev).next()
);
}
#[test]
pub fn syntax_text_and_len() {
let mut builder: crate::TreeBuilder<crate::api::RawLanguage> = crate::TreeBuilder::new();
builder.start_node(crate::SyntaxKind(0));
builder.token_with_trivia(
crate::SyntaxKind(0),
"\n\t let \t\t",
vec![TriviaPiece::Whitespace(3)],
vec![TriviaPiece::Whitespace(3)],
);
builder.finish_node();
// // Node texts
let node = builder.finish();
assert_eq!("\n\t let \t\t", node.text());
assert_eq!("let", node.text_trimmed());
assert_eq!("\n\t ", node.first_leading_trivia().unwrap().text());
assert_eq!(" \t\t", node.last_trailing_trivia().unwrap().text());
// Token texts
let token = node.first_token().unwrap();
assert_eq!("\n\t let \t\t", token.text());
assert_eq!("let", token.text_trimmed());
assert_eq!("\n\t ", token.leading_trivia().text());
assert_eq!(" \t\t", token.trailing_trivia().text());
}
#[test]
pub fn syntax_range() {
let mut builder: crate::TreeBuilder<crate::api::RawLanguage> = crate::TreeBuilder::new();
builder.start_node(crate::SyntaxKind(0));
builder.token_with_trivia(
crate::SyntaxKind(0),
"\n\t let \t\t",
vec![TriviaPiece::Whitespace(3)],
vec![TriviaPiece::Whitespace(3)],
);
builder.token_with_trivia(
crate::SyntaxKind(0),
"a ",
vec![TriviaPiece::Whitespace(0)],
vec![TriviaPiece::Whitespace(1)],
);
builder.token_with_trivia(
crate::SyntaxKind(1),
"\n=\n",
vec![TriviaPiece::Whitespace(1)],
vec![TriviaPiece::Whitespace(1)],
);
builder.token(crate::SyntaxKind(0), "1");
builder.token_with_trivia(
crate::SyntaxKind(0),
";\t\t",
vec![],
vec![TriviaPiece::Whitespace(2)],
);
builder.finish_node();
let node = builder.finish();
// Node Ranges
assert_eq!(TextRange::new(0.into(), 18.into()), node.text_range());
assert_eq!(
TextRange::new(3.into(), 16.into()),
node.text_trimmed_range()
);
assert_eq!(
TextRange::new(0.into(), 3.into()),
node.first_leading_trivia().unwrap().text_range()
);
assert_eq!(
TextRange::new(16.into(), 18.into()),
node.last_trailing_trivia().unwrap().text_range()
);
// as NodeOrToken
let eq_token = node
.descendants_with_tokens()
.find(|x| x.kind().0 == 1)
.unwrap();
assert_eq!(TextRange::new(11.into(), 14.into()), eq_token.text_range());
assert_eq!(
TextRange::new(12.into(), 13.into()),
eq_token.text_trimmed_range()
);
assert_eq!(
TextRange::new(11.into(), 12.into()),
eq_token.leading_trivia().unwrap().text_range()
);
assert_eq!(
TextRange::new(13.into(), 14.into()),
eq_token.trailing_trivia().unwrap().text_range()
);
// as Token
let eq_token = eq_token.as_token().unwrap();
assert_eq!(TextRange::new(11.into(), 14.into()), eq_token.text_range());
assert_eq!(
TextRange::new(12.into(), 13.into()),
eq_token.text_trimmed_range()
);
assert_eq!(
TextRange::new(11.into(), 12.into()),
eq_token.leading_trivia().text_range()
);
assert_eq!(
TextRange::new(13.into(), 14.into()),
eq_token.trailing_trivia().text_range()
);
}
#[test]
pub fn syntax_trivia_pieces() {
use crate::*;
let node = TreeBuilder::<RawLanguage>::wrap_with_node(SyntaxKind(0), |builder| {
builder.token_with_trivia(
SyntaxKind(1),
"\n\t /**/let \t\t",
vec![TriviaPiece::Whitespace(3), TriviaPiece::Comments(4)],
vec![TriviaPiece::Whitespace(3)],
);
});
let pieces: Vec<_> = node.first_leading_trivia().unwrap().pieces().collect();
assert_eq!(2, pieces.len());
assert_eq!("\n\t ", pieces[0].text());
assert_eq!(TextSize::from(3), pieces[0].text_len());
assert_eq!(TextRange::new(0.into(), 3.into()), pieces[0].text_range());
assert!(pieces[0].as_whitespace().is_some());
assert_eq!("/**/", pieces[1].text());
assert_eq!(TextSize::from(4), pieces[1].text_len());
assert_eq!(TextRange::new(3.into(), 7.into()), pieces[1].text_range());
assert!(pieces[1].as_comments().is_some());
}
}
| 26.914582 | 137 | 0.630698 |
6afbfb4110908ee3d080d5e06cc975e1682cf89e | 2,700 | use criterion::{black_box, criterion_group, criterion_main, Criterion};
use rdst::utils::bench_utils::{bench_common, bench_medley};
use rdst::utils::test_utils::NumericTest;
use rdst::RadixSort;
use voracious_radix_sort::{RadixKey as VorKey, RadixSort as Vor, Radixable};
fn full_sort_common<T>(c: &mut Criterion, shift: T, name_suffix: &str)
where
T: NumericTest<T> + Radixable<T> + VorKey,
{
let tests: Vec<(&str, Box<dyn Fn(Vec<_>)>)> = vec![
(
"rdst",
Box::new(|mut input| {
input.radix_sort_unstable();
black_box(input);
}),
),
(
"rdst_low_mem",
Box::new(|mut input| {
input.radix_sort_builder().with_low_mem_tuner().sort();
black_box(input);
}),
),
(
"voracious",
Box::new(|mut input| {
input.voracious_mt_sort(num_cpus::get());
black_box(input);
}),
),
];
bench_common(c, shift, &("full_sort_".to_owned() + name_suffix), tests);
}
fn full_sort_medley_set<T>(c: &mut Criterion, suffix: &str, shift: T)
where
T: NumericTest<T> + Radixable<T> + VorKey,
{
let tests: Vec<(&str, Box<dyn Fn(Vec<T>)>)> = vec![
(
"rdst",
Box::new(|mut input| {
input.radix_sort_unstable();
black_box(input);
}),
),
(
"rdst_low_mem",
Box::new(|mut input| {
input.radix_sort_builder().with_low_mem_tuner().sort();
black_box(input);
}),
),
(
"voracious",
Box::new(|mut input| {
input.voracious_mt_sort(num_cpus::get());
black_box(input);
}),
),
];
bench_medley(c, &("full_sort_medley_".to_owned() + suffix), tests, shift);
}
fn full_sort_u32(c: &mut Criterion) {
full_sort_common(c, 0u32, "u32");
}
fn full_sort_u64(c: &mut Criterion) {
full_sort_common(c, 0u64, "u64");
}
fn full_sort_u32_bimodal(c: &mut Criterion) {
full_sort_common(c, 16u32, "u32_bimodal");
}
fn full_sort_u64_bimodal(c: &mut Criterion) {
full_sort_common(c, 32u64, "u64_bimodal");
}
fn full_sort_medley(c: &mut Criterion) {
full_sort_medley_set(c, "u32", 0u32);
full_sort_medley_set(c, "u32_bimodal", 16u32);
full_sort_medley_set(c, "u64", 0u64);
full_sort_medley_set(c, "u64_bimodal", 32u64);
}
criterion_group!(
benches,
full_sort_u32,
full_sort_u64,
full_sort_u32_bimodal,
full_sort_u64_bimodal,
full_sort_medley,
);
criterion_main!(benches);
| 26.732673 | 78 | 0.557037 |
75d4fe417a7679e0192f3a9441cf5783a2061c04 | 10,435 | //! Synchronous HTTP interactions
use std::collections::hash_map::DefaultHasher;
use std::collections::HashMap;
use std::fmt::{Display, Formatter};
use std::hash::{Hash, Hasher};
use std::sync::{Arc, Mutex};
use anyhow::anyhow;
use log::warn;
use serde_json::{json, Map, Value};
use crate::bodies::OptionalBody;
use crate::content_types::ContentType;
use crate::interaction::Interaction;
use crate::json_utils::{is_empty, json_to_string};
use crate::matchingrules::MatchingRules;
use crate::message::Message;
use crate::provider_states::ProviderState;
use crate::sync_interaction::RequestResponseInteraction;
use crate::v4::async_message::AsynchronousMessage;
use crate::v4::http_parts::{HttpRequest, HttpResponse};
use crate::v4::interaction::{InteractionMarkup, parse_plugin_config, V4Interaction};
use crate::v4::sync_message::SynchronousMessage;
use crate::v4::V4InteractionType;
/// V4 HTTP Interaction Type
#[derive(Debug, Clone, Eq)]
pub struct SynchronousHttp {
/// Interaction ID. This will only be set if the Pact file was fetched from a Pact Broker
pub id: Option<String>,
/// Unique key for this interaction
pub key: Option<String>,
/// A description for the interaction. Must be unique within the Pact file
pub description: String,
/// Optional provider states for the interaction.
/// See `<https://docs.pact.io/getting_started/provider_states>` for more info on provider states.
pub provider_states: Vec<ProviderState>,
/// Request of the interaction
pub request: HttpRequest,
/// Response of the interaction
pub response: HttpResponse,
/// Annotations and comments associated with this interaction
pub comments: HashMap<String, Value>,
/// If this interaction is pending. Pending interactions will never fail the build if they fail
pub pending: bool,
/// Configuration added by plugins
pub plugin_config: HashMap<String, HashMap<String, Value>>,
/// Text markup to use to render the interaction in a UI
pub interaction_markup: InteractionMarkup
}
impl SynchronousHttp {
fn calc_hash(&self) -> String {
let mut s = DefaultHasher::new();
self.hash(&mut s);
format!("{:x}", s.finish())
}
/// Creates a new version with a calculated key
pub fn with_key(&self) -> SynchronousHttp {
SynchronousHttp {
key: Some(self.calc_hash()),
.. self.clone()
}
}
/// Parse the JSON into a SynchronousHttp interaction
pub fn from_json(json: &Value, index: usize) -> anyhow::Result<SynchronousHttp> {
if json.is_object() {
let id = json.get("_id").map(|id| json_to_string(id));
let key = json.get("key").map(|id| json_to_string(id));
let description = match json.get("description") {
Some(v) => match *v {
Value::String(ref s) => s.clone(),
_ => v.to_string()
},
None => format!("Interaction {}", index)
};
let comments = match json.get("comments") {
Some(v) => match v {
Value::Object(map) => map.iter()
.map(|(k, v)| (k.clone(), v.clone())).collect(),
_ => {
warn!("Interaction comments must be a JSON Object, but received {}. Ignoring", v);
Default::default()
}
},
None => Default::default()
};
let provider_states = ProviderState::from_json(json);
let request = json.get("request").cloned().unwrap_or_default();
let response = json.get("response").cloned().unwrap_or_default();
let plugin_config = parse_plugin_config(json);
let interaction_markup = json.get("interactionMarkup")
.map(|markup| InteractionMarkup::from_json(markup)).unwrap_or_default();
Ok(SynchronousHttp {
id,
key,
description,
provider_states,
request: HttpRequest::from_json(&request)?,
response: HttpResponse::from_json(&response)?,
comments,
pending: json.get("pending")
.map(|value| value.as_bool().unwrap_or_default()).unwrap_or_default(),
plugin_config,
interaction_markup
})
} else {
Err(anyhow!("Expected a JSON object for the interaction, got '{}'", json))
}
}
}
impl V4Interaction for SynchronousHttp {
fn to_json(&self) -> Value {
let mut json = json!({
"type": V4InteractionType::Synchronous_HTTP.to_string(),
"key": self.key.clone().unwrap_or_else(|| self.calc_hash()),
"description": self.description.clone(),
"request": self.request.to_json(),
"response": self.response.to_json(),
"pending": self.pending
});
if !self.provider_states.is_empty() {
let map = json.as_object_mut().unwrap();
map.insert("providerStates".to_string(), Value::Array(
self.provider_states.iter().map(|p| p.to_json()).collect()));
}
let comments: Map<String, Value> = self.comments.iter()
.filter(|(_k, v)| !is_empty(v))
.map(|(k, v)| (k.clone(), v.clone()))
.collect();
if !comments.is_empty() {
let map = json.as_object_mut().unwrap();
map.insert("comments".to_string(), Value::Object(comments));
}
if !self.plugin_config.is_empty() {
let map = json.as_object_mut().unwrap();
map.insert("pluginConfiguration".to_string(), self.plugin_config.iter()
.map(|(k, v)|
(k.clone(), Value::Object(v.iter().map(|(k, v)| (k.clone(), v.clone())).collect()))
).collect());
}
if !self.interaction_markup.is_empty() {
let map = json.as_object_mut().unwrap();
map.insert("interactionMarkup".to_string(), self.interaction_markup.to_json());
}
json
}
fn to_super(&self) -> &dyn Interaction {
self
}
fn key(&self) -> Option<String> {
self.key.clone()
}
fn boxed_v4(&self) -> Box<dyn V4Interaction> {
Box::new(self.clone())
}
fn comments(&self) -> HashMap<String, Value> {
self.comments.clone()
}
fn comments_mut(&mut self) -> &mut HashMap<String, Value> {
&mut self.comments
}
fn v4_type(&self) -> V4InteractionType {
V4InteractionType::Synchronous_HTTP
}
fn plugin_config(&self) -> HashMap<String, HashMap<String, Value>> {
self.plugin_config.clone()
}
fn plugin_config_mut(&mut self) -> &mut HashMap<String, HashMap<String, Value>> {
&mut self.plugin_config
}
fn interaction_markup(&self) -> InteractionMarkup {
self.interaction_markup.clone()
}
fn interaction_markup_mut(&mut self) -> &mut InteractionMarkup {
&mut self.interaction_markup
}
}
impl Interaction for SynchronousHttp {
fn type_of(&self) -> String {
format!("V4 {}", self.v4_type())
}
fn is_request_response(&self) -> bool {
true
}
fn as_request_response(&self) -> Option<RequestResponseInteraction> {
Some(RequestResponseInteraction {
id: self.id.clone(),
description: self.description.clone(),
provider_states: self.provider_states.clone(),
request: self.request.as_v3_request(),
response: self.response.as_v3_response()
})
}
fn is_message(&self) -> bool {
false
}
fn as_message(&self) -> Option<Message> {
None
}
fn id(&self) -> Option<String> {
self.id.clone()
}
fn description(&self) -> String {
self.description.clone()
}
fn set_id(&mut self, id: Option<String>) {
self.id = id;
}
fn set_description(&mut self, description: &str) {
self.description = description.to_string();
}
fn provider_states(&self) -> Vec<ProviderState> {
self.provider_states.clone()
}
fn provider_states_mut(&mut self) -> &mut Vec<ProviderState> {
&mut self.provider_states
}
fn contents(&self) -> OptionalBody {
self.response.body.clone()
}
fn contents_for_verification(&self) -> OptionalBody {
self.response.body.clone()
}
fn content_type(&self) -> Option<ContentType> {
self.response.content_type()
}
fn is_v4(&self) -> bool {
true
}
fn as_v4(&self) -> Option<Box<dyn V4Interaction>> {
Some(self.boxed_v4())
}
fn as_v4_mut(&mut self) -> Option<&mut dyn V4Interaction> {
Some(self)
}
fn is_v4_http(&self) -> bool {
true
}
fn as_v4_http(&self) -> Option<SynchronousHttp> {
Some(self.clone())
}
fn as_v4_async_message(&self) -> Option<AsynchronousMessage> {
None
}
fn as_v4_sync_message(&self) -> Option<SynchronousMessage> {
None
}
fn as_v4_http_mut(&mut self) -> Option<&mut SynchronousHttp> {
Some(self)
}
fn as_v4_async_message_mut(&mut self) -> Option<&mut AsynchronousMessage> {
None
}
fn as_v4_sync_message_mut(&mut self) -> Option<&mut SynchronousMessage> {
None
}
fn boxed(&self) -> Box<dyn Interaction + Send + Sync> {
Box::new(self.clone())
}
fn arced(&self) -> Arc<dyn Interaction + Send + Sync> {
Arc::new(self.clone())
}
fn thread_safe(&self) -> Arc<Mutex<dyn Interaction + Send + Sync>> {
Arc::new(Mutex::new(self.clone()))
}
fn matching_rules(&self) -> Option<MatchingRules> {
None
}
fn pending(&self) -> bool {
self.pending
}
}
impl Default for SynchronousHttp {
fn default() -> Self {
SynchronousHttp {
id: None,
key: None,
description: "Synchronous/HTTP Interaction".to_string(),
provider_states: vec![],
request: HttpRequest::default(),
response: HttpResponse::default(),
comments: Default::default(),
pending: false,
plugin_config: Default::default(),
interaction_markup: Default::default()
}
}
}
impl PartialEq for SynchronousHttp {
fn eq(&self, other: &Self) -> bool {
self.description == other.description && self.provider_states == other.provider_states &&
self.request == other.request && self.response == other.response &&
self.pending == other.pending
}
}
impl Hash for SynchronousHttp {
fn hash<H: Hasher>(&self, state: &mut H) {
self.description.hash(state);
self.provider_states.hash(state);
self.request.hash(state);
self.response.hash(state);
self.pending.hash(state);
}
}
impl Display for SynchronousHttp {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
let pending = if self.pending { " [PENDING]" } else { "" };
write!(f, "V4 Http Interaction{} ( id: {:?}, description: \"{}\", provider_states: {:?}, request: {}, response: {} )",
pending, self.id, self.description, self.provider_states, self.request, self.response)
}
}
#[cfg(test)]
mod tests {
}
| 27.60582 | 122 | 0.644082 |
698cf105ae53c12d6b1a5d4dabec5175f1dc6c63 | 2,655 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* This module contains a simple utility routine
* used by both `typeck` and `const_eval`.
* Almost certainly this could (and should) be refactored out of existence.
*/
use middle::def;
use middle::ty::{self, Ty};
use syntax::ast;
use util::ppaux::Repr;
pub const NO_REGIONS: usize = 1;
pub const NO_TPS: usize = 2;
pub fn check_path_args(tcx: &ty::ctxt, segments: &[ast::PathSegment], flags: usize) {
for segment in segments {
if (flags & NO_TPS) != 0 {
for typ in segment.parameters.types() {
span_err!(tcx.sess, typ.span, E0109,
"type parameters are not allowed on this type");
break;
}
}
if (flags & NO_REGIONS) != 0 {
for lifetime in segment.parameters.lifetimes() {
span_err!(tcx.sess, lifetime.span, E0110,
"lifetime parameters are not allowed on this type");
break;
}
}
}
}
pub fn prim_ty_to_ty<'tcx>(tcx: &ty::ctxt<'tcx>,
segments: &[ast::PathSegment],
nty: ast::PrimTy)
-> Ty<'tcx> {
check_path_args(tcx, segments, NO_TPS | NO_REGIONS);
match nty {
ast::TyBool => tcx.types.bool,
ast::TyChar => tcx.types.char,
ast::TyInt(it) => ty::mk_mach_int(tcx, it),
ast::TyUint(uit) => ty::mk_mach_uint(tcx, uit),
ast::TyFloat(ft) => ty::mk_mach_float(tcx, ft),
ast::TyStr => ty::mk_str(tcx)
}
}
pub fn ast_ty_to_prim_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ast_ty: &ast::Ty)
-> Option<Ty<'tcx>> {
if let ast::TyPath(None, ref path) = ast_ty.node {
let def = match tcx.def_map.borrow().get(&ast_ty.id) {
None => {
tcx.sess.span_bug(ast_ty.span,
&format!("unbound path {}", path.repr(tcx)))
}
Some(d) => d.full_def()
};
if let def::DefPrimTy(nty) = def {
Some(prim_ty_to_ty(tcx, &path.segments, nty))
} else {
None
}
} else {
None
}
}
| 33.607595 | 85 | 0.550282 |
eb107bff0d37ee4c54c285b5834761c083667dae | 1,490 | use crate::{
errors::{verify, ParrotError},
strings::{FAIL_MINUTES_PARSING, FAIL_SECONDS_PARSING, SEEKED},
utils::create_response,
};
use serenity::{
client::Context, model::application::interaction::application_command::ApplicationCommandInteraction,
};
use std::time::Duration;
pub async fn seek(
ctx: &Context,
interaction: &mut ApplicationCommandInteraction,
) -> Result<(), ParrotError> {
let guild_id = interaction.guild_id.unwrap();
let manager = songbird::get(ctx).await.unwrap();
let call = manager.get(guild_id).unwrap();
let args = interaction.data.options.clone();
let seek_time = args.first().unwrap().value.as_ref().unwrap();
let timestamp = seek_time.as_str().unwrap();
let mut units_iter = timestamp.split(':');
let minutes = units_iter.next().and_then(|c| c.parse::<u64>().ok());
let minutes = verify(minutes, ParrotError::Other(FAIL_MINUTES_PARSING))?;
let seconds = units_iter.next().and_then(|c| c.parse::<u64>().ok());
let seconds = verify(seconds, ParrotError::Other(FAIL_SECONDS_PARSING))?;
let timestamp = minutes * 60 + seconds;
let handler = call.lock().await;
let track = handler
.queue()
.current()
.ok_or(ParrotError::NothingPlaying)?;
drop(handler);
track.seek_time(Duration::from_secs(timestamp)).unwrap();
create_response(
&ctx.http,
interaction,
&format!("{} **{}**!", SEEKED, seek_time),
)
.await
}
| 30.408163 | 105 | 0.656376 |
38d95dba78e01fa81d7587923563137c8b50b700 | 2,005 | /*
Copyright (c) 2017 The swc Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
use anyhow::Context;
use napi::{CallContext, JsBuffer, Status};
use serde::de::DeserializeOwned;
pub trait MapErr<T>: Into<Result<T, anyhow::Error>> {
fn convert_err(self) -> napi::Result<T> {
self.into()
.map_err(|err| napi::Error::new(Status::GenericFailure, format!("{:?}", err)))
}
}
impl<T> MapErr<T> for Result<T, anyhow::Error> {}
pub trait CtxtExt {
/// Currently this uses JsBuffer
fn get_deserialized<T>(&self, index: usize) -> napi::Result<T>
where
T: DeserializeOwned;
}
impl CtxtExt for CallContext<'_> {
fn get_deserialized<T>(&self, index: usize) -> napi::Result<T>
where
T: DeserializeOwned,
{
let buffer = self.get::<JsBuffer>(index)?.into_value()?;
let v = serde_json::from_slice(&buffer)
.with_context(|| format!("Argument at `{}` is not JsBuffer", index))
.convert_err()?;
Ok(v)
}
}
| 32.33871 | 90 | 0.708728 |
5d879de5cb6e0437a8c9ca2ee261f6bd3b7bed52 | 5,924 | use futures::prelude::*;
use hyper::client;
use hyper::header;
use hyper::{self, Body, Chunk, Method, Request, StatusCode, Uri};
use percent_encoding::{utf8_percent_encode, QUERY_ENCODE_SET};
use serde::de::DeserializeOwned;
use serde_json;
use std::marker::PhantomData;
use std::str::FromStr;
use std::time::Duration;
pub trait ExecFuture: Future<Item = (StatusCode, Chunk), Error = hyper::Error> {}
impl<T: Future<Item = (StatusCode, Chunk), Error = hyper::Error>> ExecFuture for T {}
#[derive(Debug)]
pub enum QueryError {
Hyper(hyper::Error),
Response(StatusCode, QueryMeta, Chunk),
Json(serde_json::Error),
}
#[derive(Debug)]
pub struct QueryOption {
pub wait_index: u64,
pub wait_time: Option<Duration>,
pub dc: Option<String>,
pub tag: Option<String>,
}
#[derive(Debug)]
pub struct QueryMeta {
pub last_index: u64,
}
impl Default for QueryMeta {
fn default() -> Self {
Self { last_index: 0 }
}
}
fn parse_query_meta<T>(resp: &hyper::Response<T>) -> Result<QueryMeta, ()> {
let mut last_index: u64 = 0;
if let Some(v) = resp.headers().get("X-Consul-Index") {
let s = v.to_str().map_err(|_| ())?;
last_index = FromStr::from_str(s).map_err(|_| ())?;
}
Ok(QueryMeta { last_index })
}
pub struct QueryFuture<T> {
inner: Box<Future<Item = (StatusCode, QueryMeta, Chunk), Error = QueryError> + Send>,
dummy: PhantomData<T>,
}
impl<T> Future for QueryFuture<T>
where
T: DeserializeOwned,
{
type Item = (QueryMeta, T);
type Error = QueryError;
fn poll(&mut self) -> Result<Async<Self::Item>, Self::Error> {
let (status, meta, chunk) = try_ready!(self.inner.poll());
if !status.is_success() {
Err(QueryError::Response(status, meta, chunk))
} else {
match serde_json::from_slice(chunk.as_ref()) {
Ok(v) => Ok(Async::Ready((meta, v))),
Err(e) => Err(QueryError::Json(e)),
}
}
}
}
pub struct ClientRequest<'a> {
method: Method,
path: &'a str,
params: Vec<&'a str>,
body: Option<Vec<u8>>,
}
#[derive(Debug)]
pub struct Client {
base: String,
token: Option<String>,
hc: hyper::Client<client::HttpConnector>,
}
impl Client {
pub fn new(base: &str, token: Option<String>) -> Client {
Client {
base: base.trim_right_matches('/').to_owned(),
token,
hc: hyper::Client::builder().keep_alive(true).build_http(),
}
}
fn send(&self, req: ClientRequest) -> client::ResponseFuture {
let params = &req.params;
let n: usize = params.iter().map(|s| s.len()).sum();
let mut u = String::with_capacity(self.base.len() + req.path.len() + params.len() * 2 + n);
u.push_str(self.base.as_str());
u.push_str(req.path);
u.push('?');
let mut i = 0;
while i < params.len() {
if i > 0 {
u.push('&');
}
u.push_str(params[i]);
u.push('=');
u.push_str(
utf8_percent_encode(params[i + 1], QUERY_ENCODE_SET)
.to_string()
.as_str(),
);
i += 2;
}
let body = match req.body {
Some(body) => Body::from(body),
None => Body::empty(),
};
let mut hr = Request::new(body);
*hr.uri_mut() = Uri::from_str(u.as_str()).unwrap();
*hr.method_mut() = req.method;
hr.headers_mut().insert(
header::CONTENT_TYPE,
header::HeaderValue::from_static("application/json"),
);
if let Some(ref t) = self.token {
hr.headers_mut().insert(
header::HeaderName::from_static("X-Consul-Token"),
header::HeaderValue::from_str(t.as_str()).unwrap(),
);
}
self.hc.request(hr)
}
pub(super) fn exec(
&self,
method: Method,
path: &str,
params: Vec<&str>,
body: Option<Vec<u8>>,
) -> impl ExecFuture {
self.send(ClientRequest {
method,
path,
params,
body,
}).and_then(|resp| {
let status = resp.status();
resp.into_body().concat2().map(move |chunk| (status, chunk))
})
}
pub(super) fn query<T: DeserializeOwned>(
&self,
path: &str,
params: Vec<&str>,
qo: Option<QueryOption>,
) -> QueryFuture<T> {
let (index, wait);
let mut params = params;
if let Some(ref qo) = qo {
if let Some(ref v) = qo.dc {
params.push("dc");
params.push(v.as_str());
}
if let Some(ref v) = qo.tag {
params.push("tag");
params.push(v.as_str());
}
if qo.wait_index != 0 {
index = qo.wait_index.to_string();
params.push("index");
params.push(index.as_str());
}
if let Some(ref v) = qo.wait_time {
wait = v.as_secs().to_string() + "s";
params.push("wait");
params.push(wait.as_str());
}
}
QueryFuture {
inner: Box::new(
self.send(ClientRequest {
method: Method::GET,
path,
params,
body: None,
}).and_then(|resp| {
let status = resp.status();
let meta = parse_query_meta(&resp).unwrap_or_default();
resp.into_body()
.concat2()
.map(move |chunk| (status, meta, chunk))
}).map_err(|e| QueryError::Hyper(e)),
),
dummy: PhantomData,
}
}
}
| 28.897561 | 99 | 0.507427 |
29870ca8dfd8fceac0412dce9cc0efbba6b81d43 | 17,206 | use crate::log::Stopwatch;
use rand::Rng;
use std::sync::Arc;
use futures::Future;
use crate::pairing::{
Engine,
CurveProjective,
CurveAffine
};
use crate::pairing::ff::{
PrimeField,
Field
};
use super::{
ParameterSource,
Proof
};
use crate::{
SynthesisError,
Circuit,
ConstraintSystem,
LinearCombination,
Variable,
Index
};
use crate::domain::{
EvaluationDomain,
Scalar
};
use crate::source::{
DensityTracker,
FullDensity
};
use crate::multiexp::*;
use crate::worker::{
Worker
};
fn eval<E: Engine>(
lc: &LinearCombination<E>,
mut input_density: Option<&mut DensityTracker>,
mut aux_density: Option<&mut DensityTracker>,
input_assignment: &[E::Fr],
aux_assignment: &[E::Fr]
) -> E::Fr
{
let mut acc = E::Fr::zero();
for &(index, coeff) in lc.0.iter() {
let mut tmp;
match index {
Variable(Index::Input(i)) => {
tmp = input_assignment[i];
if let Some(ref mut v) = input_density {
v.inc(i);
}
},
Variable(Index::Aux(i)) => {
tmp = aux_assignment[i];
if let Some(ref mut v) = aux_density {
v.inc(i);
}
}
}
if coeff == E::Fr::one() {
acc.add_assign(&tmp);
} else {
tmp.mul_assign(&coeff);
acc.add_assign(&tmp);
}
}
acc
}
// This is a proving assignment with densities precalculated
pub struct PreparedProver<E: Engine>{
assignment: ProvingAssignment<E>,
}
#[derive(Clone)]
struct ProvingAssignment<E: Engine> {
// Density of queries
a_aux_density: DensityTracker,
b_input_density: DensityTracker,
b_aux_density: DensityTracker,
// Evaluations of A, B, C polynomials
a: Vec<Scalar<E>>,
b: Vec<Scalar<E>>,
c: Vec<Scalar<E>>,
// Assignments of variables
input_assignment: Vec<E::Fr>,
aux_assignment: Vec<E::Fr>
}
pub fn prepare_prover<E, C>(
circuit: C,
) -> Result<PreparedProver<E>, SynthesisError>
where E: Engine, C: Circuit<E>
{
let mut prover = ProvingAssignment {
a_aux_density: DensityTracker::new(),
b_input_density: DensityTracker::new(),
b_aux_density: DensityTracker::new(),
a: vec![],
b: vec![],
c: vec![],
input_assignment: vec![],
aux_assignment: vec![]
};
prover.alloc_input(|| "", || Ok(E::Fr::one()))?;
circuit.synthesize(&mut prover)?;
for i in 0..prover.input_assignment.len() {
prover.enforce(|| "",
|lc| lc + Variable(Index::Input(i)),
|lc| lc,
|lc| lc,
);
}
let prepared = PreparedProver {
assignment: prover
};
return Ok(prepared)
}
impl<E:Engine> PreparedProver<E> {
pub fn create_random_proof<R, P: ParameterSource<E>>(
& self,
params: P,
rng: &mut R
) -> Result<Proof<E>, SynthesisError>
where R: Rng
{
let r = rng.gen();
let s = rng.gen();
self.create_proof(params, r, s)
}
pub fn create_proof<P: ParameterSource<E>>(
& self,
mut params: P,
r: E::Fr,
s: E::Fr
) -> Result<Proof<E>, SynthesisError>
{
let prover = self.assignment.clone();
let worker = Worker::new();
let vk = params.get_vk(self.assignment.input_assignment.len())?;
let stopwatch = Stopwatch::new();
let h = {
let mut a = EvaluationDomain::from_coeffs(prover.a)?;
let mut b = EvaluationDomain::from_coeffs(prover.b)?;
let mut c = EvaluationDomain::from_coeffs(prover.c)?;
elog_verbose!("H query domain size is {}", a.as_ref().len());
// here a coset is a domain where denominator (z) does not vanish
// inverse FFT is an interpolation
a.ifft(&worker);
// evaluate in coset
a.coset_fft(&worker);
// same is for B and C
b.ifft(&worker);
b.coset_fft(&worker);
c.ifft(&worker);
c.coset_fft(&worker);
// do A*B-C in coset
a.mul_assign(&worker, &b);
drop(b);
a.sub_assign(&worker, &c);
drop(c);
// z does not vanish in coset, so we divide by non-zero
a.divide_by_z_on_coset(&worker);
// interpolate back in coset
a.icoset_fft(&worker);
let mut a = a.into_coeffs();
let a_len = a.len() - 1;
a.truncate(a_len);
// TODO: parallelize if it's even helpful
// TODO: in large settings it may worth to parallelize
let a = Arc::new(a.into_iter().map(|s| s.0.into_repr()).collect::<Vec<_>>());
multiexp(&worker, params.get_h(a.len())?, FullDensity, a)
};
elog_verbose!("{} seconds for prover for H evaluation (mostly FFT)", stopwatch.elapsed());
let stopwatch = Stopwatch::new();
// TODO: Check that difference in operations for different chunks is small
// TODO: parallelize if it's even helpful
// TODO: in large settings it may worth to parallelize
let input_assignment = Arc::new(prover.input_assignment.into_iter().map(|s| s.into_repr()).collect::<Vec<_>>());
let aux_assignment = Arc::new(prover.aux_assignment.into_iter().map(|s| s.into_repr()).collect::<Vec<_>>());
let input_len = input_assignment.len();
let aux_len = aux_assignment.len();
elog_verbose!("H query is dense in G1,\nOther queries are {} elements in G1 and {} elements in G2",
2*(input_len + aux_len) + aux_len, input_len + aux_len);
// Run a dedicated process for dense vector
let l = multiexp(&worker, params.get_l(aux_assignment.len())?, FullDensity, aux_assignment.clone());
let a_aux_density_total = prover.a_aux_density.get_total_density();
let (a_inputs_source, a_aux_source) = params.get_a(input_assignment.len(), a_aux_density_total)?;
let a_inputs = multiexp(&worker, a_inputs_source, FullDensity, input_assignment.clone());
let a_aux = multiexp(&worker, a_aux_source, Arc::new(prover.a_aux_density), aux_assignment.clone());
let b_input_density = Arc::new(prover.b_input_density);
let b_input_density_total = b_input_density.get_total_density();
let b_aux_density = Arc::new(prover.b_aux_density);
let b_aux_density_total = b_aux_density.get_total_density();
let (b_g1_inputs_source, b_g1_aux_source) = params.get_b_g1(b_input_density_total, b_aux_density_total)?;
let b_g1_inputs = multiexp(&worker, b_g1_inputs_source, b_input_density.clone(), input_assignment.clone());
let b_g1_aux = multiexp(&worker, b_g1_aux_source, b_aux_density.clone(), aux_assignment.clone());
let (b_g2_inputs_source, b_g2_aux_source) = params.get_b_g2(b_input_density_total, b_aux_density_total)?;
let b_g2_inputs = multiexp(&worker, b_g2_inputs_source, b_input_density, input_assignment);
let b_g2_aux = multiexp(&worker, b_g2_aux_source, b_aux_density, aux_assignment);
if vk.delta_g1.is_zero() || vk.delta_g2.is_zero() {
// If this element is zero, someone is trying to perform a
// subversion-CRS attack.
return Err(SynthesisError::UnexpectedIdentity);
}
let mut g_a = vk.delta_g1.mul(r);
g_a.add_assign_mixed(&vk.alpha_g1);
let mut g_b = vk.delta_g2.mul(s);
g_b.add_assign_mixed(&vk.beta_g2);
let mut g_c;
{
let mut rs = r;
rs.mul_assign(&s);
g_c = vk.delta_g1.mul(rs);
g_c.add_assign(&vk.alpha_g1.mul(s));
g_c.add_assign(&vk.beta_g1.mul(r));
}
let mut a_answer = a_inputs.wait()?;
a_answer.add_assign(&a_aux.wait()?);
g_a.add_assign(&a_answer);
a_answer.mul_assign(s);
g_c.add_assign(&a_answer);
let mut b1_answer = b_g1_inputs.wait()?;
b1_answer.add_assign(&b_g1_aux.wait()?);
let mut b2_answer = b_g2_inputs.wait()?;
b2_answer.add_assign(&b_g2_aux.wait()?);
g_b.add_assign(&b2_answer);
b1_answer.mul_assign(r);
g_c.add_assign(&b1_answer);
g_c.add_assign(&h.wait()?);
g_c.add_assign(&l.wait()?);
elog_verbose!("{} seconds for prover for point multiplication", stopwatch.elapsed());
Ok(Proof {
a: g_a.into_affine(),
b: g_b.into_affine(),
c: g_c.into_affine()
})
}
}
impl<E: Engine> ConstraintSystem<E> for ProvingAssignment<E> {
type Root = Self;
fn alloc<F, A, AR>(
&mut self,
_: A,
f: F
) -> Result<Variable, SynthesisError>
where F: FnOnce() -> Result<E::Fr, SynthesisError>, A: FnOnce() -> AR, AR: Into<String>
{
self.aux_assignment.push(f()?);
self.a_aux_density.add_element();
self.b_aux_density.add_element();
Ok(Variable(Index::Aux(self.aux_assignment.len() - 1)))
}
fn alloc_input<F, A, AR>(
&mut self,
_: A,
f: F
) -> Result<Variable, SynthesisError>
where F: FnOnce() -> Result<E::Fr, SynthesisError>, A: FnOnce() -> AR, AR: Into<String>
{
self.input_assignment.push(f()?);
self.b_input_density.add_element();
Ok(Variable(Index::Input(self.input_assignment.len() - 1)))
}
fn enforce<A, AR, LA, LB, LC>(
&mut self,
_: A,
a: LA,
b: LB,
c: LC
)
where A: FnOnce() -> AR, AR: Into<String>,
LA: FnOnce(LinearCombination<E>) -> LinearCombination<E>,
LB: FnOnce(LinearCombination<E>) -> LinearCombination<E>,
LC: FnOnce(LinearCombination<E>) -> LinearCombination<E>
{
let a = a(LinearCombination::zero());
let b = b(LinearCombination::zero());
let c = c(LinearCombination::zero());
self.a.push(Scalar(eval(
&a,
// Inputs have full density in the A query
// because there are constraints of the
// form x * 0 = 0 for each input.
None,
Some(&mut self.a_aux_density),
&self.input_assignment,
&self.aux_assignment
)));
self.b.push(Scalar(eval(
&b,
Some(&mut self.b_input_density),
Some(&mut self.b_aux_density),
&self.input_assignment,
&self.aux_assignment
)));
self.c.push(Scalar(eval(
&c,
// There is no C polynomial query,
// though there is an (beta)A + (alpha)B + C
// query for all aux variables.
// However, that query has full density.
None,
None,
&self.input_assignment,
&self.aux_assignment
)));
}
fn push_namespace<NR, N>(&mut self, _: N)
where NR: Into<String>, N: FnOnce() -> NR
{
// Do nothing; we don't care about namespaces in this context.
}
fn pop_namespace(&mut self)
{
// Do nothing; we don't care about namespaces in this context.
}
fn get_root(&mut self) -> &mut Self::Root {
self
}
}
pub fn create_random_proof<E, C, R, P: ParameterSource<E>>(
circuit: C,
params: P,
rng: &mut R
) -> Result<Proof<E>, SynthesisError>
where E: Engine, C: Circuit<E>, R: Rng
{
let r = rng.gen();
let s = rng.gen();
create_proof::<E, C, P>(circuit, params, r, s)
}
pub fn create_proof<E, C, P: ParameterSource<E>>(
circuit: C,
mut params: P,
r: E::Fr,
s: E::Fr
) -> Result<Proof<E>, SynthesisError>
where E: Engine, C: Circuit<E>
{
let mut prover = ProvingAssignment {
a_aux_density: DensityTracker::new(),
b_input_density: DensityTracker::new(),
b_aux_density: DensityTracker::new(),
a: vec![],
b: vec![],
c: vec![],
input_assignment: vec![],
aux_assignment: vec![]
};
prover.alloc_input(|| "", || Ok(E::Fr::one()))?;
circuit.synthesize(&mut prover)?;
for i in 0..prover.input_assignment.len() {
prover.enforce(|| "",
|lc| lc + Variable(Index::Input(i)),
|lc| lc,
|lc| lc,
);
}
let worker = Worker::new();
let vk = params.get_vk(prover.input_assignment.len())?;
let stopwatch = Stopwatch::new();
let h = {
let mut a = EvaluationDomain::from_coeffs(prover.a)?;
let mut b = EvaluationDomain::from_coeffs(prover.b)?;
let mut c = EvaluationDomain::from_coeffs(prover.c)?;
elog_verbose!("H query domain size is {}", a.as_ref().len());
// here a coset is a domain where denominator (z) does not vanish
// inverse FFT is an interpolation
a.ifft(&worker);
// evaluate in coset
a.coset_fft(&worker);
// same is for B and C
b.ifft(&worker);
b.coset_fft(&worker);
c.ifft(&worker);
c.coset_fft(&worker);
// do A*B-C in coset
a.mul_assign(&worker, &b);
drop(b);
a.sub_assign(&worker, &c);
drop(c);
// z does not vanish in coset, so we divide by non-zero
a.divide_by_z_on_coset(&worker);
// interpolate back in coset
a.icoset_fft(&worker);
let mut a = a.into_coeffs();
let a_len = a.len() - 1;
a.truncate(a_len);
// TODO: parallelize if it's even helpful
// TODO: in large settings it may worth to parallelize
let a = Arc::new(a.into_iter().map(|s| s.0.into_repr()).collect::<Vec<_>>());
multiexp(&worker, params.get_h(a.len())?, FullDensity, a)
};
elog_verbose!("{} seconds for prover for H evaluation (mostly FFT)", stopwatch.elapsed());
let stopwatch = Stopwatch::new();
// TODO: Check that difference in operations for different chunks is small
// TODO: parallelize if it's even helpful
// TODO: in large settings it may worth to parallelize
let input_assignment = Arc::new(prover.input_assignment.into_iter().map(|s| s.into_repr()).collect::<Vec<_>>());
let aux_assignment = Arc::new(prover.aux_assignment.into_iter().map(|s| s.into_repr()).collect::<Vec<_>>());
// Run a dedicated process for dense vector
let l = multiexp(&worker, params.get_l(aux_assignment.len())?, FullDensity, aux_assignment.clone());
let a_aux_density_total = prover.a_aux_density.get_total_density();
let (a_inputs_source, a_aux_source) = params.get_a(input_assignment.len(), a_aux_density_total)?;
let a_inputs = multiexp(&worker, a_inputs_source, FullDensity, input_assignment.clone());
let a_aux = multiexp(&worker, a_aux_source, Arc::new(prover.a_aux_density), aux_assignment.clone());
let b_input_density = Arc::new(prover.b_input_density);
let b_input_density_total = b_input_density.get_total_density();
let b_aux_density = Arc::new(prover.b_aux_density);
let b_aux_density_total = b_aux_density.get_total_density();
let (b_g1_inputs_source, b_g1_aux_source) = params.get_b_g1(b_input_density_total, b_aux_density_total)?;
let b_g1_inputs = multiexp(&worker, b_g1_inputs_source, b_input_density.clone(), input_assignment.clone());
let b_g1_aux = multiexp(&worker, b_g1_aux_source, b_aux_density.clone(), aux_assignment.clone());
let (b_g2_inputs_source, b_g2_aux_source) = params.get_b_g2(b_input_density_total, b_aux_density_total)?;
let b_g2_inputs = multiexp(&worker, b_g2_inputs_source, b_input_density, input_assignment);
let b_g2_aux = multiexp(&worker, b_g2_aux_source, b_aux_density, aux_assignment);
if vk.delta_g1.is_zero() || vk.delta_g2.is_zero() {
// If this element is zero, someone is trying to perform a
// subversion-CRS attack.
return Err(SynthesisError::UnexpectedIdentity);
}
let mut g_a = vk.delta_g1.mul(r);
g_a.add_assign_mixed(&vk.alpha_g1);
let mut g_b = vk.delta_g2.mul(s);
g_b.add_assign_mixed(&vk.beta_g2);
let mut g_c;
{
let mut rs = r;
rs.mul_assign(&s);
g_c = vk.delta_g1.mul(rs);
g_c.add_assign(&vk.alpha_g1.mul(s));
g_c.add_assign(&vk.beta_g1.mul(r));
}
let mut a_answer = a_inputs.wait()?;
a_answer.add_assign(&a_aux.wait()?);
g_a.add_assign(&a_answer);
a_answer.mul_assign(s);
g_c.add_assign(&a_answer);
let mut b1_answer = b_g1_inputs.wait()?;
b1_answer.add_assign(&b_g1_aux.wait()?);
let mut b2_answer = b_g2_inputs.wait()?;
b2_answer.add_assign(&b_g2_aux.wait()?);
g_b.add_assign(&b2_answer);
b1_answer.mul_assign(r);
g_c.add_assign(&b1_answer);
g_c.add_assign(&h.wait()?);
g_c.add_assign(&l.wait()?);
elog_verbose!("{} seconds for prover for point multiplication", stopwatch.elapsed());
Ok(Proof {
a: g_a.into_affine(),
b: g_b.into_affine(),
c: g_c.into_affine()
})
}
| 31.001802 | 120 | 0.590143 |
8fea00b164b1c40e9386794db1290498ccf3e4e7 | 89,829 | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
Defines a translator that converts an `Ast` to an `Hir`.
*/
use std::cell::{Cell, RefCell};
use std::result;
use ast::{self, Ast, Span, Visitor};
use hir::{self, Error, ErrorKind, Hir};
use unicode::{self, ClassQuery};
type Result<T> = result::Result<T, Error>;
/// A builder for constructing an AST->HIR translator.
#[derive(Clone, Debug)]
pub struct TranslatorBuilder {
allow_invalid_utf8: bool,
flags: Flags,
}
impl Default for TranslatorBuilder {
fn default() -> TranslatorBuilder {
TranslatorBuilder::new()
}
}
impl TranslatorBuilder {
/// Create a new translator builder with a default c onfiguration.
pub fn new() -> TranslatorBuilder {
TranslatorBuilder {
allow_invalid_utf8: false,
flags: Flags::default(),
}
}
/// Build a translator using the current configuration.
pub fn build(&self) -> Translator {
Translator {
stack: RefCell::new(vec![]),
flags: Cell::new(self.flags),
allow_invalid_utf8: self.allow_invalid_utf8,
}
}
/// When enabled, translation will permit the construction of a regular
/// expression that may match invalid UTF-8.
///
/// When disabled (the default), the translator is guaranteed to produce
/// an expression that will only ever match valid UTF-8 (otherwise, the
/// translator will return an error).
///
/// Perhaps surprisingly, when invalid UTF-8 isn't allowed, a negated ASCII
/// word boundary (uttered as `(?-u:\B)` in the concrete syntax) will cause
/// the parser to return an error. Namely, a negated ASCII word boundary
/// can result in matching positions that aren't valid UTF-8 boundaries.
pub fn allow_invalid_utf8(
&mut self,
yes: bool,
) -> &mut TranslatorBuilder {
self.allow_invalid_utf8 = yes;
self
}
/// Enable or disable the case insensitive flag (`i`) by default.
pub fn case_insensitive(&mut self, yes: bool) -> &mut TranslatorBuilder {
self.flags.case_insensitive = if yes { Some(true) } else { None };
self
}
/// Enable or disable the multi-line matching flag (`m`) by default.
pub fn multi_line(&mut self, yes: bool) -> &mut TranslatorBuilder {
self.flags.multi_line = if yes { Some(true) } else { None };
self
}
/// Enable or disable the "dot matches any character" flag (`s`) by
/// default.
pub fn dot_matches_new_line(
&mut self,
yes: bool,
) -> &mut TranslatorBuilder {
self.flags.dot_matches_new_line = if yes { Some(true) } else { None };
self
}
/// Enable or disable the "swap greed" flag (`U`) by default.
pub fn swap_greed(&mut self, yes: bool) -> &mut TranslatorBuilder {
self.flags.swap_greed = if yes { Some(true) } else { None };
self
}
/// Enable or disable the Unicode flag (`u`) by default.
pub fn unicode(&mut self, yes: bool) -> &mut TranslatorBuilder {
self.flags.unicode = if yes { None } else { Some(false) };
self
}
}
/// A translator maps abstract syntax to a high level intermediate
/// representation.
///
/// A translator may be benefit from reuse. That is, a translator can translate
/// many abstract syntax trees.
///
/// A `Translator` can be configured in more detail via a
/// [`TranslatorBuilder`](struct.TranslatorBuilder.html).
#[derive(Clone, Debug)]
pub struct Translator {
/// Our call stack, but on the heap.
stack: RefCell<Vec<HirFrame>>,
/// The current flag settings.
flags: Cell<Flags>,
/// Whether we're allowed to produce HIR that can match arbitrary bytes.
allow_invalid_utf8: bool,
}
impl Translator {
/// Create a new translator using the default configuration.
pub fn new() -> Translator {
TranslatorBuilder::new().build()
}
/// Translate the given abstract syntax tree (AST) into a high level
/// intermediate representation (HIR).
///
/// If there was a problem doing the translation, then an HIR-specific
/// error is returned.
///
/// The original pattern string used to produce the `Ast` *must* also be
/// provided. The translator does not use the pattern string during any
/// correct translation, but is used for error reporting.
pub fn translate(&mut self, pattern: &str, ast: &Ast) -> Result<Hir> {
ast::visit(ast, TranslatorI::new(self, pattern))
}
}
/// An HirFrame is a single stack frame, represented explicitly, which is
/// created for each item in the Ast that we traverse.
///
/// Note that technically, this type doesn't represent our entire stack
/// frame. In particular, the Ast visitor represents any state associated with
/// traversing the Ast itself.
#[derive(Clone, Debug)]
enum HirFrame {
/// An arbitrary HIR expression. These get pushed whenever we hit a base
/// case in the Ast. They get popped after an inductive (i.e., recursive)
/// step is complete.
Expr(Hir),
/// A Unicode character class. This frame is mutated as we descend into
/// the Ast of a character class (which is itself its own mini recursive
/// structure).
ClassUnicode(hir::ClassUnicode),
/// A byte-oriented character class. This frame is mutated as we descend
/// into the Ast of a character class (which is itself its own mini
/// recursive structure).
///
/// Byte character classes are created when Unicode mode (`u`) is disabled.
/// If `allow_invalid_utf8` is disabled (the default), then a byte
/// character is only permitted to match ASCII text.
ClassBytes(hir::ClassBytes),
/// This is pushed on to the stack upon first seeing any kind of group,
/// indicated by parentheses (including non-capturing groups). It is popped
/// upon leaving a group.
Group {
/// The old active flags, if any, when this group was opened.
///
/// If this group sets flags, then the new active flags are set to the
/// result of merging the old flags with the flags introduced by this
/// group.
///
/// When this group is popped, the active flags should be restored to
/// the flags set here.
///
/// The "active" flags correspond to whatever flags are set in the
/// Translator.
old_flags: Option<Flags>,
},
/// This is pushed whenever a concatenation is observed. After visiting
/// every sub-expression in the concatenation, the translator's stack is
/// popped until it sees a Concat frame.
Concat,
/// This is pushed whenever an alternation is observed. After visiting
/// every sub-expression in the alternation, the translator's stack is
/// popped until it sees an Alternation frame.
Alternation,
}
impl HirFrame {
/// Assert that the current stack frame is an Hir expression and return it.
fn unwrap_expr(self) -> Hir {
match self {
HirFrame::Expr(expr) => expr,
_ => panic!("tried to unwrap expr from HirFrame, got: {:?}", self)
}
}
/// Assert that the current stack frame is a Unicode class expression and
/// return it.
fn unwrap_class_unicode(self) -> hir::ClassUnicode {
match self {
HirFrame::ClassUnicode(cls) => cls,
_ => panic!("tried to unwrap Unicode class \
from HirFrame, got: {:?}", self)
}
}
/// Assert that the current stack frame is a byte class expression and
/// return it.
fn unwrap_class_bytes(self) -> hir::ClassBytes {
match self {
HirFrame::ClassBytes(cls) => cls,
_ => panic!("tried to unwrap byte class \
from HirFrame, got: {:?}", self)
}
}
/// Assert that the current stack frame is a group indicator and return
/// its corresponding flags (the flags that were active at the time the
/// group was entered) if they exist.
fn unwrap_group(self) -> Option<Flags> {
match self {
HirFrame::Group { old_flags } => old_flags,
_ => panic!("tried to unwrap group from HirFrame, got: {:?}", self)
}
}
}
impl<'t, 'p> Visitor for TranslatorI<'t, 'p> {
type Output = Hir;
type Err = Error;
fn finish(self) -> Result<Hir> {
if self.trans().stack.borrow().is_empty() {
// This can happen if the Ast given consists of a single set of
// flags. e.g., `(?i)`. /shrug
return Ok(Hir::empty());
}
// ... otherwise, we should have exactly one HIR on the stack.
assert_eq!(self.trans().stack.borrow().len(), 1);
Ok(self.pop().unwrap().unwrap_expr())
}
fn visit_pre(&mut self, ast: &Ast) -> Result<()> {
match *ast {
Ast::Class(ast::Class::Bracketed(_)) => {
if self.flags().unicode() {
let cls = hir::ClassUnicode::empty();
self.push(HirFrame::ClassUnicode(cls));
} else {
let cls = hir::ClassBytes::empty();
self.push(HirFrame::ClassBytes(cls));
}
}
Ast::Group(ref x) => {
let old_flags = x.flags().map(|ast| self.set_flags(ast));
self.push(HirFrame::Group {
old_flags: old_flags,
});
}
Ast::Concat(ref x) if x.asts.is_empty() => {}
Ast::Concat(_) => {
self.push(HirFrame::Concat);
}
Ast::Alternation(ref x) if x.asts.is_empty() => {}
Ast::Alternation(_) => {
self.push(HirFrame::Alternation);
}
_ => {}
}
Ok(())
}
fn visit_post(&mut self, ast: &Ast) -> Result<()> {
match *ast {
Ast::Empty(_) => {
self.push(HirFrame::Expr(Hir::empty()));
}
Ast::Flags(ref x) => {
self.set_flags(&x.flags);
}
Ast::Literal(ref x) => {
self.push(HirFrame::Expr(self.hir_literal(x)?));
}
Ast::Dot(span) => {
self.push(HirFrame::Expr(self.hir_dot(span)?));
}
Ast::Assertion(ref x) => {
self.push(HirFrame::Expr(self.hir_assertion(x)?));
}
Ast::Class(ast::Class::Perl(ref x)) => {
if self.flags().unicode() {
let cls = self.hir_perl_unicode_class(x);
let hcls = hir::Class::Unicode(cls);
self.push(HirFrame::Expr(Hir::class(hcls)));
} else {
let cls = self.hir_perl_byte_class(x);
let hcls = hir::Class::Bytes(cls);
self.push(HirFrame::Expr(Hir::class(hcls)));
}
}
Ast::Class(ast::Class::Unicode(ref x)) => {
let cls = hir::Class::Unicode(self.hir_unicode_class(x)?);
self.push(HirFrame::Expr(Hir::class(cls)));
}
Ast::Class(ast::Class::Bracketed(ref ast)) => {
if self.flags().unicode() {
let mut cls = self.pop().unwrap().unwrap_class_unicode();
self.unicode_fold_and_negate(ast.negated, &mut cls);
if cls.iter().next().is_none() {
return Err(self.error(
ast.span, ErrorKind::EmptyClassNotAllowed));
}
let expr = Hir::class(hir::Class::Unicode(cls));
self.push(HirFrame::Expr(expr));
} else {
let mut cls = self.pop().unwrap().unwrap_class_bytes();
self.bytes_fold_and_negate(
&ast.span, ast.negated, &mut cls)?;
if cls.iter().next().is_none() {
return Err(self.error(
ast.span, ErrorKind::EmptyClassNotAllowed));
}
let expr = Hir::class(hir::Class::Bytes(cls));
self.push(HirFrame::Expr(expr));
}
}
Ast::Repetition(ref x) => {
let expr = self.pop().unwrap().unwrap_expr();
self.push(HirFrame::Expr(self.hir_repetition(x, expr)));
}
Ast::Group(ref x) => {
let expr = self.pop().unwrap().unwrap_expr();
if let Some(flags) = self.pop().unwrap().unwrap_group() {
self.trans().flags.set(flags);
}
self.push(HirFrame::Expr(self.hir_group(x, expr)));
}
Ast::Concat(_) => {
let mut exprs = vec![];
while let Some(HirFrame::Expr(expr)) = self.pop() {
if !expr.kind().is_empty() {
exprs.push(expr);
}
}
exprs.reverse();
self.push(HirFrame::Expr(Hir::concat(exprs)));
}
Ast::Alternation(_) => {
let mut exprs = vec![];
while let Some(HirFrame::Expr(expr)) = self.pop() {
exprs.push(expr);
}
exprs.reverse();
self.push(HirFrame::Expr(Hir::alternation(exprs)));
}
}
Ok(())
}
fn visit_class_set_item_pre(
&mut self,
ast: &ast::ClassSetItem,
) -> Result<()> {
match *ast {
ast::ClassSetItem::Bracketed(_) => {
if self.flags().unicode() {
let cls = hir::ClassUnicode::empty();
self.push(HirFrame::ClassUnicode(cls));
} else {
let cls = hir::ClassBytes::empty();
self.push(HirFrame::ClassBytes(cls));
}
}
// We needn't handle the Union case here since the visitor will
// do it for us.
_ => {}
}
Ok(())
}
fn visit_class_set_item_post(
&mut self,
ast: &ast::ClassSetItem,
) -> Result<()> {
match *ast {
ast::ClassSetItem::Empty(_) => {}
ast::ClassSetItem::Literal(ref x) => {
if self.flags().unicode() {
let mut cls = self.pop().unwrap().unwrap_class_unicode();
cls.push(hir::ClassUnicodeRange::new(x.c, x.c));
self.push(HirFrame::ClassUnicode(cls));
} else {
let mut cls = self.pop().unwrap().unwrap_class_bytes();
let byte = self.class_literal_byte(x)?;
cls.push(hir::ClassBytesRange::new(byte, byte));
self.push(HirFrame::ClassBytes(cls));
}
}
ast::ClassSetItem::Range(ref x) => {
if self.flags().unicode() {
let mut cls = self.pop().unwrap().unwrap_class_unicode();
cls.push(hir::ClassUnicodeRange::new(x.start.c, x.end.c));
self.push(HirFrame::ClassUnicode(cls));
} else {
let mut cls = self.pop().unwrap().unwrap_class_bytes();
let start = self.class_literal_byte(&x.start)?;
let end = self.class_literal_byte(&x.end)?;
cls.push(hir::ClassBytesRange::new(start, end));
self.push(HirFrame::ClassBytes(cls));
}
}
ast::ClassSetItem::Ascii(ref x) => {
if self.flags().unicode() {
let mut cls = self.pop().unwrap().unwrap_class_unicode();
for &(s, e) in ascii_class(&x.kind) {
cls.push(hir::ClassUnicodeRange::new(s, e));
}
self.unicode_fold_and_negate(x.negated, &mut cls);
self.push(HirFrame::ClassUnicode(cls));
} else {
let mut cls = self.pop().unwrap().unwrap_class_bytes();
for &(s, e) in ascii_class(&x.kind) {
cls.push(hir::ClassBytesRange::new(s as u8, e as u8));
}
self.bytes_fold_and_negate(
&x.span, x.negated, &mut cls)?;
self.push(HirFrame::ClassBytes(cls));
}
}
ast::ClassSetItem::Unicode(ref x) => {
let xcls = self.hir_unicode_class(x)?;
let mut cls = self.pop().unwrap().unwrap_class_unicode();
cls.union(&xcls);
self.push(HirFrame::ClassUnicode(cls));
}
ast::ClassSetItem::Perl(ref x) => {
if self.flags().unicode() {
let xcls = self.hir_perl_unicode_class(x);
let mut cls = self.pop().unwrap().unwrap_class_unicode();
cls.union(&xcls);
self.push(HirFrame::ClassUnicode(cls));
} else {
let xcls = self.hir_perl_byte_class(x);
let mut cls = self.pop().unwrap().unwrap_class_bytes();
cls.union(&xcls);
self.push(HirFrame::ClassBytes(cls));
}
}
ast::ClassSetItem::Bracketed(ref ast) => {
if self.flags().unicode() {
let mut cls1 = self.pop().unwrap().unwrap_class_unicode();
self.unicode_fold_and_negate(ast.negated, &mut cls1);
let mut cls2 = self.pop().unwrap().unwrap_class_unicode();
cls2.union(&cls1);
self.push(HirFrame::ClassUnicode(cls2));
} else {
let mut cls1 = self.pop().unwrap().unwrap_class_bytes();
self.bytes_fold_and_negate(
&ast.span, ast.negated, &mut cls1)?;
let mut cls2 = self.pop().unwrap().unwrap_class_bytes();
cls2.union(&cls1);
self.push(HirFrame::ClassBytes(cls2));
}
}
// This is handled automatically by the visitor.
ast::ClassSetItem::Union(_) => {}
}
Ok(())
}
fn visit_class_set_binary_op_pre(
&mut self,
_op: &ast::ClassSetBinaryOp,
) -> Result<()> {
if self.flags().unicode() {
let cls = hir::ClassUnicode::empty();
self.push(HirFrame::ClassUnicode(cls));
} else {
let cls = hir::ClassBytes::empty();
self.push(HirFrame::ClassBytes(cls));
}
Ok(())
}
fn visit_class_set_binary_op_in(
&mut self,
_op: &ast::ClassSetBinaryOp,
) -> Result<()> {
if self.flags().unicode() {
let cls = hir::ClassUnicode::empty();
self.push(HirFrame::ClassUnicode(cls));
} else {
let cls = hir::ClassBytes::empty();
self.push(HirFrame::ClassBytes(cls));
}
Ok(())
}
fn visit_class_set_binary_op_post(
&mut self,
op: &ast::ClassSetBinaryOp,
) -> Result<()> {
use ast::ClassSetBinaryOpKind::*;
if self.flags().unicode() {
let mut rhs = self.pop().unwrap().unwrap_class_unicode();
let mut lhs = self.pop().unwrap().unwrap_class_unicode();
let mut cls = self.pop().unwrap().unwrap_class_unicode();
if self.flags().case_insensitive() {
rhs.case_fold_simple();
lhs.case_fold_simple();
}
match op.kind {
Intersection => lhs.intersect(&rhs),
Difference => lhs.difference(&rhs),
SymmetricDifference => lhs.symmetric_difference(&rhs),
}
cls.union(&lhs);
self.push(HirFrame::ClassUnicode(cls));
} else {
let mut rhs = self.pop().unwrap().unwrap_class_bytes();
let mut lhs = self.pop().unwrap().unwrap_class_bytes();
let mut cls = self.pop().unwrap().unwrap_class_bytes();
if self.flags().case_insensitive() {
rhs.case_fold_simple();
lhs.case_fold_simple();
}
match op.kind {
Intersection => lhs.intersect(&rhs),
Difference => lhs.difference(&rhs),
SymmetricDifference => lhs.symmetric_difference(&rhs),
}
cls.union(&lhs);
self.push(HirFrame::ClassBytes(cls));
}
Ok(())
}
}
/// The internal implementation of a translator.
///
/// This type is responsible for carrying around the original pattern string,
/// which is not tied to the internal state of a translator.
///
/// A TranslatorI exists for the time it takes to translate a single Ast.
#[derive(Clone, Debug)]
struct TranslatorI<'t, 'p> {
trans: &'t Translator,
pattern: &'p str,
}
impl<'t, 'p> TranslatorI<'t, 'p> {
/// Build a new internal translator.
fn new(trans: &'t Translator, pattern: &'p str) -> TranslatorI<'t, 'p> {
TranslatorI { trans: trans, pattern: pattern }
}
/// Return a reference to the underlying translator.
fn trans(&self) -> &Translator {
&self.trans
}
/// Push the given frame on to the call stack.
fn push(&self, frame: HirFrame) {
self.trans().stack.borrow_mut().push(frame);
}
/// Pop the top of the call stack. If the call stack is empty, return None.
fn pop(&self) -> Option<HirFrame> {
self.trans().stack.borrow_mut().pop()
}
/// Create a new error with the given span and error type.
fn error(&self, span: Span, kind: ErrorKind) -> Error {
Error { kind: kind, pattern: self.pattern.to_string(), span: span }
}
/// Return a copy of the active flags.
fn flags(&self) -> Flags {
self.trans().flags.get()
}
/// Set the flags of this translator from the flags set in the given AST.
/// Then, return the old flags.
fn set_flags(&self, ast_flags: &ast::Flags) -> Flags {
let old_flags = self.flags();
let mut new_flags = Flags::from_ast(ast_flags);
new_flags.merge(&old_flags);
self.trans().flags.set(new_flags);
old_flags
}
fn hir_literal(&self, lit: &ast::Literal) -> Result<Hir> {
let ch = match self.literal_to_char(lit)? {
byte @ hir::Literal::Byte(_) => return Ok(Hir::literal(byte)),
hir::Literal::Unicode(ch) => ch,
};
if self.flags().case_insensitive() {
self.hir_from_char_case_insensitive(lit.span, ch)
} else {
self.hir_from_char(lit.span, ch)
}
}
/// Convert an Ast literal to its scalar representation.
///
/// When Unicode mode is enabled, then this always succeeds and returns a
/// `char` (Unicode scalar value).
///
/// When Unicode mode is disabled, then a raw byte is returned. If that
/// byte is not ASCII and invalid UTF-8 is not allowed, then this returns
/// an error.
fn literal_to_char(&self, lit: &ast::Literal) -> Result<hir::Literal> {
if self.flags().unicode() {
return Ok(hir::Literal::Unicode(lit.c));
}
let byte = match lit.byte() {
None => return Ok(hir::Literal::Unicode(lit.c)),
Some(byte) => byte,
};
if byte <= 0x7F {
return Ok(hir::Literal::Unicode(byte as char));
}
if !self.trans().allow_invalid_utf8 {
return Err(self.error(lit.span, ErrorKind::InvalidUtf8));
}
Ok(hir::Literal::Byte(byte))
}
fn hir_from_char(&self, span: Span, c: char) -> Result<Hir> {
if !self.flags().unicode() && c.len_utf8() > 1 {
return Err(self.error(span, ErrorKind::UnicodeNotAllowed));
}
Ok(Hir::literal(hir::Literal::Unicode(c)))
}
fn hir_from_char_case_insensitive(
&self,
span: Span,
c: char,
) -> Result<Hir> {
// If case folding won't do anything, then don't bother trying.
if !unicode::contains_simple_case_mapping(c, c) {
return self.hir_from_char(span, c);
}
if self.flags().unicode() {
let mut cls = hir::ClassUnicode::new(vec![
hir::ClassUnicodeRange::new(c, c),
]);
cls.case_fold_simple();
Ok(Hir::class(hir::Class::Unicode(cls)))
} else {
if c.len_utf8() > 1 {
return Err(self.error(span, ErrorKind::UnicodeNotAllowed));
}
let mut cls = hir::ClassBytes::new(vec![
hir::ClassBytesRange::new(c as u8, c as u8),
]);
cls.case_fold_simple();
Ok(Hir::class(hir::Class::Bytes(cls)))
}
}
fn hir_dot(&self, span: Span) -> Result<Hir> {
let unicode = self.flags().unicode();
if !unicode && !self.trans().allow_invalid_utf8 {
return Err(self.error(span, ErrorKind::InvalidUtf8));
}
Ok(if self.flags().dot_matches_new_line() {
Hir::any(!unicode)
} else {
Hir::dot(!unicode)
})
}
fn hir_assertion(&self, asst: &ast::Assertion) -> Result<Hir> {
let unicode = self.flags().unicode();
let multi_line = self.flags().multi_line();
Ok(match asst.kind {
ast::AssertionKind::StartLine => {
Hir::anchor(if multi_line {
hir::Anchor::StartLine
} else {
hir::Anchor::StartText
})
}
ast::AssertionKind::EndLine => {
Hir::anchor(if multi_line {
hir::Anchor::EndLine
} else {
hir::Anchor::EndText
})
}
ast::AssertionKind::StartText => {
Hir::anchor(hir::Anchor::StartText)
}
ast::AssertionKind::EndText => {
Hir::anchor(hir::Anchor::EndText)
}
ast::AssertionKind::WordBoundary => {
Hir::word_boundary(if unicode {
hir::WordBoundary::Unicode
} else {
hir::WordBoundary::Ascii
})
}
ast::AssertionKind::NotWordBoundary => {
Hir::word_boundary(if unicode {
hir::WordBoundary::UnicodeNegate
} else {
// It is possible for negated ASCII word boundaries to
// match at invalid UTF-8 boundaries, even when searching
// valid UTF-8.
if !self.trans().allow_invalid_utf8 {
return Err(self.error(
asst.span, ErrorKind::InvalidUtf8));
}
hir::WordBoundary::AsciiNegate
})
}
})
}
fn hir_group(&self, group: &ast::Group, expr: Hir) -> Hir {
let kind = match group.kind {
ast::GroupKind::CaptureIndex(idx) => {
hir::GroupKind::CaptureIndex(idx)
}
ast::GroupKind::CaptureName(ref capname) => {
hir::GroupKind::CaptureName {
name: capname.name.clone(),
index: capname.index,
}
}
ast::GroupKind::NonCapturing(_) => hir::GroupKind::NonCapturing,
};
Hir::group(hir::Group {
kind: kind,
hir: Box::new(expr),
})
}
fn hir_repetition(&self, rep: &ast::Repetition, expr: Hir) -> Hir {
let kind = match rep.op.kind {
ast::RepetitionKind::ZeroOrOne => hir::RepetitionKind::ZeroOrOne,
ast::RepetitionKind::ZeroOrMore => hir::RepetitionKind::ZeroOrMore,
ast::RepetitionKind::OneOrMore => hir::RepetitionKind::OneOrMore,
ast::RepetitionKind::Range(ast::RepetitionRange::Exactly(m)) => {
hir::RepetitionKind::Range(hir::RepetitionRange::Exactly(m))
}
ast::RepetitionKind::Range(ast::RepetitionRange::AtLeast(m)) => {
hir::RepetitionKind::Range(hir::RepetitionRange::AtLeast(m))
}
ast::RepetitionKind::Range(ast::RepetitionRange::Bounded(m,n)) => {
hir::RepetitionKind::Range(hir::RepetitionRange::Bounded(m, n))
}
};
let greedy =
if self.flags().swap_greed() {
!rep.greedy
} else {
rep.greedy
};
Hir::repetition(hir::Repetition {
kind: kind,
greedy: greedy,
hir: Box::new(expr),
})
}
fn hir_unicode_class(
&self,
ast_class: &ast::ClassUnicode,
) -> Result<hir::ClassUnicode> {
use ast::ClassUnicodeKind::*;
if !self.flags().unicode() {
return Err(self.error(
ast_class.span,
ErrorKind::UnicodeNotAllowed,
));
}
let query = match ast_class.kind {
OneLetter(name) => ClassQuery::OneLetter(name),
Named(ref name) => ClassQuery::Binary(name),
NamedValue { ref name, ref value, .. } => {
ClassQuery::ByValue {
property_name: name,
property_value: value,
}
}
};
match unicode::class(query) {
Ok(mut class) => {
self.unicode_fold_and_negate(ast_class.negated, &mut class);
Ok(class)
}
Err(unicode::Error::PropertyNotFound) => {
Err(self.error(
ast_class.span,
ErrorKind::UnicodePropertyNotFound,
))
}
Err(unicode::Error::PropertyValueNotFound) => {
Err(self.error(
ast_class.span,
ErrorKind::UnicodePropertyValueNotFound,
))
}
}
}
fn hir_perl_unicode_class(
&self,
ast_class: &ast::ClassPerl,
) -> hir::ClassUnicode {
use ast::ClassPerlKind::*;
use unicode_tables::perl_word::PERL_WORD;
assert!(self.flags().unicode());
let mut class = match ast_class.kind {
Digit => {
let query = ClassQuery::Binary("Decimal_Number");
unicode::class(query).unwrap()
}
Space => {
let query = ClassQuery::Binary("Whitespace");
unicode::class(query).unwrap()
}
Word => unicode::hir_class(PERL_WORD),
};
// We needn't apply case folding here because the Perl Unicode classes
// are already closed under Unicode simple case folding.
if ast_class.negated {
class.negate();
}
class
}
fn hir_perl_byte_class(
&self,
ast_class: &ast::ClassPerl,
) -> hir::ClassBytes {
use ast::ClassPerlKind::*;
assert!(!self.flags().unicode());
let mut class = match ast_class.kind {
Digit => hir_ascii_class_bytes(&ast::ClassAsciiKind::Digit),
Space => hir_ascii_class_bytes(&ast::ClassAsciiKind::Space),
Word => hir_ascii_class_bytes(&ast::ClassAsciiKind::Word),
};
// We needn't apply case folding here because the Perl ASCII classes
// are already closed (under ASCII case folding).
if ast_class.negated {
class.negate();
}
class
}
fn unicode_fold_and_negate(
&self,
negated: bool,
class: &mut hir::ClassUnicode,
) {
// Note that we must apply case folding before negation!
// Consider `(?i)[^x]`. If we applied negation field, then
// the result would be the character class that matched any
// Unicode scalar value.
if self.flags().case_insensitive() {
class.case_fold_simple();
}
if negated {
class.negate();
}
}
fn bytes_fold_and_negate(
&self,
span: &Span,
negated: bool,
class: &mut hir::ClassBytes,
) -> Result<()> {
// Note that we must apply case folding before negation!
// Consider `(?i)[^x]`. If we applied negation field, then
// the result would be the character class that matched any
// Unicode scalar value.
if self.flags().case_insensitive() {
class.case_fold_simple();
}
if negated {
class.negate();
}
if !self.trans().allow_invalid_utf8 && !class.is_all_ascii() {
return Err(self.error(span.clone(), ErrorKind::InvalidUtf8));
}
Ok(())
}
/// Return a scalar byte value suitable for use as a literal in a byte
/// character class.
fn class_literal_byte(&self, ast: &ast::Literal) -> Result<u8> {
match self.literal_to_char(ast)? {
hir::Literal::Byte(byte) => Ok(byte),
hir::Literal::Unicode(ch) => {
if ch <= 0x7F as char {
Ok(ch as u8)
} else {
// We can't feasibly support Unicode in
// byte oriented classes. Byte classes don't
// do Unicode case folding.
Err(self.error(ast.span, ErrorKind::UnicodeNotAllowed))
}
}
}
}
}
/// A translator's representation of a regular expression's flags at any given
/// moment in time.
///
/// Each flag can be in one of three states: absent, present but disabled or
/// present but enabled.
#[derive(Clone, Copy, Debug, Default)]
struct Flags {
case_insensitive: Option<bool>,
multi_line: Option<bool>,
dot_matches_new_line: Option<bool>,
swap_greed: Option<bool>,
unicode: Option<bool>,
// Note that `ignore_whitespace` is omitted here because it is handled
// entirely in the parser.
}
impl Flags {
fn from_ast(ast: &ast::Flags) -> Flags {
let mut flags = Flags::default();
let mut enable = true;
for item in &ast.items {
match item.kind {
ast::FlagsItemKind::Negation => {
enable = false;
}
ast::FlagsItemKind::Flag(ast::Flag::CaseInsensitive) => {
flags.case_insensitive = Some(enable);
}
ast::FlagsItemKind::Flag(ast::Flag::MultiLine) => {
flags.multi_line = Some(enable);
}
ast::FlagsItemKind::Flag(ast::Flag::DotMatchesNewLine) => {
flags.dot_matches_new_line = Some(enable);
}
ast::FlagsItemKind::Flag(ast::Flag::SwapGreed) => {
flags.swap_greed = Some(enable);
}
ast::FlagsItemKind::Flag(ast::Flag::Unicode) => {
flags.unicode = Some(enable);
}
ast::FlagsItemKind::Flag(ast::Flag::IgnoreWhitespace) => {}
}
}
flags
}
fn merge(&mut self, previous: &Flags) {
if self.case_insensitive.is_none() {
self.case_insensitive = previous.case_insensitive;
}
if self.multi_line.is_none() {
self.multi_line = previous.multi_line;
}
if self.dot_matches_new_line.is_none() {
self.dot_matches_new_line = previous.dot_matches_new_line;
}
if self.swap_greed.is_none() {
self.swap_greed = previous.swap_greed;
}
if self.unicode.is_none() {
self.unicode = previous.unicode;
}
}
fn case_insensitive(&self) -> bool {
self.case_insensitive.unwrap_or(false)
}
fn multi_line(&self) -> bool {
self.multi_line.unwrap_or(false)
}
fn dot_matches_new_line(&self) -> bool {
self.dot_matches_new_line.unwrap_or(false)
}
fn swap_greed(&self) -> bool {
self.swap_greed.unwrap_or(false)
}
fn unicode(&self) -> bool {
self.unicode.unwrap_or(true)
}
}
fn hir_ascii_class_bytes(kind: &ast::ClassAsciiKind) -> hir::ClassBytes {
let ranges: Vec<_> = ascii_class(kind).iter().cloned().map(|(s, e)| {
hir::ClassBytesRange::new(s as u8, e as u8)
}).collect();
hir::ClassBytes::new(ranges)
}
fn ascii_class(kind: &ast::ClassAsciiKind) -> &'static [(char, char)] {
use ast::ClassAsciiKind::*;
// The contortions below with `const` appear necessary for older versions
// of Rust.
type T = &'static [(char, char)];
match *kind {
Alnum => {
const X: T = &[('0', '9'), ('A', 'Z'), ('a', 'z')];
X
}
Alpha => {
const X: T = &[('A', 'Z'), ('a', 'z')];
X
}
Ascii => {
const X: T = &[('\x00', '\x7F')];
X
}
Blank => {
const X: T = &[(' ', '\t')];
X
}
Cntrl => {
const X: T = &[('\x00', '\x1F'), ('\x7F', '\x7F')];
X
}
Digit => {
const X: T = &[('0', '9')];
X
}
Graph => {
const X: T = &[('!', '~')];
X
}
Lower => {
const X: T = &[('a', 'z')];
X
}
Print => {
const X: T = &[(' ', '~')];
X
}
Punct => {
const X: T = &[('!', '/'), (':', '@'), ('[', '`'), ('{', '~')];
X
}
Space => {
const X: T = &[
('\t', '\t'), ('\n', '\n'), ('\x0B', '\x0B'), ('\x0C', '\x0C'),
('\r', '\r'), (' ', ' '),
];
X
}
Upper => {
const X: T = &[('A', 'Z')];
X
}
Word => {
const X: T = &[('0', '9'), ('A', 'Z'), ('_', '_'), ('a', 'z')];
X
}
Xdigit => {
const X: T = &[('0', '9'), ('A', 'F'), ('a', 'f')];
X
}
}
}
#[cfg(test)]
mod tests {
use ast::{self, Ast, Position, Span};
use ast::parse::ParserBuilder;
use hir::{self, Hir, HirKind};
use unicode::{self, ClassQuery};
use super::{TranslatorBuilder, ascii_class};
// We create these errors to compare with real hir::Errors in the tests.
// We define equality between TestError and hir::Error to disregard the
// pattern string in hir::Error, which is annoying to provide in tests.
#[derive(Clone, Debug)]
struct TestError {
span: Span,
kind: hir::ErrorKind,
}
impl PartialEq<hir::Error> for TestError {
fn eq(&self, other: &hir::Error) -> bool {
self.span == other.span && self.kind == other.kind
}
}
impl PartialEq<TestError> for hir::Error {
fn eq(&self, other: &TestError) -> bool {
self.span == other.span && self.kind == other.kind
}
}
fn parse(pattern: &str) -> Ast {
ParserBuilder::new().octal(true).build().parse(pattern).unwrap()
}
fn t(pattern: &str) -> Hir {
TranslatorBuilder::new()
.allow_invalid_utf8(false)
.build()
.translate(pattern, &parse(pattern))
.unwrap()
}
fn t_err(pattern: &str) -> hir::Error {
TranslatorBuilder::new()
.allow_invalid_utf8(false)
.build()
.translate(pattern, &parse(pattern))
.unwrap_err()
}
fn t_bytes(pattern: &str) -> Hir {
TranslatorBuilder::new()
.allow_invalid_utf8(true)
.build()
.translate(pattern, &parse(pattern))
.unwrap()
}
fn hir_lit(s: &str) -> Hir {
match s.len() {
0 => Hir::empty(),
_ => {
let lits = s
.chars()
.map(hir::Literal::Unicode)
.map(Hir::literal)
.collect();
Hir::concat(lits)
}
}
}
fn hir_blit(s: &[u8]) -> Hir {
match s.len() {
0 => Hir::empty(),
1 => Hir::literal(hir::Literal::Byte(s[0])),
_ => {
let lits = s
.iter()
.cloned()
.map(hir::Literal::Byte)
.map(Hir::literal)
.collect();
Hir::concat(lits)
}
}
}
fn hir_group(i: u32, expr: Hir) -> Hir {
Hir::group(hir::Group {
kind: hir::GroupKind::CaptureIndex(i),
hir: Box::new(expr),
})
}
fn hir_group_name(i: u32, name: &str, expr: Hir) -> Hir {
Hir::group(hir::Group {
kind: hir::GroupKind::CaptureName {
name: name.to_string(),
index: i,
},
hir: Box::new(expr),
})
}
fn hir_group_nocap(expr: Hir) -> Hir {
Hir::group(hir::Group {
kind: hir::GroupKind::NonCapturing,
hir: Box::new(expr),
})
}
fn hir_quest(greedy: bool, expr: Hir) -> Hir {
Hir::repetition(hir::Repetition {
kind: hir::RepetitionKind::ZeroOrOne,
greedy: greedy,
hir: Box::new(expr),
})
}
fn hir_star(greedy: bool, expr: Hir) -> Hir {
Hir::repetition(hir::Repetition {
kind: hir::RepetitionKind::ZeroOrMore,
greedy: greedy,
hir: Box::new(expr),
})
}
fn hir_plus(greedy: bool, expr: Hir) -> Hir {
Hir::repetition(hir::Repetition {
kind: hir::RepetitionKind::OneOrMore,
greedy: greedy,
hir: Box::new(expr),
})
}
fn hir_range(greedy: bool, range: hir::RepetitionRange, expr: Hir) -> Hir {
Hir::repetition(hir::Repetition {
kind: hir::RepetitionKind::Range(range),
greedy: greedy,
hir: Box::new(expr),
})
}
fn hir_alt(alts: Vec<Hir>) -> Hir {
Hir::alternation(alts)
}
fn hir_cat(exprs: Vec<Hir>) -> Hir {
Hir::concat(exprs)
}
fn hir_uclass_query(query: ClassQuery) -> Hir {
Hir::class(hir::Class::Unicode(unicode::class(query).unwrap()))
}
fn hir_uclass_perl_word() -> Hir {
use unicode_tables::perl_word::PERL_WORD;
Hir::class(hir::Class::Unicode(unicode::hir_class(PERL_WORD)))
}
fn hir_uclass(ranges: &[(char, char)]) -> Hir {
let ranges: Vec<hir::ClassUnicodeRange> = ranges
.iter()
.map(|&(s, e)| hir::ClassUnicodeRange::new(s, e))
.collect();
Hir::class(hir::Class::Unicode(hir::ClassUnicode::new(ranges)))
}
fn hir_bclass(ranges: &[(u8, u8)]) -> Hir {
let ranges: Vec<hir::ClassBytesRange> = ranges
.iter()
.map(|&(s, e)| hir::ClassBytesRange::new(s, e))
.collect();
Hir::class(hir::Class::Bytes(hir::ClassBytes::new(ranges)))
}
fn hir_bclass_from_char(ranges: &[(char, char)]) -> Hir {
let ranges: Vec<hir::ClassBytesRange> = ranges
.iter()
.map(|&(s, e)| {
assert!(s as u32 <= 0x7F);
assert!(e as u32 <= 0x7F);
hir::ClassBytesRange::new(s as u8, e as u8)
})
.collect();
Hir::class(hir::Class::Bytes(hir::ClassBytes::new(ranges)))
}
fn hir_case_fold(expr: Hir) -> Hir {
match expr.into_kind() {
HirKind::Class(mut cls) => {
cls.case_fold_simple();
Hir::class(cls)
}
_ => panic!("cannot case fold non-class Hir expr"),
}
}
fn hir_negate(expr: Hir) -> Hir {
match expr.into_kind() {
HirKind::Class(mut cls) => {
cls.negate();
Hir::class(cls)
}
_ => panic!("cannot negate non-class Hir expr"),
}
}
fn hir_union(expr1: Hir, expr2: Hir) -> Hir {
use hir::Class::{Bytes, Unicode};
match (expr1.into_kind(), expr2.into_kind()) {
(
HirKind::Class(Unicode(mut c1)),
HirKind::Class(Unicode(c2)),
) => {
c1.union(&c2);
Hir::class(hir::Class::Unicode(c1))
}
(
HirKind::Class(Bytes(mut c1)),
HirKind::Class(Bytes(c2)),
) => {
c1.union(&c2);
Hir::class(hir::Class::Bytes(c1))
}
_ => panic!("cannot union non-class Hir exprs"),
}
}
fn hir_difference(expr1: Hir, expr2: Hir) -> Hir {
use hir::Class::{Bytes, Unicode};
match (expr1.into_kind(), expr2.into_kind()) {
(
HirKind::Class(Unicode(mut c1)),
HirKind::Class(Unicode(c2)),
) => {
c1.difference(&c2);
Hir::class(hir::Class::Unicode(c1))
}
(
HirKind::Class(Bytes(mut c1)),
HirKind::Class(Bytes(c2)),
) => {
c1.difference(&c2);
Hir::class(hir::Class::Bytes(c1))
}
_ => panic!("cannot difference non-class Hir exprs"),
}
}
fn hir_anchor(anchor: hir::Anchor) -> Hir {
Hir::anchor(anchor)
}
fn hir_word(wb: hir::WordBoundary) -> Hir {
Hir::word_boundary(wb)
}
#[test]
fn empty() {
assert_eq!(t(""), Hir::empty());
assert_eq!(t("(?i)"), Hir::empty());
assert_eq!(t("()"), hir_group(1, Hir::empty()));
assert_eq!(t("(?:)"), hir_group_nocap(Hir::empty()));
assert_eq!(t("(?P<wat>)"), hir_group_name(1, "wat", Hir::empty()));
assert_eq!(t("|"), hir_alt(vec![Hir::empty(), Hir::empty()]));
assert_eq!(t("()|()"), hir_alt(vec![
hir_group(1, Hir::empty()),
hir_group(2, Hir::empty()),
]));
assert_eq!(t("(|b)"), hir_group(1, hir_alt(vec![
Hir::empty(),
hir_lit("b"),
])));
assert_eq!(t("(a|)"), hir_group(1, hir_alt(vec![
hir_lit("a"),
Hir::empty(),
])));
assert_eq!(t("(a||c)"), hir_group(1, hir_alt(vec![
hir_lit("a"),
Hir::empty(),
hir_lit("c"),
])));
assert_eq!(t("(||)"), hir_group(1, hir_alt(vec![
Hir::empty(),
Hir::empty(),
Hir::empty(),
])));
}
#[test]
fn literal() {
assert_eq!(t("a"), hir_lit("a"));
assert_eq!(t("(?-u)a"), hir_lit("a"));
assert_eq!(t("β"), hir_lit("β"));
assert_eq!(t("abcd"), hir_lit("abcd"));
assert_eq!(t_bytes("(?-u)a"), hir_lit("a"));
assert_eq!(t_bytes("(?-u)\x61"), hir_lit("a"));
assert_eq!(t_bytes(r"(?-u)\x61"), hir_lit("a"));
assert_eq!(t_bytes(r"(?-u)\xFF"), hir_blit(b"\xFF"));
assert_eq!(t_err("(?-u)β"), TestError {
kind: hir::ErrorKind::UnicodeNotAllowed,
span: Span::new(Position::new(5, 1, 6), Position::new(8, 1, 7)),
});
assert_eq!(t_err(r"(?-u)\xFF"), TestError {
kind: hir::ErrorKind::InvalidUtf8,
span: Span::new(Position::new(5, 1, 6), Position::new(9, 1, 10)),
});
}
#[test]
fn literal_case_insensitive() {
assert_eq!(t("(?i)a"), hir_uclass(&[
('A', 'A'), ('a', 'a'),
]));
assert_eq!(t("(?i:a)"), hir_group_nocap(hir_uclass(&[
('A', 'A'), ('a', 'a')],
)));
assert_eq!(t("a(?i)a(?-i)a"), hir_cat(vec![
hir_lit("a"),
hir_uclass(&[('A', 'A'), ('a', 'a')]),
hir_lit("a"),
]));
assert_eq!(t("(?i)ab@c"), hir_cat(vec![
hir_uclass(&[('A', 'A'), ('a', 'a')]),
hir_uclass(&[('B', 'B'), ('b', 'b')]),
hir_lit("@"),
hir_uclass(&[('C', 'C'), ('c', 'c')]),
]));
assert_eq!(t("(?i)Ξ²"), hir_uclass(&[
('Ξ', 'Ξ'), ('Ξ²', 'Ξ²'), ('Ο', 'Ο'),
]));
assert_eq!(t("(?i-u)a"), hir_bclass(&[
(b'A', b'A'), (b'a', b'a'),
]));
assert_eq!(t("(?-u)a(?i)a(?-i)a"), hir_cat(vec![
hir_lit("a"),
hir_bclass(&[(b'A', b'A'), (b'a', b'a')]),
hir_lit("a"),
]));
assert_eq!(t("(?i-u)ab@c"), hir_cat(vec![
hir_bclass(&[(b'A', b'A'), (b'a', b'a')]),
hir_bclass(&[(b'B', b'B'), (b'b', b'b')]),
hir_lit("@"),
hir_bclass(&[(b'C', b'C'), (b'c', b'c')]),
]));
assert_eq!(t_bytes("(?i-u)a"), hir_bclass(&[
(b'A', b'A'), (b'a', b'a'),
]));
assert_eq!(t_bytes("(?i-u)\x61"), hir_bclass(&[
(b'A', b'A'), (b'a', b'a'),
]));
assert_eq!(t_bytes(r"(?i-u)\x61"), hir_bclass(&[
(b'A', b'A'), (b'a', b'a'),
]));
assert_eq!(t_bytes(r"(?i-u)\xFF"), hir_blit(b"\xFF"));
assert_eq!(t_err("(?i-u)Ξ²"), TestError {
kind: hir::ErrorKind::UnicodeNotAllowed,
span: Span::new(
Position::new(6, 1, 7),
Position::new(8, 1, 8),
),
});
}
#[test]
fn dot() {
assert_eq!(t("."), hir_uclass(&[
('\0', '\t'),
('\x0B', '\u{10FFFF}'),
]));
assert_eq!(t("(?s)."), hir_uclass(&[
('\0', '\u{10FFFF}'),
]));
assert_eq!(t_bytes("(?-u)."), hir_bclass(&[
(b'\0', b'\t'),
(b'\x0B', b'\xFF'),
]));
assert_eq!(t_bytes("(?s-u)."), hir_bclass(&[
(b'\0', b'\xFF'),
]));
// If invalid UTF-8 isn't allowed, then non-Unicode `.` isn't allowed.
assert_eq!(t_err("(?-u)."), TestError {
kind: hir::ErrorKind::InvalidUtf8,
span: Span::new(Position::new(5, 1, 6), Position::new(6, 1, 7)),
});
assert_eq!(t_err("(?s-u)."), TestError {
kind: hir::ErrorKind::InvalidUtf8,
span: Span::new(Position::new(6, 1, 7), Position::new(7, 1, 8)),
});
}
#[test]
fn assertions() {
assert_eq!(t("^"), hir_anchor(hir::Anchor::StartText));
assert_eq!(t("$"), hir_anchor(hir::Anchor::EndText));
assert_eq!(t(r"\A"), hir_anchor(hir::Anchor::StartText));
assert_eq!(t(r"\z"), hir_anchor(hir::Anchor::EndText));
assert_eq!(t("(?m)^"), hir_anchor(hir::Anchor::StartLine));
assert_eq!(t("(?m)$"), hir_anchor(hir::Anchor::EndLine));
assert_eq!(t(r"(?m)\A"), hir_anchor(hir::Anchor::StartText));
assert_eq!(t(r"(?m)\z"), hir_anchor(hir::Anchor::EndText));
assert_eq!(t(r"\b"), hir_word(hir::WordBoundary::Unicode));
assert_eq!(t(r"\B"), hir_word(hir::WordBoundary::UnicodeNegate));
assert_eq!(t(r"(?-u)\b"), hir_word(hir::WordBoundary::Ascii));
assert_eq!(
t_bytes(r"(?-u)\B"),
hir_word(hir::WordBoundary::AsciiNegate));
assert_eq!(t_err(r"(?-u)\B"), TestError {
kind: hir::ErrorKind::InvalidUtf8,
span: Span::new(Position::new(5, 1, 6), Position::new(7, 1, 8)),
});
}
#[test]
fn group() {
assert_eq!(t("(a)"), hir_group(1, hir_lit("a")));
assert_eq!(t("(a)(b)"), hir_cat(vec![
hir_group(1, hir_lit("a")),
hir_group(2, hir_lit("b")),
]));
assert_eq!(t("(a)|(b)"), hir_alt(vec![
hir_group(1, hir_lit("a")),
hir_group(2, hir_lit("b")),
]));
assert_eq!(t("(?P<foo>)"), hir_group_name(1, "foo", Hir::empty()));
assert_eq!(t("(?P<foo>a)"), hir_group_name(1, "foo", hir_lit("a")));
assert_eq!(t("(?P<foo>a)(?P<bar>b)"), hir_cat(vec![
hir_group_name(1, "foo", hir_lit("a")),
hir_group_name(2, "bar", hir_lit("b")),
]));
assert_eq!(t("(?:)"), hir_group_nocap(Hir::empty()));
assert_eq!(t("(?:a)"), hir_group_nocap(hir_lit("a")));
assert_eq!(t("(?:a)(b)"), hir_cat(vec![
hir_group_nocap(hir_lit("a")),
hir_group(1, hir_lit("b")),
]));
assert_eq!(t("(a)(?:b)(c)"), hir_cat(vec![
hir_group(1, hir_lit("a")),
hir_group_nocap(hir_lit("b")),
hir_group(2, hir_lit("c")),
]));
assert_eq!(t("(a)(?P<foo>b)(c)"), hir_cat(vec![
hir_group(1, hir_lit("a")),
hir_group_name(2, "foo", hir_lit("b")),
hir_group(3, hir_lit("c")),
]));
}
#[test]
fn flags() {
assert_eq!(t("(?i:a)a"), hir_cat(vec![
hir_group_nocap(hir_uclass(&[('A', 'A'), ('a', 'a')])),
hir_lit("a"),
]));
assert_eq!(t("(?i-u:a)Ξ²"), hir_cat(vec![
hir_group_nocap(hir_bclass(&[(b'A', b'A'), (b'a', b'a')])),
hir_lit("Ξ²"),
]));
assert_eq!(t("(?i)(?-i:a)a"), hir_cat(vec![
hir_group_nocap(hir_lit("a")),
hir_uclass(&[('A', 'A'), ('a', 'a')]),
]));
assert_eq!(t("(?im)a^"), hir_cat(vec![
hir_uclass(&[('A', 'A'), ('a', 'a')]),
hir_anchor(hir::Anchor::StartLine),
]));
assert_eq!(t("(?im)a^(?i-m)a^"), hir_cat(vec![
hir_uclass(&[('A', 'A'), ('a', 'a')]),
hir_anchor(hir::Anchor::StartLine),
hir_uclass(&[('A', 'A'), ('a', 'a')]),
hir_anchor(hir::Anchor::StartText),
]));
assert_eq!(t("(?U)a*a*?(?-U)a*a*?"), hir_cat(vec![
hir_star(false, hir_lit("a")),
hir_star(true, hir_lit("a")),
hir_star(true, hir_lit("a")),
hir_star(false, hir_lit("a")),
]));
assert_eq!(t("(?:a(?i)a)a"), hir_cat(vec![
hir_group_nocap(hir_cat(vec![
hir_lit("a"),
hir_uclass(&[('A', 'A'), ('a', 'a')]),
])),
hir_lit("a"),
]));
assert_eq!(t("(?i)(?:a(?-i)a)a"), hir_cat(vec![
hir_group_nocap(hir_cat(vec![
hir_uclass(&[('A', 'A'), ('a', 'a')]),
hir_lit("a"),
])),
hir_uclass(&[('A', 'A'), ('a', 'a')]),
]));
}
#[test]
fn escape() {
assert_eq!(
t(r"\\\.\+\*\?\(\)\|\[\]\{\}\^\$\#"),
hir_lit(r"\.+*?()|[]{}^$#"));
}
#[test]
fn repetition() {
assert_eq!(t("a?"), hir_quest(true, hir_lit("a")));
assert_eq!(t("a*"), hir_star(true, hir_lit("a")));
assert_eq!(t("a+"), hir_plus(true, hir_lit("a")));
assert_eq!(t("a??"), hir_quest(false, hir_lit("a")));
assert_eq!(t("a*?"), hir_star(false, hir_lit("a")));
assert_eq!(t("a+?"), hir_plus(false, hir_lit("a")));
assert_eq!(
t("a{1}"),
hir_range(
true,
hir::RepetitionRange::Exactly(1),
hir_lit("a"),
));
assert_eq!(
t("a{1,}"),
hir_range(
true,
hir::RepetitionRange::AtLeast(1),
hir_lit("a"),
));
assert_eq!(
t("a{1,2}"),
hir_range(
true,
hir::RepetitionRange::Bounded(1, 2),
hir_lit("a"),
));
assert_eq!(
t("a{1}?"),
hir_range(
false,
hir::RepetitionRange::Exactly(1),
hir_lit("a"),
));
assert_eq!(
t("a{1,}?"),
hir_range(
false,
hir::RepetitionRange::AtLeast(1),
hir_lit("a"),
));
assert_eq!(
t("a{1,2}?"),
hir_range(
false,
hir::RepetitionRange::Bounded(1, 2),
hir_lit("a"),
));
assert_eq!(t("ab?"), hir_cat(vec![
hir_lit("a"),
hir_quest(true, hir_lit("b")),
]));
assert_eq!(t("(ab)?"), hir_quest(true, hir_group(1, hir_cat(vec![
hir_lit("a"),
hir_lit("b"),
]))));
assert_eq!(t("a|b?"), hir_alt(vec![
hir_lit("a"),
hir_quest(true, hir_lit("b")),
]));
}
#[test]
fn cat_alt() {
assert_eq!(t("(ab)"), hir_group(1, hir_cat(vec![
hir_lit("a"),
hir_lit("b"),
])));
assert_eq!(t("a|b"), hir_alt(vec![
hir_lit("a"),
hir_lit("b"),
]));
assert_eq!(t("a|b|c"), hir_alt(vec![
hir_lit("a"),
hir_lit("b"),
hir_lit("c"),
]));
assert_eq!(t("ab|bc|cd"), hir_alt(vec![
hir_lit("ab"),
hir_lit("bc"),
hir_lit("cd"),
]));
assert_eq!(t("(a|b)"), hir_group(1, hir_alt(vec![
hir_lit("a"),
hir_lit("b"),
])));
assert_eq!(t("(a|b|c)"), hir_group(1, hir_alt(vec![
hir_lit("a"),
hir_lit("b"),
hir_lit("c"),
])));
assert_eq!(t("(ab|bc|cd)"), hir_group(1, hir_alt(vec![
hir_lit("ab"),
hir_lit("bc"),
hir_lit("cd"),
])));
assert_eq!(t("(ab|(bc|(cd)))"), hir_group(1, hir_alt(vec![
hir_lit("ab"),
hir_group(2, hir_alt(vec![
hir_lit("bc"),
hir_group(3, hir_lit("cd")),
])),
])));
}
#[test]
fn class_ascii() {
assert_eq!(
t("[[:alnum:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Alnum)));
assert_eq!(
t("[[:alpha:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Alpha)));
assert_eq!(
t("[[:ascii:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Ascii)));
assert_eq!(
t("[[:blank:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Blank)));
assert_eq!(
t("[[:cntrl:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Cntrl)));
assert_eq!(
t("[[:digit:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Digit)));
assert_eq!(
t("[[:graph:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Graph)));
assert_eq!(
t("[[:lower:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Lower)));
assert_eq!(
t("[[:print:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Print)));
assert_eq!(
t("[[:punct:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Punct)));
assert_eq!(
t("[[:space:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Space)));
assert_eq!(
t("[[:upper:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Upper)));
assert_eq!(
t("[[:word:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Word)));
assert_eq!(
t("[[:xdigit:]]"),
hir_uclass(ascii_class(&ast::ClassAsciiKind::Xdigit)));
assert_eq!(
t("[[:^lower:]]"),
hir_negate(hir_uclass(ascii_class(&ast::ClassAsciiKind::Lower))));
assert_eq!(
t("(?i)[[:lower:]]"),
hir_uclass(&[
('A', 'Z'), ('a', 'z'),
('\u{17F}', '\u{17F}'),
('\u{212A}', '\u{212A}'),
]));
assert_eq!(
t("(?-u)[[:lower:]]"),
hir_bclass_from_char(ascii_class(&ast::ClassAsciiKind::Lower)));
assert_eq!(
t("(?i-u)[[:lower:]]"),
hir_case_fold(hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Lower))));
assert_eq!(t_err("(?-u)[[:^lower:]]"), TestError {
kind: hir::ErrorKind::InvalidUtf8,
span: Span::new(Position::new(6, 1, 7), Position::new(16, 1, 17)),
});
assert_eq!(t_err("(?i-u)[[:^lower:]]"), TestError {
kind: hir::ErrorKind::InvalidUtf8,
span: Span::new(Position::new(7, 1, 8), Position::new(17, 1, 18)),
});
}
#[test]
fn class_perl() {
// Unicode
assert_eq!(
t(r"\d"),
hir_uclass_query(ClassQuery::Binary("digit")));
assert_eq!(
t(r"\s"),
hir_uclass_query(ClassQuery::Binary("space")));
assert_eq!(
t(r"\w"),
hir_uclass_perl_word());
assert_eq!(
t(r"(?i)\d"),
hir_uclass_query(ClassQuery::Binary("digit")));
assert_eq!(
t(r"(?i)\s"),
hir_uclass_query(ClassQuery::Binary("space")));
assert_eq!(
t(r"(?i)\w"),
hir_uclass_perl_word());
// Unicode, negated
assert_eq!(
t(r"\D"),
hir_negate(hir_uclass_query(ClassQuery::Binary("digit"))));
assert_eq!(
t(r"\S"),
hir_negate(hir_uclass_query(ClassQuery::Binary("space"))));
assert_eq!(
t(r"\W"),
hir_negate(hir_uclass_perl_word()));
assert_eq!(
t(r"(?i)\D"),
hir_negate(hir_uclass_query(ClassQuery::Binary("digit"))));
assert_eq!(
t(r"(?i)\S"),
hir_negate(hir_uclass_query(ClassQuery::Binary("space"))));
assert_eq!(
t(r"(?i)\W"),
hir_negate(hir_uclass_perl_word()));
// ASCII only
assert_eq!(
t(r"(?-u)\d"),
hir_bclass_from_char(ascii_class(&ast::ClassAsciiKind::Digit)));
assert_eq!(
t(r"(?-u)\s"),
hir_bclass_from_char(ascii_class(&ast::ClassAsciiKind::Space)));
assert_eq!(
t(r"(?-u)\w"),
hir_bclass_from_char(ascii_class(&ast::ClassAsciiKind::Word)));
assert_eq!(
t(r"(?i-u)\d"),
hir_bclass_from_char(ascii_class(&ast::ClassAsciiKind::Digit)));
assert_eq!(
t(r"(?i-u)\s"),
hir_bclass_from_char(ascii_class(&ast::ClassAsciiKind::Space)));
assert_eq!(
t(r"(?i-u)\w"),
hir_bclass_from_char(ascii_class(&ast::ClassAsciiKind::Word)));
// ASCII only, negated
assert_eq!(
t(r"(?-u)\D"),
hir_negate(hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Digit))));
assert_eq!(
t(r"(?-u)\S"),
hir_negate(hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Space))));
assert_eq!(
t(r"(?-u)\W"),
hir_negate(hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Word))));
assert_eq!(
t(r"(?i-u)\D"),
hir_negate(hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Digit))));
assert_eq!(
t(r"(?i-u)\S"),
hir_negate(hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Space))));
assert_eq!(
t(r"(?i-u)\W"),
hir_negate(hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Word))));
}
#[test]
fn class_unicode() {
assert_eq!(
t(r"\pZ"),
hir_uclass_query(ClassQuery::Binary("Z")));
assert_eq!(
t(r"\pz"),
hir_uclass_query(ClassQuery::Binary("Z")));
assert_eq!(
t(r"\p{Separator}"),
hir_uclass_query(ClassQuery::Binary("Z")));
assert_eq!(
t(r"\p{se PaRa ToR}"),
hir_uclass_query(ClassQuery::Binary("Z")));
assert_eq!(
t(r"\p{gc:Separator}"),
hir_uclass_query(ClassQuery::Binary("Z")));
assert_eq!(
t(r"\p{gc=Separator}"),
hir_uclass_query(ClassQuery::Binary("Z")));
assert_eq!(
t(r"\p{Other}"),
hir_uclass_query(ClassQuery::Binary("Other")));
assert_eq!(
t(r"\pC"),
hir_uclass_query(ClassQuery::Binary("Other")));
assert_eq!(
t(r"\PZ"),
hir_negate(hir_uclass_query(ClassQuery::Binary("Z"))));
assert_eq!(
t(r"\P{separator}"),
hir_negate(hir_uclass_query(ClassQuery::Binary("Z"))));
assert_eq!(
t(r"\P{gc!=separator}"),
hir_negate(hir_uclass_query(ClassQuery::Binary("Z"))));
assert_eq!(
t(r"\p{Greek}"),
hir_uclass_query(ClassQuery::Binary("Greek")));
assert_eq!(
t(r"(?i)\p{Greek}"),
hir_case_fold(hir_uclass_query(ClassQuery::Binary("Greek"))));
assert_eq!(
t(r"(?i)\P{Greek}"),
hir_negate(hir_case_fold(hir_uclass_query(
ClassQuery::Binary("Greek")))));
assert_eq!(
t(r"\p{any}"),
hir_uclass_query(ClassQuery::Binary("Any")));
assert_eq!(
t(r"\p{assigned}"),
hir_uclass_query(ClassQuery::Binary("Assigned")));
assert_eq!(
t(r"\p{ascii}"),
hir_uclass_query(ClassQuery::Binary("ASCII")));
assert_eq!(
t(r"\p{gc:any}"),
hir_uclass_query(ClassQuery::Binary("Any")));
assert_eq!(
t(r"\p{gc:assigned}"),
hir_uclass_query(ClassQuery::Binary("Assigned")));
assert_eq!(
t(r"\p{gc:ascii}"),
hir_uclass_query(ClassQuery::Binary("ASCII")));
assert_eq!(t_err(r"(?-u)\pZ"), TestError {
kind: hir::ErrorKind::UnicodeNotAllowed,
span: Span::new(Position::new(5, 1, 6), Position::new(8, 1, 9)),
});
assert_eq!(t_err(r"(?-u)\p{Separator}"), TestError {
kind: hir::ErrorKind::UnicodeNotAllowed,
span: Span::new(Position::new(5, 1, 6), Position::new(18, 1, 19)),
});
assert_eq!(t_err(r"\pE"), TestError {
kind: hir::ErrorKind::UnicodePropertyNotFound,
span: Span::new(Position::new(0, 1, 1), Position::new(3, 1, 4)),
});
assert_eq!(t_err(r"\p{Foo}"), TestError {
kind: hir::ErrorKind::UnicodePropertyNotFound,
span: Span::new(Position::new(0, 1, 1), Position::new(7, 1, 8)),
});
assert_eq!(t_err(r"\p{gc:Foo}"), TestError {
kind: hir::ErrorKind::UnicodePropertyValueNotFound,
span: Span::new(Position::new(0, 1, 1), Position::new(10, 1, 11)),
});
assert_eq!(t_err(r"\p{sc:Foo}"), TestError {
kind: hir::ErrorKind::UnicodePropertyValueNotFound,
span: Span::new(Position::new(0, 1, 1), Position::new(10, 1, 11)),
});
assert_eq!(t_err(r"\p{scx:Foo}"), TestError {
kind: hir::ErrorKind::UnicodePropertyValueNotFound,
span: Span::new(Position::new(0, 1, 1), Position::new(11, 1, 12)),
});
assert_eq!(t_err(r"\p{age:Foo}"), TestError {
kind: hir::ErrorKind::UnicodePropertyValueNotFound,
span: Span::new(Position::new(0, 1, 1), Position::new(11, 1, 12)),
});
}
#[test]
fn class_bracketed() {
assert_eq!(t("[a]"), hir_uclass(&[('a', 'a')]));
assert_eq!(t("[^[a]]"), hir_negate(hir_uclass(&[('a', 'a')])));
assert_eq!(t("[a-z]"), hir_uclass(&[('a', 'z')]));
assert_eq!(t("[a-fd-h]"), hir_uclass(&[('a', 'h')]));
assert_eq!(t("[a-fg-m]"), hir_uclass(&[('a', 'm')]));
assert_eq!(t(r"[\x00]"), hir_uclass(&[('\0', '\0')]));
assert_eq!(t(r"[\n]"), hir_uclass(&[('\n', '\n')]));
assert_eq!(t("[\n]"), hir_uclass(&[('\n', '\n')]));
assert_eq!(
t(r"[\d]"),
hir_uclass_query(ClassQuery::Binary("digit")));
assert_eq!(
t(r"[\pZ]"),
hir_uclass_query(ClassQuery::Binary("separator")));
assert_eq!(
t(r"[\p{separator}]"),
hir_uclass_query(ClassQuery::Binary("separator")));
assert_eq!(
t(r"[^\D]"),
hir_uclass_query(ClassQuery::Binary("digit")));
assert_eq!(
t(r"[^\PZ]"),
hir_uclass_query(ClassQuery::Binary("separator")));
assert_eq!(
t(r"[^\P{separator}]"),
hir_uclass_query(ClassQuery::Binary("separator")));
assert_eq!(
t(r"(?i)[^\D]"),
hir_uclass_query(ClassQuery::Binary("digit")));
assert_eq!(
t(r"(?i)[^\P{greek}]"),
hir_case_fold(hir_uclass_query(ClassQuery::Binary("greek"))));
assert_eq!(t("(?-u)[a]"), hir_bclass(&[(b'a', b'a')]));
assert_eq!(t(r"(?-u)[\x00]"), hir_bclass(&[(b'\0', b'\0')]));
assert_eq!(t_bytes(r"(?-u)[\xFF]"), hir_bclass(&[(b'\xFF', b'\xFF')]));
assert_eq!(t("(?i)[a]"), hir_uclass(&[('A', 'A'), ('a', 'a')]));
assert_eq!(t("(?i)[k]"), hir_uclass(&[
('K', 'K'), ('k', 'k'), ('\u{212A}', '\u{212A}'),
]));
assert_eq!(t("(?i)[Ξ²]"), hir_uclass(&[
('Ξ', 'Ξ'), ('Ξ²', 'Ξ²'), ('Ο', 'Ο'),
]));
assert_eq!(t("(?i-u)[k]"), hir_bclass(&[
(b'K', b'K'), (b'k', b'k'),
]));
assert_eq!(t("[^a]"), hir_negate(hir_uclass(&[('a', 'a')])));
assert_eq!(t(r"[^\x00]"), hir_negate(hir_uclass(&[('\0', '\0')])));
assert_eq!(
t_bytes("(?-u)[^a]"),
hir_negate(hir_bclass(&[(b'a', b'a')])));
assert_eq!(
t(r"[^\d]"),
hir_negate(hir_uclass_query(ClassQuery::Binary("digit"))));
assert_eq!(
t(r"[^\pZ]"),
hir_negate(hir_uclass_query(ClassQuery::Binary("separator"))));
assert_eq!(
t(r"[^\p{separator}]"),
hir_negate(hir_uclass_query(ClassQuery::Binary("separator"))));
assert_eq!(
t(r"(?i)[^\p{greek}]"),
hir_negate(hir_case_fold(hir_uclass_query(
ClassQuery::Binary("greek")))));
assert_eq!(
t(r"(?i)[\P{greek}]"),
hir_negate(hir_case_fold(hir_uclass_query(
ClassQuery::Binary("greek")))));
// Test some weird cases.
assert_eq!(t(r"[\[]"), hir_uclass(&[('[', '[')]));
assert_eq!(t(r"[&]"), hir_uclass(&[('&', '&')]));
assert_eq!(t(r"[\&]"), hir_uclass(&[('&', '&')]));
assert_eq!(t(r"[\&\&]"), hir_uclass(&[('&', '&')]));
assert_eq!(t(r"[\x00-&]"), hir_uclass(&[('\0', '&')]));
assert_eq!(t(r"[&-\xFF]"), hir_uclass(&[('&', '\u{FF}')]));
assert_eq!(t(r"[~]"), hir_uclass(&[('~', '~')]));
assert_eq!(t(r"[\~]"), hir_uclass(&[('~', '~')]));
assert_eq!(t(r"[\~\~]"), hir_uclass(&[('~', '~')]));
assert_eq!(t(r"[\x00-~]"), hir_uclass(&[('\0', '~')]));
assert_eq!(t(r"[~-\xFF]"), hir_uclass(&[('~', '\u{FF}')]));
assert_eq!(t(r"[-]"), hir_uclass(&[('-', '-')]));
assert_eq!(t(r"[\-]"), hir_uclass(&[('-', '-')]));
assert_eq!(t(r"[\-\-]"), hir_uclass(&[('-', '-')]));
assert_eq!(t(r"[\x00-\-]"), hir_uclass(&[('\0', '-')]));
assert_eq!(t(r"[\--\xFF]"), hir_uclass(&[('-', '\u{FF}')]));
assert_eq!(t_err("(?-u)[^a]"), TestError {
kind: hir::ErrorKind::InvalidUtf8,
span: Span::new(Position::new(5, 1, 6), Position::new(9, 1, 10)),
});
assert_eq!(t_err(r"[^\s\S]"), TestError {
kind: hir::ErrorKind::EmptyClassNotAllowed,
span: Span::new(Position::new(0, 1, 1), Position::new(7, 1, 8)),
});
assert_eq!(t_err(r"(?-u)[^\s\S]"), TestError {
kind: hir::ErrorKind::EmptyClassNotAllowed,
span: Span::new(Position::new(5, 1, 6), Position::new(12, 1, 13)),
});
}
#[test]
fn class_bracketed_union() {
assert_eq!(
t("[a-zA-Z]"),
hir_uclass(&[('A', 'Z'), ('a', 'z')]));
assert_eq!(
t(r"[a\pZb]"),
hir_union(
hir_uclass(&[('a', 'b')]),
hir_uclass_query(ClassQuery::Binary("separator"))));
assert_eq!(
t(r"[\pZ\p{Greek}]"),
hir_union(
hir_uclass_query(ClassQuery::Binary("greek")),
hir_uclass_query(ClassQuery::Binary("separator"))));
assert_eq!(
t(r"[\p{age:3.0}\pZ\p{Greek}]"),
hir_union(
hir_uclass_query(ClassQuery::ByValue {
property_name: "age",
property_value: "3.0",
}),
hir_union(
hir_uclass_query(ClassQuery::Binary("greek")),
hir_uclass_query(ClassQuery::Binary("separator")))));
assert_eq!(
t(r"[[[\p{age:3.0}\pZ]\p{Greek}][\p{Cyrillic}]]"),
hir_union(
hir_uclass_query(ClassQuery::ByValue {
property_name: "age",
property_value: "3.0",
}),
hir_union(
hir_uclass_query(ClassQuery::Binary("cyrillic")),
hir_union(
hir_uclass_query(ClassQuery::Binary("greek")),
hir_uclass_query(ClassQuery::Binary("separator"))))));
assert_eq!(
t(r"(?i)[\p{age:3.0}\pZ\p{Greek}]"),
hir_case_fold(hir_union(
hir_uclass_query(ClassQuery::ByValue {
property_name: "age",
property_value: "3.0",
}),
hir_union(
hir_uclass_query(ClassQuery::Binary("greek")),
hir_uclass_query(ClassQuery::Binary("separator"))))));
assert_eq!(
t(r"[^\p{age:3.0}\pZ\p{Greek}]"),
hir_negate(hir_union(
hir_uclass_query(ClassQuery::ByValue {
property_name: "age",
property_value: "3.0",
}),
hir_union(
hir_uclass_query(ClassQuery::Binary("greek")),
hir_uclass_query(ClassQuery::Binary("separator"))))));
assert_eq!(
t(r"(?i)[^\p{age:3.0}\pZ\p{Greek}]"),
hir_negate(hir_case_fold(hir_union(
hir_uclass_query(ClassQuery::ByValue {
property_name: "age",
property_value: "3.0",
}),
hir_union(
hir_uclass_query(ClassQuery::Binary("greek")),
hir_uclass_query(ClassQuery::Binary("separator")))))));
}
#[test]
fn class_bracketed_nested() {
assert_eq!(
t(r"[a[^c]]"),
hir_negate(hir_uclass(&[('c', 'c')])));
assert_eq!(
t(r"[a-b[^c]]"),
hir_negate(hir_uclass(&[('c', 'c')])));
assert_eq!(
t(r"[a-c[^c]]"),
hir_negate(hir_uclass(&[])));
assert_eq!(
t(r"[^a[^c]]"),
hir_uclass(&[('c', 'c')]));
assert_eq!(
t(r"[^a-b[^c]]"),
hir_uclass(&[('c', 'c')]));
assert_eq!(
t(r"(?i)[a[^c]]"),
hir_negate(hir_case_fold(hir_uclass(&[('c', 'c')]))));
assert_eq!(
t(r"(?i)[a-b[^c]]"),
hir_negate(hir_case_fold(hir_uclass(&[('c', 'c')]))));
assert_eq!(
t(r"(?i)[^a[^c]]"),
hir_uclass(&[('C', 'C'), ('c', 'c')]));
assert_eq!(
t(r"(?i)[^a-b[^c]]"),
hir_uclass(&[('C', 'C'), ('c', 'c')]));
assert_eq!(t_err(r"[^a-c[^c]]"), TestError {
kind: hir::ErrorKind::EmptyClassNotAllowed,
span: Span::new(Position::new(0, 1, 1), Position::new(10, 1, 11)),
});
assert_eq!(t_err(r"(?i)[^a-c[^c]]"), TestError {
kind: hir::ErrorKind::EmptyClassNotAllowed,
span: Span::new(Position::new(4, 1, 5), Position::new(14, 1, 15)),
});
}
#[test]
fn class_bracketed_intersect() {
assert_eq!(t("[abc&&b-c]"), hir_uclass(&[('b', 'c')]));
assert_eq!(t("[abc&&[b-c]]"), hir_uclass(&[('b', 'c')]));
assert_eq!(t("[[abc]&&[b-c]]"), hir_uclass(&[('b', 'c')]));
assert_eq!(t("[a-z&&b-y&&c-x]"), hir_uclass(&[('c', 'x')]));
assert_eq!(t("[c-da-b&&a-d]"), hir_uclass(&[('a', 'd')]));
assert_eq!(t("[a-d&&c-da-b]"), hir_uclass(&[('a', 'd')]));
assert_eq!(t(r"[a-z&&a-c]"), hir_uclass(&[('a', 'c')]));
assert_eq!(t(r"[[a-z&&a-c]]"), hir_uclass(&[('a', 'c')]));
assert_eq!(t(r"[^[a-z&&a-c]]"), hir_negate(hir_uclass(&[('a', 'c')])));
assert_eq!(t("(?-u)[abc&&b-c]"), hir_bclass(&[(b'b', b'c')]));
assert_eq!(t("(?-u)[abc&&[b-c]]"), hir_bclass(&[(b'b', b'c')]));
assert_eq!(t("(?-u)[[abc]&&[b-c]]"), hir_bclass(&[(b'b', b'c')]));
assert_eq!(t("(?-u)[a-z&&b-y&&c-x]"), hir_bclass(&[(b'c', b'x')]));
assert_eq!(t("(?-u)[c-da-b&&a-d]"), hir_bclass(&[(b'a', b'd')]));
assert_eq!(t("(?-u)[a-d&&c-da-b]"), hir_bclass(&[(b'a', b'd')]));
assert_eq!(
t("(?i)[abc&&b-c]"),
hir_case_fold(hir_uclass(&[('b', 'c')])));
assert_eq!(
t("(?i)[abc&&[b-c]]"),
hir_case_fold(hir_uclass(&[('b', 'c')])));
assert_eq!(
t("(?i)[[abc]&&[b-c]]"),
hir_case_fold(hir_uclass(&[('b', 'c')])));
assert_eq!(
t("(?i)[a-z&&b-y&&c-x]"),
hir_case_fold(hir_uclass(&[('c', 'x')])));
assert_eq!(
t("(?i)[c-da-b&&a-d]"),
hir_case_fold(hir_uclass(&[('a', 'd')])));
assert_eq!(
t("(?i)[a-d&&c-da-b]"),
hir_case_fold(hir_uclass(&[('a', 'd')])));
assert_eq!(
t("(?i-u)[abc&&b-c]"),
hir_case_fold(hir_bclass(&[(b'b', b'c')])));
assert_eq!(
t("(?i-u)[abc&&[b-c]]"),
hir_case_fold(hir_bclass(&[(b'b', b'c')])));
assert_eq!(
t("(?i-u)[[abc]&&[b-c]]"),
hir_case_fold(hir_bclass(&[(b'b', b'c')])));
assert_eq!(
t("(?i-u)[a-z&&b-y&&c-x]"),
hir_case_fold(hir_bclass(&[(b'c', b'x')])));
assert_eq!(
t("(?i-u)[c-da-b&&a-d]"),
hir_case_fold(hir_bclass(&[(b'a', b'd')])));
assert_eq!(
t("(?i-u)[a-d&&c-da-b]"),
hir_case_fold(hir_bclass(&[(b'a', b'd')])));
// In `[a^]`, `^` does not need to be escaped, so it makes sense that
// `^` is also allowed to be unescaped after `&&`.
assert_eq!(t(r"[\^&&^]"), hir_uclass(&[('^', '^')]));
// `]` needs to be escaped after `&&` since it's not at start of class.
assert_eq!(t(r"[]&&\]]"), hir_uclass(&[(']', ']')]));
assert_eq!(t(r"[-&&-]"), hir_uclass(&[('-', '-')]));
assert_eq!(t(r"[\&&&&]"), hir_uclass(&[('&', '&')]));
assert_eq!(t(r"[\&&&\&]"), hir_uclass(&[('&', '&')]));
// Test precedence.
assert_eq!(
t(r"[a-w&&[^c-g]z]"),
hir_uclass(&[('a', 'b'), ('h', 'w')]));
}
#[test]
fn class_bracketed_intersect_negate() {
assert_eq!(
t(r"[^\w&&\d]"),
hir_negate(hir_uclass_query(ClassQuery::Binary("digit"))));
assert_eq!(
t(r"[^[a-z&&a-c]]"),
hir_negate(hir_uclass(&[('a', 'c')])));
assert_eq!(
t(r"[^[\w&&\d]]"),
hir_negate(hir_uclass_query(ClassQuery::Binary("digit"))));
assert_eq!(
t(r"[^[^\w&&\d]]"),
hir_uclass_query(ClassQuery::Binary("digit")));
assert_eq!(
t(r"[[[^\w]&&[^\d]]]"),
hir_negate(hir_uclass_perl_word()));
assert_eq!(
t_bytes(r"(?-u)[^\w&&\d]"),
hir_negate(hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Digit))));
assert_eq!(
t_bytes(r"(?-u)[^[a-z&&a-c]]"),
hir_negate(hir_bclass(&[(b'a', b'c')])));
assert_eq!(
t_bytes(r"(?-u)[^[\w&&\d]]"),
hir_negate(hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Digit))));
assert_eq!(
t_bytes(r"(?-u)[^[^\w&&\d]]"),
hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Digit)));
assert_eq!(
t_bytes(r"(?-u)[[[^\w]&&[^\d]]]"),
hir_negate(hir_bclass_from_char(ascii_class(
&ast::ClassAsciiKind::Word))));
}
#[test]
fn class_bracketed_difference() {
assert_eq!(
t(r"[\pL--[:ascii:]]"),
hir_difference(
hir_uclass_query(ClassQuery::Binary("letter")),
hir_uclass(&[('\0', '\x7F')])));
assert_eq!(
t(r"(?-u)[[:alpha:]--[:lower:]]"),
hir_bclass(&[(b'A', b'Z')]));
}
#[test]
fn class_bracketed_symmetric_difference() {
assert_eq!(
t(r"[\p{sc:Greek}~~\p{scx:Greek}]"),
hir_uclass(&[
('\u{0342}', '\u{0342}'),
('\u{0345}', '\u{0345}'),
('\u{1DC0}', '\u{1DC1}'),
]));
assert_eq!(
t(r"[a-g~~c-j]"),
hir_uclass(&[('a', 'b'), ('h', 'j')]));
assert_eq!(
t(r"(?-u)[a-g~~c-j]"),
hir_bclass(&[(b'a', b'b'), (b'h', b'j')]));
}
#[test]
fn ignore_whitespace() {
assert_eq!(t(r"(?x)\12 3"), hir_lit("\n3"));
assert_eq!(t(r"(?x)\x { 53 }"), hir_lit("S"));
assert_eq!(t(r"(?x)\x # comment
{ # comment
53 # comment
} #comment"), hir_lit("S"));
assert_eq!(t(r"(?x)\x 53"), hir_lit("S"));
assert_eq!(t(r"(?x)\x # comment
53 # comment"), hir_lit("S"));
assert_eq!(t(r"(?x)\x5 3"), hir_lit("S"));
assert_eq!(t(r"(?x)\p # comment
{ # comment
Separator # comment
} # comment"), hir_uclass_query(ClassQuery::Binary("separator")));
assert_eq!(t(r"(?x)a # comment
{ # comment
5 # comment
, # comment
10 # comment
} # comment"),
hir_range(
true, hir::RepetitionRange::Bounded(5, 10), hir_lit("a")));
assert_eq!(t(r"(?x)a\ # hi there"), hir_lit("a "));
}
#[test]
fn analysis_is_always_utf8() {
// Positive examples.
assert!(t_bytes(r"a").is_always_utf8());
assert!(t_bytes(r"ab").is_always_utf8());
assert!(t_bytes(r"(?-u)a").is_always_utf8());
assert!(t_bytes(r"(?-u)ab").is_always_utf8());
assert!(t_bytes(r"\xFF").is_always_utf8());
assert!(t_bytes(r"\xFF\xFF").is_always_utf8());
assert!(t_bytes(r"[^a]").is_always_utf8());
assert!(t_bytes(r"[^a][^a]").is_always_utf8());
assert!(t_bytes(r"\b").is_always_utf8());
assert!(t_bytes(r"\B").is_always_utf8());
assert!(t_bytes(r"(?-u)\b").is_always_utf8());
// Negative examples.
assert!(!t_bytes(r"(?-u)\xFF").is_always_utf8());
assert!(!t_bytes(r"(?-u)\xFF\xFF").is_always_utf8());
assert!(!t_bytes(r"(?-u)[^a]").is_always_utf8());
assert!(!t_bytes(r"(?-u)[^a][^a]").is_always_utf8());
assert!(!t_bytes(r"(?-u)\B").is_always_utf8());
}
#[test]
fn analysis_is_all_assertions() {
// Positive examples.
assert!(t(r"\b").is_all_assertions());
assert!(t(r"\B").is_all_assertions());
assert!(t(r"^").is_all_assertions());
assert!(t(r"$").is_all_assertions());
assert!(t(r"\A").is_all_assertions());
assert!(t(r"\z").is_all_assertions());
assert!(t(r"$^\z\A\b\B").is_all_assertions());
assert!(t(r"$|^|\z|\A|\b|\B").is_all_assertions());
assert!(t(r"^$|$^").is_all_assertions());
assert!(t(r"((\b)+())*^").is_all_assertions());
// Negative examples.
assert!(!t(r"^a").is_all_assertions());
}
#[test]
fn analysis_is_anchored() {
// Positive examples.
assert!(t(r"^").is_anchored_start());
assert!(t(r"$").is_anchored_end());
assert!(t(r"^^").is_anchored_start());
assert!(t(r"$$").is_anchored_end());
assert!(t(r"^$").is_anchored_start());
assert!(t(r"^$").is_anchored_end());
assert!(t(r"^foo").is_anchored_start());
assert!(t(r"foo$").is_anchored_end());
assert!(t(r"^foo|^bar").is_anchored_start());
assert!(t(r"foo$|bar$").is_anchored_end());
assert!(t(r"^(foo|bar)").is_anchored_start());
assert!(t(r"(foo|bar)$").is_anchored_end());
assert!(t(r"^+").is_anchored_start());
assert!(t(r"$+").is_anchored_end());
assert!(t(r"^++").is_anchored_start());
assert!(t(r"$++").is_anchored_end());
assert!(t(r"(^)+").is_anchored_start());
assert!(t(r"($)+").is_anchored_end());
assert!(t(r"$^").is_anchored_start());
assert!(t(r"$^").is_anchored_end());
assert!(t(r"$^|^$").is_anchored_start());
assert!(t(r"$^|^$").is_anchored_end());
assert!(t(r"\b^").is_anchored_start());
assert!(t(r"$\b").is_anchored_end());
assert!(t(r"^(?m:^)").is_anchored_start());
assert!(t(r"(?m:$)$").is_anchored_end());
assert!(t(r"(?m:^)^").is_anchored_start());
assert!(t(r"$(?m:$)").is_anchored_end());
// Negative examples.
assert!(!t(r"(?m)^").is_anchored_start());
assert!(!t(r"(?m)$").is_anchored_end());
assert!(!t(r"(?m:^$)|$^").is_anchored_start());
assert!(!t(r"(?m:^$)|$^").is_anchored_end());
assert!(!t(r"$^|(?m:^$)").is_anchored_start());
assert!(!t(r"$^|(?m:^$)").is_anchored_end());
assert!(!t(r"a^").is_anchored_start());
assert!(!t(r"$a").is_anchored_start());
assert!(!t(r"a^").is_anchored_start());
assert!(!t(r"$a").is_anchored_start());
assert!(!t(r"^foo|bar").is_anchored_start());
assert!(!t(r"foo|bar$").is_anchored_end());
assert!(!t(r"^*").is_anchored_start());
assert!(!t(r"$*").is_anchored_end());
assert!(!t(r"^*+").is_anchored_start());
assert!(!t(r"$*+").is_anchored_end());
assert!(!t(r"^+*").is_anchored_start());
assert!(!t(r"$+*").is_anchored_end());
assert!(!t(r"(^)*").is_anchored_start());
assert!(!t(r"($)*").is_anchored_end());
}
#[test]
fn analysis_is_any_anchored() {
// Positive examples.
assert!(t(r"^").is_any_anchored_start());
assert!(t(r"$").is_any_anchored_end());
assert!(t(r"\A").is_any_anchored_start());
assert!(t(r"\z").is_any_anchored_end());
// Negative examples.
assert!(!t(r"(?m)^").is_any_anchored_start());
assert!(!t(r"(?m)$").is_any_anchored_end());
assert!(!t(r"$").is_any_anchored_start());
assert!(!t(r"^").is_any_anchored_end());
}
#[test]
fn analysis_is_match_empty() {
// Positive examples.
assert!(t(r"").is_match_empty());
assert!(t(r"()").is_match_empty());
assert!(t(r"()*").is_match_empty());
assert!(t(r"()+").is_match_empty());
assert!(t(r"()?").is_match_empty());
assert!(t(r"a*").is_match_empty());
assert!(t(r"a?").is_match_empty());
assert!(t(r"a{0}").is_match_empty());
assert!(t(r"a{0,}").is_match_empty());
assert!(t(r"a{0,1}").is_match_empty());
assert!(t(r"a{0,10}").is_match_empty());
assert!(t(r"\pL*").is_match_empty());
assert!(t(r"a*|b").is_match_empty());
assert!(t(r"b|a*").is_match_empty());
assert!(t(r"a*a?(abcd)*").is_match_empty());
assert!(t(r"^").is_match_empty());
assert!(t(r"$").is_match_empty());
assert!(t(r"(?m)^").is_match_empty());
assert!(t(r"(?m)$").is_match_empty());
assert!(t(r"\A").is_match_empty());
assert!(t(r"\z").is_match_empty());
assert!(t(r"\B").is_match_empty());
assert!(t_bytes(r"(?-u)\B").is_match_empty());
// Negative examples.
assert!(!t(r"a+").is_match_empty());
assert!(!t(r"a{1}").is_match_empty());
assert!(!t(r"a{1,}").is_match_empty());
assert!(!t(r"a{1,2}").is_match_empty());
assert!(!t(r"a{1,10}").is_match_empty());
assert!(!t(r"b|a").is_match_empty());
assert!(!t(r"a*a+(abcd)*").is_match_empty());
assert!(!t(r"\b").is_match_empty());
assert!(!t(r"(?-u)\b").is_match_empty());
}
}
| 35.463482 | 79 | 0.482595 |
18342524cbf401dd1ba9d61d574f1f8c607ce51a | 581 | use async_std::io::Write;
use crate::broker::{BrokerMessage, ChBrokerSend};
use anyhow::Result;
use async_trait::async_trait;
use kuska_ssb::{api::ApiCaller, rpc::RecvMsg};
#[derive(Debug)]
pub enum RpcInput {
None,
Timer,
Network(i32, RecvMsg),
Message(BrokerMessage),
}
#[async_trait]
pub trait RpcHandler<W>: Send + Sync
where
W: Write + Unpin + Send + Sync,
{
fn name(&self) -> &'static str;
async fn handle(
&mut self,
api: &mut ApiCaller<W>,
op: &RpcInput,
ch_broker: &mut ChBrokerSend,
) -> Result<bool>;
}
| 20.034483 | 49 | 0.628227 |
d68f8563a2d8d243f8571616a6febd92254713f6 | 1,705 | use gw_common::{state::State, H256};
use gw_traits::CodeStore;
use gw_types::{bytes::Bytes, offchain::RunResult};
pub struct RunResultState<'a>(pub &'a mut RunResult);
impl<'a> State for RunResultState<'a> {
fn get_raw(&self, key: &H256) -> Result<H256, gw_common::error::Error> {
self.0
.read_values
.get(key)
.cloned()
.ok_or(gw_common::error::Error::MissingKey)
}
fn update_raw(&mut self, key: H256, value: H256) -> Result<(), gw_common::error::Error> {
self.0.write.write_values.insert(key, value);
Ok(())
}
fn calculate_root(&self) -> Result<H256, gw_common::error::Error> {
// unsupported operation
Err(gw_common::error::Error::MissingKey)
}
fn get_account_count(&self) -> Result<u32, gw_common::error::Error> {
self.0
.write
.account_count
.ok_or(gw_common::error::Error::MissingKey)
}
fn set_account_count(&mut self, count: u32) -> Result<(), gw_common::error::Error> {
self.0.write.account_count = Some(count);
Ok(())
}
}
impl<'a> CodeStore for RunResultState<'a> {
fn get_data(&self, data_hash: &H256) -> Option<Bytes> {
self.0.read_data.get(data_hash).cloned()
}
fn insert_data(&mut self, data_hash: H256, code: Bytes) {
self.0.write.write_data.insert(data_hash, code);
}
fn get_script(&self, script_hash: &H256) -> Option<gw_types::packed::Script> {
self.0.get_scripts.get(script_hash).cloned()
}
fn insert_script(&mut self, script_hash: H256, script: gw_types::packed::Script) {
self.0.write.new_scripts.insert(script_hash, script);
}
}
| 34.795918 | 93 | 0.614076 |
0106512754fe748a031087ba2489b8998518b5ca | 66,439 | //! Searches, processes and uploads debug information files (DIFs). See
//! `DifUpload` for more information.
use std::collections::{BTreeMap, BTreeSet};
use std::convert::TryInto;
use std::ffi::{OsStr, OsString};
use std::fmt::{self, Display};
use std::fs::{self, File};
use std::io::{BufReader, BufWriter, Read, Seek, SeekFrom, Write};
use std::iter::IntoIterator;
use std::mem::transmute;
use std::ops::Deref;
use std::path::{Component, Path, PathBuf};
use std::process::Command;
use std::slice::{Chunks, Iter};
use std::str;
use std::thread;
use std::time::{Duration, Instant};
use console::style;
use failure::{bail, err_msg, Error, SyncFailure};
use indicatif::HumanBytes;
use log::{debug, info, warn};
use sha1::Digest;
use symbolic::common::{AsSelf, ByteView, DebugId, SelfCell, Uuid};
use symbolic::debuginfo::macho::{BcSymbolMap, UuidMapping};
use symbolic::debuginfo::sourcebundle::SourceBundleWriter;
use symbolic::debuginfo::{Archive, FileEntry, FileFormat, Object};
use walkdir::WalkDir;
use which::which;
use zip::{write::FileOptions, ZipArchive, ZipWriter};
use crate::api::{
Api, ChunkUploadCapability, ChunkUploadOptions, ChunkedDifRequest, ChunkedFileState,
};
use crate::config::Config;
use crate::constants::{DEFAULT_MAX_DIF_SIZE, DEFAULT_MAX_WAIT};
use crate::utils::chunks::{
upload_chunks, BatchedSliceExt, Chunk, ItemSize, ASSEMBLE_POLL_INTERVAL,
};
use crate::utils::dif::ObjectDifFeatures;
use crate::utils::fs::{get_sha1_checksum, get_sha1_checksums, TempDir, TempFile};
use crate::utils::progress::{ProgressBar, ProgressStyle};
use crate::utils::ui::{copy_with_progress, make_byte_progress_bar};
/// A debug info file on the server.
pub use crate::api::DebugInfoFile;
/// Fallback maximum number of chunks in a batch for the legacy upload.
static MAX_CHUNKS: u64 = 64;
/// An iterator over chunks of data in a `ChunkedDifMatch` object.
///
/// This struct is returned by `ChunkedDifMatch::chunks`.
struct DifChunks<'a> {
checksums: Iter<'a, Digest>,
iter: Chunks<'a, u8>,
}
impl<'a> Iterator for DifChunks<'a> {
type Item = Chunk<'a>;
fn next(&mut self) -> Option<Self::Item> {
match (self.checksums.next(), self.iter.next()) {
(Some(checksum), Some(data)) => Some(Chunk((*checksum, data))),
(_, _) => None,
}
}
}
/// A Debug Information File.
///
/// This is primarily used to store inside the [`DifMatch`] so does not contain any
/// information already present there. You probably should look whether you can use
/// [`DifMatch`] instead of this instead.
enum ParsedDif<'a> {
Object(Object<'a>),
BcSymbolMap(BcSymbolMap<'a>),
UuidMap(UuidMapping),
}
impl<'slf, 'data: 'slf> AsSelf<'slf> for ParsedDif<'data> {
type Ref = ParsedDif<'data>;
fn as_self(&'slf self) -> &Self::Ref {
self
}
}
/// Contains backing data for a `DifMatch`.
///
/// This can be used to store the actual data that a `FatObject` might be
/// relying upon, such as temporary files or extracted archives. It will be
/// disposed along with a `DifMatch` once it is dropped.
#[derive(Debug)]
enum DifBacking {
Temp(TempFile),
}
/// A handle to a debug information file found by `DifUpload`.
///
/// It contains a `FatObject` giving access to the metadata and contents of the
/// debug information file. `DifMatch::attachments` may contain supplemental
/// files used to further process this file, such as dSYM PLists.
struct DifMatch<'data> {
_backing: Option<DifBacking>,
dif: SelfCell<ByteView<'data>, ParsedDif<'data>>,
name: String,
debug_id: Option<DebugId>,
attachments: Option<BTreeMap<String, ByteView<'static>>>,
}
impl<'data> DifMatch<'data> {
fn from_temp_object<S>(temp_file: TempFile, name: S) -> Result<Self, Error>
where
S: Into<String>,
{
let buffer = ByteView::open(temp_file.path()).map_err(SyncFailure::new)?;
let dif = SelfCell::try_new(buffer, |b| {
Object::parse(unsafe { &*b }).map(ParsedDif::Object)
})?;
// Even though we could supply the debug_id here from the object we do not, the
// server will do the same anyway and we actually have control over the version of
// the code running there so can fix bugs more reliably. Additionally supplying
// Some(...) for debug_id can only be done if the ChunkedUploadCapability::Pdbs is
// present, which is kind of a protocol bug. Not supplying it means more recent
// sentry-cli versions keep working with ancient versions of sentry by not
// triggering this protocol bug in most common situations. See
// https://github.com/getsentry/sentry-cli/issues/980
Ok(DifMatch {
_backing: Some(DifBacking::Temp(temp_file)),
dif,
name: name.into(),
debug_id: None,
attachments: None,
})
}
/// Creates a [`DifMatch`] from a `.bcsymbolmap` file.
///
/// The `uuid` is the DebugID of the symbolmap while `name` is the filename of the file.
/// Normally the filename should be the `uuid` with `.bcsymbolmap` appended to it.
fn from_bcsymbolmap(
uuid: DebugId,
name: String,
data: ByteView<'static>,
) -> Result<Self, Error> {
let dif = SelfCell::try_new(data, |buf| {
BcSymbolMap::parse(unsafe { &*buf }).map(ParsedDif::BcSymbolMap)
})?;
Ok(Self {
_backing: None,
dif,
name,
debug_id: Some(uuid),
attachments: None,
})
}
fn from_plist(uuid: DebugId, name: String, data: ByteView<'static>) -> Result<Self, Error> {
let dif = SelfCell::try_new(data, |buf| {
UuidMapping::parse_plist(uuid, unsafe { &*buf }).map(ParsedDif::UuidMap)
})?;
Ok(Self {
_backing: None,
dif,
name,
debug_id: Some(uuid),
attachments: None,
})
}
/// Moves the specified temporary debug file to a safe location and assumes
/// ownership. The file will be deleted in the file system when this
/// `DifMatch` is dropped.
///
/// The path must point to a `FatObject` containing exactly one `Object`.
fn take_temp<P, S>(path: P, name: S) -> Result<Self, Error>
where
P: AsRef<Path>,
S: Into<String>,
{
let temp_file = TempFile::take(path)?;
Self::from_temp_object(temp_file, name)
}
/// Returns the parsed [`Object`] of this DIF.
pub fn object(&self) -> Option<&Object<'data>> {
match self.dif.get() {
ParsedDif::Object(ref obj) => Some(obj),
ParsedDif::BcSymbolMap(_) => None,
ParsedDif::UuidMap(_) => None,
}
}
pub fn format(&self) -> DifFormat {
match self.dif.get() {
ParsedDif::Object(ref object) => DifFormat::Object(object.file_format()),
ParsedDif::BcSymbolMap(_) => DifFormat::BcSymbolMap,
ParsedDif::UuidMap(_) => DifFormat::PList,
}
}
/// Returns the raw binary data of this DIF.
pub fn data(&self) -> &[u8] {
match self.dif.get() {
ParsedDif::Object(ref obj) => obj.data(),
ParsedDif::BcSymbolMap(_) => self.dif.owner(),
ParsedDif::UuidMap(_) => self.dif.owner(),
}
}
/// Returns the size of of this DIF in bytes.
pub fn size(&self) -> u64 {
self.data().len() as u64
}
/// Returns the path of this DIF relative to the search origin.
pub fn path(&self) -> &str {
&self.name
}
/// Returns the name of this DIF, including its file extension.
pub fn file_name(&self) -> &str {
Path::new(self.path())
.file_name()
.and_then(OsStr::to_str)
.unwrap_or("Generic")
}
/// Returns attachments of this DIF, if any.
pub fn attachments(&self) -> Option<&BTreeMap<String, ByteView<'static>>> {
self.attachments.as_ref()
}
/// Determines whether this file needs resolution of hidden symbols.
pub fn needs_symbol_map(&self) -> bool {
// XCode release archives and dSYM bundles downloaded from iTunes
// Connect contain Swift library symbols. These have caused various
// issues in the past, so we ignore them for now. In particular, there
// are never any BCSymbolMaps generated for them and the DBGOriginalUUID
// in the plist is the UUID of the original dsym file.
//
// We *might* have to locate the original library in the Xcode
// distribution, then build a new non-fat dSYM file from it and patch
// the the UUID.
if self.file_name().starts_with("libswift") {
return false;
}
match self.object() {
Some(Object::MachO(ref macho)) => macho.requires_symbolmap(),
_ => false,
}
}
}
impl<'data> fmt::Debug for DifMatch<'data> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("DifMatch")
.field("object", &self.object())
.field("name", &self.name)
.finish()
}
}
/// A `DifMatch` with computed SHA1 checksum.
#[derive(Debug)]
struct HashedDifMatch<'data> {
inner: DifMatch<'data>,
checksum: Digest,
}
impl<'data> HashedDifMatch<'data> {
/// Calculates the SHA1 checksum for the given DIF.
fn from(inner: DifMatch<'data>) -> Result<Self, Error> {
let checksum = get_sha1_checksum(inner.data())?;
Ok(HashedDifMatch { inner, checksum })
}
/// Returns the SHA1 checksum of this DIF.
fn checksum(&self) -> Digest {
self.checksum
}
}
impl<'data> Deref for HashedDifMatch<'data> {
type Target = DifMatch<'data>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'data> ItemSize for HashedDifMatch<'data> {
fn size(&self) -> u64 {
self.deref().size()
}
}
/// A chunked `DifMatch` with computed SHA1 checksums.
#[derive(Debug)]
struct ChunkedDifMatch<'data> {
inner: HashedDifMatch<'data>,
chunks: Vec<Digest>,
chunk_size: u64,
}
impl<'data> ChunkedDifMatch<'data> {
/// Slices the DIF into chunks of `chunk_size` bytes each, and computes SHA1
/// checksums for every chunk as well as the entire DIF.
pub fn from(inner: DifMatch<'data>, chunk_size: u64) -> Result<Self, Error> {
let (checksum, chunks) = get_sha1_checksums(inner.data(), chunk_size)?;
Ok(ChunkedDifMatch {
inner: HashedDifMatch { inner, checksum },
chunks,
chunk_size,
})
}
/// Returns an iterator over all chunk checksums.
pub fn checksums(&self) -> Iter<'_, Digest> {
self.chunks.iter()
}
/// Returns an iterator over all `DifChunk`s.
pub fn chunks(&self) -> DifChunks<'_> {
DifChunks {
checksums: self.checksums(),
iter: self.data().chunks(self.chunk_size as usize),
}
}
/// Creates a tuple which can be collected into a `ChunkedDifRequest`.
pub fn to_assemble(&self) -> (Digest, ChunkedDifRequest<'_>) {
(
self.checksum(),
ChunkedDifRequest {
name: self.file_name(),
debug_id: self.debug_id,
chunks: &self.chunks,
},
)
}
}
impl<'data> Deref for ChunkedDifMatch<'data> {
type Target = HashedDifMatch<'data>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl<'data> ItemSize for ChunkedDifMatch<'data> {
fn size(&self) -> u64 {
self.deref().size()
}
}
type ZipFileArchive = ZipArchive<BufReader<File>>;
/// A handle to the source of a potential `DifMatch` used inside `search_difs`.
///
/// The primary use of this handle is to resolve files relative to the debug
/// information file and store them in `DifMatch::attachments`. These could be
/// companion files or metadata files needed to process the DIFs in sentry-cli,
/// or later even on Sentry.
#[derive(Debug)]
enum DifSource<'a> {
/// A file located in the file system
FileSystem(&'a Path),
/// An entry in a ZIP file
Zip(&'a mut ZipFileArchive, &'a str),
}
impl<'a> DifSource<'a> {
/// Resolves a file relative to the directory of `base`, stripping of the
/// file name.
fn get_relative_fs(base: &Path, path: &Path) -> Option<ByteView<'static>> {
// Use parent() to get to the directory and then move relative from
// there. ByteView will internally cannonicalize the path and resolve
// symlinks.
base.parent()
.and_then(|p| ByteView::open(p.join(path)).ok())
}
/// Extracts a file relative to the directory of `name`, stripping of the
/// file name.
fn get_relative_zip(
zip: &mut ZipFileArchive,
name: &str,
path: &Path,
) -> Option<ByteView<'static>> {
// There is no built-in utility that normalizes paths without access to
// the file system. We start by removing the file name from the given
// path and then start to manually resolve the path components to a
// final path.
let mut zip_path = PathBuf::from(name);
zip_path.pop();
for component in path.components() {
match component {
Component::ParentDir => {
zip_path.pop();
}
Component::Normal(p) => {
zip_path.push(p);
}
_ => {
// `Component::CurDir` leaves the path as-is, and the
// remaining `Component::RootDir` and `Component::Prefix` do
// not make sense in ZIP files.
}
}
}
zip_path
.to_str()
.and_then(|name| zip.by_name(name).ok())
.and_then(|f| ByteView::read(f).ok())
}
/// Resolves a file relative to this source and reads it into a `ByteView`.
///
/// The target is always resolved relative to the directory of the source,
/// excluding its file name. The path "../changed" relative to a source
/// pointing to "path/to/file" will resolve in "path/changed".
///
/// The returned ByteView will allow random-access to the data until it is
/// disposed. If the source points to a ZIP file, the target is fully read
/// into a memory buffer. See `ByteView::from_reader` for more information.
pub fn get_relative<P>(&mut self, path: P) -> Option<ByteView<'static>>
where
P: AsRef<Path>,
{
match *self {
DifSource::FileSystem(base) => Self::get_relative_fs(base, path.as_ref()),
DifSource::Zip(ref mut zip, name) => Self::get_relative_zip(*zip, name, path.as_ref()),
}
}
}
/// Information returned by `assemble_difs` containing flat lists of incomplete
/// DIFs and their missing chunks.
type MissingDifsInfo<'data, 'm> = (Vec<&'m ChunkedDifMatch<'data>>, Vec<Chunk<'m>>);
/// Verifies that the given path contains a ZIP file and opens it.
fn try_open_zip<P>(path: P) -> Result<Option<ZipFileArchive>, Error>
where
P: AsRef<Path>,
{
if path.as_ref().extension() != Some("zip".as_ref()) {
return Ok(None);
}
let mut magic: [u8; 2] = [0; 2];
let mut file = File::open(path)?;
if file.read_exact(&mut magic).is_err() {
// Catch empty or single-character files
return Ok(None);
}
file.seek(SeekFrom::Start(0))?;
Ok(match &magic {
b"PK" => Some(ZipArchive::new(BufReader::new(file))?),
_ => None,
})
}
/// Searches the given ZIP for potential DIFs and passes them to the callback.
///
/// To avoid unnecessary file operations, the file extension is already checked
/// for every entry before opening it.
///
/// This function will not recurse into ZIPs contained in this ZIP.
fn walk_difs_zip<F>(mut zip: ZipFileArchive, options: &DifUpload, mut func: F) -> Result<(), Error>
where
F: FnMut(DifSource<'_>, String, ByteView<'static>) -> Result<(), Error>,
{
for index in 0..zip.len() {
let (name, buffer) = {
let zip_file = zip.by_index(index)?;
let name = zip_file.name().to_string();
if !options.valid_extension(Path::new(&name).extension()) {
continue;
}
(name, ByteView::read(zip_file).map_err(SyncFailure::new)?)
};
func(DifSource::Zip(&mut zip, &name), name.clone(), buffer)?;
}
Ok(())
}
/// Recursively searches the given location for potential DIFs and passes them
/// to the callback.
///
/// If `DifUpload::allow_zips` is set, then this function will attempt to open
/// the ZIP and search it for DIFs as well, however not recursing further into
/// nested ZIPs.
///
/// To avoid unnecessary file operations, the file extension is already checked
/// for every entry before opening it.
fn walk_difs_directory<F, P>(location: P, options: &DifUpload, mut func: F) -> Result<(), Error>
where
P: AsRef<Path>,
F: FnMut(DifSource<'_>, String, ByteView<'static>) -> Result<(), Error>,
{
let location = location.as_ref();
let directory = if location.is_dir() {
location
} else {
location.parent().unwrap_or_else(|| Path::new(""))
};
debug!("searching location {}", location.display());
for entry in WalkDir::new(location)
.follow_links(true)
.into_iter()
.filter_map(Result::ok)
{
if !entry.metadata()?.is_file() {
// Walkdir recurses automatically into folders
continue;
}
let path = entry.path();
match try_open_zip(path) {
Ok(Some(zip)) => {
debug!("searching zip archive {}", path.display());
walk_difs_zip(zip, options, &mut func)?;
debug!("finished zip archive {}", path.display());
continue;
}
Err(e) => {
debug!("skipping zip archive {}", path.display());
debug!("error: {}", e);
continue;
}
Ok(None) => {
// this is not a zip archive
}
}
if !options.valid_extension(path.extension()) {
continue;
}
let buffer = ByteView::open(path).map_err(SyncFailure::new)?;
let name = path
.strip_prefix(directory)
.unwrap()
.to_string_lossy()
.into_owned();
func(DifSource::FileSystem(path), name, buffer)?;
}
debug!("finished location {}", directory.display());
Ok(())
}
/// Searches for mapping PLists next to the given `source`. It returns a mapping
/// of Plist name to owning buffer of the file's contents. This function should
/// only be called for dSYMs.
fn find_uuid_plists(
object: &Object<'_>,
source: &mut DifSource<'_>,
) -> Option<BTreeMap<String, ByteView<'static>>> {
let uuid = object.debug_id().uuid();
if uuid.is_nil() {
return None;
}
// When uploading an XCode build archive to iTunes Connect, Apple will
// re-build the app for different architectures, causing new UUIDs in the
// final bundle. To allow mapping back to the original symbols, it adds
// PList files in the `Resources` folder (one level above the binary) that
// contains the original UUID, one for each object contained in the fat
// object.
//
// The folder structure looks like this:
//
// App.dSYM
// ββ Info.plist
// ββ Resources
// ββ 1B205CD0-67D0-4D69-A0FA-C6BDDDB2A609.plist
// ββ 1C228684-3EE5-472B-AB8D-29B3FBF63A70.plist
// ββ DWARF
// ββ App
let plist_name = format!("{:X}.plist", uuid.to_hyphenated_ref());
let plist = match source.get_relative(format!("../{}", &plist_name)) {
Some(plist) => plist,
None => return None,
};
let mut plists = BTreeMap::new();
plists.insert(plist_name, plist);
Some(plists)
}
/// Patch debug identifiers for PDBs where the corresponding PE specifies a different age.
fn fix_pdb_ages(difs: &mut [DifMatch<'_>], age_overrides: &BTreeMap<Uuid, u32>) {
for dif in difs {
if let Some(object) = dif.object() {
if object.file_format() != FileFormat::Pdb {
continue;
}
let debug_id = object.debug_id();
let age = match age_overrides.get(&debug_id.uuid()) {
Some(age) => *age,
None => continue,
};
if age == debug_id.appendix() {
continue;
}
log::debug!(
"overriding age for {} ({} -> {})",
dif.name,
debug_id.appendix(),
age
);
dif.debug_id = Some(DebugId::from_parts(debug_id.uuid(), age));
}
}
}
/// Searches matching debug information files.
fn search_difs(options: &DifUpload) -> Result<Vec<DifMatch<'static>>, Error> {
let progress_style = ProgressStyle::default_spinner().template(
"{spinner} Searching for debug symbol files...\
\n found {prefix:.yellow} {msg:.dim}",
);
let progress = ProgressBar::new_spinner();
progress.enable_steady_tick(100);
progress.set_style(progress_style);
let mut age_overrides = BTreeMap::new();
let mut collected = Vec::new();
for base_path in &options.paths {
if base_path == Path::new("") {
warn!(
"Skipping uploading from an empty path (\"\"). \
Maybe you expanded an empty shell variable?"
);
continue;
}
walk_difs_directory(base_path, options, |source, name, buffer| {
debug!("trying to process {}", name);
progress.set_message(&name);
if Archive::peek(&buffer) != FileFormat::Unknown {
let mut difs =
collect_object_dif(source, name, buffer, options, &mut age_overrides);
collected.append(difs.as_mut());
} else if BcSymbolMap::test(&buffer) {
if let Some(dif) = collect_auxdif(name, buffer, options, AuxDifKind::BcSymbolMap) {
collected.push(dif);
}
} else if buffer.starts_with(b"<?xml") {
if let Some(dif) = collect_auxdif(name, buffer, options, AuxDifKind::UuidMap) {
collected.push(dif);
}
};
progress.set_prefix(&collected.len().to_string());
Ok(())
})?;
}
if !age_overrides.is_empty() {
fix_pdb_ages(&mut collected, &age_overrides);
}
progress.finish_and_clear();
println!(
"{} Found {} debug information {}",
style(">").dim(),
style(collected.len()).yellow(),
match collected.len() {
1 => "file",
_ => "files",
}
);
Ok(collected)
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
enum AuxDifKind {
BcSymbolMap,
UuidMap,
}
impl Display for AuxDifKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
AuxDifKind::BcSymbolMap => write!(f, "BCSymbolMap"),
AuxDifKind::UuidMap => write!(f, "UuidMap"),
}
}
}
/// Collects a possible BCSymbolmap or PList into a [`DifMatch`].
///
/// The `name` is the relative path of the file processed, while `buffer` contains the
/// actual data.
fn collect_auxdif<'a>(
name: String,
buffer: ByteView<'static>,
options: &DifUpload,
kind: AuxDifKind,
) -> Option<DifMatch<'a>> {
let file_stem = Path::new(&name)
.file_stem()
.map(|stem| stem.to_string_lossy())
.unwrap_or_default();
let uuid: DebugId = match file_stem.parse() {
Ok(uuid) => uuid,
Err(_) => {
if kind == AuxDifKind::BcSymbolMap {
// There are loads of plists in a normal XCode Archive that are not valid
// UUID mappings. Warning for all these is pointless.
warn!(
"Skipping {kind} with invalid filename: {name}",
kind = kind,
name = name
);
}
return None;
}
};
let dif_result = match kind {
AuxDifKind::BcSymbolMap => DifMatch::from_bcsymbolmap(uuid, name.clone(), buffer),
AuxDifKind::UuidMap => DifMatch::from_plist(uuid, name.clone(), buffer),
};
let dif = match dif_result {
Ok(dif) => dif,
Err(err) => {
warn!(
"Skipping invalid {kind} file {name}: {err}",
kind = kind,
name = name,
err = err
);
return None;
}
};
// Skip this file if we don't want to process it.
if !options.validate_dif(&dif) {
return None;
}
Some(dif)
}
/// Processes and [`DifSource`] which is expected to be an object file.
fn collect_object_dif<'a>(
mut source: DifSource<'_>,
name: String,
buffer: ByteView<'static>,
options: &DifUpload,
age_overrides: &mut BTreeMap<Uuid, u32>,
) -> Vec<DifMatch<'a>> {
let mut collected = Vec::with_capacity(2);
// Try to parse a potential object file. If this is not possible,
// then we're not dealing with an object file, thus silently
// skipping it.
let format = Archive::peek(&buffer);
// Override this behavior for PE files. Their debug identifier is
// needed in case PDBs should be uploaded to fix an eventual age
// mismatch
let should_override_age =
format == FileFormat::Pe && options.valid_format(DifFormat::Object(FileFormat::Pdb));
if !should_override_age && !options.valid_format(DifFormat::Object(format)) {
return collected;
}
debug!("trying to parse dif {}", name);
let archive = match Archive::parse(&buffer) {
Ok(archive) => archive,
Err(e) => {
warn!("Skipping invalid debug file {}: {}", name, e);
return collected;
}
};
// Each `FatObject` might contain multiple matching objects, each of
// which needs to retain a reference to the original fat file. We
// create a shared instance here and clone it into `DifMatche`s
// below.
for object in archive.objects() {
// Silently skip all objects that we cannot process. This can
// happen due to invalid object files, which we then just
// discard rather than stopping the scan.
let object = match object {
Ok(object) => object,
Err(_) => continue,
};
// Objects without debug id will be skipped altogether. While frames
// during symbolication might be lacking debug identifiers,
// Sentry requires object files to have one during upload.
let id = object.debug_id();
if id.is_nil() {
continue;
}
// Store a mapping of "age" values for all encountered PE files,
// regardless of whether they will be uploaded. This is used later
// to fix up PDB files.
if should_override_age {
age_overrides.insert(id.uuid(), id.appendix());
}
// Invoke logic to retrieve attachments specific to the kind
// of object file. These are used for processing. Since only
// dSYMs equire processing currently, all other kinds are
// skipped.
let attachments = match object.file_format() {
FileFormat::MachO => find_uuid_plists(&object, &mut source),
_ => None,
};
// We retain the buffer and the borrowed object in a new SelfCell. This is
// incredibly unsafe, but in our case it is fine, since the SelfCell owns the same
// buffer that was used to retrieve the object.
let cell =
unsafe { SelfCell::from_raw(buffer.clone(), ParsedDif::Object(transmute(object))) };
let dif = DifMatch {
_backing: None,
dif: cell,
name: name.clone(),
debug_id: Some(id),
attachments,
};
// Skip this file if we don't want to process it.
if !options.validate_dif(&dif) {
continue;
}
collected.push(dif);
}
collected
}
/// Resolves BCSymbolMaps and replaces hidden symbols in a `DifMatch` using
/// `dsymutil`. If successful, this will return a new `DifMatch` based on a
/// temporary file. The original dSYM is not touched.
///
/// Note that this process copies the file to a temporary location and might
/// incur significant I/O for larger debug files.
fn resolve_hidden_symbols<'a>(dif: DifMatch<'a>, symbol_map: &Path) -> Result<DifMatch<'a>, Error> {
if dif.attachments.is_none() {
println!(
"{} {}: Could not locate UUID mapping for {}",
style(">").dim(),
style("Warning").red(),
style(dif.file_name()).yellow(),
);
return Ok(dif);
}
// We need to rebuild the Resources folder of a dSYM structure in a temp
// directory that is guaranteed to be deleted after this operation. The
// Info.plist is not needed for this operation:
// Resources
// ββ 1B205CD0-67D0-4D69-A0FA-C6BDDDB2A609.plist
// ββ 1C228684-3EE5-472B-AB8D-29B3FBF63A70.plist
// ββ DWARF
// ββ ObjectFile
let temp_dir = TempDir::create()?;
fs::create_dir_all(temp_dir.path().join("DWARF"))?;
// Copy the object file binary
let temp_path = temp_dir.path().join("DWARF").join(dif.file_name());
let mut temp_file = File::create(&temp_path)?;
temp_file.write_all(dif.data())?;
temp_file.sync_data()?;
// Copy the UUID plists
for (name, view) in dif.attachments().unwrap() {
let mut plist = File::create(temp_dir.path().join(name))?;
plist.write_all(view)?;
plist.sync_data()?;
}
let output = Command::new("dsymutil")
.arg("-symbol-map")
.arg(symbol_map)
.arg(&temp_path)
.output()?;
if !output.status.success() {
if let Ok(error) = str::from_utf8(&output.stderr) {
bail!("Could not resolve BCSymbolMaps: {}", error);
} else {
bail!("Could not resolve BCSymbolMaps due to an unknown error");
}
}
// Take ownership of the modified (fat) object file and move it somewhere
// else so it is safe to delete the temp directory.
DifMatch::take_temp(temp_path, dif.path())
}
/// Runs all `DifMatch` objects through the provided callback and displays a
/// progress bar while doing so.
///
/// ```
/// prepare_difs(processed, |m| HashedDifMatch::from(m))?
/// ```
fn prepare_difs<'data, F, T>(items: Vec<DifMatch<'data>>, mut func: F) -> Result<Vec<T>, Error>
where
F: FnMut(DifMatch<'data>) -> Result<T, Error>,
{
let progress_style = ProgressStyle::default_bar().template(
"{prefix:.dim} Preparing for upload... {msg:.dim}\
\n{wide_bar} {pos}/{len}",
);
let progress = ProgressBar::new(items.len() as u64);
progress.set_style(progress_style);
progress.set_prefix(">");
let mut calculated = Vec::new();
for item in items {
progress.inc(1);
progress.set_message(item.path());
calculated.push(func(item)?);
}
progress.finish_and_clear();
println!(
"{} Prepared debug information {} for upload",
style(">").dim(),
match calculated.len() {
1 => "file",
_ => "files",
}
);
Ok(calculated)
}
/// Resolves BCSymbolMaps for all debug files with hidden symbols. All other
/// files are not touched. Note that this only applies to Apple dSYMs.
///
/// If there are debug files with hidden symbols but no `symbol_map` path is
/// given, a warning is emitted.
fn process_symbol_maps<'a>(
difs: Vec<DifMatch<'a>>,
symbol_map: Option<&Path>,
) -> Result<Vec<DifMatch<'a>>, Error> {
let (with_hidden, mut without_hidden): (Vec<_>, _) =
difs.into_iter().partition(DifMatch::needs_symbol_map);
if with_hidden.is_empty() {
return Ok(without_hidden);
}
let symbol_map = match symbol_map {
Some(path) => path,
_ => {
println!(
"{} {}: Found {} symbol files with hidden symbols (need BCSymbolMaps)",
style(">").dim(),
style("Warning").red(),
style(with_hidden.len()).yellow()
);
without_hidden.extend(with_hidden);
return Ok(without_hidden);
}
};
let len = with_hidden.len();
let progress_style = ProgressStyle::default_bar().template(
"{prefix:.dim} Resolving BCSymbolMaps... {msg:.dim}\
\n{wide_bar} {pos}/{len}",
);
let progress = ProgressBar::new(len as u64);
progress.set_style(progress_style);
progress.set_prefix(">");
for dif in with_hidden {
progress.inc(1);
progress.set_message(dif.path());
without_hidden.push(resolve_hidden_symbols(dif, symbol_map)?);
}
progress.finish_and_clear();
println!(
"{} Resolved BCSymbolMaps for {} debug information {}",
style(">").dim(),
style(len).yellow(),
match len {
1 => "file",
_ => "files",
}
);
Ok(without_hidden)
}
/// Default filter function to skip over bad sources we do not want to include.
pub fn filter_bad_sources(entry: &FileEntry) -> bool {
if entry.name_str().ends_with(".pch") {
// always ignore pch files
false
} else if let Ok(meta) = fs::metadata(&entry.abs_path_str()) {
// ignore files larger than 1MB
meta.len() < 1_000_000
} else {
// if a file metadata could not be read it will be skipped later.
true
}
}
/// Resolves BCSymbolMaps for all debug files with hidden symbols. All other
/// files are not touched. Note that this only applies to Apple dSYMs.
///
/// If there are debug files with hidden symbols but no `symbol_map` path is
/// given, a warning is emitted.
fn create_source_bundles<'a>(difs: &[DifMatch<'a>]) -> Result<Vec<DifMatch<'a>>, Error> {
let mut source_bundles = Vec::new();
let progress_style = ProgressStyle::default_bar().template(
"{prefix:.dim} Resolving source code... {msg:.dim}\
\n{wide_bar} {pos}/{len}",
);
let progress = ProgressBar::new(difs.len() as u64);
progress.set_style(progress_style);
progress.set_prefix(">");
for dif in difs {
progress.inc(1);
progress.set_message(dif.path());
let object = match dif.object() {
Some(object) => object,
None => continue,
};
if object.has_sources() {
// Do not create standalone source bundles if the original object already contains
// source code. This would just store duplicate information in Sentry.
continue;
}
let temp_file = TempFile::create()?;
let writer = SourceBundleWriter::start(BufWriter::new(temp_file.open()?))?;
// Resolve source files from the object and write their contents into the archive. Skip to
// upload this bundle if no source could be written. This can happen if there is no file or
// line information in the object file, or if none of the files could be resolved.
let written =
writer.write_object_with_filter(object, dif.file_name(), filter_bad_sources)?;
if !written {
continue;
}
let mut source_bundle = DifMatch::from_temp_object(temp_file, dif.path())?;
source_bundle.debug_id = dif.debug_id;
source_bundles.push(source_bundle);
}
let len = source_bundles.len();
progress.finish_and_clear();
println!(
"{} Resolved source code for {} debug information {}",
style(">").dim(),
style(len).yellow(),
match len {
1 => "file",
_ => "files",
}
);
Ok(source_bundles)
}
/// Calls the assemble endpoint and returns the state for every `DifMatch` along
/// with info on missing chunks.
///
/// The returned value contains separate vectors for incomplete DIFs and
/// missing chunks for convenience.
fn try_assemble_difs<'data, 'm>(
difs: &'m [ChunkedDifMatch<'data>],
options: &DifUpload,
) -> Result<MissingDifsInfo<'data, 'm>, Error> {
let api = Api::current();
let request = difs.iter().map(ChunkedDifMatch::to_assemble).collect();
let response = api.assemble_difs(&options.org, &options.project, &request)?;
// We map all DIFs by their checksum, so we can access them faster when
// iterating through the server response below. Since the caller will invoke
// this function multiple times (most likely twice), this operation is
// performed twice with the same data. While this is redundant, it is also
// fast enough and keeping it here makes the `try_assemble_difs` interface
// nicer.
let difs_by_checksum = difs
.iter()
.map(|m| (m.checksum, m))
.collect::<BTreeMap<_, _>>();
let mut difs = Vec::new();
let mut chunks = Vec::new();
for (checksum, ref file_response) in response {
let chunked_match = *difs_by_checksum
.get(&checksum)
.ok_or_else(|| err_msg("Server returned unexpected checksum"))?;
match file_response.state {
ChunkedFileState::Error => {
// One of the files could not be uploaded properly and resulted
// in an error. We include this file in the return value so that
// it shows up in the final report.
difs.push(chunked_match);
}
ChunkedFileState::Assembling => {
// This file is currently assembling. The caller will have to poll this file later
// until it either resolves or errors.
difs.push(chunked_match);
}
ChunkedFileState::NotFound => {
// Assembling for one of the files has not started because some
// (or all) of its chunks have not been found. We report its
// missing chunks to the caller and then continue. The caller
// will have to call `try_assemble_difs` again after uploading
// them.
let mut missing_chunks = chunked_match
.chunks()
.filter(|&Chunk((c, _))| file_response.missing_chunks.contains(&c))
.peekable();
// Usually every file that is NotFound should also contain a set
// of missing chunks. However, if we tried to upload an empty
// file or the server returns an invalid response, we need to
// make sure that this match is not included in the missing
// difs.
if missing_chunks.peek().is_some() {
difs.push(chunked_match);
}
chunks.extend(missing_chunks);
}
_ => {
// This file has already finished. No action required anymore.
}
}
}
Ok((difs, chunks))
}
/// Concurrently uploads chunks specified in `missing_info` in batches. The
/// batch size and number of concurrent requests is controlled by
/// `chunk_options`.
///
/// This function blocks until all chunks have been uploaded.
fn upload_missing_chunks(
missing_info: &MissingDifsInfo<'_, '_>,
chunk_options: &ChunkUploadOptions,
) -> Result<(), Error> {
let &(ref difs, ref chunks) = missing_info;
// Chunks might be empty if errors occurred in a previous upload. We do
// not need to render a progress bar or perform an upload in this case.
if chunks.is_empty() {
return Ok(());
}
let progress_style = ProgressStyle::default_bar().template(&format!(
"{} Uploading {} missing debug information file{}...\
\n{{wide_bar}} {{bytes}}/{{total_bytes}} ({{eta}})",
style(">").dim(),
style(difs.len().to_string()).yellow(),
if difs.len() == 1 { "" } else { "s" }
));
upload_chunks(chunks, chunk_options, progress_style)?;
println!(
"{} Uploaded {} missing debug information {}",
style(">").dim(),
style(difs.len().to_string()).yellow(),
match difs.len() {
1 => "file",
_ => "files",
}
);
Ok(())
}
/// Renders the given detail string to the command line. If the `detail` is
/// either missing or empty, the optional fallback will be used.
fn render_detail(detail: &Option<String>, fallback: Option<&str>) {
let mut string = match *detail {
Some(ref string) => string.as_str(),
None => "",
};
if string.is_empty() {
if let Some(fallback) = fallback {
string = fallback;
}
}
for line in string.lines() {
if !line.is_empty() {
println!(" {}", style(line).dim());
}
}
}
/// Polls the assemble endpoint until all DIFs have either completed or errored. Returns a list of
/// `DebugInfoFile`s that have been created successfully and also prints a summary to the user.
///
/// This function assumes that all chunks have been uploaded successfully. If there are still
/// missing chunks in the assemble response, this likely indicates a bug in the server.
fn poll_dif_assemble(
difs: &[&ChunkedDifMatch<'_>],
options: &DifUpload,
) -> Result<(Vec<DebugInfoFile>, bool), Error> {
let progress_style = ProgressStyle::default_bar().template(
"{prefix:.dim} Processing files...\
\n{wide_bar} {pos}/{len}",
);
let api = Api::current();
let progress = ProgressBar::new(difs.len() as u64);
progress.set_style(progress_style);
progress.set_prefix(">");
let assemble_start = Instant::now();
let request = difs.iter().map(|d| d.to_assemble()).collect();
let response = loop {
let response = api.assemble_difs(&options.org, &options.project, &request)?;
let chunks_missing = response
.values()
.any(|r| r.state == ChunkedFileState::NotFound);
if chunks_missing {
return Err(err_msg(
"Some uploaded files are now missing on the server. Please retry by running \
`sentry-cli upload-dif` again. If this problem persists, please report a bug.",
));
}
// Poll until there is a response, unless the user has specified to skip polling. In
// that case, we return the potentially partial response from the server. This might
// still contain a cached error.
if !options.wait {
break response;
}
if assemble_start.elapsed() > options.max_wait {
break response;
}
let pending = response
.iter()
.filter(|&(_, r)| r.state.is_pending())
.count();
progress.set_position((difs.len() - pending) as u64);
if pending == 0 {
break response;
}
thread::sleep(ASSEMBLE_POLL_INTERVAL);
};
progress.finish_and_clear();
if response.values().any(|r| r.state.is_pending()) {
println!("{} File upload complete:\n", style(">").dim());
} else {
println!("{} File processing complete:\n", style(">").dim());
}
let (errors, mut successes): (Vec<_>, _) = response
.into_iter()
.partition(|&(_, ref r)| r.state.is_err() || options.wait && r.state.is_pending());
// Print a summary of all successes first, so that errors show up at the
// bottom for the user
successes.sort_by_key(|&(_, ref success)| {
success
.dif
.as_ref()
.map(|x| x.object_name.as_str())
.unwrap_or("")
.to_owned()
});
let difs_by_checksum: BTreeMap<_, _> = difs.iter().map(|m| (m.checksum, m)).collect();
for &(checksum, ref success) in &successes {
// Silently skip all OK entries without a "dif" record since the server
// will always return one.
if let Some(ref dif) = success.dif {
// Files that have completed processing will contain a `dif` record
// returned by the server. Use this to show detailed information.
println!(
" {:>7} {} ({}; {}{})",
style("OK").green(),
style(&dif.id()).dim(),
dif.object_name,
dif.cpu_name,
dif.data
.kind
.map(|c| format!(" {:#}", c))
.unwrap_or_default()
);
render_detail(&success.detail, None);
} else if let Some(dif) = difs_by_checksum.get(&checksum) {
// If we skip waiting for the server to finish processing, there
// are pending entries. We only expect results that have been
// uploaded in the first place, so we can skip everything else.
let kind = match dif.dif.get() {
ParsedDif::Object(ref object) => match object.kind() {
symbolic::debuginfo::ObjectKind::None => String::new(),
k => format!(" {:#}", k),
},
ParsedDif::BcSymbolMap(_) => String::from("bcsymbolmap"),
ParsedDif::UuidMap(_) => String::from("uuidmap"),
};
println!(
" {:>7} {} ({}; {}{})",
style("PENDING").yellow(),
style(dif.debug_id.map(|id| id.to_string()).unwrap_or_default()).dim(),
dif.name,
dif.object()
.map(|object| object.arch())
.map(|arch| arch.to_string())
.unwrap_or_default(),
kind,
);
}
// All other entries will be in the `errors` list.
}
// Print a summary of all errors at the bottom.
let mut errored = vec![];
for (checksum, error) in errors {
let dif = difs_by_checksum
.get(&checksum)
.ok_or_else(|| err_msg("Server returned unexpected checksum"))?;
errored.push((dif, error));
}
errored.sort_by_key(|x| x.0.file_name());
let has_errors = !errored.is_empty();
for (dif, error) in errored {
let fallback = match error.state {
ChunkedFileState::Assembling => Some("The file is still processing and not ready yet"),
ChunkedFileState::NotFound => Some("The file could not be saved"),
_ => Some("An unknown error occurred"),
};
println!(" {:>7} {}", style("ERROR").red(), dif.file_name());
render_detail(&error.detail, fallback);
}
// Return only successful uploads
Ok((
successes.into_iter().filter_map(|(_, r)| r.dif).collect(),
has_errors,
))
}
/// Uploads debug info files using the chunk-upload endpoint.
fn upload_difs_chunked(
options: &DifUpload,
chunk_options: &ChunkUploadOptions,
) -> Result<(Vec<DebugInfoFile>, bool), Error> {
// Search for debug files in the file system and ZIPs
let found = search_difs(options)?;
if found.is_empty() {
println!("{} No debug information files found", style(">").dim());
return Ok(Default::default());
}
// Try to resolve BCSymbolMaps
let symbol_map = options.symbol_map.as_deref();
let mut processed = process_symbol_maps(found, symbol_map)?;
// Resolve source code context if specified
if options.include_sources {
let source_bundles = create_source_bundles(&processed)?;
processed.extend(source_bundles);
}
// Calculate checksums and chunks
let chunked = prepare_difs(processed, |m| {
ChunkedDifMatch::from(m, chunk_options.chunk_size)
})?;
// Upload missing chunks to the server and remember incomplete difs
let missing_info = try_assemble_difs(&chunked, options)?;
upload_missing_chunks(&missing_info, chunk_options)?;
// Only if DIFs were missing, poll until assembling is complete
let (missing_difs, _) = missing_info;
if !missing_difs.is_empty() {
poll_dif_assemble(&missing_difs, options)
} else {
println!(
"{} Nothing to upload, all files are on the server",
style(">").dim()
);
Ok((Default::default(), false))
}
}
/// Returns debug files missing on the server.
fn get_missing_difs<'data>(
objects: Vec<HashedDifMatch<'data>>,
options: &DifUpload,
) -> Result<Vec<HashedDifMatch<'data>>, Error> {
info!(
"Checking for missing debug information files: {:#?}",
&objects
);
let api = Api::current();
let missing_checksums = {
let checksums = objects.iter().map(HashedDifMatch::checksum);
api.find_missing_dif_checksums(&options.org, &options.project, checksums)?
};
let missing = objects
.into_iter()
.filter(|sym| missing_checksums.contains(&sym.checksum()))
.collect();
info!("Missing debug information files: {:#?}", &missing);
Ok(missing)
}
/// Compresses the given batch into a ZIP archive.
fn create_batch_archive(difs: &[HashedDifMatch<'_>]) -> Result<TempFile, Error> {
let total_bytes = difs.iter().map(ItemSize::size).sum();
let pb = make_byte_progress_bar(total_bytes);
let tf = TempFile::create()?;
{
let mut zip = ZipWriter::new(tf.open()?);
for symbol in difs {
zip.start_file(symbol.file_name(), FileOptions::default())?;
copy_with_progress(&pb, &mut symbol.data(), &mut zip)?;
}
}
pb.finish_and_clear();
Ok(tf)
}
/// Uploads the given DIFs to the server in batched ZIP archives.
fn upload_in_batches(
objects: &[HashedDifMatch<'_>],
options: &DifUpload,
) -> Result<Vec<DebugInfoFile>, Error> {
let api = Api::current();
let max_size = Config::current().get_max_dif_archive_size()?;
let mut dsyms = Vec::new();
for (i, (batch, _)) in objects.batches(max_size, MAX_CHUNKS).enumerate() {
println!("\n{}", style(format!("Batch {}", i + 1)).bold());
println!(
"{} Compressing {} debug symbol files",
style(">").dim(),
style(batch.len()).yellow()
);
let archive = create_batch_archive(batch)?;
println!("{} Uploading debug symbol files", style(">").dim());
dsyms.extend(api.upload_dif_archive(&options.org, &options.project, archive.path())?);
}
Ok(dsyms)
}
/// Uploads debug info files using the legacy endpoint.
fn upload_difs_batched(options: &DifUpload) -> Result<Vec<DebugInfoFile>, Error> {
// Search for debug files in the file system and ZIPs
let found = search_difs(options)?;
if found.is_empty() {
println!("{} No debug information files found", style(">").dim());
return Ok(Default::default());
}
// Try to resolve BCSymbolMaps
let symbol_map = options.symbol_map.as_deref();
let processed = process_symbol_maps(found, symbol_map)?;
// Calculate checksums
let hashed = prepare_difs(processed, HashedDifMatch::from)?;
// Check which files are missing on the server
let missing = get_missing_difs(hashed, options)?;
if missing.is_empty() {
println!(
"{} Nothing to upload, all files are on the server",
style(">").dim()
);
println!("{} Nothing to upload", style(">").dim());
return Ok(Default::default());
}
// Upload missing DIFs in batches
let uploaded = upload_in_batches(&missing, options)?;
if !uploaded.is_empty() {
println!("{} File upload complete:\n", style(">").dim());
for dif in &uploaded {
println!(
" {} ({}; {})",
style(&dif.id()).dim(),
&dif.object_name,
dif.cpu_name
);
}
}
Ok(uploaded)
}
/// The format of a Debug Information File (DIF).
///
/// Most DIFs are also object files, but we also know of some auxiliary DIF formats.
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum DifFormat {
/// An object file of some kind, as per [`symbolic::debuginfo::FileFormat`].
///
/// Not all these are fully objects, but they all implement
/// [`symbolic::debuginfo::ObjectLike`].
Object(FileFormat),
/// An Apple BCSymbolMap.
BcSymbolMap,
/// An Apple PList.
///
/// This only considers PLists which contain a
/// [`symbolic::debuginfo::macho::UuidMapping`] used to map a `dSYM` UUID back to UUID
/// of the original `BCSymbolMap`.
PList,
}
/// Searches, processes and uploads debug information files (DIFs).
///
/// This struct is created with the `DifUpload::new` function. Then, set
/// search parameters and start the upload via `DifUpload::upload`.
///
/// ```
/// use utils::dif_upload::DifUpload;
///
/// DifUpload::new("org".into(), "project".into())
/// .search_path(".")
/// .upload()?;
/// ```
///
/// The upload tries to perform a chunked upload by requesting the new
/// `chunk-upload/` endpoint. If chunk uploads are disabled or the server does
/// not support them yet, it falls back to the legacy `files/dsyms/` endpoint.
///
/// The uploader will walk the given `paths` in the file system recursively and
/// search for DIFs. If `allow_zips` is not deactivated, it will also open ZIP
/// files and search there.
///
/// By default, all supported object files will be included. To customize this,
/// use the `filter_id`, `filter_kind`, `filter_class` and `filter_extension`
/// methods.
///
/// If `symbol_map` is set and Apple dSYMs with hidden symbols are found, the
/// uploader will first try to locate BCSymbolMaps and generate new dSYMs with
/// resolved symbols.
#[derive(Debug, Default)]
pub struct DifUpload {
org: String,
project: String,
paths: Vec<PathBuf>,
ids: BTreeSet<DebugId>,
formats: BTreeSet<DifFormat>,
features: ObjectDifFeatures,
extensions: BTreeSet<OsString>,
symbol_map: Option<PathBuf>,
zips_allowed: bool,
max_file_size: u64,
max_wait: Duration,
pdbs_allowed: bool,
sources_allowed: bool,
include_sources: bool,
bcsymbolmaps_allowed: bool,
wait: bool,
}
impl DifUpload {
/// Creates a new `DifUpload` with default parameters.
///
/// To use it, also add paths using `DifUpload::search_path`. It will scan
/// the paths and contained ZIPs for all supported object files and upload
/// them.
///
/// Use `DifUpload::symbol_map` to configure a location of BCSymbolMap files
/// to resolve hidden symbols in dSYMs obtained from iTunes Connect.
///
/// ```
/// use utils::dif_upload::DifUpload;
///
/// DifUpload::new("org", "project")
/// .search_path(".")
/// .upload()?;
/// ```
pub fn new(org: String, project: String) -> Self {
DifUpload {
org,
project,
paths: Vec::new(),
ids: BTreeSet::new(),
formats: BTreeSet::new(),
features: ObjectDifFeatures::all(),
extensions: BTreeSet::new(),
symbol_map: None,
zips_allowed: true,
max_file_size: DEFAULT_MAX_DIF_SIZE,
max_wait: DEFAULT_MAX_WAIT,
pdbs_allowed: false,
sources_allowed: false,
include_sources: false,
bcsymbolmaps_allowed: false,
wait: false,
}
}
/// Adds a path to search for debug information files.
pub fn search_path<P>(&mut self, path: P) -> &mut Self
where
P: Into<PathBuf>,
{
self.paths.push(path.into());
self
}
/// Adds paths to search for debug information files.
pub fn search_paths<I>(&mut self, paths: I) -> &mut Self
where
I: IntoIterator,
I::Item: Into<PathBuf>,
{
for path in paths {
self.paths.push(path.into())
}
self
}
/// Add a `DebugId` to filter for.
///
/// By default, all DebugIds will be included.
pub fn filter_id<I>(&mut self, id: I) -> &mut Self
where
I: Into<DebugId>,
{
self.ids.insert(id.into());
self
}
/// Add `DebugId`s to filter for.
///
/// By default, all DebugIds will be included. If `ids` is empty, this will
/// not be changed.
pub fn filter_ids<I>(&mut self, ids: I) -> &mut Self
where
I: IntoIterator,
I::Item: Into<DebugId>,
{
for id in ids {
self.ids.insert(id.into());
}
self
}
/// Add an `FileFormat` to filter for.
///
/// By default, all object formats will be included.
pub fn filter_format(&mut self, format: DifFormat) -> &mut Self {
self.formats.insert(format);
self
}
/// Add `FileFormat`s to filter for.
///
/// By default, all object formats will be included. If `formats` is empty, this
/// will not be changed.
pub fn filter_formats<I>(&mut self, formats: I) -> &mut Self
where
I: IntoIterator<Item = DifFormat>,
{
self.formats.extend(formats);
self
}
/// Add an `ObjectFeature` to filter for.
///
/// By default, all object features will be included.
pub fn filter_features(&mut self, features: ObjectDifFeatures) -> &mut Self {
self.features = features;
self
}
/// Add a file extension to filter for.
///
/// By default, all file extensions will be included.
pub fn filter_extension<S>(&mut self, extension: S) -> &mut Self
where
S: Into<OsString>,
{
self.extensions.insert(extension.into());
self
}
/// Add a file extension to filter for.
///
/// By default, all file extensions will be included.
pub fn filter_extensions<I>(&mut self, extensions: I) -> &mut Self
where
I: IntoIterator,
I::Item: Into<OsString>,
{
for extension in extensions {
self.extensions.insert(extension.into());
}
self
}
/// Set a path containing BCSymbolMaps to resolve hidden symbols in dSYMs
/// obtained from iTunes Connect. This requires the `dsymutil` command.
///
/// By default, hidden symbol resolution will be skipped.
pub fn symbol_map<P>(&mut self, path: P) -> Result<&mut Self, Error>
where
P: Into<PathBuf>,
{
which("dsymutil").map_err(|_| err_msg("Command `dsymutil` not found"))?;
self.symbol_map = Some(path.into());
Ok(self)
}
/// Set whether opening and searching ZIPs for debug information files is
/// allowed or not.
///
/// Defaults to `true`.
pub fn allow_zips(&mut self, allow: bool) -> &mut Self {
self.zips_allowed = allow;
self
}
/// Set whether source files should be resolved during the scan process and
/// uploaded as a separate archive.
///
/// Defaults to `false`.
pub fn include_sources(&mut self, include: bool) -> &mut Self {
self.include_sources = include;
self
}
/// Set whether the upload should wait for the server to complete processing
/// files or exit immediately after the upload.
///
/// Defaults to `false`.
pub fn wait(&mut self, wait: bool) -> &mut Self {
self.wait = wait;
self
}
/// Performs the search for DIFs and uploads them.
///
/// ```
/// use utils::dif_upload::DifUpload;
///
/// DifUpload::new("org", "project")
/// .search_path(".")
/// .upload()?;
/// ```
///
/// The okay part of the return value is `(files, has_errors)`. The
/// latter can be used to indicate a fail state from the upload.
pub fn upload(&mut self) -> Result<(Vec<DebugInfoFile>, bool), Error> {
if self.paths.is_empty() {
println!("{}: No paths were provided.", style("Warning").yellow());
return Ok(Default::default());
}
let api = Api::current();
if let Some(ref chunk_options) = api.get_chunk_upload_options(&self.org)? {
if chunk_options.max_file_size > 0 {
self.max_file_size = chunk_options.max_file_size;
}
if chunk_options.max_wait > 0 {
self.max_wait = Duration::from_secs(chunk_options.max_wait);
}
self.pdbs_allowed = chunk_options.supports(ChunkUploadCapability::Pdbs);
self.sources_allowed = chunk_options.supports(ChunkUploadCapability::Sources);
self.bcsymbolmaps_allowed = chunk_options.supports(ChunkUploadCapability::BcSymbolmap);
if chunk_options.supports(ChunkUploadCapability::DebugFiles) {
self.validate_capabilities();
return upload_difs_chunked(self, chunk_options);
}
}
self.validate_capabilities();
Ok((upload_difs_batched(self)?, false))
}
/// Validate that the server supports all requested capabilities.
fn validate_capabilities(&mut self) {
// Checks whether source bundles are *explicitly* requested on the command line.
if (self
.formats
.contains(&DifFormat::Object(FileFormat::SourceBundle))
|| self.include_sources)
&& !self.sources_allowed
{
warn!("Source uploads are not supported by the configured Sentry server");
self.include_sources = false;
}
// Checks whether PDBs or PEs were *explicitly* requested on the command line.
if (self.formats.contains(&DifFormat::Object(FileFormat::Pdb))
|| self.formats.contains(&DifFormat::Object(FileFormat::Pe)))
&& !self.pdbs_allowed
{
warn!("PDBs and PEs are not supported by the configured Sentry server");
// This is validated additionally in .valid_format()
}
// Checks whether BCSymbolMaps and PLists are **explicitly** requested on the command line.
if (self.formats.contains(&DifFormat::BcSymbolMap)
|| self.formats.contains(&DifFormat::PList))
&& !self.bcsymbolmaps_allowed
{
warn!("BCSymbolMaps are not supported by the configured Sentry server");
}
}
/// Determines if this `DebugId` matches the search criteria.
fn valid_id(&self, id: DebugId) -> bool {
self.ids.is_empty() || self.ids.contains(&id)
}
/// Determines if this file extension matches the search criteria.
fn valid_extension(&self, ext: Option<&OsStr>) -> bool {
self.extensions.is_empty() || ext.map_or(false, |e| self.extensions.contains(e))
}
/// Determines if this [`DifFormat`] matches the search criteria.
fn valid_format(&self, format: DifFormat) -> bool {
match format {
DifFormat::Object(FileFormat::Unknown) => false,
DifFormat::Object(FileFormat::Pdb) if !self.pdbs_allowed => false,
DifFormat::Object(FileFormat::Pe) if !self.pdbs_allowed => false,
DifFormat::Object(FileFormat::SourceBundle) if !self.sources_allowed => false,
DifFormat::BcSymbolMap | DifFormat::PList if !self.bcsymbolmaps_allowed => false,
format => self.formats.is_empty() || self.formats.contains(&format),
}
}
/// Determines if the given `Object` matches the features search criteria.
///
/// If this is not an Object DIF then the object features filter does not apply so this
/// always returns that it is valid.
fn valid_features(&self, dif: &DifMatch) -> bool {
let object = match dif.object() {
Some(object) => object,
None => return true,
};
self.features.symtab && object.has_symbols()
|| self.features.debug && object.has_debug_info()
|| self.features.unwind && object.has_unwind_info()
|| self.features.sources && object.has_sources()
}
/// Checks if a file is too large and logs skip message if so.
fn valid_size(&self, name: &str, size: usize) -> bool {
let file_size: Result<u64, _> = size.try_into();
let too_large = match file_size {
Ok(file_size) => file_size > self.max_file_size,
Err(_) => true,
};
if too_large {
warn!(
"Skipping debug file since it exceeds {}: {} ({})",
HumanBytes(self.max_file_size),
name,
HumanBytes(file_size.unwrap_or(u64::MAX)),
);
}
!too_large
}
/// Validates DIF on whether it should be processed.
///
/// This takes all the filters configured in the [`DifUpload`] into account and returns
/// whether a file should be skipped or not. It also takes care of logging such a skip
/// if required.
fn validate_dif(&self, dif: &DifMatch) -> bool {
// Skip if we didn't want this kind of DIF.
if !self.valid_format(dif.format()) {
debug!("skipping {} because of format", dif.name);
return false;
}
// Skip if this DIF does not have features we want.
if !self.valid_features(dif) {
debug!("skipping {} because of features", dif.name);
return false;
}
// Skip if this DIF has no DebugId or we are only looking for certain IDs.
let id = dif.debug_id.unwrap_or_default();
if id.is_nil() || !self.valid_id(id) {
debug!("skipping {} because of debugid", dif.name);
return false;
}
// Skip if file exceeds the maximum allowed file size.
if !self.valid_size(&dif.name, dif.data().len()) {
debug!("skipping {} because of size", dif.name);
return false;
}
true
}
}
| 33.811196 | 100 | 0.590015 |
f5016b31bec9963da37462f73cc8b5eb8b083da9 | 572 | // interface-types
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub fn integers(_a1: u8, _a2: i8, _a3: u16, _a4: i16, _a5: u32, _a6: i32, _a7: f32, _a8: f64) {}
#[wasm_bindgen]
pub fn ret_i8() -> i8 {
0
}
#[wasm_bindgen]
pub fn ret_u8() -> u8 {
1
}
#[wasm_bindgen]
pub fn ret_i16() -> i16 {
2
}
#[wasm_bindgen]
pub fn ret_u16() -> u16 {
3
}
#[wasm_bindgen]
pub fn ret_i32() -> i32 {
4
}
#[wasm_bindgen]
pub fn ret_u32() -> u32 {
5
}
#[wasm_bindgen]
pub fn ret_f32() -> f32 {
6.0
}
#[wasm_bindgen]
pub fn ret_f64() -> f64 {
7.0
}
| 12.170213 | 96 | 0.575175 |
23a6de909da537bf9e51d6b1bde5f59edc024919 | 1,676 | /* automatically generated by rust-bindgen */
#![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)]
#![cfg(target_os = "macos")]
#[macro_use]
extern crate objc;
#[allow(non_camel_case_types)]
pub type id = *mut objc::runtime::Object;
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct Foo(pub id);
impl std::ops::Deref for Foo {
type Target = objc::runtime::Object;
fn deref(&self) -> &Self::Target {
unsafe { &*self.0 }
}
}
unsafe impl objc::Message for Foo {}
impl Foo {
pub fn alloc() -> Self {
Self(unsafe { msg_send!(objc::class!(Foo), alloc) })
}
}
impl<ObjectType: 'static> IFoo<ObjectType> for Foo {}
pub trait IFoo<ObjectType>: Sized + std::ops::Deref {
unsafe fn get(self) -> id
where
<Self as std::ops::Deref>::Target: objc::Message + Sized,
{
msg_send!(self, get)
}
}
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct FooMultiGeneric(pub id);
impl std::ops::Deref for FooMultiGeneric {
type Target = objc::runtime::Object;
fn deref(&self) -> &Self::Target {
unsafe { &*self.0 }
}
}
unsafe impl objc::Message for FooMultiGeneric {}
impl FooMultiGeneric {
pub fn alloc() -> Self {
Self(unsafe { msg_send!(objc::class!(FooMultiGeneric), alloc) })
}
}
impl<KeyType: 'static, ObjectType: 'static>
IFooMultiGeneric<KeyType, ObjectType> for FooMultiGeneric
{
}
pub trait IFooMultiGeneric<KeyType, ObjectType>:
Sized + std::ops::Deref
{
unsafe fn objectForKey_(self, key: id) -> id
where
<Self as std::ops::Deref>::Target: objc::Message + Sized,
{
msg_send!(self, objectForKey: key)
}
}
| 26.1875 | 82 | 0.643198 |
2653be2f77c870440bd073bf172a728ca7b40551 | 1,904 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{account_address::AccountAddress, value::MoveValue};
use alloc::vec::Vec;
use core::fmt;
use serde::{Deserialize, Serialize};
#[derive(Clone, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub enum TransactionArgument {
U8(u8),
U64(u64),
U128(u128),
Address(AccountAddress),
U8Vector(Vec<u8>),
Bool(bool),
}
impl fmt::Debug for TransactionArgument {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TransactionArgument::U8(value) => write!(f, "{{U8: {}}}", value),
TransactionArgument::U64(value) => write!(f, "{{U64: {}}}", value),
TransactionArgument::U128(value) => write!(f, "{{U128: {}}}", value),
TransactionArgument::Bool(boolean) => write!(f, "{{BOOL: {}}}", boolean),
TransactionArgument::Address(address) => write!(f, "{{ADDRESS: {:?}}}", address),
TransactionArgument::U8Vector(vector) => {
write!(f, "{{U8Vector: 0x{}}}", hex::encode(vector))
}
}
}
}
/// Convert the transaction arguments into Move values.
pub fn convert_txn_args(args: &[TransactionArgument]) -> Vec<Vec<u8>> {
args.iter()
.map(|arg| {
let mv = match arg {
TransactionArgument::U8(i) => MoveValue::U8(*i),
TransactionArgument::U64(i) => MoveValue::U64(*i),
TransactionArgument::U128(i) => MoveValue::U128(*i),
TransactionArgument::Address(a) => MoveValue::Address(*a),
TransactionArgument::Bool(b) => MoveValue::Bool(*b),
TransactionArgument::U8Vector(v) => MoveValue::vector_u8(v.clone()),
};
mv.simple_serialize()
.expect("transaction arguments must serialize")
})
.collect()
}
| 37.333333 | 93 | 0.573004 |
9ca2017272694a7011a5c6b0bf2a9671fc80f2b4 | 278,139 | /// Emit binary machine code for `inst` for the x86 ISA.
#[allow(unused_variables, unreachable_code)]
pub fn emit_inst<CS: CodeSink + ?Sized>(
func: &Function,
inst: Inst,
divert: &mut RegDiversions,
sink: &mut CS,
isa: &dyn TargetIsa,
) {
let encoding = func.encodings[inst];
let bits = encoding.bits();
let inst_data = &func.dfg[inst];
match encoding.recipe() {
// Recipe get_pinned_reg
0 => {
if let InstructionData::NullAry {
opcode,
..
} = *inst_data {
return;
}
}
// Recipe RexOp1set_pinned_reg
1 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let r15 = RU::r15.into();
put_rexop1(bits, rex2(r15, in_reg0), sink);
modrm_rr(r15, in_reg0, sink);
return;
}
}
// Recipe DynRexOp1umr
2 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_dynrexop1(bits, rex2(out_reg0, in_reg0), sink);
modrm_rr(out_reg0, in_reg0, sink);
return;
}
}
// Recipe RexOp1umr
3 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex2(out_reg0, in_reg0), sink);
modrm_rr(out_reg0, in_reg0, sink);
return;
}
}
// Recipe Op1umr
4 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op1(bits, rex2(out_reg0, in_reg0), sink);
modrm_rr(out_reg0, in_reg0, sink);
return;
}
}
// Recipe Op1rmov
5 => {
if let InstructionData::RegMove {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
put_op1(bits, rex2(dst, src), sink);
modrm_rr(dst, src, sink);
return;
}
}
// Recipe RexOp1rmov
6 => {
if let InstructionData::RegMove {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
put_rexop1(bits, rex2(dst, src), sink);
modrm_rr(dst, src, sink);
return;
}
}
// Recipe Op1pu_id
7 => {
if let InstructionData::UnaryImm {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// The destination register is encoded in the low bits of the opcode.
// No ModR/M.
put_op1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
return;
}
}
// Recipe RexOp1pu_id
8 => {
if let InstructionData::UnaryImm {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// The destination register is encoded in the low bits of the opcode.
// No ModR/M.
put_rexop1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
return;
}
}
// Recipe RexOp1u_id
9 => {
if let InstructionData::UnaryImm {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex1(out_reg0), sink);
modrm_r_bits(out_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
return;
}
}
// Recipe RexOp1pu_iq
10 => {
if let InstructionData::UnaryImm {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
let imm: i64 = imm.into();
sink.put8(imm as u64);
return;
}
}
// Recipe Op1pu_id_bool
11 => {
if let InstructionData::UnaryBool {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// The destination register is encoded in the low bits of the opcode.
// No ModR/M.
put_op1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
let imm: u32 = if imm { 1 } else { 0 };
sink.put4(imm);
return;
}
}
// Recipe RexOp1pu_id_bool
12 => {
if let InstructionData::UnaryBool {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// The destination register is encoded in the low bits of the opcode.
// No ModR/M.
put_rexop1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
let imm: u32 = if imm { 1 } else { 0 };
sink.put4(imm);
return;
}
}
// Recipe Op1u_id_z
13 => {
if let InstructionData::UnaryImm {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op1(bits, rex2(out_reg0, out_reg0), sink);
modrm_rr(out_reg0, out_reg0, sink);
return;
}
}
// Recipe RexOp1u_id_z
14 => {
if let InstructionData::UnaryImm {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex2(out_reg0, out_reg0), sink);
modrm_rr(out_reg0, out_reg0, sink);
return;
}
}
// Recipe null
15 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
return;
}
}
// Recipe Op2urm_noflags_abcd
16 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe RexOp2urm_noflags
17 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe Op2urm_noflags
18 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe RexOp1urm_noflags
19 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe RexOp1copysp
20 => {
if let InstructionData::CopySpecial {
opcode,
src,
dst,
..
} = *inst_data {
put_rexop1(bits, rex2(dst, src), sink);
modrm_rr(dst, src, sink);
return;
}
}
// Recipe Op1copysp
21 => {
if let InstructionData::CopySpecial {
opcode,
src,
dst,
..
} = *inst_data {
put_op1(bits, rex2(dst, src), sink);
modrm_rr(dst, src, sink);
return;
}
}
// Recipe Op1umr_reg_to_ssa
22 => {
if let InstructionData::CopyToSsa {
opcode,
src,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op1(bits, rex2(out_reg0, src), sink);
modrm_rr(out_reg0, src, sink);
return;
}
}
// Recipe RexOp1umr_reg_to_ssa
23 => {
if let InstructionData::CopyToSsa {
opcode,
src,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex2(out_reg0, src), sink);
modrm_rr(out_reg0, src, sink);
return;
}
}
// Recipe Mp2furm_reg_to_ssa
24 => {
if let InstructionData::CopyToSsa {
opcode,
src,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp2(bits, rex2(src, out_reg0), sink);
modrm_rr(src, out_reg0, sink);
return;
}
}
// Recipe RexMp2furm_reg_to_ssa
25 => {
if let InstructionData::CopyToSsa {
opcode,
src,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexmp2(bits, rex2(src, out_reg0), sink);
modrm_rr(src, out_reg0, sink);
return;
}
}
// Recipe dummy_sarg_t
26 => {
if let InstructionData::NullAry {
opcode,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_stk0 = StackRef::masked(
divert.stack(results[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
return;
}
}
// Recipe Op1ldWithIndex
27 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe RexOp1ldWithIndex
28 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe Op2ldWithIndex
29 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe RexOp2ldWithIndex
30 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe Op1ldWithIndexDisp8
31 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexOp1ldWithIndexDisp8
32 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op2ldWithIndexDisp8
33 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexOp2ldWithIndexDisp8
34 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op1ldWithIndexDisp32
35 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp32(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexOp1ldWithIndexDisp32
36 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp32(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op2ldWithIndexDisp32
37 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp32(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexOp2ldWithIndexDisp32
38 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp32(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op1stWithIndex
39 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
}
return;
}
}
// Recipe RexOp1stWithIndex
40 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
}
return;
}
}
// Recipe Mp1stWithIndex
41 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
}
return;
}
}
// Recipe RexMp1stWithIndex
42 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
}
return;
}
}
// Recipe Op1stWithIndexDisp8
43 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexOp1stWithIndexDisp8
44 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Mp1stWithIndexDisp8
45 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexMp1stWithIndexDisp8
46 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op1stWithIndexDisp32
47 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp32(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexOp1stWithIndexDisp32
48 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp32(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Mp1stWithIndexDisp32
49 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp32(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexMp1stWithIndexDisp32
50 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp32(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op1stWithIndex_abcd
51 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
}
return;
}
}
// Recipe RexOp1stWithIndex_abcd
52 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
}
return;
}
}
// Recipe Op1stWithIndexDisp8_abcd
53 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexOp1stWithIndexDisp8_abcd
54 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op1stWithIndexDisp32_abcd
55 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp32(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexOp1stWithIndexDisp32_abcd
56 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp32(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op1st
57 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else if needs_offset(in_reg1) {
modrm_disp8(in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe RexOp1st
58 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else if needs_offset(in_reg1) {
modrm_disp8(in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe Mp1st
59 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else if needs_offset(in_reg1) {
modrm_disp8(in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe RexMp1st
60 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else if needs_offset(in_reg1) {
modrm_disp8(in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe Op1stDisp8
61 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp8(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexOp1stDisp8
62 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp8(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Mp1stDisp8
63 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp8(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexMp1stDisp8
64 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp8(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op1stDisp32
65 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp32(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp32(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexOp1stDisp32
66 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp32(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp32(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Mp1stDisp32
67 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp32(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp32(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexMp1stDisp32
68 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp32(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp32(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op1st_abcd
69 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else if needs_offset(in_reg1) {
modrm_disp8(in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe Op1stDisp8_abcd
70 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp8(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op1stDisp32_abcd
71 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp32(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp32(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op1spillSib32
72 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_stk0 = StackRef::masked(
divert.stack(results[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let base = stk_base(out_stk0.base);
put_op1(bits, rex2(base, in_reg0), sink);
modrm_sib_disp32(in_reg0, sink);
sib_noindex(base, sink);
sink.put4(out_stk0.offset as u32);
return;
}
}
// Recipe RexOp1spillSib32
73 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_stk0 = StackRef::masked(
divert.stack(results[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let base = stk_base(out_stk0.base);
put_rexop1(bits, rex2(base, in_reg0), sink);
modrm_sib_disp32(in_reg0, sink);
sib_noindex(base, sink);
sink.put4(out_stk0.offset as u32);
return;
}
}
// Recipe Op1regspill32
74 => {
if let InstructionData::RegSpill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let dst = StackRef::sp(dst, &func.stack_slots);
let base = stk_base(dst.base);
put_op1(bits, rex2(base, src), sink);
modrm_sib_disp32(src, sink);
sib_noindex(base, sink);
sink.put4(dst.offset as u32);
return;
}
}
// Recipe RexOp1regspill32
75 => {
if let InstructionData::RegSpill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let dst = StackRef::sp(dst, &func.stack_slots);
let base = stk_base(dst.base);
put_rexop1(bits, rex2(base, src), sink);
modrm_sib_disp32(src, sink);
sib_noindex(base, sink);
sink.put4(dst.offset as u32);
return;
}
}
// Recipe Op1ld
76 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else if needs_offset(in_reg0) {
modrm_disp8(in_reg0, out_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg0, out_reg0, sink);
}
return;
}
}
// Recipe RexOp1ld
77 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else if needs_offset(in_reg0) {
modrm_disp8(in_reg0, out_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg0, out_reg0, sink);
}
return;
}
}
// Recipe Op2ld
78 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else if needs_offset(in_reg0) {
modrm_disp8(in_reg0, out_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg0, out_reg0, sink);
}
return;
}
}
// Recipe RexOp2ld
79 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else if needs_offset(in_reg0) {
modrm_disp8(in_reg0, out_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg0, out_reg0, sink);
}
return;
}
}
// Recipe Op1ldDisp8
80 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp8(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexOp1ldDisp8
81 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp8(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op2ldDisp8
82 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp8(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexOp2ldDisp8
83 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp8(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op1ldDisp32
84 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op1(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp32(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp32(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexOp1ldDisp32
85 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop1(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp32(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp32(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op2ldDisp32
86 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp32(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp32(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexOp2ldDisp32
87 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp32(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp32(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op1fillSib32
88 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_stk0 = StackRef::masked(
divert.stack(args[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
let base = stk_base(in_stk0.base);
put_op1(bits, rex2(base, out_reg0), sink);
modrm_sib_disp32(out_reg0, sink);
sib_noindex(base, sink);
sink.put4(in_stk0.offset as u32);
return;
}
}
// Recipe RexOp1fillSib32
89 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_stk0 = StackRef::masked(
divert.stack(args[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
let base = stk_base(in_stk0.base);
put_rexop1(bits, rex2(base, out_reg0), sink);
modrm_sib_disp32(out_reg0, sink);
sib_noindex(base, sink);
sink.put4(in_stk0.offset as u32);
return;
}
}
// Recipe Op1regfill32
90 => {
if let InstructionData::RegFill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
let src = StackRef::sp(src, &func.stack_slots);
let base = stk_base(src.base);
put_op1(bits, rex2(base, dst), sink);
modrm_sib_disp32(dst, sink);
sib_noindex(base, sink);
sink.put4(src.offset as u32);
return;
}
}
// Recipe RexOp1regfill32
91 => {
if let InstructionData::RegFill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
let src = StackRef::sp(src, &func.stack_slots);
let base = stk_base(src.base);
put_rexop1(bits, rex2(base, dst), sink);
modrm_sib_disp32(dst, sink);
sib_noindex(base, sink);
sink.put4(src.offset as u32);
return;
}
}
// Recipe fillnull
92 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_stk0 = StackRef::masked(
divert.stack(args[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
return;
}
}
// Recipe ffillnull
93 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_stk0 = StackRef::masked(
divert.stack(args[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
return;
}
}
// Recipe Op1pushq
94 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
put_op1(bits | (in_reg0 & 7), rex1(in_reg0), sink);
return;
}
}
// Recipe RexOp1pushq
95 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
put_rexop1(bits | (in_reg0 & 7), rex1(in_reg0), sink);
return;
}
}
// Recipe Op1popq
96 => {
if let InstructionData::NullAry {
opcode,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
return;
}
}
// Recipe RexOp1popq
97 => {
if let InstructionData::NullAry {
opcode,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
return;
}
}
// Recipe stacknull
98 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_stk0 = StackRef::masked(
divert.stack(args[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
let results = [func.dfg.first_result(inst)];
let out_stk0 = StackRef::masked(
divert.stack(results[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
return;
}
}
// Recipe Op1adjustsp
99 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_op1(bits, rex2(RU::rsp.into(), in_reg0), sink);
modrm_rr(RU::rsp.into(), in_reg0, sink);
return;
}
}
// Recipe RexOp1adjustsp
100 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_rexop1(bits, rex2(RU::rsp.into(), in_reg0), sink);
modrm_rr(RU::rsp.into(), in_reg0, sink);
return;
}
}
// Recipe Op1adjustsp_ib
101 => {
if let InstructionData::UnaryImm {
opcode,
imm,
..
} = *inst_data {
put_op1(bits, rex1(RU::rsp.into()), sink);
modrm_r_bits(RU::rsp.into(), bits, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe Op1adjustsp_id
102 => {
if let InstructionData::UnaryImm {
opcode,
imm,
..
} = *inst_data {
put_op1(bits, rex1(RU::rsp.into()), sink);
modrm_r_bits(RU::rsp.into(), bits, sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
return;
}
}
// Recipe RexOp1adjustsp_ib
103 => {
if let InstructionData::UnaryImm {
opcode,
imm,
..
} = *inst_data {
put_rexop1(bits, rex1(RU::rsp.into()), sink);
modrm_r_bits(RU::rsp.into(), bits, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe RexOp1adjustsp_id
104 => {
if let InstructionData::UnaryImm {
opcode,
imm,
..
} = *inst_data {
put_rexop1(bits, rex1(RU::rsp.into()), sink);
modrm_r_bits(RU::rsp.into(), bits, sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
return;
}
}
// Recipe Mp2frurm
105 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe RexMp2frurm
106 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexmp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe Mp2rfumr
107 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp2(bits, rex2(out_reg0, in_reg0), sink);
modrm_rr(out_reg0, in_reg0, sink);
return;
}
}
// Recipe RexMp2rfumr
108 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexmp2(bits, rex2(out_reg0, in_reg0), sink);
modrm_rr(out_reg0, in_reg0, sink);
return;
}
}
// Recipe Op2furm
109 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe RexOp2furm
110 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe Op2frmov
111 => {
if let InstructionData::RegMove {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
put_op2(bits, rex2(src, dst), sink);
modrm_rr(src, dst, sink);
return;
}
}
// Recipe RexOp2frmov
112 => {
if let InstructionData::RegMove {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
put_rexop2(bits, rex2(src, dst), sink);
modrm_rr(src, dst, sink);
return;
}
}
// Recipe Mp2fld
113 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else if needs_offset(in_reg0) {
modrm_disp8(in_reg0, out_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg0, out_reg0, sink);
}
return;
}
}
// Recipe RexMp2fld
114 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else if needs_offset(in_reg0) {
modrm_disp8(in_reg0, out_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg0, out_reg0, sink);
}
return;
}
}
// Recipe Mp2fldDisp8
115 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp8(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexMp2fldDisp8
116 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp8(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Mp2fldDisp32
117 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp32(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp32(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexMp2fldDisp32
118 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp32(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp32(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Mp2fldWithIndex
119 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe RexMp2fldWithIndex
120 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe Mp2fldWithIndexDisp8
121 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexMp2fldWithIndexDisp8
122 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Mp2fldWithIndexDisp32
123 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp32(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexMp2fldWithIndexDisp32
124 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp32(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Mp2fst
125 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else if needs_offset(in_reg1) {
modrm_disp8(in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe RexMp2fst
126 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else if needs_offset(in_reg1) {
modrm_disp8(in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe Mp2fstDisp8
127 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp8(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexMp2fstDisp8
128 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp8(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Mp2fstDisp32
129 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp32(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp32(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexMp2fstDisp32
130 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp32(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp32(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Mp2fstWithIndex
131 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
}
return;
}
}
// Recipe RexMp2fstWithIndex
132 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
}
return;
}
}
// Recipe Mp2fstWithIndexDisp8
133 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexMp2fstWithIndexDisp8
134 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Mp2fstWithIndexDisp32
135 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp32(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexMp2fstWithIndexDisp32
136 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp32(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Mp2ffillSib32
137 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_stk0 = StackRef::masked(
divert.stack(args[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
let base = stk_base(in_stk0.base);
put_mp2(bits, rex2(base, out_reg0), sink);
modrm_sib_disp32(out_reg0, sink);
sib_noindex(base, sink);
sink.put4(in_stk0.offset as u32);
return;
}
}
// Recipe RexMp2ffillSib32
138 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_stk0 = StackRef::masked(
divert.stack(args[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
let base = stk_base(in_stk0.base);
put_rexmp2(bits, rex2(base, out_reg0), sink);
modrm_sib_disp32(out_reg0, sink);
sib_noindex(base, sink);
sink.put4(in_stk0.offset as u32);
return;
}
}
// Recipe Mp2fregfill32
139 => {
if let InstructionData::RegFill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
let src = StackRef::sp(src, &func.stack_slots);
let base = stk_base(src.base);
put_mp2(bits, rex2(base, dst), sink);
modrm_sib_disp32(dst, sink);
sib_noindex(base, sink);
sink.put4(src.offset as u32);
return;
}
}
// Recipe RexMp2fregfill32
140 => {
if let InstructionData::RegFill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
let src = StackRef::sp(src, &func.stack_slots);
let base = stk_base(src.base);
put_rexmp2(bits, rex2(base, dst), sink);
modrm_sib_disp32(dst, sink);
sib_noindex(base, sink);
sink.put4(src.offset as u32);
return;
}
}
// Recipe Mp2fspillSib32
141 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_stk0 = StackRef::masked(
divert.stack(results[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let base = stk_base(out_stk0.base);
put_mp2(bits, rex2(base, in_reg0), sink);
modrm_sib_disp32(in_reg0, sink);
sib_noindex(base, sink);
sink.put4(out_stk0.offset as u32);
return;
}
}
// Recipe RexMp2fspillSib32
142 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_stk0 = StackRef::masked(
divert.stack(results[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let base = stk_base(out_stk0.base);
put_rexmp2(bits, rex2(base, in_reg0), sink);
modrm_sib_disp32(in_reg0, sink);
sib_noindex(base, sink);
sink.put4(out_stk0.offset as u32);
return;
}
}
// Recipe Mp2fregspill32
143 => {
if let InstructionData::RegSpill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let dst = StackRef::sp(dst, &func.stack_slots);
let base = stk_base(dst.base);
put_mp2(bits, rex2(base, src), sink);
modrm_sib_disp32(src, sink);
sib_noindex(base, sink);
sink.put4(dst.offset as u32);
return;
}
}
// Recipe RexMp2fregspill32
144 => {
if let InstructionData::RegSpill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let dst = StackRef::sp(dst, &func.stack_slots);
let base = stk_base(dst.base);
put_rexmp2(bits, rex2(base, src), sink);
modrm_sib_disp32(src, sink);
sib_noindex(base, sink);
sink.put4(dst.offset as u32);
return;
}
}
// Recipe Op2f32imm_z
145 => {
if let InstructionData::UnaryIeee32 {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op2(bits, rex2(out_reg0, out_reg0), sink);
modrm_rr(out_reg0, out_reg0, sink);
return;
}
}
// Recipe Mp2f64imm_z
146 => {
if let InstructionData::UnaryIeee64 {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp2(bits, rex2(out_reg0, out_reg0), sink);
modrm_rr(out_reg0, out_reg0, sink);
return;
}
}
// Recipe RexOp2f32imm_z
147 => {
if let InstructionData::UnaryIeee32 {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop2(bits, rex2(out_reg0, out_reg0), sink);
modrm_rr(out_reg0, out_reg0, sink);
return;
}
}
// Recipe RexMp2f64imm_z
148 => {
if let InstructionData::UnaryIeee64 {
opcode,
imm,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexmp2(bits, rex2(out_reg0, out_reg0), sink);
modrm_rr(out_reg0, out_reg0, sink);
return;
}
}
// Recipe DynRexMp2frurm
149 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_dynrexmp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe Mp2furm
150 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe RexMp2furm
151 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexmp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe Mp2rfurm
152 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe RexMp2rfurm
153 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexmp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe Mp3furmi_rnd
154 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp3(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
sink.put1(match opcode {
Opcode::Nearest => 0b00,
Opcode::Floor => 0b01,
Opcode::Ceil => 0b10,
Opcode::Trunc => 0b11,
x => panic!("{} unexpected for furmi_rnd", opcode),
});
return;
}
}
// Recipe RexMp3furmi_rnd
155 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexmp3(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
sink.put1(match opcode {
Opcode::Nearest => 0b00,
Opcode::Floor => 0b01,
Opcode::Ceil => 0b10,
Opcode::Trunc => 0b11,
x => panic!("{} unexpected for furmi_rnd", opcode),
});
return;
}
}
// Recipe Mp2fa
156 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_mp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe RexMp2fa
157 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexmp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe Op2fcscc
158 => {
if let InstructionData::FloatCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_op2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
// `setCC` instruction, no REX.
use crate::ir::condcodes::FloatCC::*;
let setcc = match cond {
Ordered => 0x9b, // EQ|LT|GT => setnp (P=0)
Unordered => 0x9a, // UN => setp (P=1)
OrderedNotEqual => 0x95, // LT|GT => setne (Z=0),
UnorderedOrEqual => 0x94, // UN|EQ => sete (Z=1)
GreaterThan => 0x97, // GT => seta (C=0&Z=0)
GreaterThanOrEqual => 0x93, // GT|EQ => setae (C=0)
UnorderedOrLessThan => 0x92, // UN|LT => setb (C=1)
UnorderedOrLessThanOrEqual => 0x96, // UN|LT|EQ => setbe (Z=1|C=1)
Equal | // EQ
NotEqual | // UN|LT|GT
LessThan | // LT
LessThanOrEqual | // LT|EQ
UnorderedOrGreaterThan | // UN|GT
UnorderedOrGreaterThanOrEqual // UN|GT|EQ
=> panic!("{} not supported by fcscc", cond),
};
sink.put1(0x0f);
sink.put1(setcc);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe RexOp2fcscc
159 => {
if let InstructionData::FloatCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_rexop2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
// `setCC` instruction, no REX.
use crate::ir::condcodes::FloatCC::*;
let setcc = match cond {
Ordered => 0x9b, // EQ|LT|GT => setnp (P=0)
Unordered => 0x9a, // UN => setp (P=1)
OrderedNotEqual => 0x95, // LT|GT => setne (Z=0),
UnorderedOrEqual => 0x94, // UN|EQ => sete (Z=1)
GreaterThan => 0x97, // GT => seta (C=0&Z=0)
GreaterThanOrEqual => 0x93, // GT|EQ => setae (C=0)
UnorderedOrLessThan => 0x92, // UN|LT => setb (C=1)
UnorderedOrLessThanOrEqual => 0x96, // UN|LT|EQ => setbe (Z=1|C=1)
Equal | // EQ
NotEqual | // UN|LT|GT
LessThan | // LT
LessThanOrEqual | // LT|EQ
UnorderedOrGreaterThan | // UN|GT
UnorderedOrGreaterThanOrEqual // UN|GT|EQ
=> panic!("{} not supported by fcscc", cond),
};
sink.put1(0x0f);
sink.put1(setcc);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe Mp2fcscc
160 => {
if let InstructionData::FloatCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_mp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
// `setCC` instruction, no REX.
use crate::ir::condcodes::FloatCC::*;
let setcc = match cond {
Ordered => 0x9b, // EQ|LT|GT => setnp (P=0)
Unordered => 0x9a, // UN => setp (P=1)
OrderedNotEqual => 0x95, // LT|GT => setne (Z=0),
UnorderedOrEqual => 0x94, // UN|EQ => sete (Z=1)
GreaterThan => 0x97, // GT => seta (C=0&Z=0)
GreaterThanOrEqual => 0x93, // GT|EQ => setae (C=0)
UnorderedOrLessThan => 0x92, // UN|LT => setb (C=1)
UnorderedOrLessThanOrEqual => 0x96, // UN|LT|EQ => setbe (Z=1|C=1)
Equal | // EQ
NotEqual | // UN|LT|GT
LessThan | // LT
LessThanOrEqual | // LT|EQ
UnorderedOrGreaterThan | // UN|GT
UnorderedOrGreaterThanOrEqual // UN|GT|EQ
=> panic!("{} not supported by fcscc", cond),
};
sink.put1(0x0f);
sink.put1(setcc);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe RexMp2fcscc
161 => {
if let InstructionData::FloatCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_rexmp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
// `setCC` instruction, no REX.
use crate::ir::condcodes::FloatCC::*;
let setcc = match cond {
Ordered => 0x9b, // EQ|LT|GT => setnp (P=0)
Unordered => 0x9a, // UN => setp (P=1)
OrderedNotEqual => 0x95, // LT|GT => setne (Z=0),
UnorderedOrEqual => 0x94, // UN|EQ => sete (Z=1)
GreaterThan => 0x97, // GT => seta (C=0&Z=0)
GreaterThanOrEqual => 0x93, // GT|EQ => setae (C=0)
UnorderedOrLessThan => 0x92, // UN|LT => setb (C=1)
UnorderedOrLessThanOrEqual => 0x96, // UN|LT|EQ => setbe (Z=1|C=1)
Equal | // EQ
NotEqual | // UN|LT|GT
LessThan | // LT
LessThanOrEqual | // LT|EQ
UnorderedOrGreaterThan | // UN|GT
UnorderedOrGreaterThanOrEqual // UN|GT|EQ
=> panic!("{} not supported by fcscc", cond),
};
sink.put1(0x0f);
sink.put1(setcc);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe Op2fcmp
162 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_op2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe RexOp2fcmp
163 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexop2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe Mp2fcmp
164 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_mp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe RexMp2fcmp
165 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexmp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe DynRexOp1rr
166 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe RexOp1rr
167 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe DynRexOp1rout
168 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe RexOp1rout
169 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe DynRexOp1rin
170 => {
if let InstructionData::Ternary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe RexOp1rin
171 => {
if let InstructionData::Ternary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe DynRexOp1rio
172 => {
if let InstructionData::Ternary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe RexOp1rio
173 => {
if let InstructionData::Ternary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe DynRexOp1r_ib
174 => {
if let InstructionData::BinaryImm64 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_dynrexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe RexOp1r_ib
175 => {
if let InstructionData::BinaryImm64 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe DynRexOp1r_id
176 => {
if let InstructionData::BinaryImm64 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_dynrexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
return;
}
}
// Recipe RexOp1r_id
177 => {
if let InstructionData::BinaryImm64 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
return;
}
}
// Recipe DynRexOp1ur
178 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_dynrexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
return;
}
}
// Recipe RexOp1ur
179 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
return;
}
}
// Recipe Op1ur
180 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_op1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
return;
}
}
// Recipe Op1rr
181 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_op1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe DynRexOp2rrx
182 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexop2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe RexOp2rrx
183 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexop2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe DynRexOp1div
184 => {
if let InstructionData::Ternary {
opcode,
ref args,
..
} = *inst_data {
let in_reg2 = divert.reg(args[2], &func.locations);
sink.trap(TrapCode::IntegerDivisionByZero, func.srclocs[inst]);
put_dynrexop1(bits, rex1(in_reg2), sink);
modrm_r_bits(in_reg2, bits, sink);
return;
}
}
// Recipe RexOp1div
185 => {
if let InstructionData::Ternary {
opcode,
ref args,
..
} = *inst_data {
let in_reg2 = divert.reg(args[2], &func.locations);
sink.trap(TrapCode::IntegerDivisionByZero, func.srclocs[inst]);
put_rexop1(bits, rex1(in_reg2), sink);
modrm_r_bits(in_reg2, bits, sink);
return;
}
}
// Recipe DynRexOp1mulx
186 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexop1(bits, rex1(in_reg1), sink);
modrm_r_bits(in_reg1, bits, sink);
return;
}
}
// Recipe RexOp1mulx
187 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexop1(bits, rex1(in_reg1), sink);
modrm_r_bits(in_reg1, bits, sink);
return;
}
}
// Recipe Op2fa
188 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_op2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe RexOp2fa
189 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexop2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe Op2fax
190 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_op2(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe RexOp2fax
191 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexop2(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe Op1rc
192 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
put_op1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
return;
}
}
// Recipe RexOp1rc
193 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
return;
}
}
// Recipe Mp2urm
194 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe RexMp2urm
195 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexmp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe DynRexOp2bsf_and_bsr
196 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = func.dfg.inst_results(inst);
let out_reg0 = divert.reg(results[0], &func.locations);
put_dynrexop2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe RexOp2bsf_and_bsr
197 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = func.dfg.inst_results(inst);
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe DynRexOp1icscc
198 => {
if let InstructionData::IntCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_dynrexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
// `setCC` instruction, no REX.
let setcc = 0x90 | icc2opc(cond);
sink.put1(0x0f);
sink.put1(setcc as u8);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe RexOp1icscc
199 => {
if let InstructionData::IntCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_rexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
// `setCC` instruction, no REX.
let setcc = 0x90 | icc2opc(cond);
sink.put1(0x0f);
sink.put1(setcc as u8);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe DynRexOp1icscc_ib
200 => {
if let InstructionData::IntCompareImm {
opcode,
cond,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_dynrexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
// `setCC` instruction, no REX.
let setcc = 0x90 | icc2opc(cond);
sink.put1(0x0f);
sink.put1(setcc as u8);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe RexOp1icscc_ib
201 => {
if let InstructionData::IntCompareImm {
opcode,
cond,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
// `setCC` instruction, no REX.
let setcc = 0x90 | icc2opc(cond);
sink.put1(0x0f);
sink.put1(setcc as u8);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe DynRexOp1icscc_id
202 => {
if let InstructionData::IntCompareImm {
opcode,
cond,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_dynrexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
// `setCC` instruction, no REX.
let setcc = 0x90 | icc2opc(cond);
sink.put1(0x0f);
sink.put1(setcc as u8);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe RexOp1icscc_id
203 => {
if let InstructionData::IntCompareImm {
opcode,
cond,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
// `setCC` instruction, no REX.
let setcc = 0x90 | icc2opc(cond);
sink.put1(0x0f);
sink.put1(setcc as u8);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe DynRexOp1rcmp
204 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe RexOp1rcmp
205 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexop1(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe DynRexOp1rcmp_ib
206 => {
if let InstructionData::BinaryImm64 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_dynrexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe RexOp1rcmp_ib
207 => {
if let InstructionData::BinaryImm64 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe DynRexOp1rcmp_id
208 => {
if let InstructionData::BinaryImm64 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_dynrexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
return;
}
}
// Recipe RexOp1rcmp_id
209 => {
if let InstructionData::BinaryImm64 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put4(imm as u32);
return;
}
}
// Recipe Op1rcmp_sp
210 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_op1(bits, rex2(in_reg0, RU::rsp.into()), sink);
modrm_rr(in_reg0, RU::rsp.into(), sink);
return;
}
}
// Recipe RexOp1rcmp_sp
211 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_rexop1(bits, rex2(in_reg0, RU::rsp.into()), sink);
modrm_rr(in_reg0, RU::rsp.into(), sink);
return;
}
}
// Recipe Op2seti_abcd
212 => {
if let InstructionData::IntCond {
opcode,
cond,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op2(bits | icc2opc(cond), rex1(out_reg0), sink);
modrm_r_bits(out_reg0, bits, sink);
return;
}
}
// Recipe RexOp2seti
213 => {
if let InstructionData::IntCond {
opcode,
cond,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop2(bits | icc2opc(cond), rex1(out_reg0), sink);
modrm_r_bits(out_reg0, bits, sink);
return;
}
}
// Recipe Op2setf_abcd
214 => {
if let InstructionData::FloatCond {
opcode,
cond,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op2(bits | fcc2opc(cond), rex1(out_reg0), sink);
modrm_r_bits(out_reg0, bits, sink);
return;
}
}
// Recipe RexOp2setf
215 => {
if let InstructionData::FloatCond {
opcode,
cond,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop2(bits | fcc2opc(cond), rex1(out_reg0), sink);
modrm_r_bits(out_reg0, bits, sink);
return;
}
}
// Recipe DynRexOp2cmov
216 => {
if let InstructionData::IntSelect {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
put_dynrexop2(bits | icc2opc(cond), rex2(in_reg1, in_reg2), sink);
modrm_rr(in_reg1, in_reg2, sink);
return;
}
}
// Recipe RexOp2cmov
217 => {
if let InstructionData::IntSelect {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
put_rexop2(bits | icc2opc(cond), rex2(in_reg1, in_reg2), sink);
modrm_rr(in_reg1, in_reg2, sink);
return;
}
}
// Recipe Mp3fa
218 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_mp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe DynRexMp3fa
219 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexmp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe Mp2r_ib_unsigned_fpr
220 => {
if let InstructionData::BinaryImm8 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe DynRexMp2r_ib_unsigned_fpr
221 => {
if let InstructionData::BinaryImm8 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_dynrexmp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe Mp3blend
222 => {
if let InstructionData::Ternary {
opcode,
ref args,
..
} = *inst_data {
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
put_mp3(bits, rex2(in_reg1, in_reg2), sink);
modrm_rr(in_reg1, in_reg2, sink);
return;
}
}
// Recipe DynRexMp3blend
223 => {
if let InstructionData::Ternary {
opcode,
ref args,
..
} = *inst_data {
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
put_dynrexmp3(bits, rex2(in_reg1, in_reg2), sink);
modrm_rr(in_reg1, in_reg2, sink);
return;
}
}
// Recipe Mp3fa_ib
224 => {
if let InstructionData::TernaryImm8 {
opcode,
imm,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_mp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe DynRexMp3fa_ib
225 => {
if let InstructionData::TernaryImm8 {
opcode,
imm,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexmp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe null_fpr
226 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
return;
}
}
// Recipe Mp3r_ib_unsigned_r
227 => {
if let InstructionData::TernaryImm8 {
opcode,
imm,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_mp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe DynRexMp3r_ib_unsigned_r
228 => {
if let InstructionData::TernaryImm8 {
opcode,
imm,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexmp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe Mp2r_ib_unsigned_r
229 => {
if let InstructionData::TernaryImm8 {
opcode,
imm,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_mp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe DynRexMp2r_ib_unsigned_r
230 => {
if let InstructionData::TernaryImm8 {
opcode,
imm,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexmp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe RexMp3r_ib_unsigned_r
231 => {
if let InstructionData::TernaryImm8 {
opcode,
imm,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_rexmp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe DynRexMp2fa
232 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexmp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe DynRexOp2fa
233 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexop2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe Mp3r_ib_unsigned_gpr
234 => {
if let InstructionData::BinaryImm8 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp3(bits, rex2(out_reg0, in_reg0), sink);
modrm_rr(out_reg0, in_reg0, sink); // note the flipped register in the ModR/M byte
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe DynRexMp3r_ib_unsigned_gpr
235 => {
if let InstructionData::BinaryImm8 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_dynrexmp3(bits, rex2(out_reg0, in_reg0), sink);
modrm_rr(out_reg0, in_reg0, sink); // note the flipped register in the ModR/M byte
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe RexMp3r_ib_unsigned_gpr
236 => {
if let InstructionData::BinaryImm8 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexmp3(bits, rex2(out_reg0, in_reg0), sink);
modrm_rr(out_reg0, in_reg0, sink); // note the flipped register in the ModR/M byte
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe Mp3furm
237 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_mp3(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe DynRexMp3furm
238 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_dynrexmp3(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe EvexMp2evex_reg_rm_128
239 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// instruction encoding operands: reg (op1, w), rm (op2, r)
// this maps to: out_reg0, in_reg0
let context = EvexContext::Other { length: EvexVectorLength::V128 };
let masking = EvexMasking::None;
put_evex(bits, out_reg0, 0, in_reg0, context, masking, sink); // params: reg, vvvv, rm
modrm_rr(in_reg0, out_reg0, sink); // params: rm, reg
return;
}
}
// Recipe DynRexMp2furm
240 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_dynrexmp2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe DynRexMp2vconst_optimized
241 => {
if let InstructionData::UnaryConst {
opcode,
constant_handle,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_dynrexmp2(bits, rex2(out_reg0, out_reg0), sink);
modrm_rr(out_reg0, out_reg0, sink);
return;
}
}
// Recipe Op2vconst
242 => {
if let InstructionData::UnaryConst {
opcode,
constant_handle,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op2(bits, rex2(0, out_reg0), sink);
modrm_riprel(out_reg0, sink);
const_disp4(constant_handle, func, sink);
return;
}
}
// Recipe DynRexOp2vconst
243 => {
if let InstructionData::UnaryConst {
opcode,
constant_handle,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_dynrexop2(bits, rex2(0, out_reg0), sink);
modrm_riprel(out_reg0, sink);
const_disp4(constant_handle, func, sink);
return;
}
}
// Recipe Op2fst
244 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else if needs_offset(in_reg1) {
modrm_disp8(in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe DynRexOp2fst
245 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_dynrexop2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else if needs_offset(in_reg1) {
modrm_disp8(in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe Op2fstDisp8
246 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp8(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe DynRexOp2fstDisp8
247 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_dynrexop2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp8(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op2fstDisp32
248 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp32(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp32(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe DynRexOp2fstDisp32
249 => {
if let InstructionData::Store {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_dynrexop2(bits, rex2(in_reg1, in_reg0), sink);
if needs_sib_byte(in_reg1) {
modrm_sib_disp32(in_reg0, sink);
sib_noindex(in_reg1, sink);
} else {
modrm_disp32(in_reg1, in_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op2fstWithIndex
250 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
}
return;
}
}
// Recipe RexOp2fstWithIndex
251 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg1) {
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
}
return;
}
}
// Recipe Op2fstWithIndexDisp8
252 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexOp2fstWithIndexDisp8
253 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp8(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op2fstWithIndexDisp32
254 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp32(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexOp2fstWithIndexDisp32
255 => {
if let InstructionData::StoreComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let in_reg2 = divert.reg(args[2], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex3(in_reg1, in_reg0, in_reg2), sink);
modrm_sib_disp32(in_reg0, sink);
sib(0, in_reg2, in_reg1, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op2fld
256 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else if needs_offset(in_reg0) {
modrm_disp8(in_reg0, out_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg0, out_reg0, sink);
}
return;
}
}
// Recipe DynRexOp2fld
257 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_dynrexop2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else if needs_offset(in_reg0) {
modrm_disp8(in_reg0, out_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg0, out_reg0, sink);
}
return;
}
}
// Recipe Op2fldDisp8
258 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp8(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe DynRexOp2fldDisp8
259 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_dynrexop2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp8(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op2fldDisp32
260 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp32(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp32(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe DynRexOp2fldDisp32
261 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_dynrexop2(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp32(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp32(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op2fldWithIndex
262 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe RexOp2fldWithIndex
263 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe Op2fldWithIndexDisp8
264 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexOp2fldWithIndexDisp8
265 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Op2fldWithIndexDisp32
266 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_op2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp32(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexOp2fldWithIndexDisp32
267 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexop2(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp32(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Op2fspillSib32
268 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_stk0 = StackRef::masked(
divert.stack(results[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let base = stk_base(out_stk0.base);
put_op2(bits, rex2(base, in_reg0), sink);
modrm_sib_disp32(in_reg0, sink);
sib_noindex(base, sink);
sink.put4(out_stk0.offset as u32);
return;
}
}
// Recipe RexOp2fspillSib32
269 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_stk0 = StackRef::masked(
divert.stack(results[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let base = stk_base(out_stk0.base);
put_rexop2(bits, rex2(base, in_reg0), sink);
modrm_sib_disp32(in_reg0, sink);
sib_noindex(base, sink);
sink.put4(out_stk0.offset as u32);
return;
}
}
// Recipe Op2fregspill32
270 => {
if let InstructionData::RegSpill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let dst = StackRef::sp(dst, &func.stack_slots);
let base = stk_base(dst.base);
put_op2(bits, rex2(base, src), sink);
modrm_sib_disp32(src, sink);
sib_noindex(base, sink);
sink.put4(dst.offset as u32);
return;
}
}
// Recipe RexOp2fregspill32
271 => {
if let InstructionData::RegSpill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
let dst = StackRef::sp(dst, &func.stack_slots);
let base = stk_base(dst.base);
put_rexop2(bits, rex2(base, src), sink);
modrm_sib_disp32(src, sink);
sib_noindex(base, sink);
sink.put4(dst.offset as u32);
return;
}
}
// Recipe Op2ffillSib32
272 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_stk0 = StackRef::masked(
divert.stack(args[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
let base = stk_base(in_stk0.base);
put_op2(bits, rex2(base, out_reg0), sink);
modrm_sib_disp32(out_reg0, sink);
sib_noindex(base, sink);
sink.put4(in_stk0.offset as u32);
return;
}
}
// Recipe RexOp2ffillSib32
273 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_stk0 = StackRef::masked(
divert.stack(args[0], &func.locations),
StackBaseMask(1),
&func.stack_slots,
).unwrap();
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
let base = stk_base(in_stk0.base);
put_rexop2(bits, rex2(base, out_reg0), sink);
modrm_sib_disp32(out_reg0, sink);
sib_noindex(base, sink);
sink.put4(in_stk0.offset as u32);
return;
}
}
// Recipe Op2fregfill32
274 => {
if let InstructionData::RegFill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
let src = StackRef::sp(src, &func.stack_slots);
let base = stk_base(src.base);
put_op2(bits, rex2(base, dst), sink);
modrm_sib_disp32(dst, sink);
sib_noindex(base, sink);
sink.put4(src.offset as u32);
return;
}
}
// Recipe RexOp2fregfill32
275 => {
if let InstructionData::RegFill {
opcode,
src,
dst,
arg,
..
} = *inst_data {
divert.apply(inst_data);
let src = StackRef::sp(src, &func.stack_slots);
let base = stk_base(src.base);
put_rexop2(bits, rex2(base, dst), sink);
modrm_sib_disp32(dst, sink);
sib_noindex(base, sink);
sink.put4(src.offset as u32);
return;
}
}
// Recipe Op2furm_reg_to_ssa
276 => {
if let InstructionData::CopyToSsa {
opcode,
src,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op2(bits, rex2(src, out_reg0), sink);
modrm_rr(src, out_reg0, sink);
return;
}
}
// Recipe RexOp2furm_reg_to_ssa
277 => {
if let InstructionData::CopyToSsa {
opcode,
src,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop2(bits, rex2(src, out_reg0), sink);
modrm_rr(src, out_reg0, sink);
return;
}
}
// Recipe Mp3fld
278 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp3(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else if needs_offset(in_reg0) {
modrm_disp8(in_reg0, out_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg0, out_reg0, sink);
}
return;
}
}
// Recipe DynRexMp3fld
279 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_dynrexmp3(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else if needs_offset(in_reg0) {
modrm_disp8(in_reg0, out_reg0, sink);
sink.put1(0);
} else {
modrm_rm(in_reg0, out_reg0, sink);
}
return;
}
}
// Recipe Mp3fldDisp8
280 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp3(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp8(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe DynRexMp3fldDisp8
281 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_dynrexmp3(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp8(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Mp3fldDisp32
282 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp3(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp32(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp32(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe DynRexMp3fldDisp32
283 => {
if let InstructionData::Load {
opcode,
flags,
offset,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_dynrexmp3(bits, rex2(in_reg0, out_reg0), sink);
if needs_sib_byte(in_reg0) {
modrm_sib_disp32(out_reg0, sink);
sib_noindex(in_reg0, sink);
} else {
modrm_disp32(in_reg0, out_reg0, sink);
}
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe Mp3fldWithIndex
284 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp3(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe RexMp3fldWithIndex
285 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp3(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
// The else branch always inserts an SIB byte.
if needs_offset(in_reg0) {
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
}
return;
}
}
// Recipe Mp3fldWithIndexDisp8
286 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp3(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe RexMp3fldWithIndexDisp8
287 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp3(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp8(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put1(offset as u8);
return;
}
}
// Recipe Mp3fldWithIndexDisp32
288 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_mp3(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp32(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe RexMp3fldWithIndexDisp32
289 => {
if let InstructionData::LoadComplex {
opcode,
flags,
offset,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
if !flags.notrap() {
sink.trap(TrapCode::HeapOutOfBounds, func.srclocs[inst]);
}
put_rexmp3(bits, rex3(in_reg0, out_reg0, in_reg1), sink);
modrm_sib_disp32(out_reg0, sink);
sib(0, in_reg1, in_reg0, sink);
let offset: i32 = offset.into();
sink.put4(offset as u32);
return;
}
}
// Recipe EvexMp3evex_reg_vvvv_rm_128
290 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// instruction encoding operands: reg (op1, w), vvvv (op2, r), rm (op3, r)
// this maps to: out_reg0, in_reg0, in_reg1
let context = EvexContext::Other { length: EvexVectorLength::V128 };
let masking = EvexMasking::None;
put_evex(bits, out_reg0, in_reg0, in_reg1, context, masking, sink); // params: reg, vvvv, rm
modrm_rr(in_reg1, out_reg0, sink); // params: rm, reg
return;
}
}
// Recipe Mp2fax
291 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_mp2(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe DynRexMp2fax
292 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexmp2(bits, rex2(in_reg0, in_reg1), sink);
modrm_rr(in_reg0, in_reg1, sink);
return;
}
}
// Recipe Mp3fcmp
293 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_mp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe DynRexMp3fcmp
294 => {
if let InstructionData::Binary {
opcode,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
put_dynrexmp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe Mp2f_ib
295 => {
if let InstructionData::BinaryImm64 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_mp2(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe DynRexMp2f_ib
296 => {
if let InstructionData::BinaryImm64 {
opcode,
imm,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_dynrexmp2(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
let imm: i64 = imm.into();
sink.put1(imm as u8);
return;
}
}
// Recipe Mp2icscc_fpr
297 => {
if let InstructionData::IntCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
// Comparison instruction.
put_mp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe DynRexMp2icscc_fpr
298 => {
if let InstructionData::IntCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
// Comparison instruction.
put_dynrexmp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe Mp3icscc_fpr
299 => {
if let InstructionData::IntCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
// Comparison instruction.
put_mp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe DynRexMp3icscc_fpr
300 => {
if let InstructionData::IntCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
// Comparison instruction.
put_dynrexmp3(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
return;
}
}
// Recipe Op2pfcmp
301 => {
if let InstructionData::FloatCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
// Comparison instruction.
put_op2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
// Add immediate byte indicating what type of comparison.
use crate::ir::condcodes::FloatCC::*;
let imm = match cond {
Equal => 0x00,
LessThan => 0x01,
LessThanOrEqual => 0x02,
Unordered => 0x03,
NotEqual => 0x04,
UnorderedOrGreaterThanOrEqual => 0x05,
UnorderedOrGreaterThan => 0x06,
Ordered => 0x07,
_ => panic!("{} not supported by pfcmp", cond),
};
sink.put1(imm);
return;
}
}
// Recipe DynRexOp2pfcmp
302 => {
if let InstructionData::FloatCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
// Comparison instruction.
put_dynrexop2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
// Add immediate byte indicating what type of comparison.
use crate::ir::condcodes::FloatCC::*;
let imm = match cond {
Equal => 0x00,
LessThan => 0x01,
LessThanOrEqual => 0x02,
Unordered => 0x03,
NotEqual => 0x04,
UnorderedOrGreaterThanOrEqual => 0x05,
UnorderedOrGreaterThan => 0x06,
Ordered => 0x07,
_ => panic!("{} not supported by pfcmp", cond),
};
sink.put1(imm);
return;
}
}
// Recipe Mp2pfcmp
303 => {
if let InstructionData::FloatCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
// Comparison instruction.
put_mp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
// Add immediate byte indicating what type of comparison.
use crate::ir::condcodes::FloatCC::*;
let imm = match cond {
Equal => 0x00,
LessThan => 0x01,
LessThanOrEqual => 0x02,
Unordered => 0x03,
NotEqual => 0x04,
UnorderedOrGreaterThanOrEqual => 0x05,
UnorderedOrGreaterThan => 0x06,
Ordered => 0x07,
_ => panic!("{} not supported by pfcmp", cond),
};
sink.put1(imm);
return;
}
}
// Recipe DynRexMp2pfcmp
304 => {
if let InstructionData::FloatCompare {
opcode,
cond,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
// Comparison instruction.
put_dynrexmp2(bits, rex2(in_reg1, in_reg0), sink);
modrm_rr(in_reg1, in_reg0, sink);
// Add immediate byte indicating what type of comparison.
use crate::ir::condcodes::FloatCC::*;
let imm = match cond {
Equal => 0x00,
LessThan => 0x01,
LessThanOrEqual => 0x02,
Unordered => 0x03,
NotEqual => 0x04,
UnorderedOrGreaterThanOrEqual => 0x05,
UnorderedOrGreaterThan => 0x06,
Ordered => 0x07,
_ => panic!("{} not supported by pfcmp", cond),
};
sink.put1(imm);
return;
}
}
// Recipe DynRexOp2furm
305 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_dynrexop2(bits, rex2(in_reg0, out_reg0), sink);
modrm_rr(in_reg0, out_reg0, sink);
return;
}
}
// Recipe Op1fnaddr4
306 => {
if let InstructionData::FuncAddr {
opcode,
func_ref,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
sink.reloc_external(func.srclocs[inst],
Reloc::Abs4,
&func.dfg.ext_funcs[func_ref].name,
0);
sink.put4(0);
return;
}
}
// Recipe RexOp1fnaddr8
307 => {
if let InstructionData::FuncAddr {
opcode,
func_ref,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
sink.reloc_external(func.srclocs[inst],
Reloc::Abs8,
&func.dfg.ext_funcs[func_ref].name,
0);
sink.put8(0);
return;
}
}
// Recipe Op1allones_fnaddr4
308 => {
if let InstructionData::FuncAddr {
opcode,
func_ref,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
sink.reloc_external(func.srclocs[inst],
Reloc::Abs4,
&func.dfg.ext_funcs[func_ref].name,
0);
// Write the immediate as `!0` for the benefit of BaldrMonkey.
sink.put4(!0);
return;
}
}
// Recipe RexOp1allones_fnaddr8
309 => {
if let InstructionData::FuncAddr {
opcode,
func_ref,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
sink.reloc_external(func.srclocs[inst],
Reloc::Abs8,
&func.dfg.ext_funcs[func_ref].name,
0);
// Write the immediate as `!0` for the benefit of BaldrMonkey.
sink.put8(!0);
return;
}
}
// Recipe RexOp1pcrel_fnaddr8
310 => {
if let InstructionData::FuncAddr {
opcode,
func_ref,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex2(0, out_reg0), sink);
modrm_riprel(out_reg0, sink);
// The addend adjusts for the difference between the end of the
// instruction and the beginning of the immediate field.
sink.reloc_external(func.srclocs[inst],
Reloc::X86PCRel4,
&func.dfg.ext_funcs[func_ref].name,
-4);
sink.put4(0);
return;
}
}
// Recipe RexOp1got_fnaddr8
311 => {
if let InstructionData::FuncAddr {
opcode,
func_ref,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex2(0, out_reg0), sink);
modrm_riprel(out_reg0, sink);
// The addend adjusts for the difference between the end of the
// instruction and the beginning of the immediate field.
sink.reloc_external(func.srclocs[inst],
Reloc::X86GOTPCRel4,
&func.dfg.ext_funcs[func_ref].name,
-4);
sink.put4(0);
return;
}
}
// Recipe Op1gvaddr4
312 => {
if let InstructionData::UnaryGlobalValue {
opcode,
global_value,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
sink.reloc_external(func.srclocs[inst],
Reloc::Abs4,
&func.global_values[global_value].symbol_name(),
0);
sink.put4(0);
return;
}
}
// Recipe RexOp1gvaddr8
313 => {
if let InstructionData::UnaryGlobalValue {
opcode,
global_value,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
sink.reloc_external(func.srclocs[inst],
Reloc::Abs8,
&func.global_values[global_value].symbol_name(),
0);
sink.put8(0);
return;
}
}
// Recipe RexOp1pcrel_gvaddr8
314 => {
if let InstructionData::UnaryGlobalValue {
opcode,
global_value,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex2(0, out_reg0), sink);
modrm_rm(5, out_reg0, sink);
// The addend adjusts for the difference between the end of the
// instruction and the beginning of the immediate field.
sink.reloc_external(func.srclocs[inst],
Reloc::X86PCRel4,
&func.global_values[global_value].symbol_name(),
-4);
sink.put4(0);
return;
}
}
// Recipe RexOp1got_gvaddr8
315 => {
if let InstructionData::UnaryGlobalValue {
opcode,
global_value,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex2(0, out_reg0), sink);
modrm_rm(5, out_reg0, sink);
// The addend adjusts for the difference between the end of the
// instruction and the beginning of the immediate field.
sink.reloc_external(func.srclocs[inst],
Reloc::X86GOTPCRel4,
&func.global_values[global_value].symbol_name(),
-4);
sink.put4(0);
return;
}
}
// Recipe RexOp1spaddr_id
316 => {
if let InstructionData::StackLoad {
opcode,
stack_slot,
offset,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
let sp = StackRef::sp(stack_slot, &func.stack_slots);
let base = stk_base(sp.base);
put_rexop1(bits, rex2(base, out_reg0), sink);
modrm_sib_disp32(out_reg0, sink);
sib_noindex(base, sink);
let imm : i32 = offset.into();
sink.put4(sp.offset.checked_add(imm).unwrap() as u32);
return;
}
}
// Recipe Op1spaddr_id
317 => {
if let InstructionData::StackLoad {
opcode,
stack_slot,
offset,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
let sp = StackRef::sp(stack_slot, &func.stack_slots);
let base = stk_base(sp.base);
put_op1(bits, rex2(base, out_reg0), sink);
modrm_sib_disp32(out_reg0, sink);
sib_noindex(base, sink);
let imm : i32 = offset.into();
sink.put4(sp.offset.checked_add(imm).unwrap() as u32);
return;
}
}
// Recipe RexOp1const_addr
318 => {
if let InstructionData::UnaryConst {
opcode,
constant_handle,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex2(0, out_reg0), sink);
modrm_riprel(out_reg0, sink);
const_disp4(constant_handle, func, sink);
return;
}
}
// Recipe Op1const_addr
319 => {
if let InstructionData::UnaryConst {
opcode,
constant_handle,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op1(bits, rex2(0, out_reg0), sink);
modrm_riprel(out_reg0, sink);
const_disp4(constant_handle, func, sink);
return;
}
}
// Recipe Op1call_id
320 => {
if let InstructionData::Call {
opcode,
func_ref,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
put_op1(bits, BASE_REX, sink);
// The addend adjusts for the difference between the end of the
// instruction and the beginning of the immediate field.
sink.reloc_external(func.srclocs[inst],
Reloc::X86CallPCRel4,
&func.dfg.ext_funcs[func_ref].name,
-4);
sink.put4(0);
sink.add_call_site(opcode, func.srclocs[inst]);
return;
}
}
// Recipe Op1call_plt_id
321 => {
if let InstructionData::Call {
opcode,
func_ref,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
put_op1(bits, BASE_REX, sink);
sink.reloc_external(func.srclocs[inst],
Reloc::X86CallPLTRel4,
&func.dfg.ext_funcs[func_ref].name,
-4);
sink.put4(0);
sink.add_call_site(opcode, func.srclocs[inst]);
return;
}
}
// Recipe Op1call_r
322 => {
if let InstructionData::CallIndirect {
opcode,
sig_ref,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
put_op1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
sink.add_call_site(opcode, func.srclocs[inst]);
return;
}
}
// Recipe RexOp1call_r
323 => {
if let InstructionData::CallIndirect {
opcode,
sig_ref,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
sink.trap(TrapCode::StackOverflow, func.srclocs[inst]);
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
sink.add_call_site(opcode, func.srclocs[inst]);
return;
}
}
// Recipe Op1ret
324 => {
if let InstructionData::MultiAry {
opcode,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_op1(bits, BASE_REX, sink);
return;
}
}
// Recipe Op1jmpb
325 => {
if let InstructionData::Jump {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_op1(bits, BASE_REX, sink);
disp1(destination, func, sink);
return;
}
}
// Recipe Op1jmpd
326 => {
if let InstructionData::Jump {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_op1(bits, BASE_REX, sink);
disp4(destination, func, sink);
return;
}
}
// Recipe Op1brib
327 => {
if let InstructionData::BranchInt {
opcode,
cond,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_op1(bits | icc2opc(cond), BASE_REX, sink);
disp1(destination, func, sink);
return;
}
}
// Recipe RexOp1brib
328 => {
if let InstructionData::BranchInt {
opcode,
cond,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_rexop1(bits | icc2opc(cond), BASE_REX, sink);
disp1(destination, func, sink);
return;
}
}
// Recipe Op2brid
329 => {
if let InstructionData::BranchInt {
opcode,
cond,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_op2(bits | icc2opc(cond), BASE_REX, sink);
disp4(destination, func, sink);
return;
}
}
// Recipe RexOp2brid
330 => {
if let InstructionData::BranchInt {
opcode,
cond,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_rexop2(bits | icc2opc(cond), BASE_REX, sink);
disp4(destination, func, sink);
return;
}
}
// Recipe Op1brfb
331 => {
if let InstructionData::BranchFloat {
opcode,
cond,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_op1(bits | fcc2opc(cond), BASE_REX, sink);
disp1(destination, func, sink);
return;
}
}
// Recipe RexOp1brfb
332 => {
if let InstructionData::BranchFloat {
opcode,
cond,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_rexop1(bits | fcc2opc(cond), BASE_REX, sink);
disp1(destination, func, sink);
return;
}
}
// Recipe Op2brfd
333 => {
if let InstructionData::BranchFloat {
opcode,
cond,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_op2(bits | fcc2opc(cond), BASE_REX, sink);
disp4(destination, func, sink);
return;
}
}
// Recipe RexOp2brfd
334 => {
if let InstructionData::BranchFloat {
opcode,
cond,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
put_rexop2(bits | fcc2opc(cond), BASE_REX, sink);
disp4(destination, func, sink);
return;
}
}
// Recipe Op1tjccb
335 => {
if let InstructionData::Branch {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
// test r, r.
put_op1((bits & 0xff00) | 0x85, rex2(in_reg0, in_reg0), sink);
modrm_rr(in_reg0, in_reg0, sink);
// Jcc instruction.
sink.put1(bits as u8);
disp1(destination, func, sink);
return;
}
}
// Recipe RexOp1tjccb
336 => {
if let InstructionData::Branch {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
// test r, r.
put_rexop1((bits & 0xff00) | 0x85, rex2(in_reg0, in_reg0), sink);
modrm_rr(in_reg0, in_reg0, sink);
// Jcc instruction.
sink.put1(bits as u8);
disp1(destination, func, sink);
return;
}
}
// Recipe Op1tjccd
337 => {
if let InstructionData::Branch {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
// test r, r.
put_op1((bits & 0xff00) | 0x85, rex2(in_reg0, in_reg0), sink);
modrm_rr(in_reg0, in_reg0, sink);
// Jcc instruction.
sink.put1(0x0f);
sink.put1(bits as u8);
disp4(destination, func, sink);
return;
}
}
// Recipe RexOp1tjccd
338 => {
if let InstructionData::Branch {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
// test r, r.
put_rexop1((bits & 0xff00) | 0x85, rex2(in_reg0, in_reg0), sink);
modrm_rr(in_reg0, in_reg0, sink);
// Jcc instruction.
sink.put1(0x0f);
sink.put1(bits as u8);
disp4(destination, func, sink);
return;
}
}
// Recipe Op1t8jccd_long
339 => {
if let InstructionData::Branch {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
// test32 r, 0xff.
put_op1((bits & 0xff00) | 0xf7, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
sink.put4(0xff);
// Jcc instruction.
sink.put1(0x0f);
sink.put1(bits as u8);
disp4(destination, func, sink);
return;
}
}
// Recipe Op1t8jccb_abcd
340 => {
if let InstructionData::Branch {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
// test8 r, r.
put_op1((bits & 0xff00) | 0x84, rex2(in_reg0, in_reg0), sink);
modrm_rr(in_reg0, in_reg0, sink);
// Jcc instruction.
sink.put1(bits as u8);
disp1(destination, func, sink);
return;
}
}
// Recipe RexOp1t8jccb
341 => {
if let InstructionData::Branch {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
// test8 r, r.
put_rexop1((bits & 0xff00) | 0x84, rex2(in_reg0, in_reg0), sink);
modrm_rr(in_reg0, in_reg0, sink);
// Jcc instruction.
sink.put1(bits as u8);
disp1(destination, func, sink);
return;
}
}
// Recipe Op1t8jccd_abcd
342 => {
if let InstructionData::Branch {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
// test8 r, r.
put_op1((bits & 0xff00) | 0x84, rex2(in_reg0, in_reg0), sink);
modrm_rr(in_reg0, in_reg0, sink);
// Jcc instruction.
sink.put1(0x0f);
sink.put1(bits as u8);
disp4(destination, func, sink);
return;
}
}
// Recipe RexOp1t8jccd
343 => {
if let InstructionData::Branch {
opcode,
destination,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
let in_reg0 = divert.reg(args[0], &func.locations);
// test8 r, r.
put_rexop1((bits & 0xff00) | 0x84, rex2(in_reg0, in_reg0), sink);
modrm_rr(in_reg0, in_reg0, sink);
// Jcc instruction.
sink.put1(0x0f);
sink.put1(bits as u8);
disp4(destination, func, sink);
return;
}
}
// Recipe RexOp1jt_entry
344 => {
if let InstructionData::BranchTableEntry {
opcode,
imm,
table,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex3(in_reg1, out_reg0, in_reg0), sink);
if needs_offset(in_reg1) {
modrm_sib_disp8(out_reg0, sink);
sib(imm.trailing_zeros() as u8, in_reg0, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(imm.trailing_zeros() as u8, in_reg0, in_reg1, sink);
}
return;
}
}
// Recipe Op1jt_entry
345 => {
if let InstructionData::BranchTableEntry {
opcode,
imm,
table,
ref args,
..
} = *inst_data {
let in_reg0 = divert.reg(args[0], &func.locations);
let in_reg1 = divert.reg(args[1], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op1(bits, rex3(in_reg1, out_reg0, in_reg0), sink);
if needs_offset(in_reg1) {
modrm_sib_disp8(out_reg0, sink);
sib(imm.trailing_zeros() as u8, in_reg0, in_reg1, sink);
sink.put1(0);
} else {
modrm_sib(out_reg0, sink);
sib(imm.trailing_zeros() as u8, in_reg0, in_reg1, sink);
}
return;
}
}
// Recipe RexOp1jt_base
346 => {
if let InstructionData::BranchTableBase {
opcode,
table,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_rexop1(bits, rex2(0, out_reg0), sink);
modrm_riprel(out_reg0, sink);
// No reloc is needed here as the jump table is emitted directly after
// the function body.
jt_disp4(table, func, sink);
return;
}
}
// Recipe Op1jt_base
347 => {
if let InstructionData::BranchTableBase {
opcode,
table,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
put_op1(bits, rex2(0, out_reg0), sink);
modrm_riprel(out_reg0, sink);
// No reloc is needed here as the jump table is emitted directly after
// the function body.
jt_disp4(table, func, sink);
return;
}
}
// Recipe RexOp1indirect_jmp
348 => {
if let InstructionData::IndirectJump {
opcode,
table,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
return;
}
}
// Recipe Op1indirect_jmp
349 => {
if let InstructionData::IndirectJump {
opcode,
table,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
put_op1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
return;
}
}
// Recipe Op2trap
350 => {
if let InstructionData::Trap {
opcode,
code,
..
} = *inst_data {
sink.trap(code, func.srclocs[inst]);
put_op2(bits, BASE_REX, sink);
return;
}
}
// Recipe debugtrap
351 => {
if let InstructionData::NullAry {
opcode,
..
} = *inst_data {
sink.put1(0xcc);
return;
}
}
// Recipe trapif
352 => {
if let InstructionData::IntCondTrap {
opcode,
cond,
code,
..
} = *inst_data {
// Jump over a 2-byte ud2.
sink.put1(0x70 | (icc2opc(cond.inverse()) as u8));
sink.put1(2);
// ud2.
sink.trap(code, func.srclocs[inst]);
sink.put1(0x0f);
sink.put1(0x0b);
return;
}
}
// Recipe trapff
353 => {
if let InstructionData::FloatCondTrap {
opcode,
cond,
code,
..
} = *inst_data {
// Jump over a 2-byte ud2.
sink.put1(0x70 | (fcc2opc(cond.inverse()) as u8));
sink.put1(2);
// ud2.
sink.trap(code, func.srclocs[inst]);
sink.put1(0x0f);
sink.put1(0x0b);
return;
}
}
// Recipe Op1pu_id_ref
354 => {
if let InstructionData::NullAry {
opcode,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// The destination register is encoded in the low bits of the opcode.
// No ModR/M.
put_op1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
sink.put4(0);
return;
}
}
// Recipe RexOp1pu_id_ref
355 => {
if let InstructionData::NullAry {
opcode,
..
} = *inst_data {
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// The destination register is encoded in the low bits of the opcode.
// No ModR/M.
put_rexop1(bits | (out_reg0 & 7), rex1(out_reg0), sink);
sink.put4(0);
return;
}
}
// Recipe Op1is_zero
356 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Test instruction.
put_op1(bits, rex2(in_reg0, in_reg0), sink);
modrm_rr(in_reg0, in_reg0, sink);
// Check ZF = 1 flag to see if register holds 0.
sink.put1(0x0f);
sink.put1(0x94);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe RexOp1is_zero
357 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Test instruction.
put_rexop1(bits, rex2(in_reg0, in_reg0), sink);
modrm_rr(in_reg0, in_reg0, sink);
// Check ZF = 1 flag to see if register holds 0.
sink.put1(0x0f);
sink.put1(0x94);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe Op1is_invalid
358 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_op1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
sink.put1(0xff);
// `setCC` instruction, no REX.
use crate::ir::condcodes::IntCC::*;
let setcc = 0x90 | icc2opc(Equal);
sink.put1(0x0f);
sink.put1(setcc as u8);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe RexOp1is_invalid
359 => {
if let InstructionData::Unary {
opcode,
arg,
..
} = *inst_data {
let args = [arg];
let in_reg0 = divert.reg(args[0], &func.locations);
let results = [func.dfg.first_result(inst)];
let out_reg0 = divert.reg(results[0], &func.locations);
// Comparison instruction.
put_rexop1(bits, rex1(in_reg0), sink);
modrm_r_bits(in_reg0, bits, sink);
sink.put1(0xff);
// `setCC` instruction, no REX.
use crate::ir::condcodes::IntCC::*;
let setcc = 0x90 | icc2opc(Equal);
sink.put1(0x0f);
sink.put1(setcc as u8);
modrm_rr(out_reg0, 0, sink);
return;
}
}
// Recipe safepoint
360 => {
if let InstructionData::MultiAry {
opcode,
ref args,
..
} = *inst_data {
let args = args.as_slice(&func.dfg.value_lists);
sink.add_stack_map(args, func, isa);
return;
}
}
// Recipe elf_tls_get_addr
361 => {
if let InstructionData::UnaryGlobalValue {
opcode,
global_value,
..
} = *inst_data {
// output %rax
// clobbers %rdi
// Those data16 prefixes are necessary to pad to 16 bytes.
// data16 lea gv@tlsgd(%rip),%rdi
sink.put1(0x66); // data16
sink.put1(0b01001000); // rex.w
const LEA: u8 = 0x8d;
sink.put1(LEA); // lea
modrm_riprel(0b111/*out_reg0*/, sink); // 0x3d
sink.reloc_external(func.srclocs[inst],
Reloc::ElfX86_64TlsGd,
&func.global_values[global_value].symbol_name(),
-4);
sink.put4(0);
// data16 data16 callq __tls_get_addr-4
sink.put1(0x66); // data16
sink.put1(0x66); // data16
sink.put1(0b01001000); // rex.w
sink.put1(0xe8); // call
sink.reloc_external(func.srclocs[inst],
Reloc::X86CallPLTRel4,
&ExternalName::LibCall(LibCall::ElfTlsGetAddr),
-4);
sink.put4(0);
return;
}
}
// Recipe macho_tls_get_addr
362 => {
if let InstructionData::UnaryGlobalValue {
opcode,
global_value,
..
} = *inst_data {
// output %rax
// clobbers %rdi
// movq gv@tlv(%rip), %rdi
sink.put1(0x48); // rex
sink.put1(0x8b); // mov
modrm_riprel(0b111/*out_reg0*/, sink); // 0x3d
sink.reloc_external(func.srclocs[inst],
Reloc::MachOX86_64Tlv,
&func.global_values[global_value].symbol_name(),
-4);
sink.put4(0);
// callq *(%rdi)
sink.put1(0xff);
sink.put1(0x17);
return;
}
}
_ => {},
}
if encoding.is_legal() {
bad_encoding(func, inst);
}
}
| 37.359167 | 108 | 0.427581 |
fc013501ee52668c5c9d85f47748e9d8b4a4004e | 8,241 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn serialize_operation_cancel_rotate_secret(
input: &crate::input::CancelRotateSecretInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_cancel_rotate_secret_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_create_secret(
input: &crate::input::CreateSecretInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_create_secret_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_resource_policy(
input: &crate::input::DeleteResourcePolicyInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_resource_policy_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_delete_secret(
input: &crate::input::DeleteSecretInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_delete_secret_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_describe_secret(
input: &crate::input::DescribeSecretInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_describe_secret_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_get_random_password(
input: &crate::input::GetRandomPasswordInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_get_random_password_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_get_resource_policy(
input: &crate::input::GetResourcePolicyInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_get_resource_policy_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_get_secret_value(
input: &crate::input::GetSecretValueInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_get_secret_value_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_secrets(
input: &crate::input::ListSecretsInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_secrets_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_list_secret_version_ids(
input: &crate::input::ListSecretVersionIdsInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_list_secret_version_ids_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_put_resource_policy(
input: &crate::input::PutResourcePolicyInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_put_resource_policy_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_put_secret_value(
input: &crate::input::PutSecretValueInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_put_secret_value_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_restore_secret(
input: &crate::input::RestoreSecretInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_restore_secret_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_rotate_secret(
input: &crate::input::RotateSecretInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_rotate_secret_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_tag_resource(
input: &crate::input::TagResourceInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_tag_resource_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_untag_resource(
input: &crate::input::UntagResourceInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_untag_resource_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_update_secret(
input: &crate::input::UpdateSecretInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_update_secret_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_update_secret_version_stage(
input: &crate::input::UpdateSecretVersionStageInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_update_secret_version_stage_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
pub fn serialize_operation_validate_resource_policy(
input: &crate::input::ValidateResourcePolicyInput,
) -> Result<smithy_http::body::SdkBody, serde_json::error::Error> {
let mut out = String::new();
let mut object = smithy_json::serialize::JsonObjectWriter::new(&mut out);
crate::json_ser::serialize_structure_validate_resource_policy_input(&mut object, input);
object.finish();
Ok(smithy_http::body::SdkBody::from(out))
}
| 43.146597 | 95 | 0.730494 |
e5c636d64ae5c7a5e6b54f2756cdf4b7dd25f38b | 4,510 | s! {
pub struct termios2 {
pub c_iflag: ::tcflag_t,
pub c_oflag: ::tcflag_t,
pub c_cflag: ::tcflag_t,
pub c_lflag: ::tcflag_t,
pub c_line: ::cc_t,
pub c_cc: [::cc_t; 19],
pub c_ispeed: ::speed_t,
pub c_ospeed: ::speed_t,
}
}
// include/uapi/asm-generic/socket.h
// arch/alpha/include/uapi/asm/socket.h
// tools/include/uapi/asm-generic/socket.h
// arch/mips/include/uapi/asm/socket.h
pub const SOL_SOCKET: ::c_int = 1;
// Defined in unix/linux_like/mod.rs
// pub const SO_DEBUG: ::c_int = 1;
pub const SO_REUSEADDR: ::c_int = 2;
pub const SO_TYPE: ::c_int = 3;
pub const SO_ERROR: ::c_int = 4;
pub const SO_DONTROUTE: ::c_int = 5;
pub const SO_BROADCAST: ::c_int = 6;
pub const SO_SNDBUF: ::c_int = 7;
pub const SO_RCVBUF: ::c_int = 8;
pub const SO_KEEPALIVE: ::c_int = 9;
pub const SO_OOBINLINE: ::c_int = 10;
pub const SO_NO_CHECK: ::c_int = 11;
pub const SO_PRIORITY: ::c_int = 12;
pub const SO_LINGER: ::c_int = 13;
pub const SO_BSDCOMPAT: ::c_int = 14;
pub const SO_REUSEPORT: ::c_int = 15;
pub const SO_PASSCRED: ::c_int = 16;
pub const SO_PEERCRED: ::c_int = 17;
pub const SO_RCVLOWAT: ::c_int = 18;
pub const SO_SNDLOWAT: ::c_int = 19;
pub const SO_RCVTIMEO: ::c_int = 20;
pub const SO_SNDTIMEO: ::c_int = 21;
// pub const SO_RCVTIMEO_OLD: ::c_int = 20;
// pub const SO_SNDTIMEO_OLD: ::c_int = 21;
pub const SO_SECURITY_AUTHENTICATION: ::c_int = 22;
pub const SO_SECURITY_ENCRYPTION_TRANSPORT: ::c_int = 23;
pub const SO_SECURITY_ENCRYPTION_NETWORK: ::c_int = 24;
pub const SO_BINDTODEVICE: ::c_int = 25;
pub const SO_ATTACH_FILTER: ::c_int = 26;
pub const SO_DETACH_FILTER: ::c_int = 27;
pub const SO_GET_FILTER: ::c_int = SO_ATTACH_FILTER;
pub const SO_PEERNAME: ::c_int = 28;
pub const SO_TIMESTAMP: ::c_int = 29;
// pub const SO_TIMESTAMP_OLD: ::c_int = 29;
pub const SO_ACCEPTCONN: ::c_int = 30;
pub const SO_PEERSEC: ::c_int = 31;
pub const SO_SNDBUFFORCE: ::c_int = 32;
pub const SO_RCVBUFFORCE: ::c_int = 33;
pub const SO_PASSSEC: ::c_int = 34;
pub const SO_TIMESTAMPNS: ::c_int = 35;
// pub const SO_TIMESTAMPNS_OLD: ::c_int = 35;
pub const SO_MARK: ::c_int = 36;
pub const SO_TIMESTAMPING: ::c_int = 37;
// pub const SO_TIMESTAMPING_OLD: ::c_int = 37;
pub const SO_PROTOCOL: ::c_int = 38;
pub const SO_DOMAIN: ::c_int = 39;
pub const SO_RXQ_OVFL: ::c_int = 40;
pub const SO_WIFI_STATUS: ::c_int = 41;
pub const SCM_WIFI_STATUS: ::c_int = SO_WIFI_STATUS;
pub const SO_PEEK_OFF: ::c_int = 42;
pub const SO_NOFCS: ::c_int = 43;
pub const SO_LOCK_FILTER: ::c_int = 44;
pub const SO_SELECT_ERR_QUEUE: ::c_int = 45;
pub const SO_BUSY_POLL: ::c_int = 46;
pub const SO_MAX_PACING_RATE: ::c_int = 47;
pub const SO_BPF_EXTENSIONS: ::c_int = 48;
pub const SO_INCOMING_CPU: ::c_int = 49;
pub const SO_ATTACH_BPF: ::c_int = 50;
pub const SO_DETACH_BPF: ::c_int = SO_DETACH_FILTER;
pub const SO_ATTACH_REUSEPORT_CBPF: ::c_int = 51;
pub const SO_ATTACH_REUSEPORT_EBPF: ::c_int = 52;
pub const SO_CNX_ADVICE: ::c_int = 53;
pub const SCM_TIMESTAMPING_OPT_STATS: ::c_int = 54;
pub const SO_MEMINFO: ::c_int = 55;
pub const SO_INCOMING_NAPI_ID: ::c_int = 56;
pub const SO_COOKIE: ::c_int = 57;
pub const SCM_TIMESTAMPING_PKTINFO: ::c_int = 58;
pub const SO_PEERGROUPS: ::c_int = 59;
pub const SO_ZEROCOPY: ::c_int = 60;
pub const SO_TXTIME: ::c_int = 61;
pub const SCM_TXTIME: ::c_int = SO_TXTIME;
pub const SO_BINDTOIFINDEX: ::c_int = 62;
cfg_if! {
// Some of these platforms in CI already have these constants.
// But they may still not have those _OLD ones.
if #[cfg(all(any(target_arch = "x86",
target_arch = "x86_64",
target_arch = "aarch64"),
not(target_env = "musl")))] {
pub const SO_TIMESTAMP_NEW: ::c_int = 63;
pub const SO_TIMESTAMPNS_NEW: ::c_int = 64;
pub const SO_TIMESTAMPING_NEW: ::c_int = 65;
pub const SO_RCVTIMEO_NEW: ::c_int = 66;
pub const SO_SNDTIMEO_NEW: ::c_int = 67;
pub const SO_DETACH_REUSEPORT_BPF: ::c_int = 68;
}
}
// pub const SO_PREFER_BUSY_POLL: ::c_int = 69;
// pub const SO_BUSY_POLL_BUDGET: ::c_int = 70;
// Defined in unix/linux_like/mod.rs
// pub const SCM_TIMESTAMP: ::c_int = SO_TIMESTAMP;
pub const SCM_TIMESTAMPNS: ::c_int = SO_TIMESTAMPNS;
pub const SCM_TIMESTAMPING: ::c_int = SO_TIMESTAMPING;
pub const TCGETS2: ::c_ulong = 0x802c542a;
pub const TCSETS2: ::c_ulong = 0x402c542b;
pub const TCSETSW2: ::c_ulong = 0x402c542c;
pub const TCSETSF2: ::c_ulong = 0x402c542d;
| 37.89916 | 66 | 0.692018 |
7606282c544cea3b77e2c5f9f47774b0903b23c4 | 2,426 | // WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!!
#[cfg(any(feature = "all", feature = "android-app-AliasActivity"))]
__jni_bindgen! {
/// public class [AliasActivity](https://developer.android.com/reference/android/app/AliasActivity.html)
///
/// Required feature: android-app-AliasActivity
public class AliasActivity ("android/app/AliasActivity") extends crate::android::app::Activity {
/// [AliasActivity](https://developer.android.com/reference/android/app/AliasActivity.html#AliasActivity())
pub fn new<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::app::AliasActivity>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/app/AliasActivity", java.flags == PUBLIC, .name == "<init>", .descriptor == "()V"
unsafe {
let __jni_args = [];
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/AliasActivity\0", "<init>\0", "()V\0");
__jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
// // Not emitting: Non-public method
// /// [onCreate](https://developer.android.com/reference/android/app/AliasActivity.html#onCreate(android.os.Bundle))
// ///
// /// Required features: "android-os-Bundle"
// #[cfg(any(feature = "all", all(feature = "android-os-Bundle")))]
// fn onCreate<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::android::os::Bundle>>) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// // class.path == "android/app/AliasActivity", java.flags == PROTECTED, .name == "onCreate", .descriptor == "(Landroid/os/Bundle;)V"
// unsafe {
// let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())];
// let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
// let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/AliasActivity\0", "onCreate\0", "(Landroid/os/Bundle;)V\0");
// __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
// }
// }
}
}
| 65.567568 | 258 | 0.632317 |
64703f4dc355e562cdf531341cd801848645ea29 | 6,527 | use crate::BASE_DIR_FLAG;
use account_utils::{random_password, strip_off_newlines};
use clap::{App, Arg, ArgMatches};
use eth2_wallet::{
bip39::{Language, Mnemonic, MnemonicType},
PlainText,
};
use eth2_wallet_manager::{WalletManager, WalletType};
use std::ffi::OsStr;
use std::fs::{self, File};
use std::io::prelude::*;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
pub const CMD: &str = "create";
pub const HD_TYPE: &str = "hd";
pub const NAME_FLAG: &str = "name";
pub const PASSPHRASE_FLAG: &str = "passphrase-file";
pub const TYPE_FLAG: &str = "type";
pub const MNEMONIC_FLAG: &str = "mnemonic-output-path";
pub fn cli_app<'a, 'b>() -> App<'a, 'b> {
App::new(CMD)
.about("Creates a new HD (hierarchical-deterministic) EIP-2386 wallet.")
.arg(
Arg::with_name(NAME_FLAG)
.long(NAME_FLAG)
.value_name("WALLET_NAME")
.help(
"The wallet will be created with this name. It is not allowed to \
create two wallets with the same name for the same --base-dir.",
)
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name(PASSPHRASE_FLAG)
.long(PASSPHRASE_FLAG)
.value_name("WALLET_PASSWORD_PATH")
.help(
"A path to a file containing the password which will unlock the wallet. \
If the file does not exist, a random password will be generated and \
saved at that path. To avoid confusion, if the file does not already \
exist it must include a '.pass' suffix.",
)
.takes_value(true)
.required(true),
)
.arg(
Arg::with_name(TYPE_FLAG)
.long(TYPE_FLAG)
.value_name("WALLET_TYPE")
.help(
"The type of wallet to create. Only HD (hierarchical-deterministic) \
wallets are supported presently..",
)
.takes_value(true)
.possible_values(&[HD_TYPE])
.default_value(HD_TYPE),
)
.arg(
Arg::with_name(MNEMONIC_FLAG)
.long(MNEMONIC_FLAG)
.value_name("MNEMONIC_PATH")
.help(
"If present, the mnemonic will be saved to this file. DO NOT SHARE THE MNEMONIC.",
)
.takes_value(true)
)
}
pub fn cli_run(matches: &ArgMatches, base_dir: PathBuf) -> Result<(), String> {
let name: String = clap_utils::parse_required(matches, NAME_FLAG)?;
let wallet_password_path: PathBuf = clap_utils::parse_required(matches, PASSPHRASE_FLAG)?;
let mnemonic_output_path: Option<PathBuf> = clap_utils::parse_optional(matches, MNEMONIC_FLAG)?;
let type_field: String = clap_utils::parse_required(matches, TYPE_FLAG)?;
let wallet_type = match type_field.as_ref() {
HD_TYPE => WalletType::Hd,
unknown => return Err(format!("--{} {} is not supported", TYPE_FLAG, unknown)),
};
let mgr = WalletManager::open(&base_dir)
.map_err(|e| format!("Unable to open --{}: {:?}", BASE_DIR_FLAG, e))?;
// Create a new random mnemonic.
//
// The `tiny-bip39` crate uses `thread_rng()` for this entropy.
let mnemonic = Mnemonic::new(MnemonicType::Words12, Language::English);
// Create a random password if the file does not exist.
if !wallet_password_path.exists() {
// To prevent users from accidentally supplying their password to the PASSPHRASE_FLAG and
// create a file with that name, we require that the password has a .pass suffix.
if wallet_password_path.extension() != Some(&OsStr::new("pass")) {
return Err(format!(
"Only creates a password file if that file ends in .pass: {:?}",
wallet_password_path
));
}
create_with_600_perms(&wallet_password_path, random_password().as_bytes())
.map_err(|e| format!("Unable to write to {:?}: {:?}", wallet_password_path, e))?;
}
let wallet_password = fs::read(&wallet_password_path)
.map_err(|e| format!("Unable to read {:?}: {:?}", wallet_password_path, e))
.map(|bytes| PlainText::from(strip_off_newlines(bytes)))?;
let wallet = mgr
.create_wallet(name, wallet_type, &mnemonic, wallet_password.as_bytes())
.map_err(|e| format!("Unable to create wallet: {:?}", e))?;
if let Some(path) = mnemonic_output_path {
create_with_600_perms(&path, mnemonic.phrase().as_bytes())
.map_err(|e| format!("Unable to write mnemonic to {:?}: {:?}", path, e))?;
}
println!("Your wallet's 12-word BIP-39 mnemonic is:");
println!();
println!("\t{}", mnemonic.phrase());
println!();
println!("This mnemonic can be used to fully restore your wallet, should ");
println!("you lose the JSON file or your password. ");
println!();
println!("It is very important that you DO NOT SHARE this mnemonic as it will ");
println!("reveal the private keys of all validators and keys generated with ");
println!("this wallet. That would be catastrophic.");
println!();
println!("It is also important to store a backup of this mnemonic so you can ");
println!("recover your private keys in the case of data loss. Writing it on ");
println!("a piece of paper and storing it in a safe place would be prudent.");
println!();
println!("Your wallet's UUID is:");
println!();
println!("\t{}", wallet.wallet().uuid());
println!();
println!("You do not need to backup your UUID or keep it secret.");
Ok(())
}
/// Creates a file with `600 (-rw-------)` permissions.
pub fn create_with_600_perms<P: AsRef<Path>>(path: P, bytes: &[u8]) -> Result<(), String> {
let path = path.as_ref();
let mut file =
File::create(&path).map_err(|e| format!("Unable to create {:?}: {}", path, e))?;
let mut perm = file
.metadata()
.map_err(|e| format!("Unable to get {:?} metadata: {}", path, e))?
.permissions();
perm.set_mode(0o600);
file.set_permissions(perm)
.map_err(|e| format!("Unable to set {:?} permissions: {}", path, e))?;
file.write_all(bytes)
.map_err(|e| format!("Unable to write to {:?}: {}", path, e))?;
Ok(())
}
| 39.557576 | 102 | 0.587713 |
decb7b8af9f3ad2d88116db43a2fbc3972c1701c | 858 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern: reference is not valid outside of its lifetime
extern mod extra;
use extra::arc;
fn main() {
let x = ~arc::RWARC(1);
let mut y = None;
do x.write_downgrade |write_mode| {
y = Some(write_mode);
}
y.get();
// Adding this line causes a method unification failure instead
// do (&option::unwrap(y)).write |state| { assert!(*state == 1); }
}
| 35.75 | 70 | 0.687646 |
23c19f7e2c7b7857e6366553e231b5057787d491 | 5,484 | use headers_serializer::ToMaps;
use crate::types::Metas;
#[derive(Clone, Debug, Default, PartialEq, ToMaps)]
// #[cfg_attr(feature = "deserialize_structs", derive(Deserialize))]
pub struct PutObjectOptions {
/// <p>The web page caching behavior that is specified when the object is downloaded.</p>
#[label("opts")]
pub cache_control: Option<String>,
/// <p></p>
#[label("opts")]
pub content_disposition: Option<String>,
/// <p>The content encoding type of the object during the download. </p>
#[label("opts")]
pub content_encoding: Option<String>,
/// <p>The MD5 hash of the object you want to upload. The value of Content-MD5 is calculated based on the MD5 algorithm. After the Content-MD5 request header is uploaded, OSS calculates the MD5 hash of the received object and checks whether the calculated MD5 hash is the same as the Content-MD5 value provided in the request.</p>
/// <p>To ensure data integrity, OSS provides multiple methods for you to check the MD5 hashes of the data. To perform MD5 verification based on the Content-MD5 header, add the Content-MD5 header to the request.</p>
#[label("opts")]
pub content_md5: Option<String>,
/// <p>The ETag that is generated when an object is created. ETags are used to identify the content of the objects.</p>
/// <p><li>If an object is created by using a PutObject request, the ETag value is the MD5 hash of the object content.</li>
/// <li>If an object is created by using other methods, the ETag value is the UUID of the object content.</li></p>
/// <p> Note: <li>The ETag value of the object can be used to check whether the object content is modified. To verify data integrity, we recommend that you do not use the ETag of an object as the MD5 hash of the object.</li></p>
#[label("opts")]
pub e_tag: Option<String>,
/// <p>The time period after which the response is considered expired.</p>
#[label("opts")]
pub expires: Option<String>,
/// <p>Specifies whether the PutObject operation overwrites objects of the same name. When the versioning status of the requested bucket is enabled or suspended, the x-oss-forbid-overwrite request header is invalid. In this case, the PutObject operation overwrites objects of the same name.</p>
/// <p><li>If x-oss-forbid-overwrite is not specified or the value of x-oss-forbid-overwrite is set to false, an existing object has the same name as that of the object you want to upload can be overwritten.</li>
/// <li>If the value of x-oss-forbid-overwrite is set to true, an existing object that has the same name as that of the object you want to upload cannot be overwritten.</li></p>
/// <p>If you specify the x-oss-forbid-overwrite request header, the queries per second (QPS) performance of OSS may be degraded. If you want to use the x-oss-forbid-overwrite request header to perform a large number of operations (QPS greater than 1,000), submit a ticket.</p>
#[label("opts")]
pub x_oss_forbid_overwrite: Option<String>,
/// <p>The server-side encryption method that is used when OSS creates the object.</p>
/// <p>Valid values: AES256 and KMS</p>
/// <p>If you specify this parameter, this parameter is returned in the response header and the uploaded object is encrypted and stored. When you download the encrypted object, the x-oss-server-side-encryption header is included in the response and the header value is set to the algorithm used to encrypt the object.</p>
#[label("opts")]
pub x_oss_server_side_encryption: Option<String>,
/// <p>The ID of the customer master key (CMK) hosted in KMS.</p>
/// <p>This parameter is valid only when x-oss-server-side-encryption is set to KMS.</p>
#[label("opts")]
pub x_oss_server_side_encryption_key_id: Option<String>,
/// <p>The access control list (ACL) of the object you want to create.</p>
/// <p>Valid values: public-read, private, and public-read-write</p>
#[label("opts")]
pub x_oss_object_acl: Option<String>,
/// <p>The storage class of an object.</p>
/// <p>If you specify the storage class when you upload the object, the specified storage class applies regardless of the storage class of the bucket that contains the object. If you set x-oss-storage-class to Standard when you upload an object to an IA bucket, the object is stored as a Standard object.</p>
/// <p>Valid values: Standard, IA, Archive, and ColdArchive.</p>
/// <p>Supported operations: PutObject, InitiateMultipartUpload, AppendObject, PutObjectSymlink, and CopyObject.</p>
#[label("opts")]
pub x_oss_storage_class: Option<String>,
/// <p>The object tag. You can configure multiple tags for the object. Example: TagA=A&TagB=B.</p>
/// <p>Note: <li>The tag key and value must be URL-encoded. If a key-value pair does not contain an equal sign (=), the tag value is considered an empty string.</li></p>
#[label("opts")]
pub x_oss_tagging: Option<String>,
/// <p>If the PutObject request contains a parameter prefixed with x-oss-meta-*, the parameter is considered to be user metadata. Example: x-oss-meta-location. An object can have multiple similar parameters. However, the total size of the user metadata cannot exceed 8 KB.</p>
/// <p>Metadata supports hyphens (-), digits, and letters. Uppercase letters are converted to lowercase letters, and other characters such as underscores (_) are not supported.</p>
pub metas: Option<Metas>,
}
| 87.047619 | 334 | 0.719548 |
fe1443dbc5f6a1a3043cea4f7d53ea200e17c42d | 903 | #[doc = "DEVDMANXTDSC register accessor: an alias for `Reg<DEVDMANXTDSC_SPEC>`"]
pub type DEVDMANXTDSC = crate::Reg<devdmanxtdsc::DEVDMANXTDSC_SPEC>;
#[doc = "Device DMA Channel Next Descriptor Address Register"]
pub mod devdmanxtdsc;
#[doc = "DEVDMAADDRESS register accessor: an alias for `Reg<DEVDMAADDRESS_SPEC>`"]
pub type DEVDMAADDRESS = crate::Reg<devdmaaddress::DEVDMAADDRESS_SPEC>;
#[doc = "Device DMA Channel Address Register"]
pub mod devdmaaddress;
#[doc = "DEVDMACONTROL register accessor: an alias for `Reg<DEVDMACONTROL_SPEC>`"]
pub type DEVDMACONTROL = crate::Reg<devdmacontrol::DEVDMACONTROL_SPEC>;
#[doc = "Device DMA Channel Control Register"]
pub mod devdmacontrol;
#[doc = "DEVDMASTATUS register accessor: an alias for `Reg<DEVDMASTATUS_SPEC>`"]
pub type DEVDMASTATUS = crate::Reg<devdmastatus::DEVDMASTATUS_SPEC>;
#[doc = "Device DMA Channel Status Register"]
pub mod devdmastatus;
| 53.117647 | 82 | 0.776301 |
ebb354454524df14e493f1b9538b622088f81246 | 17,287 | #![allow(non_camel_case_types, non_upper_case_globals)]
const_ordinary! { DROPEFFECT: u32: "shell";
/// [`DROPEFFECT`](https://docs.microsoft.com/en-us/windows/win32/com/dropeffect-constants)
/// constants (`u32`).
=>
=>
NONE 0
COPY 1
MOVE 2
LINK 4
SCROLL 0x8000_0000
}
const_ordinary! { FO: u32: "shell";
/// [`SHFILEOPSTRUCT`](crate::SHFILEOPSTRUCT) `wFunc` (`u32`).
=>
=>
MOVE 0x0001
COPY 0x0002
DELETE 0x0003
RENAME 0x0004
}
const_bitflag! { FOF: u16: "shell";
/// [`SHFILEOPSTRUCT`](crate::SHFILEOPSTRUCT) `fFlags` (`u16`).
=>
=>
MULTIDESTFILES 0x0001
CONFIRMMOUSE 0x0002
SILENT 0x0004
RENAMEONCOLLISION 0x0008
NOCONFIRMATION 0x0010
WANTMAPPINGHANDLE 0x0020
ALLOWUNDO 0x0040
FILESONLY 0x0080
SIMPLEPROGRESS 0x0100
NOCONFIRMMKDIR 0x0200
NOERRORUI 0x0400
NOCOPYSECURITYATTRIBS 0x0800
NORECURSION 0x1000
NO_CONNECTED_ELEMENTS 0x2000
WANTNUKEWARNING 0x4000
NORECURSEREPARSE 0x8000
NO_UI Self::SILENT.0 | Self::NOCONFIRMATION.0 | Self::NOERRORUI.0 | Self::NOCONFIRMMKDIR.0
}
const_bitflag! { FOS: u32: "shell";
/// [`_FILEOPENDIALOGOPTIONS`](https://docs.microsoft.com/en-us/windows/win32/api/shobjidl_core/ne-shobjidl_core-_fileopendialogoptions)
/// enumeration (`u32`).
=>
=>
/// When saving a file prompt before overwriting an existing file of the
/// same name. This is a default value for the Save dialog.
OVERWRITEPROMPT 0x2
/// In the Save dialog only allow the user to choose a file that has one of
/// the file name extensions specified through
/// [`IFileDialog::SetFileTypes`](crate::prelude::ShellIFileDialog::SetFileTypes).
STRICTFILETYPES 0x4
/// Don't change the current working directory.
NOCHANGEDIR 0x8
/// Present an Open dialog that offers a choice of folders rather than
/// files.
PICKFOLDERS 0x20
/// Ensures that returned items are file system items
/// ([`SFGAO::FILESYSTEM`](crate::co::SFGAO::FILESYSTEM)). Note that this
/// does not apply to items returned by
/// [`IFileDialog::GetCurrentSelection`](crate::prelude::ShellIFileDialog::GetCurrentSelection).
FORCEFILESYSTEM 0x40
/// Enables the user to choose any item in the Shell namespace not just
/// those with [`SFGAO::STREAM`](crate::co::SFGAO::STREAM) or
/// [`SFAGO::FILESYSTEM`](crate::co::SFGAO::FILESYSTEM) attributes. This
/// flag cannot be combined with
/// [`FOS::FORCEFILESYSTEM`](crate::co::FOS::FORCEFILESYSTEM).
ALLNONSTORAGEITEMS 0x80
/// Do not check for situations that would prevent an application from
/// opening the selected file such as sharing violations or access denied
/// errors.
NOVALIDATE 0x100
/// Enables the user to select multiple items in the open dialog. Note that
/// when this flag is set the [`IFileOpenDialog`](crate::IFileOpenDialog)
/// interface must be used to retrieve those items.
ALLOWMULTISELECT 0x200
/// The item returned must be in an existing folder. This is a default
/// value.
PATHMUSTEXIST 0x800
/// The item returned must exist. This is a default value for the Open
/// dialog.
FILEMUSTEXIST 0x1000
/// Prompt for creation if the item returned in the save dialog does not
/// exist. Note that this does not actually create the item.
CREATEPROMPT 0x2000
/// In the case of a sharing violation when an application is opening a
/// file call the application back through
/// [`OnShareViolation`](crate::prelude::ShellIFileDialogEvents::OnShareViolation)
/// for guidance. This flag is overridden by
/// [`FOS::NOVALIDATE`](crate::co::FOS::NOVALIDATE).
SHAREAWARE 0x4000
/// Do not return read-only items. This is a default value for the Save
/// dialog.
NOREADONLYRETURN 0x8000
/// Do not test whether creation of the item as specified in the Save dialog
/// will be successful. If this flag is not set the calling application
/// must handle errors such as denial of access discovered when the item
/// is created.
NOTESTFILECREATE 0x1_0000
/// Hide the list of places from which the user has recently opened or saved
/// items. This value is not supported as of Windows 7.
HIDEMRUPLACES 0x2_0000
/// Hide items shown by default in the view's navigation pane. This flag is
/// often used in conjunction with the
/// [`IFileDialog::AddPlace`](crate::prelude::ShellIFileDialog::AddPlace)
/// method, to hide standard locations and replace them with custom
/// locations.
///
/// Windows 7 and later. Hide all of the standard namespace locations (such
/// as Favorites Libraries Computer and Network) shown in the navigation
/// pane.
///
/// Windows Vista. Hide the contents of the Favorite Links tree in the
/// navigation pane. Note that the category itself is still displayed but
/// shown as empty.
HIDEPINNEDPLACES 0x4_0000
/// Shortcuts should not be treated as their target items. This allows an
/// application to open a .lnk file rather than what that file is a shortcut
/// to.
NODEREFERENCELINKS 0x10_0000
/// (This constant has no official documentation.)
OKBUTTONNEEDSINTERACTION 0x20_0000
/// Do not add the item being opened or saved to the recent documents list
/// ([`SHAddToRecentDocs`](crate::SHAddToRecentDocs)).
DONTADDTORECENT 0x200_0000
/// Include hidden and system items.
FORCESHOWHIDDEN 0x1000_0000
/// Indicates to the Save As dialog box that it should open in expanded
/// mode. Expanded mode is the mode that is set and unset by clicking the
/// button in the lower-left corner of the Save As dialog box that switches
/// between Browse Folders and Hide Folders when clicked. This value is not
/// supported as of Windows 7.
DEFAULTNOMINIMODE 0x2000_0000
/// Indicates to the Open dialog box that the preview pane should always be
/// displayed.
FORCEPREVIEWPANEON 0x4000_0000
/// Indicates that the caller is opening a file as a stream
/// ([`BHID_Stream`](crate::prelude::ShellIShellItem::BindToHandler)) so
/// there is no need to download that file.
SUPPORTSTREAMABLEITEMS 0x8000_0000
}
const_ordinary! { FDAP: u32: "shell";
/// [`FDAP`](https://docs.microsoft.com/en-us/windows/win32/api/shobjidl_core/ne-shobjidl_core-fdap)
/// enumeration (`u32`).
=>
=>
BOTTOM 0
TOP 1
}
const_bitflag! { KF: u32: "shell";
/// [`KNOWN_FOLDER_FLAG`](https://docs.microsoft.com/en-us/windows/win32/api/shlobj_core/ne-shlobj_core-known_folder_flag)
/// enumeration (`u32`).
=>
=>
DEFAULT 0x0000_0000
FORCE_APP_DATA_REDIRECTION 0x0008_0000
RETURN_FILTER_REDIRECTION_TARGET 0x0004_0000
FORCE_PACKAGE_REDIRECTION 0x0002_0000
NO_PACKAGE_REDIRECTION 0x0001_0000
FORCE_APPCONTAINER_REDIRECTION 0x0002_0000
NO_APPCONTAINER_REDIRECTION 0x0001_0000
CREATE 0x0000_8000
DONT_VERIFY 0x0000_4000
DONT_UNEXPAND 0x0000_2000
NO_ALIAS 0x0000_1000
INIT 0x0000_0800
DEFAULT_PATH 0x0000_0400
NOT_PARENT_RELATIVE 0x0000_0200
SIMPLE_IDLIST 0x0000_0100
ALIAS_ONLY 0x8000_0000
}
const_bitflag! { NIF: u32: "shell";
/// [`NOTIFYICONDATA`](crate::NOTIFYICONDATA) `uFlags` (`u32`).
=>
=>
MESSAGE 0x0000_0001
ICON 0x0000_0002
TIP 0x0000_0004
STATE 0x0000_0008
INFO 0x0000_0010
GUID 0x0000_0020
REALTIME 0x0000_0040
SHOWTIP 0x0000_0080
}
const_bitflag! { NIIF: u32: "shell";
/// [`NOTIFYICONDATA`](crate::NOTIFYICONDATA) `dwInfoFlags` (`u32`).
=>
=>
NONE 0x0000_0000
INFO 0x0000_0001
WARNING 0x0000_0002
ERROR 0x0000_0003
USER 0x0000_0004
NOSOUND 0x0000_0010
LARGE_ICON 0x0000_0020
RESPECT_QUIET_TIME 0x0000_0080
}
const_ordinary! { NIM: u32: "shell";
/// [`Shell_NotifyIcon`](crate::Shell_NotifyIcon) `message` (`u32`).
=>
=>
ADD 0x0000_0000
MODIFY 0x0000_0001
DELETE 0x0000_0002
SETFOCUS 0x0000_0003
SETVERSION 0x0000_0004
}
const_bitflag! { NIS: u32: "shell";
/// [`NOTIFYICONDATA`](crate::NOTIFYICONDATA) `dwState` and `dwStateFlags`
/// (`u32`).
=>
=>
HIDDEN 0x0000_0001
SHAREDICON 0x0000_0002
}
const_ordinary! { SE_ERR: u32: "shell";
/// [`HWND::ShellExecute`](crate::prelude::ShellHwnd::ShellExecute) return
/// value (`u32`).
=>
=>
FILE_NOT_FOUND 2
PATH_NOT_FOUND 3
BAD_FORMAT 11
ACCESSDENIED 5
OOM 8
DLLNOTFOUND 32
SHARE 26
ASSOCINCOMPLETE 27
DDETIMEOUT 28
DDEFAIL 29
DDEBUSY 30
NOASSOC 31
}
const_bitflag! { SFGAO: u32: "shell";
/// [`SFGAO`](https://docs.microsoft.com/en-us/windows/win32/shell/sfgao)
/// constants (`u32`).
=>
=>
CANCOPY DROPEFFECT::COPY.0
CANMOVE DROPEFFECT::MOVE.0
CANLINK DROPEFFECT::LINK.0
STORAGE 0x0000_0008
CANRENAME 0x0000_0010
CANDELETE 0x0000_0020
HASPROPSHEET 0x0000_0040
DROPTARGET 0x0000_0100
CAPABILITYMASK 0x0000_0177
SYSTEM 0x0000_1000
ENCRYPTED 0x0000_2000
ISSLOW 0x0000_4000
GHOSTED 0x0000_8000
LINK 0x0001_0000
SHARE 0x0002_0000
READONLY 0x0004_0000
HIDDEN 0x0008_0000
FILESYSANCESTOR 0x1000_0000
FOLDER 0x2000_0000
FILESYSTEM 0x4000_0000
HASSUBFOLDER 0x8000_0000
CONTENTSMASK 0x8000_0000
VALIDATE 0x0100_0000
REMOVABLE 0x0200_0000
COMPRESSED 0x0400_0000
BROWSABLE 0x0800_0000
NONENUMERATED 0x0010_0000
NEWCONTENT 0x0020_0000
CANMONIKER 0x0040_0000
HASSTORAGE 0x0040_0000
STREAM 0x0040_0000
STORAGEANCESTOR 0x0080_0000
STORAGECAPMASK 0x70c5_0008
PKEYSFGAOMASK 0x8104_4000
}
const_ordinary! { SHARD: u32: "shell";
/// [`SHARD`](https://docs.microsoft.com/en-us/windows/win32/api/shlobj_core/ne-shlobj_core-shard)
/// enumeration (`u32`).
=>
=>
PIDL 0x0000_0001
PATHA 0x0000_0002
PATHW 0x0000_0003
APPIDINFO 0x0000_0004
APPIDINFOIDLIST 0x0000_0005
LINK 0x0000_0006
APPIDINFOLINK 0x0000_0007
SHELLITEM 0x0000_0008
}
const_bitflag! { SHGFI: u32: "shell";
/// [`SHGetFileInfo`](crate::SHGetFileInfo) `flags` (`u32`).
=>
=>
ICON 0x0000_0100
DISPLAYNAME 0x0000_0200
TYPENAME 0x0000_0400
ATTRIBUTES 0x0000_0800
ICONLOCATION 0x0000_1000
EXETYPE 0x0000_2000
SYSICONINDEX 0x0000_4000
LINKOVERLAY 0x0000_8000
SELECTED 0x0001_0000
ATTR_SPECIFIED 0x0002_0000
LARGEICON 0x0000_0000
SMALLICON 0x0000_0001
OPENICON 0x0000_0002
SHELLICONSIZE 0x0000_0004
PIDL 0x0000_0008
USEFILEATTRIBUTES 0x0000_0010
ADDOVERLAYS 0x0000_0020
OVERLAYINDEX 0x0000_0040
}
const_bitflag! { SHGSI: u32: "shell";
/// [`SHGetStockIconInfo`](crate::SHGetStockIconInfo) `flags` (`u32`).
=>
=>
/// None of the actual values (zero).
NoValue 0
ICONLOCATION 0
ICON SHGFI::ICON.0
SYSICONINDEX SHGFI::SYSICONINDEX.0
LINKOVERLAY SHGFI::LINKOVERLAY.0
SELECTED SHGFI::SELECTED.0
LARGEICON SHGFI::LARGEICON.0
SMALLICON SHGFI::SMALLICON.0
SHELLICONSIZE SHGFI::SHELLICONSIZE.0
}
const_ordinary! { SIGDN: u32: "shell";
/// [`SIGDN`](https://docs.microsoft.com/en-us/windows/win32/api/shobjidl_core/ne-shobjidl_core-sigdn)
/// enumeration (`u32`).
=>
=>
/// Returns the display name relative to the parent folder. In UI this name
/// is generally ideal for display to the user.
NORMALDISPLAY 0
/// Returns the parsing name relative to the parent folder. This name is not
/// suitable for use in UI.
PARENTRELATIVEPARSING 0x8001_8001
/// Returns the parsing name relative to the desktop. This name is not
/// suitable for use in UI.
DESKTOPABSOLUTEPARSING 0x8002_8000
/// Returns the editing name relative to the parent folder. In UI this name
/// is suitable for display to the user.
PARENTRELATIVEEDITING 0x8003_1001
/// Returns the editing name relative to the desktop. In UI this name is
/// suitable for display to the user.
DESKTOPABSOLUTEEDITING 0x8004_c000
/// Returns the item's file system path if it has one. Only items that
/// report [`SFGAO::FILESYSTEM`](crate::co::SFGAO::FILESYSTEM) have a file
/// system path. When an item does not have a file system path a call to
/// [`IShellItem::GetDisplayName`](crate::prelude::ShellIShellItem::GetDisplayName)
/// on that item will fail. In UI this name is suitable for display to the
/// user in some cases but note that it might not be specified for all
/// items.
FILESYSPATH 0x8005_8000
/// Returns the item's URL if it has one. Some items do not have a URL and
/// in those cases a call to
/// [`IShellItem::GetDisplayName`](crate::prelude::ShellIShellItem::GetDisplayName)
/// will fail. This name is suitable for display to the user in some cases,
/// but note that it might not be specified for all items.
URL 0x8006_8000
/// Returns the path relative to the parent folder in a friendly format as
/// displayed in an address bar. This name is suitable for display to the
/// user.
PARENTRELATIVEFORADDRESSBAR 0x8007_c001
/// Returns the path relative to the parent folder.
PARENTRELATIVE 0x8008_0001
/// Introduced in Windows 8.
PARENTRELATIVEFORUI 0x8009_4001
}
const_ordinary! { SIID: u32: "shell";
/// [`SHSTOCKICONID`](https://docs.microsoft.com/en-us/windows/win32/api/shellapi/ne-shellapi-shstockiconid)
/// enumeration, [`SHGetStockIconInfo`](crate::SHGetStockIconInfo) `siid`
/// (`u32`).
=>
=>
DOCNOASSOC 0
DOCASSOC 1
APPLICATION 2
FOLDER 3
FOLDEROPEN 4
DRIVE525 5
DRIVE35 6
DRIVEREMOVE 7
DRIVEFIXED 8
DRIVENET 9
DRIVENETDISABLED 10
DRIVECD 11
DRIVERAM 12
WORLD 13
SERVER 15
PRINTER 16
MYNETWORK 17
FIND 22
HELP 23
SHARE 28
LINK 29
SLOWFILE 30
RECYCLER 31
RECYCLERFULL 32
MEDIACDAUDIO 40
LOCK 47
AUTOLIST 49
PRINTERNET 50
SERVERSHARE 51
PRINTERFAX 52
PRINTERFAXNET 53
PRINTERFILE 54
STACK 55
MEDIASVCD 56
STUFFEDFOLDER 57
DRIVEUNKNOWN 58
DRIVEDVD 59
MEDIADVD 60
MEDIADVDRAM 61
MEDIADVDRW 62
MEDIADVDR 63
MEDIADVDROM 64
MEDIACDAUDIOPLUS 65
MEDIACDRW 66
MEDIACDR 67
MEDIACDBURN 68
MEDIABLANKCD 69
MEDIACDROM 70
AUDIOFILES 71
IMAGEFILES 72
VIDEOFILES 73
MIXEDFILES 74
FOLDERBACK 75
FOLDERFRONT 76
SHIELD 77
WARNING 78
INFO 79
ERROR 80
KEY 81
SOFTWARE 82
RENAME 83
DELETE 84
MEDIAAUDIODVD 85
MEDIAMOVIEDVD 86
MEDIAENHANCEDCD 87
MEDIAENHANCEDDVD 88
MEDIAHDDVD 89
MEDIABLURAY 90
MEDIAVCD 91
MEDIADVDPLUSR 92
MEDIADVDPLUSRW 93
DESKTOPPC 94
MOBILEPC 95
USERS 96
MEDIASMARTMEDIA 97
MEDIACOMPACTFLASH 98
DEVICECELLPHONE 99
DEVICECAMERA 100
DEVICEVIDEOCAMERA 101
DEVICEAUDIOPLAYER 102
NETWORKCONNECT 103
INTERNET 104
ZIPFILE 105
SETTINGS 106
DRIVEHDDVD 132
DRIVEBD 133
MEDIAHDDVDROM 134
MEDIAHDDVDR 135
MEDIAHDDVDRAM 136
MEDIABDROM 137
MEDIABDR 138
MEDIABDRE 139
CLUSTEREDDRIVE 140
MAX_ICONS 181
}
const_bitflag! { SLGP: u32: "shell";
/// [`IShellLink::GetPath`](crate::prelude::ShellIShellLink::GetPath)
/// `flags` (`u32`).
=>
=>
SHORTPATH 0x1
UNCPRIORITY 0x2
RAWPATH 0x4
RELATIVEPRIORITY 0x8
}
const_bitflag! { SLR: u32: "shell";
/// [`IShellLink::Resolve`](crate::prelude::ShellIShellLink::GetPath)
/// `flags` (`u32`).
=>
=>
NONE 0
NO_UI 0x1
ANY_MATCH 0x2
UPDATE 0x4
NOUPDATE 0x8
NOSEARCH 0x10
NOTRACK 0x20
NOLINKINFO 0x40
INVOKE_MSI 0x80
NO_UI_WITH_MSG_PUMP 0x101
OFFER_DELETE_WITHOUT_FILE 0x200
KNOWNFOLDER 0x400
MACHINE_IN_LOCAL_TARGET 0x800
UPDATE_MACHINE_AND_SID 0x1000
NO_OBJECT_ID 0x2000
}
const_ordinary! { STPFLAG: u32: "shell";
/// [`STPFLAG`](https://docs.microsoft.com/en-us/windows/win32/api/shobjidl_core/ne-shobjidl_core-stpflag)
/// enumeration (`u32`).
=>
=>
NONE 0
USEAPPTHUMBNAILALWAYS 0x1
USEAPPTHUMBNAILWHENACTIVE 0x2
USEAPPPEEKALWAYS 0x4
USEAPPPEEKWHENACTIVE 0x8
}
const_ordinary! { TBPF: u32: "shell";
/// [`ITaskbarList3::SetProgressState`](crate::prelude::ShellITaskbarList3::SetProgressState)
/// `tbpFlags` (`u32`).
=>
=>
/// Stops displaying progress and returns the button to its normal state.
/// Call this method with this flag to dismiss the progress bar when the
/// operation is complete or canceled.
NOPROGRESS 0
/// The progress indicator does not grow in size but cycles repeatedly
/// along the length of the taskbar button. This indicates activity without
/// specifying what proportion of the progress is complete. Progress is
/// taking place but there is no prediction as to how long the operation
/// will take.
INDETERMINATE 0x1
/// The progress indicator grows in size from left to right in proportion to
/// the estimated amount of the operation completed. This is a determinate
/// progress indicator; a prediction is being made as to the duration of the
/// operation.
NORMAL 0x2
/// The progress indicator turns red to show that an error has occurred in
/// one of the windows that is broadcasting progress. This is a determinate
/// state. If the progress indicator is in the indeterminate state it
/// switches to a red determinate display of a generic percentage not
/// indicative of actual progress.
ERROR 0x4
/// The progress indicator turns yellow to show that progress is currently
/// stopped in one of the windows but can be resumed by the user. No error
/// condition exists and nothing is preventing the progress from continuing.
/// This is a determinate state. If the progress indicator is in the
/// indeterminate state it switches to a yellow determinate display of a
/// generic percentage not indicative of actual progress.
PAUSED 0x8
}
| 30.222028 | 138 | 0.731012 |
4a678f5de34334b48da276024f913639b83cd63e | 6,254 | use std::ffi::OsString;
use std::io::Read;
use std::os::unix::ffi::OsStringExt;
use std::os::unix::process::{CommandExt, ExitStatusExt};
use std::process::{Child, Command, ExitStatus, Output};
use crate::base::error;
use crate::base::{Error, Result};
use log::warn;
pub trait CommandTraceExt {
fn traceme(&mut self) -> &mut Command;
}
impl CommandTraceExt for Command {
fn traceme(&mut self) -> &mut Command {
unsafe { self.pre_exec(|| nix::sys::ptrace::traceme().map_err(error::nix_to_io)) }
}
}
pub struct SyscallHandler<FOpen, FOpenAt> {
pub open: FOpen,
pub openat: FOpenAt,
}
pub trait ChildTraceExt {
fn trace_syscalls<FOpen, FOpenAt>(
self,
handler: SyscallHandler<FOpen, FOpenAt>,
) -> Result<Output>
where
FOpen: FnMut(OsString, i32) -> (),
FOpenAt: FnMut(i32, OsString, i32) -> ();
}
impl ChildTraceExt for Child {
fn trace_syscalls<FOpen, FOpenAt>(
mut self,
mut handler: SyscallHandler<FOpen, FOpenAt>,
) -> Result<Output>
where
FOpen: FnMut(OsString, i32) -> (),
FOpenAt: FnMut(i32, OsString, i32) -> (),
{
use nix::sys::signal::Signal;
use nix::sys::wait::WaitStatus;
let child_pid = nix::unistd::Pid::from_raw(self.id() as i32);
match nix::sys::wait::waitpid(child_pid, None)? {
WaitStatus::Stopped(_, Signal::SIGTRAP) => (),
WaitStatus::Signaled(_, sig, _) | WaitStatus::Stopped(_, sig) => {
return Err(Error::DynamicSignaled(sig))
}
WaitStatus::Exited(_, code) => {
let status = ExitStatus::from_raw(code);
return output_of_child(&mut self, status);
}
_ => unreachable!(),
}
// TODO: should we handle forks?
use nix::sys::ptrace::Options;
nix::sys::ptrace::setoptions(
child_pid,
Options::PTRACE_O_TRACESYSGOOD | Options::PTRACE_O_EXITKILL,
)?;
nix::sys::ptrace::syscall(child_pid, None)?;
loop {
match nix::sys::wait::waitpid(child_pid, None)? {
WaitStatus::Exited(_, code) => {
let status = ExitStatus::from_raw(code);
return output_of_child(&mut self, status);
}
WaitStatus::Stopped(pid, sig) => {
warn!(
"trace_syscalls: stopped with {}, we attempt to continue",
sig
);
nix::sys::ptrace::syscall(pid, None)?;
}
WaitStatus::PtraceSyscall(pid) => {
let regs = getregs(pid)?;
match regs.orig_rax as i64 {
libc::SYS_openat => {
let dirfd = regs.rdi as i32;
let pathname = read_string_at(pid, regs.rsi)?;
let flags = regs.rdx as i32;
(handler.openat)(dirfd, pathname, flags);
}
libc::SYS_open => {
let pathname = read_string_at(pid, regs.rdi)?;
let flags = regs.rsi as i32;
(handler.open)(pathname, flags);
}
_ => (),
}
nix::sys::ptrace::syscall(pid, None)?;
}
WaitStatus::Signaled(_, sig, _) => return Err(Error::DynamicSignaled(sig)),
_ => unreachable!(),
}
}
}
}
fn output_of_child(child: &mut Child, status: ExitStatus) -> Result<Output> {
let mut stdout = Vec::new();
let mut stderr = Vec::new();
if let Some(mut child_stdout) = child.stdout.take() {
child_stdout.read_to_end(&mut stdout)?;
}
if let Some(mut child_stderr) = child.stderr.take() {
child_stderr.read_to_end(&mut stderr)?;
}
let output = Output {
status,
stdout,
stderr,
};
Ok(output)
}
#[cfg(target_env = "musl")]
fn getregs(pid: nix::unistd::Pid) -> Result<libc::user_regs_struct> {
use nix::sys::ptrace::Request;
use std::ffi::c_void;
use std::{mem, ptr};
let mut data = mem::MaybeUninit::uninit();
unsafe {
nix::sys::ptrace::ptrace(
Request::PTRACE_GETREGS,
pid,
ptr::null_mut(),
data.as_mut_ptr() as *mut c_void,
)?;
Ok(data.assume_init())
}
}
#[cfg(not(target_env = "musl"))]
fn getregs(pid: nix::unistd::Pid) -> Result<libc::user_regs_struct> {
nix::sys::ptrace::getregs(pid).map_err(Into::into)
}
fn read_string_at(pid: nix::unistd::Pid, mut addr: u64) -> Result<OsString> {
use std::ffi::c_void;
let mut result = Vec::new();
loop {
let word = nix::sys::ptrace::read(pid, addr as *mut c_void)? as u32;
let bytes: [u8; 4] = unsafe { std::mem::transmute(word) };
for byte in bytes.iter() {
if *byte == 0 {
return Ok(OsString::from_vec(result));
}
result.push(*byte);
}
addr += 4;
}
}
#[cfg(test)]
mod tests {
use super::*;
use assert_cmd::prelude::*;
use assert_fs::prelude::*;
use std::cell::RefCell;
use std::rc::Rc;
#[test]
fn test_trace() -> std::result::Result<(), Box<dyn std::error::Error>> {
let test_path = assert_fs::NamedTempFile::new("test")?;
test_path.touch()?;
let child = Command::new("cat")
.arg(test_path.path())
.traceme()
.spawn()?;
let paths = Rc::new(RefCell::new(Vec::new()));
child
.trace_syscalls(SyscallHandler {
open: |pathname, _| paths.borrow_mut().push(pathname),
openat: |_, pathname, _| paths.borrow_mut().push(pathname),
})?
.assert()
.success();
assert_eq!(
true,
Rc::try_unwrap(paths)
.unwrap()
.into_inner()
.iter()
.any(|p| p == test_path.path())
);
Ok(())
}
}
| 30.807882 | 91 | 0.504477 |
e28a759807de188de1c51b0f70216a5ce54a5ddd | 2,729 | // This file was generated by gir (https://github.com/gtk-rs/gir @ 8b9d0bb)
// from gir-files (https://github.com/gtk-rs/gir-files @ 77d1f70)
// DO NOT EDIT
use Actionable;
use Bin;
use Buildable;
use Button;
use Container;
use Orientable;
use ScaleButton;
use Widget;
use ffi;
use glib;
use glib::StaticType;
use glib::Value;
use glib::object::Downcast;
use glib::object::IsA;
use glib::signal::SignalHandlerId;
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
use std::boxed::Box as Box_;
use std::mem;
use std::mem::transmute;
use std::ptr;
glib_wrapper! {
pub struct VolumeButton(Object<ffi::GtkVolumeButton, ffi::GtkVolumeButtonClass>): ScaleButton, Button, Bin, Container, Widget, Buildable, Actionable, Orientable;
match fn {
get_type => || ffi::gtk_volume_button_get_type(),
}
}
impl VolumeButton {
pub fn new() -> VolumeButton {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_volume_button_new()).downcast_unchecked()
}
}
}
impl Default for VolumeButton {
fn default() -> Self {
Self::new()
}
}
pub trait VolumeButtonExt {
fn get_property_use_symbolic(&self) -> bool;
fn set_property_use_symbolic(&self, use_symbolic: bool);
fn connect_property_use_symbolic_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<VolumeButton> + IsA<glib::object::Object>> VolumeButtonExt for O {
fn get_property_use_symbolic(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0, "use-symbolic".to_glib_none().0, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_use_symbolic(&self, use_symbolic: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0, "use-symbolic".to_glib_none().0, Value::from(&use_symbolic).to_glib_none().0);
}
}
fn connect_property_use_symbolic_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::use-symbolic",
transmute(notify_use_symbolic_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
}
unsafe extern "C" fn notify_use_symbolic_trampoline<P>(this: *mut ffi::GtkVolumeButton, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<VolumeButton> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&VolumeButton::from_glib_borrow(this).downcast_unchecked())
}
| 30.662921 | 165 | 0.66801 |
48c6d87885a51eaa8692ceb19b0f6c24b2847f44 | 28,975 | use std::cmp;
use std::fmt;
use std::io::{self, IoSlice};
use std::marker::Unpin;
use std::mem::MaybeUninit;
use bytes::{Buf, BufMut, Bytes, BytesMut};
use tokio::io::{AsyncRead, AsyncWrite, ReadBuf};
use super::{Http1Transaction, ParseContext, ParsedMessage};
use crate::common::buf::BufList;
use crate::common::{task, Pin, Poll};
/// The initial buffer size allocated before trying to read from IO.
pub(crate) const INIT_BUFFER_SIZE: usize = 8192;
/// The minimum value that can be set to max buffer size.
pub(crate) const MINIMUM_MAX_BUFFER_SIZE: usize = INIT_BUFFER_SIZE;
/// The default maximum read buffer size. If the buffer gets this big and
/// a message is still not complete, a `TooLarge` error is triggered.
// Note: if this changes, update server::conn::Http::max_buf_size docs.
pub(crate) const DEFAULT_MAX_BUFFER_SIZE: usize = 8192 + 4096 * 100;
/// The maximum number of distinct `Buf`s to hold in a list before requiring
/// a flush. Only affects when the buffer strategy is to queue buffers.
///
/// Note that a flush can happen before reaching the maximum. This simply
/// forces a flush if the queue gets this big.
const MAX_BUF_LIST_BUFFERS: usize = 16;
pub(crate) struct Buffered<T, B> {
flush_pipeline: bool,
io: T,
read_blocked: bool,
read_buf: BytesMut,
read_buf_strategy: ReadStrategy,
write_buf: WriteBuf<B>,
}
impl<T, B> fmt::Debug for Buffered<T, B>
where
B: Buf,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Buffered")
.field("read_buf", &self.read_buf)
.field("write_buf", &self.write_buf)
.finish()
}
}
impl<T, B> Buffered<T, B>
where
T: AsyncRead + AsyncWrite + Unpin,
B: Buf,
{
pub(crate) fn new(io: T) -> Buffered<T, B> {
let strategy = if io.is_write_vectored() {
WriteStrategy::Queue
} else {
WriteStrategy::Flatten
};
let write_buf = WriteBuf::new(strategy);
Buffered {
flush_pipeline: false,
io,
read_blocked: false,
read_buf: BytesMut::with_capacity(0),
read_buf_strategy: ReadStrategy::default(),
write_buf,
}
}
#[cfg(feature = "server")]
pub(crate) fn set_flush_pipeline(&mut self, enabled: bool) {
debug_assert!(!self.write_buf.has_remaining());
self.flush_pipeline = enabled;
if enabled {
self.set_write_strategy_flatten();
}
}
pub(crate) fn set_max_buf_size(&mut self, max: usize) {
assert!(
max >= MINIMUM_MAX_BUFFER_SIZE,
"The max_buf_size cannot be smaller than {}.",
MINIMUM_MAX_BUFFER_SIZE,
);
self.read_buf_strategy = ReadStrategy::with_max(max);
self.write_buf.max_buf_size = max;
}
#[cfg(feature = "client")]
pub(crate) fn set_read_buf_exact_size(&mut self, sz: usize) {
self.read_buf_strategy = ReadStrategy::Exact(sz);
}
#[cfg(feature = "server")]
pub(crate) fn set_write_strategy_flatten(&mut self) {
// this should always be called only at construction time,
// so this assert is here to catch myself
debug_assert!(self.write_buf.queue.bufs_cnt() == 0);
self.write_buf.set_strategy(WriteStrategy::Flatten);
}
pub(crate) fn read_buf(&self) -> &[u8] {
self.read_buf.as_ref()
}
#[cfg(test)]
#[cfg(feature = "nightly")]
pub(super) fn read_buf_mut(&mut self) -> &mut BytesMut {
&mut self.read_buf
}
/// Return the "allocated" available space, not the potential space
/// that could be allocated in the future.
fn read_buf_remaining_mut(&self) -> usize {
self.read_buf.capacity() - self.read_buf.len()
}
pub(crate) fn headers_buf(&mut self) -> &mut Vec<u8> {
let buf = self.write_buf.headers_mut();
&mut buf.bytes
}
pub(super) fn write_buf(&mut self) -> &mut WriteBuf<B> {
&mut self.write_buf
}
pub(crate) fn buffer<BB: Buf + Into<B>>(&mut self, buf: BB) {
self.write_buf.buffer(buf)
}
pub(crate) fn can_buffer(&self) -> bool {
self.flush_pipeline || self.write_buf.can_buffer()
}
pub(crate) fn consume_leading_lines(&mut self) {
if !self.read_buf.is_empty() {
let mut i = 0;
while i < self.read_buf.len() {
match self.read_buf[i] {
b'\r' | b'\n' => i += 1,
_ => break,
}
}
self.read_buf.advance(i);
}
}
pub(super) fn parse<S>(
&mut self,
cx: &mut task::Context<'_>,
parse_ctx: ParseContext<'_>,
) -> Poll<crate::Result<ParsedMessage<S::Incoming>>>
where
S: Http1Transaction,
{
loop {
match super::role::parse_headers::<S>(
&mut self.read_buf,
ParseContext {
cached_headers: parse_ctx.cached_headers,
req_method: parse_ctx.req_method,
h1_parser_config: parse_ctx.h1_parser_config.clone(),
preserve_header_case: parse_ctx.preserve_header_case,
h09_responses: parse_ctx.h09_responses,
},
)? {
Some(msg) => {
debug!("parsed {} headers", msg.head.headers.len());
return Poll::Ready(Ok(msg));
}
None => {
let max = self.read_buf_strategy.max();
if self.read_buf.len() >= max {
debug!("max_buf_size ({}) reached, closing", max);
return Poll::Ready(Err(crate::Error::new_too_large()));
}
}
}
if ready!(self.poll_read_from_io(cx)).map_err(crate::Error::new_io)? == 0 {
trace!("parse eof");
return Poll::Ready(Err(crate::Error::new_incomplete()));
}
}
}
pub(crate) fn poll_read_from_io(
&mut self,
cx: &mut task::Context<'_>,
) -> Poll<io::Result<usize>> {
self.read_blocked = false;
let next = self.read_buf_strategy.next();
if self.read_buf_remaining_mut() < next {
self.read_buf.reserve(next);
}
let dst = self.read_buf.chunk_mut();
let dst = unsafe { &mut *(dst as *mut _ as *mut [MaybeUninit<u8>]) };
let mut buf = ReadBuf::uninit(dst);
match Pin::new(&mut self.io).poll_read(cx, &mut buf) {
Poll::Ready(Ok(_)) => {
let n = buf.filled().len();
unsafe {
// Safety: we just read that many bytes into the
// uninitialized part of the buffer, so this is okay.
// @tokio pls give me back `poll_read_buf` thanks
self.read_buf.advance_mut(n);
}
self.read_buf_strategy.record(n);
Poll::Ready(Ok(n))
}
Poll::Pending => {
self.read_blocked = true;
Poll::Pending
}
Poll::Ready(Err(e)) => Poll::Ready(Err(e)),
}
}
pub(crate) fn into_inner(self) -> (T, Bytes) {
(self.io, self.read_buf.freeze())
}
pub(crate) fn io_mut(&mut self) -> &mut T {
&mut self.io
}
pub(crate) fn is_read_blocked(&self) -> bool {
self.read_blocked
}
pub(crate) fn poll_flush(&mut self, cx: &mut task::Context<'_>) -> Poll<io::Result<()>> {
if self.flush_pipeline && !self.read_buf.is_empty() {
Poll::Ready(Ok(()))
} else if self.write_buf.remaining() == 0 {
Pin::new(&mut self.io).poll_flush(cx)
} else {
if let WriteStrategy::Flatten = self.write_buf.strategy {
return self.poll_flush_flattened(cx);
}
const MAX_WRITEV_BUFS: usize = 64;
loop {
let n = {
let mut iovs = [IoSlice::new(&[]); MAX_WRITEV_BUFS];
let len = self.write_buf.chunks_vectored(&mut iovs);
ready!(Pin::new(&mut self.io).poll_write_vectored(cx, &iovs[..len]))?
};
// TODO(eliza): we have to do this manually because
// `poll_write_buf` doesn't exist in Tokio 0.3 yet...when
// `poll_write_buf` comes back, the manual advance will need to leave!
self.write_buf.advance(n);
debug!("flushed {} bytes", n);
if self.write_buf.remaining() == 0 {
break;
} else if n == 0 {
trace!(
"write returned zero, but {} bytes remaining",
self.write_buf.remaining()
);
return Poll::Ready(Err(io::ErrorKind::WriteZero.into()));
}
}
Pin::new(&mut self.io).poll_flush(cx)
}
}
/// Specialized version of `flush` when strategy is Flatten.
///
/// Since all buffered bytes are flattened into the single headers buffer,
/// that skips some bookkeeping around using multiple buffers.
fn poll_flush_flattened(&mut self, cx: &mut task::Context<'_>) -> Poll<io::Result<()>> {
loop {
let n = ready!(Pin::new(&mut self.io).poll_write(cx, self.write_buf.headers.chunk()))?;
debug!("flushed {} bytes", n);
self.write_buf.headers.advance(n);
if self.write_buf.headers.remaining() == 0 {
self.write_buf.headers.reset();
break;
} else if n == 0 {
trace!(
"write returned zero, but {} bytes remaining",
self.write_buf.remaining()
);
return Poll::Ready(Err(io::ErrorKind::WriteZero.into()));
}
}
Pin::new(&mut self.io).poll_flush(cx)
}
#[cfg(test)]
fn flush<'a>(&'a mut self) -> impl std::future::Future<Output = io::Result<()>> + 'a {
futures_util::future::poll_fn(move |cx| self.poll_flush(cx))
}
}
// The `B` is a `Buf`, we never project a pin to it
impl<T: Unpin, B> Unpin for Buffered<T, B> {}
// TODO: This trait is old... at least rename to PollBytes or something...
pub(crate) trait MemRead {
fn read_mem(&mut self, cx: &mut task::Context<'_>, len: usize) -> Poll<io::Result<Bytes>>;
}
impl<T, B> MemRead for Buffered<T, B>
where
T: AsyncRead + AsyncWrite + Unpin,
B: Buf,
{
fn read_mem(&mut self, cx: &mut task::Context<'_>, len: usize) -> Poll<io::Result<Bytes>> {
if !self.read_buf.is_empty() {
let n = std::cmp::min(len, self.read_buf.len());
Poll::Ready(Ok(self.read_buf.split_to(n).freeze()))
} else {
let n = ready!(self.poll_read_from_io(cx))?;
Poll::Ready(Ok(self.read_buf.split_to(::std::cmp::min(len, n)).freeze()))
}
}
}
#[derive(Clone, Copy, Debug)]
enum ReadStrategy {
Adaptive {
decrease_now: bool,
next: usize,
max: usize,
},
#[cfg(feature = "client")]
Exact(usize),
}
impl ReadStrategy {
fn with_max(max: usize) -> ReadStrategy {
ReadStrategy::Adaptive {
decrease_now: false,
next: INIT_BUFFER_SIZE,
max,
}
}
fn next(&self) -> usize {
match *self {
ReadStrategy::Adaptive { next, .. } => next,
#[cfg(feature = "client")]
ReadStrategy::Exact(exact) => exact,
}
}
fn max(&self) -> usize {
match *self {
ReadStrategy::Adaptive { max, .. } => max,
#[cfg(feature = "client")]
ReadStrategy::Exact(exact) => exact,
}
}
fn record(&mut self, bytes_read: usize) {
match *self {
ReadStrategy::Adaptive {
ref mut decrease_now,
ref mut next,
max,
..
} => {
if bytes_read >= *next {
*next = cmp::min(incr_power_of_two(*next), max);
*decrease_now = false;
} else {
let decr_to = prev_power_of_two(*next);
if bytes_read < decr_to {
if *decrease_now {
*next = cmp::max(decr_to, INIT_BUFFER_SIZE);
*decrease_now = false;
} else {
// Decreasing is a two "record" process.
*decrease_now = true;
}
} else {
// A read within the current range should cancel
// a potential decrease, since we just saw proof
// that we still need this size.
*decrease_now = false;
}
}
}
#[cfg(feature = "client")]
ReadStrategy::Exact(_) => (),
}
}
}
fn incr_power_of_two(n: usize) -> usize {
n.saturating_mul(2)
}
fn prev_power_of_two(n: usize) -> usize {
// Only way this shift can underflow is if n is less than 4.
// (Which would means `usize::MAX >> 64` and underflowed!)
debug_assert!(n >= 4);
(::std::usize::MAX >> (n.leading_zeros() + 2)) + 1
}
impl Default for ReadStrategy {
fn default() -> ReadStrategy {
ReadStrategy::with_max(DEFAULT_MAX_BUFFER_SIZE)
}
}
#[derive(Clone)]
pub(crate) struct Cursor<T> {
bytes: T,
pos: usize,
}
impl<T: AsRef<[u8]>> Cursor<T> {
#[inline]
pub(crate) fn new(bytes: T) -> Cursor<T> {
Cursor { bytes, pos: 0 }
}
}
impl Cursor<Vec<u8>> {
/// If we've advanced the position a bit in this cursor, and wish to
/// extend the underlying vector, we may wish to unshift the "read" bytes
/// off, and move everything else over.
fn maybe_unshift(&mut self, additional: usize) {
if self.pos == 0 {
// nothing to do
return;
}
if self.bytes.capacity() - self.bytes.len() >= additional {
// there's room!
return;
}
self.bytes.drain(0..self.pos);
self.pos = 0;
}
fn reset(&mut self) {
self.pos = 0;
self.bytes.clear();
}
}
impl<T: AsRef<[u8]>> fmt::Debug for Cursor<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Cursor")
.field("pos", &self.pos)
.field("len", &self.bytes.as_ref().len())
.finish()
}
}
impl<T: AsRef<[u8]>> Buf for Cursor<T> {
#[inline]
fn remaining(&self) -> usize {
self.bytes.as_ref().len() - self.pos
}
#[inline]
fn chunk(&self) -> &[u8] {
&self.bytes.as_ref()[self.pos..]
}
#[inline]
fn advance(&mut self, cnt: usize) {
debug_assert!(self.pos + cnt <= self.bytes.as_ref().len());
self.pos += cnt;
}
}
// an internal buffer to collect writes before flushes
pub(super) struct WriteBuf<B> {
/// Re-usable buffer that holds message headers
headers: Cursor<Vec<u8>>,
max_buf_size: usize,
/// Deque of user buffers if strategy is Queue
queue: BufList<B>,
strategy: WriteStrategy,
}
impl<B: Buf> WriteBuf<B> {
fn new(strategy: WriteStrategy) -> WriteBuf<B> {
WriteBuf {
headers: Cursor::new(Vec::with_capacity(INIT_BUFFER_SIZE)),
max_buf_size: DEFAULT_MAX_BUFFER_SIZE,
queue: BufList::new(),
strategy,
}
}
}
impl<B> WriteBuf<B>
where
B: Buf,
{
#[cfg(feature = "server")]
fn set_strategy(&mut self, strategy: WriteStrategy) {
self.strategy = strategy;
}
pub(super) fn buffer<BB: Buf + Into<B>>(&mut self, mut buf: BB) {
debug_assert!(buf.has_remaining());
match self.strategy {
WriteStrategy::Flatten => {
let head = self.headers_mut();
head.maybe_unshift(buf.remaining());
//perf: This is a little faster than <Vec as BufMut>>::put,
//but accomplishes the same result.
loop {
let adv = {
let slice = buf.chunk();
if slice.is_empty() {
return;
}
head.bytes.extend_from_slice(slice);
slice.len()
};
buf.advance(adv);
}
}
WriteStrategy::Queue => {
self.queue.push(buf.into());
}
}
}
fn can_buffer(&self) -> bool {
match self.strategy {
WriteStrategy::Flatten => self.remaining() < self.max_buf_size,
WriteStrategy::Queue => {
self.queue.bufs_cnt() < MAX_BUF_LIST_BUFFERS && self.remaining() < self.max_buf_size
}
}
}
fn headers_mut(&mut self) -> &mut Cursor<Vec<u8>> {
debug_assert!(!self.queue.has_remaining());
&mut self.headers
}
}
impl<B: Buf> fmt::Debug for WriteBuf<B> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("WriteBuf")
.field("remaining", &self.remaining())
.field("strategy", &self.strategy)
.finish()
}
}
impl<B: Buf> Buf for WriteBuf<B> {
#[inline]
fn remaining(&self) -> usize {
self.headers.remaining() + self.queue.remaining()
}
#[inline]
fn chunk(&self) -> &[u8] {
let headers = self.headers.chunk();
if !headers.is_empty() {
headers
} else {
self.queue.chunk()
}
}
#[inline]
fn advance(&mut self, cnt: usize) {
let hrem = self.headers.remaining();
match hrem.cmp(&cnt) {
cmp::Ordering::Equal => self.headers.reset(),
cmp::Ordering::Greater => self.headers.advance(cnt),
cmp::Ordering::Less => {
let qcnt = cnt - hrem;
self.headers.reset();
self.queue.advance(qcnt);
}
}
}
#[inline]
fn chunks_vectored<'t>(&'t self, dst: &mut [IoSlice<'t>]) -> usize {
let n = self.headers.chunks_vectored(dst);
self.queue.chunks_vectored(&mut dst[n..]) + n
}
}
#[derive(Debug)]
enum WriteStrategy {
Flatten,
Queue,
}
#[cfg(test)]
mod tests {
use super::*;
use std::time::Duration;
use tokio_test::io::Builder as Mock;
// #[cfg(feature = "nightly")]
// use test::Bencher;
/*
impl<T: Read> MemRead for AsyncIo<T> {
fn read_mem(&mut self, len: usize) -> Poll<Bytes, io::Error> {
let mut v = vec![0; len];
let n = try_nb!(self.read(v.as_mut_slice()));
Ok(Async::Ready(BytesMut::from(&v[..n]).freeze()))
}
}
*/
#[tokio::test]
#[ignore]
async fn iobuf_write_empty_slice() {
// TODO(eliza): can i have writev back pls T_T
// // First, let's just check that the Mock would normally return an
// // error on an unexpected write, even if the buffer is empty...
// let mut mock = Mock::new().build();
// futures_util::future::poll_fn(|cx| {
// Pin::new(&mut mock).poll_write_buf(cx, &mut Cursor::new(&[]))
// })
// .await
// .expect_err("should be a broken pipe");
// // underlying io will return the logic error upon write,
// // so we are testing that the io_buf does not trigger a write
// // when there is nothing to flush
// let mock = Mock::new().build();
// let mut io_buf = Buffered::<_, Cursor<Vec<u8>>>::new(mock);
// io_buf.flush().await.expect("should short-circuit flush");
}
#[tokio::test]
async fn parse_reads_until_blocked() {
use crate::proto::h1::ClientTransaction;
let _ = pretty_env_logger::try_init();
let mock = Mock::new()
// Split over multiple reads will read all of it
.read(b"HTTP/1.1 200 OK\r\n")
.read(b"Server: hyper\r\n")
// missing last line ending
.wait(Duration::from_secs(1))
.build();
let mut buffered = Buffered::<_, Cursor<Vec<u8>>>::new(mock);
// We expect a `parse` to be not ready, and so can't await it directly.
// Rather, this `poll_fn` will wrap the `Poll` result.
futures_util::future::poll_fn(|cx| {
let parse_ctx = ParseContext {
cached_headers: &mut None,
req_method: &mut None,
h1_parser_config: Default::default(),
preserve_header_case: false,
h09_responses: false,
};
assert!(buffered
.parse::<ClientTransaction>(cx, parse_ctx)
.is_pending());
Poll::Ready(())
})
.await;
assert_eq!(
buffered.read_buf,
b"HTTP/1.1 200 OK\r\nServer: hyper\r\n"[..]
);
}
#[test]
fn read_strategy_adaptive_increments() {
let mut strategy = ReadStrategy::default();
assert_eq!(strategy.next(), 8192);
// Grows if record == next
strategy.record(8192);
assert_eq!(strategy.next(), 16384);
strategy.record(16384);
assert_eq!(strategy.next(), 32768);
// Enormous records still increment at same rate
strategy.record(::std::usize::MAX);
assert_eq!(strategy.next(), 65536);
let max = strategy.max();
while strategy.next() < max {
strategy.record(max);
}
assert_eq!(strategy.next(), max, "never goes over max");
strategy.record(max + 1);
assert_eq!(strategy.next(), max, "never goes over max");
}
#[test]
fn read_strategy_adaptive_decrements() {
let mut strategy = ReadStrategy::default();
strategy.record(8192);
assert_eq!(strategy.next(), 16384);
strategy.record(1);
assert_eq!(
strategy.next(),
16384,
"first smaller record doesn't decrement yet"
);
strategy.record(8192);
assert_eq!(strategy.next(), 16384, "record was with range");
strategy.record(1);
assert_eq!(
strategy.next(),
16384,
"in-range record should make this the 'first' again"
);
strategy.record(1);
assert_eq!(strategy.next(), 8192, "second smaller record decrements");
strategy.record(1);
assert_eq!(strategy.next(), 8192, "first doesn't decrement");
strategy.record(1);
assert_eq!(strategy.next(), 8192, "doesn't decrement under minimum");
}
#[test]
fn read_strategy_adaptive_stays_the_same() {
let mut strategy = ReadStrategy::default();
strategy.record(8192);
assert_eq!(strategy.next(), 16384);
strategy.record(8193);
assert_eq!(
strategy.next(),
16384,
"first smaller record doesn't decrement yet"
);
strategy.record(8193);
assert_eq!(
strategy.next(),
16384,
"with current step does not decrement"
);
}
#[test]
fn read_strategy_adaptive_max_fuzz() {
fn fuzz(max: usize) {
let mut strategy = ReadStrategy::with_max(max);
while strategy.next() < max {
strategy.record(::std::usize::MAX);
}
let mut next = strategy.next();
while next > 8192 {
strategy.record(1);
strategy.record(1);
next = strategy.next();
assert!(
next.is_power_of_two(),
"decrement should be powers of two: {} (max = {})",
next,
max,
);
}
}
let mut max = 8192;
while max < std::usize::MAX {
fuzz(max);
max = (max / 2).saturating_mul(3);
}
fuzz(::std::usize::MAX);
}
#[test]
#[should_panic]
#[cfg(debug_assertions)] // needs to trigger a debug_assert
fn write_buf_requires_non_empty_bufs() {
let mock = Mock::new().build();
let mut buffered = Buffered::<_, Cursor<Vec<u8>>>::new(mock);
buffered.buffer(Cursor::new(Vec::new()));
}
/*
TODO: needs tokio_test::io to allow configure write_buf calls
#[test]
fn write_buf_queue() {
let _ = pretty_env_logger::try_init();
let mock = AsyncIo::new_buf(vec![], 1024);
let mut buffered = Buffered::<_, Cursor<Vec<u8>>>::new(mock);
buffered.headers_buf().extend(b"hello ");
buffered.buffer(Cursor::new(b"world, ".to_vec()));
buffered.buffer(Cursor::new(b"it's ".to_vec()));
buffered.buffer(Cursor::new(b"hyper!".to_vec()));
assert_eq!(buffered.write_buf.queue.bufs_cnt(), 3);
buffered.flush().unwrap();
assert_eq!(buffered.io, b"hello world, it's hyper!");
assert_eq!(buffered.io.num_writes(), 1);
assert_eq!(buffered.write_buf.queue.bufs_cnt(), 0);
}
*/
#[tokio::test]
async fn write_buf_flatten() {
let _ = pretty_env_logger::try_init();
let mock = Mock::new()
.write(b"hello world, it's hyper!")
.build();
let mut buffered = Buffered::<_, Cursor<Vec<u8>>>::new(mock);
buffered.write_buf.set_strategy(WriteStrategy::Flatten);
buffered.headers_buf().extend(b"hello ");
buffered.buffer(Cursor::new(b"world, ".to_vec()));
buffered.buffer(Cursor::new(b"it's ".to_vec()));
buffered.buffer(Cursor::new(b"hyper!".to_vec()));
assert_eq!(buffered.write_buf.queue.bufs_cnt(), 0);
buffered.flush().await.expect("flush");
}
#[test]
fn write_buf_flatten_partially_flushed() {
let _ = pretty_env_logger::try_init();
let b = |s: &str| Cursor::new(s.as_bytes().to_vec());
let mut write_buf = WriteBuf::<Cursor<Vec<u8>>>::new(WriteStrategy::Flatten);
write_buf.buffer(b("hello "));
write_buf.buffer(b("world, "));
assert_eq!(write_buf.chunk(), b"hello world, ");
// advance most of the way, but not all
write_buf.advance(11);
assert_eq!(write_buf.chunk(), b", ");
assert_eq!(write_buf.headers.pos, 11);
assert_eq!(write_buf.headers.bytes.capacity(), INIT_BUFFER_SIZE);
// there's still room in the headers buffer, so just push on the end
write_buf.buffer(b("it's hyper!"));
assert_eq!(write_buf.chunk(), b", it's hyper!");
assert_eq!(write_buf.headers.pos, 11);
let rem1 = write_buf.remaining();
let cap = write_buf.headers.bytes.capacity();
// but when this would go over capacity, don't copy the old bytes
write_buf.buffer(Cursor::new(vec![b'X'; cap]));
assert_eq!(write_buf.remaining(), cap + rem1);
assert_eq!(write_buf.headers.pos, 0);
}
#[tokio::test]
async fn write_buf_queue_disable_auto() {
let _ = pretty_env_logger::try_init();
let mock = Mock::new()
.write(b"hello ")
.write(b"world, ")
.write(b"it's ")
.write(b"hyper!")
.build();
let mut buffered = Buffered::<_, Cursor<Vec<u8>>>::new(mock);
buffered.write_buf.set_strategy(WriteStrategy::Queue);
// we have 4 buffers, and vec IO disabled, but explicitly said
// don't try to auto detect (via setting strategy above)
buffered.headers_buf().extend(b"hello ");
buffered.buffer(Cursor::new(b"world, ".to_vec()));
buffered.buffer(Cursor::new(b"it's ".to_vec()));
buffered.buffer(Cursor::new(b"hyper!".to_vec()));
assert_eq!(buffered.write_buf.queue.bufs_cnt(), 3);
buffered.flush().await.expect("flush");
assert_eq!(buffered.write_buf.queue.bufs_cnt(), 0);
}
// #[cfg(feature = "nightly")]
// #[bench]
// fn bench_write_buf_flatten_buffer_chunk(b: &mut Bencher) {
// let s = "Hello, World!";
// b.bytes = s.len() as u64;
// let mut write_buf = WriteBuf::<bytes::Bytes>::new();
// write_buf.set_strategy(WriteStrategy::Flatten);
// b.iter(|| {
// let chunk = bytes::Bytes::from(s);
// write_buf.buffer(chunk);
// ::test::black_box(&write_buf);
// write_buf.headers.bytes.clear();
// })
// }
}
| 31.460369 | 100 | 0.531734 |
b9a40cf7db2540b64e3086998c559b0a44a1af4a | 9,596 | use crate::render::svg::*;
use crate::shape::axis::{Axis, AxisPosition};
use crate::view::View;
use crate::{BandScale, Error, LinearScale};
use std::path::Path;
use svg::Node;
const DEFAULT_MARGIN_TOP: i32 = 90;
const DEFAULT_MARGIN_BOTTOM: i32 = 50;
const DEFAULT_MARGIN_LEFT: i32 = 60;
const DEFAULT_MARGIN_RIGHT: i32 = 40;
const DEFAULT_WIDTH: i32 = 800;
const DEFAULT_HEIGHT: i32 = 600;
const DEFAULT_TITLE_FONT_SIZE: &str = "24px";
const DEFAULT_TITLE_Y_TRANSFORM: i32 = 25;
/// Chart represents a single document with one or more views, axes and a title.
/// It will also contain grid and legend in the future.
pub struct Chart<'a> {
margin_top: i32,
margin_bottom: i32,
margin_left: i32,
margin_right: i32,
width: i32,
height: i32,
x_axis_top: Option<Axis>,
x_axis_bottom: Option<Axis>,
y_axis_left: Option<Axis>,
y_axis_right: Option<Axis>,
views: Vec<&'a dyn View>,
title: String,
}
impl<'a> Chart<'a> {
/// Create a new chart.
pub fn new() -> Self {
Chart {
margin_top: DEFAULT_MARGIN_TOP,
margin_bottom: DEFAULT_MARGIN_BOTTOM,
margin_left: DEFAULT_MARGIN_LEFT,
margin_right: DEFAULT_MARGIN_RIGHT,
width: DEFAULT_WIDTH,
height: DEFAULT_HEIGHT,
x_axis_top: None,
x_axis_bottom: None,
y_axis_left: None,
y_axis_right: None,
views: Vec::new(),
title: String::new(),
}
}
/// Get chart width that can be used for views.
pub fn view_width(&self) -> i32 {
self.width - self.margin_left - self.margin_right
}
/// Get chart height that can be used for views.
pub fn view_height(&self) -> i32 {
self.height - self.margin_top - self.margin_bottom
}
/// Set chart top margin.
pub fn set_margin_top(mut self, margin_top: i32) -> Self {
self.margin_top = margin_top;
self
}
/// Set chart bottom margin.
pub fn set_margin_bottom(mut self, margin_bottom: i32) -> Self {
self.margin_bottom = margin_bottom;
self
}
/// Set chart left margin.
pub fn set_margin_left(mut self, margin_left: i32) -> Self {
self.margin_left = margin_left;
self
}
/// Set chart right margin.
pub fn set_margin_right(mut self, margin_right: i32) -> Self {
self.margin_right = margin_right;
self
}
/// Set chart width.
pub fn set_width(mut self, width: i32) -> Self {
self.width = width;
self
}
/// Set chart height.
pub fn set_height(mut self, height: i32) -> Self {
self.height = height;
self
}
/// Set BandScale for top axis.
pub fn set_axis_top_band(mut self, scale: BandScale) -> Self {
self.x_axis_top = Some(Axis::new(
&scale,
AxisPosition::Top,
self.view_width(),
self.view_height(),
));
self
}
/// Set LinearScale for top axis.
pub fn set_axis_top_linear(mut self, scale: LinearScale) -> Self {
self.x_axis_top = Some(Axis::new(
&scale,
AxisPosition::Top,
self.view_width(),
self.view_height(),
));
self
}
/// Set BandScale for bottom axis.
pub fn set_axis_bottom_band(mut self, scale: BandScale) -> Self {
self.x_axis_bottom = Some(Axis::new(
&scale,
AxisPosition::Bottom,
self.view_width(),
self.view_height(),
));
self
}
/// Set LinearScale for bottom axis.
pub fn set_axis_bottom_linear(mut self, scale: LinearScale) -> Self {
self.x_axis_bottom = Some(Axis::new(
&scale,
AxisPosition::Bottom,
self.view_width(),
self.view_height(),
));
self
}
/// Set BandScale for left axis.
pub fn set_axis_left_band(mut self, scale: BandScale) -> Self {
self.y_axis_left = Some(Axis::new(
&scale,
AxisPosition::Left,
self.view_width(),
self.view_height(),
));
self
}
/// Set LinearScale for left axis.
pub fn set_axis_left_linear(mut self, scale: LinearScale) -> Self {
self.y_axis_left = Some(Axis::new(
&scale,
AxisPosition::Left,
self.view_width(),
self.view_height(),
));
self
}
/// Set BandScale for right axis.
pub fn set_axis_right_band(mut self, scale: BandScale) -> Self {
self.y_axis_right = Some(Axis::new(
&scale,
AxisPosition::Right,
self.view_width(),
self.view_height(),
));
self
}
/// Set LinearScale for right axis.
pub fn set_axis_right_linear(mut self, scale: LinearScale) -> Self {
self.y_axis_right = Some(Axis::new(
&scale,
AxisPosition::Right,
self.view_width(),
self.view_height(),
));
self
}
/// Set label for top axis.
pub fn set_axis_top_label(mut self, label: &str) -> Self {
if let Some(ref mut axis) = self.x_axis_top {
axis.set_label(label);
}
self
}
/// Set label for bottom axis.
pub fn set_axis_bottom_label(mut self, label: &str) -> Self {
if let Some(ref mut axis) = self.x_axis_bottom {
axis.set_label(label);
}
self
}
/// Set label for left axis.
pub fn set_axis_left_label(mut self, label: &str) -> Self {
if let Some(ref mut axis) = self.y_axis_left {
axis.set_label(label);
}
self
}
/// Set label for right axis.
pub fn set_axis_right_label(mut self, label: &str) -> Self {
if let Some(ref mut axis) = self.y_axis_right {
axis.set_label(label);
}
self
}
/// Set chart title.
pub fn set_title(mut self, title: &str) -> Self {
self.title = title.to_string();
self
}
/// Add a view to chart.
pub fn add_view(mut self, view: &'a dyn View) -> Self {
self.views.push(view);
self
}
/// Set chart views.
pub fn set_views(mut self, views: Vec<&'a dyn View>) -> Self {
self.views = views;
self
}
/// Get chart SVG representation.
pub fn to_svg(&self) -> svg::Document {
let mut res = svg::node::element::Group::new().set(CLASS_ATTR, CLASS_CHART);
// Add axes.
if let Some(ref axis) = self.x_axis_top {
let mut axis_group = axis.to_svg();
axis_group.assign(
TRANSFORM_ATTR,
translate_x_y(self.margin_left, self.margin_top),
);
res.append(axis_group);
};
if let Some(ref axis) = self.x_axis_bottom {
let mut axis_group = axis.to_svg();
axis_group.assign(
TRANSFORM_ATTR,
translate_x_y(self.margin_left, self.height - self.margin_bottom),
);
res.append(axis_group);
};
if let Some(ref axis) = self.y_axis_left {
let mut axis_group = axis.to_svg();
axis_group.assign(
TRANSFORM_ATTR,
translate_x_y(self.margin_left, self.margin_top),
);
res.append(axis_group);
};
if let Some(ref axis) = self.y_axis_right {
let mut axis_group = axis.to_svg();
axis_group.assign(
TRANSFORM_ATTR,
translate_x_y(self.width - self.margin_right, self.margin_top),
);
res.append(axis_group);
};
// Add views.
let mut views_group = svg::node::element::Group::new()
.set(CLASS_ATTR, CLASS_VIEWS)
.set(
TRANSFORM_ATTR,
translate_x_y(self.margin_left, self.margin_top),
);
for view in self.views.iter() {
views_group.append(view.to_svg());
}
res.append(views_group);
// Add title.
if !self.title.is_empty() {
let title_group = svg::node::element::Group::new()
.set(CLASS_ATTR, CLASS_TITLE)
.set(
TRANSFORM_ATTR,
translate_x_y(self.width / 2, DEFAULT_TITLE_Y_TRANSFORM),
)
.add(
svg::node::element::Text::new()
.set(X_ATTR, START)
.set(Y_ATTR, START)
.set(DY_ATTR, DEFAULT_DY)
.set(FILL_ATTR, DEFAULT_FONT_COLOR)
.set(TEXT_ANCHOR_ATTR, TEXT_ANCHOR_MIDDLE)
.set(FONT_SIZE_ATTR, DEFAULT_TITLE_FONT_SIZE)
.set(FONT_FAMILY_ATTR, DEFAULT_FONT_FAMILY)
.add(svg::node::Text::new(&self.title)),
);
res.append(title_group);
}
svg::Document::new()
.set(WIDTH_ATTR, self.width)
.set(HEIGHT_ATTR, self.height)
.set(VIEW_BOX_ATTR, (START, START, self.width, self.height))
.add(res)
}
/// Save chart to SVG file at the specified path.
pub fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), Error> {
svg::save(path, &self.to_svg())?;
Ok(())
}
}
impl<'a> Default for Chart<'a> {
fn default() -> Self {
Self::new()
}
}
| 29.078788 | 84 | 0.542309 |
f80d14ad4ad695f63023a7daa7f70802e697a32a | 48,578 | //! # Tokens Module
//!
//! ## Overview
//!
//! The tokens module provides fungible multi-currency functionality that
//! implements `MultiCurrency` trait.
//!
//! The tokens module provides functions for:
//!
//! - Querying and setting the balance of a given account.
//! - Getting and managing total issuance.
//! - Balance transfer between accounts.
//! - Depositing and withdrawing balance.
//! - Slashing an account balance.
//!
//! ### Implementations
//!
//! The tokens module provides implementations for following traits.
//!
//! - `MultiCurrency` - Abstraction over a fungible multi-currency system.
//! - `MultiCurrencyExtended` - Extended `MultiCurrency` with additional helper
//! types and methods, like updating balance
//! by a given signed integer amount.
//!
//! ## Interface
//!
//! ### Dispatchable Functions
//!
//! - `transfer` - Transfer some balance to another account.
//! - `transfer_all` - Transfer all balance to another account.
//!
//! ### Genesis Config
//!
//! The tokens module depends on the `GenesisConfig`. Endowed accounts could be
//! configured in genesis configs.
#![cfg_attr(not(feature = "std"), no_std)]
#![allow(clippy::unused_unit)]
pub use crate::imbalances::{NegativeImbalance, PositiveImbalance};
use frame_support::{
ensure, log,
pallet_prelude::*,
traits::{
tokens::{fungible, fungibles, DepositConsequence, WithdrawConsequence},
BalanceStatus as Status, Currency as PalletCurrency, ExistenceRequirement, Get, Imbalance,
LockableCurrency as PalletLockableCurrency, MaxEncodedLen, ReservableCurrency as PalletReservableCurrency,
SignedImbalance, WithdrawReasons,
},
transactional, BoundedVec, PalletId,
};
use frame_system::{ensure_signed, pallet_prelude::*};
use orml_traits::{
arithmetic::{self, Signed},
currency::TransferAll,
BalanceStatus, GetByKey, LockIdentifier, MultiCurrency, MultiCurrencyExtended, MultiLockableCurrency,
MultiReservableCurrency, OnDust,
};
use sp_runtime::{
traits::{
AccountIdConversion, AtLeast32BitUnsigned, Bounded, CheckedAdd, CheckedSub, MaybeSerializeDeserialize, Member,
Saturating, StaticLookup, Zero,
},
ArithmeticError, DispatchError, DispatchResult, RuntimeDebug,
};
use sp_std::{
convert::{Infallible, TryFrom, TryInto},
marker,
prelude::*,
vec::Vec,
};
mod imbalances;
mod mock;
mod tests;
mod weights;
pub use weights::WeightInfo;
pub struct TransferDust<T, GetAccountId>(marker::PhantomData<(T, GetAccountId)>);
impl<T, GetAccountId> OnDust<T::AccountId, T::CurrencyId, T::Balance> for TransferDust<T, GetAccountId>
where
T: Config,
GetAccountId: Get<T::AccountId>,
{
fn on_dust(who: &T::AccountId, currency_id: T::CurrencyId, amount: T::Balance) {
// transfer the dust to treasury account, ignore the result,
// if failed will leave some dust which still could be recycled.
let _ = <Pallet<T> as MultiCurrency<T::AccountId>>::transfer(currency_id, who, &GetAccountId::get(), amount);
}
}
pub struct BurnDust<T>(marker::PhantomData<T>);
impl<T: Config> OnDust<T::AccountId, T::CurrencyId, T::Balance> for BurnDust<T> {
fn on_dust(who: &T::AccountId, currency_id: T::CurrencyId, amount: T::Balance) {
// burn the dust, ignore the result,
// if failed will leave some dust which still could be recycled.
let _ = Pallet::<T>::withdraw(currency_id, who, amount);
}
}
/// A single lock on a balance. There can be many of these on an account and
/// they "overlap", so the same balance is frozen by multiple locks.
#[derive(Encode, Decode, Clone, PartialEq, Eq, MaxEncodedLen, RuntimeDebug)]
pub struct BalanceLock<Balance> {
/// An identifier for this lock. Only one lock may be in existence for
/// each identifier.
pub id: LockIdentifier,
/// The amount which the free balance may not drop below when this lock
/// is in effect.
pub amount: Balance,
}
/// balance information for an account.
#[derive(Encode, Decode, Clone, PartialEq, Eq, Default, MaxEncodedLen, RuntimeDebug)]
pub struct AccountData<Balance> {
/// Non-reserved part of the balance. There may still be restrictions on
/// this, but it is the total pool what may in principle be transferred,
/// reserved.
///
/// This is the only balance that matters in terms of most operations on
/// tokens.
pub free: Balance,
/// Balance which is reserved and may not be used at all.
///
/// This can still get slashed, but gets slashed last of all.
///
/// This balance is a 'reserve' balance that other subsystems use in
/// order to set aside tokens that are still 'owned' by the account
/// holder, but which are suspendable.
pub reserved: Balance,
/// The amount that `free` may not drop below when withdrawing.
pub frozen: Balance,
}
impl<Balance: Saturating + Copy + Ord> AccountData<Balance> {
/// The amount that this account's free balance may not be reduced
/// beyond.
pub(crate) fn frozen(&self) -> Balance {
self.frozen
}
/// The total balance in this account including any that is reserved and
/// ignoring any frozen.
fn total(&self) -> Balance {
self.free.saturating_add(self.reserved)
}
}
pub use module::*;
#[frame_support::pallet]
pub mod module {
use super::*;
#[pallet::config]
pub trait Config: frame_system::Config {
type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;
/// The balance type
type Balance: Parameter
+ Member
+ AtLeast32BitUnsigned
+ Default
+ Copy
+ MaybeSerializeDeserialize
+ MaxEncodedLen;
/// The amount type, should be signed version of `Balance`
type Amount: Signed
+ TryInto<Self::Balance>
+ TryFrom<Self::Balance>
+ Parameter
+ Member
+ arithmetic::SimpleArithmetic
+ Default
+ Copy
+ MaybeSerializeDeserialize;
/// The currency ID type
// TODO: remove `Default` after https://github.com/paritytech/substrate/pull/9062
type CurrencyId: Parameter + Member + Copy + MaybeSerializeDeserialize + Ord + Default;
/// Weight information for extrinsics in this module.
type WeightInfo: WeightInfo;
/// The minimum amount required to keep an account.
type ExistentialDeposits: GetByKey<Self::CurrencyId, Self::Balance>;
/// Handler to burn or transfer account's dust
type OnDust: OnDust<Self::AccountId, Self::CurrencyId, Self::Balance>;
type MaxLocks: Get<u32>;
}
#[pallet::error]
pub enum Error<T> {
/// The balance is too low
BalanceTooLow,
/// Cannot convert Amount into Balance type
AmountIntoBalanceFailed,
/// Failed because liquidity restrictions due to locking
LiquidityRestrictions,
/// Failed because the maximum locks was exceeded
MaxLocksExceeded,
/// Transfer/payment would kill account
KeepAlive,
}
#[pallet::event]
#[pallet::generate_deposit(pub(crate) fn deposit_event)]
pub enum Event<T: Config> {
/// An account was created with some free balance. \[currency_id,
/// account, free_balance\]
Endowed(T::CurrencyId, T::AccountId, T::Balance),
/// An account was removed whose balance was non-zero but below
/// ExistentialDeposit, resulting in an outright loss. \[currency_id,
/// account, balance\]
DustLost(T::CurrencyId, T::AccountId, T::Balance),
/// Transfer succeeded. \[currency_id, from, to, value\]
Transfer(T::CurrencyId, T::AccountId, T::AccountId, T::Balance),
/// Some balance was reserved (moved from free to reserved).
/// \[currency_id, who, value\]
Reserved(T::CurrencyId, T::AccountId, T::Balance),
/// Some balance was unreserved (moved from reserved to free).
/// \[currency_id, who, value\]
Unreserved(T::CurrencyId, T::AccountId, T::Balance),
}
/// The total issuance of a token type.
#[pallet::storage]
#[pallet::getter(fn total_issuance)]
pub type TotalIssuance<T: Config> = StorageMap<_, Twox64Concat, T::CurrencyId, T::Balance, ValueQuery>;
/// Any liquidity locks of a token type under an account.
/// NOTE: Should only be accessed when setting, changing and freeing a lock.
#[pallet::storage]
#[pallet::getter(fn locks)]
pub type Locks<T: Config> = StorageDoubleMap<
_,
Blake2_128Concat,
T::AccountId,
Twox64Concat,
T::CurrencyId,
BoundedVec<BalanceLock<T::Balance>, T::MaxLocks>,
ValueQuery,
>;
/// The balance of a token type under an account.
///
/// NOTE: If the total is ever zero, decrease account ref account.
///
/// NOTE: This is only used in the case that this module is used to store
/// balances.
#[pallet::storage]
#[pallet::getter(fn accounts)]
pub type Accounts<T: Config> = StorageDoubleMap<
_,
Blake2_128Concat,
T::AccountId,
Twox64Concat,
T::CurrencyId,
AccountData<T::Balance>,
ValueQuery,
>;
#[pallet::genesis_config]
pub struct GenesisConfig<T: Config> {
pub balances: Vec<(T::AccountId, T::CurrencyId, T::Balance)>,
}
#[cfg(feature = "std")]
impl<T: Config> Default for GenesisConfig<T> {
fn default() -> Self {
GenesisConfig { balances: vec![] }
}
}
#[pallet::genesis_build]
impl<T: Config> GenesisBuild<T> for GenesisConfig<T> {
fn build(&self) {
// ensure no duplicates exist.
let unique_endowed_accounts = self
.balances
.iter()
.map(|(account_id, currency_id, _)| (account_id, currency_id))
.collect::<std::collections::BTreeSet<_>>();
assert!(
unique_endowed_accounts.len() == self.balances.len(),
"duplicate endowed accounts in genesis."
);
self.balances
.iter()
.for_each(|(account_id, currency_id, initial_balance)| {
assert!(
*initial_balance >= T::ExistentialDeposits::get(¤cy_id),
"the balance of any account should always be more than existential deposit.",
);
Pallet::<T>::mutate_account(account_id, *currency_id, |account_data, _| {
account_data.free = *initial_balance
});
TotalIssuance::<T>::mutate(*currency_id, |total_issuance| {
*total_issuance = total_issuance
.checked_add(initial_balance)
.expect("total issuance cannot overflow when building genesis")
});
});
}
}
#[pallet::pallet]
pub struct Pallet<T>(_);
#[pallet::hooks]
impl<T: Config> Hooks<T::BlockNumber> for Pallet<T> {}
#[pallet::call]
impl<T: Config> Pallet<T> {
/// Transfer some balance to another account.
///
/// The dispatch origin for this call must be `Signed` by the
/// transactor.
#[pallet::weight(T::WeightInfo::transfer())]
pub fn transfer(
origin: OriginFor<T>,
dest: <T::Lookup as StaticLookup>::Source,
currency_id: T::CurrencyId,
#[pallet::compact] amount: T::Balance,
) -> DispatchResultWithPostInfo {
let from = ensure_signed(origin)?;
let to = T::Lookup::lookup(dest)?;
<Self as MultiCurrency<_>>::transfer(currency_id, &from, &to, amount)?;
Self::deposit_event(Event::Transfer(currency_id, from, to, amount));
Ok(().into())
}
/// Transfer all remaining balance to the given account.
///
/// The dispatch origin for this call must be `Signed` by the
/// transactor.
#[pallet::weight(T::WeightInfo::transfer_all())]
pub fn transfer_all(
origin: OriginFor<T>,
dest: <T::Lookup as StaticLookup>::Source,
currency_id: T::CurrencyId,
) -> DispatchResultWithPostInfo {
let from = ensure_signed(origin)?;
let to = T::Lookup::lookup(dest)?;
let balance = <Self as MultiCurrency<T::AccountId>>::free_balance(currency_id, &from);
<Self as MultiCurrency<T::AccountId>>::transfer(currency_id, &from, &to, balance)?;
Self::deposit_event(Event::Transfer(currency_id, from, to, balance));
Ok(().into())
}
}
}
impl<T: Config> Pallet<T> {
/// Check whether account_id is a module account
pub(crate) fn is_module_account_id(account_id: &T::AccountId) -> bool {
PalletId::try_from_account(account_id).is_some()
}
pub(crate) fn deposit_consequence(
_who: &T::AccountId,
currency_id: T::CurrencyId,
amount: T::Balance,
account: &AccountData<T::Balance>,
) -> DepositConsequence {
if amount.is_zero() {
return DepositConsequence::Success;
}
if TotalIssuance::<T>::get(currency_id).checked_add(&amount).is_none() {
return DepositConsequence::Overflow;
}
let new_total_balance = match account.total().checked_add(&amount) {
Some(x) => x,
None => return DepositConsequence::Overflow,
};
if new_total_balance < T::ExistentialDeposits::get(¤cy_id) {
return DepositConsequence::BelowMinimum;
}
// NOTE: We assume that we are a provider, so don't need to do any checks in the
// case of account creation.
DepositConsequence::Success
}
pub(crate) fn withdraw_consequence(
who: &T::AccountId,
currency_id: T::CurrencyId,
amount: T::Balance,
account: &AccountData<T::Balance>,
) -> WithdrawConsequence<T::Balance> {
if amount.is_zero() {
return WithdrawConsequence::Success;
}
if TotalIssuance::<T>::get(currency_id).checked_sub(&amount).is_none() {
return WithdrawConsequence::Underflow;
}
let new_total_balance = match account.total().checked_sub(&amount) {
Some(x) => x,
None => return WithdrawConsequence::NoFunds,
};
// Provider restriction - total account balance cannot be reduced to zero if it
// cannot sustain the loss of a provider reference.
// NOTE: This assumes that the pallet is a provider (which is true). Is this
// ever changes, then this will need to adapt accordingly.
let ed = T::ExistentialDeposits::get(¤cy_id);
let success = if new_total_balance < ed {
if frame_system::Pallet::<T>::can_dec_provider(who) {
WithdrawConsequence::ReducedToZero(new_total_balance)
} else {
return WithdrawConsequence::WouldDie;
}
} else {
WithdrawConsequence::Success
};
// Enough free funds to have them be reduced.
let new_free_balance = match account.free.checked_sub(&amount) {
Some(b) => b,
None => return WithdrawConsequence::NoFunds,
};
// Eventual free funds must be no less than the frozen balance.
if new_free_balance < account.frozen() {
return WithdrawConsequence::Frozen;
}
success
}
pub(crate) fn try_mutate_account<R, E>(
who: &T::AccountId,
currency_id: T::CurrencyId,
f: impl FnOnce(&mut AccountData<T::Balance>, bool) -> sp_std::result::Result<R, E>,
) -> sp_std::result::Result<R, E> {
Accounts::<T>::try_mutate_exists(who, currency_id, |maybe_account| {
let existed = maybe_account.is_some();
let mut account = maybe_account.take().unwrap_or_default();
f(&mut account, existed).map(move |result| {
let maybe_endowed = if !existed { Some(account.free) } else { None };
let mut maybe_dust: Option<T::Balance> = None;
let total = account.total();
*maybe_account = if total.is_zero() {
None
} else {
// if non_zero total is below existential deposit and the account is not a
// module account, should handle the dust.
if total < T::ExistentialDeposits::get(¤cy_id) && !Self::is_module_account_id(who) {
maybe_dust = Some(total);
}
Some(account)
};
(maybe_endowed, existed, maybe_account.is_some(), maybe_dust, result)
})
})
.map(|(maybe_endowed, existed, exists, maybe_dust, result)| {
if existed && !exists {
// If existed before, decrease account provider.
// Ignore the result, because if it failed then there are remaining consumers,
// and the account storage in frame_system shouldn't be reaped.
let _ = frame_system::Pallet::<T>::dec_providers(who);
} else if !existed && exists {
// if new, increase account provider
frame_system::Pallet::<T>::inc_providers(who);
}
if let Some(endowed) = maybe_endowed {
Self::deposit_event(Event::Endowed(currency_id, who.clone(), endowed));
}
if let Some(dust_amount) = maybe_dust {
// `OnDust` maybe get/set storage `Accounts` of `who`, trigger handler here
// to avoid some unexpected errors.
T::OnDust::on_dust(who, currency_id, dust_amount);
Self::deposit_event(Event::DustLost(currency_id, who.clone(), dust_amount));
}
result
})
}
pub(crate) fn mutate_account<R>(
who: &T::AccountId,
currency_id: T::CurrencyId,
f: impl FnOnce(&mut AccountData<T::Balance>, bool) -> R,
) -> R {
Self::try_mutate_account(who, currency_id, |account, existed| -> Result<R, Infallible> {
Ok(f(account, existed))
})
.expect("Error is infallible; qed")
}
/// Set free balance of `who` to a new value.
///
/// Note this will not maintain total issuance, and the caller is
/// expected to do it.
pub(crate) fn set_free_balance(currency_id: T::CurrencyId, who: &T::AccountId, amount: T::Balance) {
Self::mutate_account(who, currency_id, |account, _| {
account.free = amount;
});
}
/// Set reserved balance of `who` to a new value.
///
/// Note this will not maintain total issuance, and the caller is
/// expected to do it.
pub(crate) fn set_reserved_balance(currency_id: T::CurrencyId, who: &T::AccountId, amount: T::Balance) {
Self::mutate_account(who, currency_id, |account, _| {
account.reserved = amount;
});
}
/// Update the account entry for `who` under `currency_id`, given the
/// locks.
pub(crate) fn update_locks(
currency_id: T::CurrencyId,
who: &T::AccountId,
locks: &[BalanceLock<T::Balance>],
) -> DispatchResult {
// update account data
Self::mutate_account(who, currency_id, |account, _| {
account.frozen = Zero::zero();
for lock in locks.iter() {
account.frozen = account.frozen.max(lock.amount);
}
});
// update locks
let existed = <Locks<T>>::contains_key(who, currency_id);
if locks.is_empty() {
<Locks<T>>::remove(who, currency_id);
if existed {
// decrease account ref count when destruct lock
frame_system::Pallet::<T>::dec_consumers(who);
}
} else {
let bounded_locks: BoundedVec<BalanceLock<T::Balance>, T::MaxLocks> =
locks.to_vec().try_into().map_err(|_| Error::<T>::MaxLocksExceeded)?;
<Locks<T>>::insert(who, currency_id, bounded_locks);
if !existed {
// increase account ref count when initialize lock
if frame_system::Pallet::<T>::inc_consumers(who).is_err() {
// No providers for the locks. This is impossible under normal circumstances
// since the funds that are under the lock will themselves be stored in the
// account and therefore will need a reference.
log::warn!(
"Warning: Attempt to introduce lock consumer reference, yet no providers. \
This is unexpected but should be safe."
);
}
}
}
Ok(())
}
/// Transfer some free balance from `from` to `to`.
/// Is a no-op if value to be transferred is zero or the `from` is the
/// same as `to`.
pub(crate) fn do_transfer(
currency_id: T::CurrencyId,
from: &T::AccountId,
to: &T::AccountId,
amount: T::Balance,
existence_requirement: ExistenceRequirement,
) -> DispatchResult {
if amount.is_zero() || from == to {
return Ok(());
}
Pallet::<T>::try_mutate_account(to, currency_id, |to_account, _is_new| -> DispatchResult {
Pallet::<T>::try_mutate_account(from, currency_id, |from_account, _is_new| -> DispatchResult {
from_account.free = from_account
.free
.checked_sub(&amount)
.ok_or(Error::<T>::BalanceTooLow)?;
to_account.free = to_account.free.checked_add(&amount).ok_or(ArithmeticError::Overflow)?;
Self::ensure_can_withdraw(currency_id, from, amount)?;
let ed = T::ExistentialDeposits::get(¤cy_id);
let allow_death = existence_requirement == ExistenceRequirement::AllowDeath;
let allow_death = allow_death && !frame_system::Pallet::<T>::is_provider_required(from);
ensure!(allow_death || from_account.total() >= ed, Error::<T>::KeepAlive);
Ok(())
})?;
Ok(())
})
}
}
impl<T: Config> MultiCurrency<T::AccountId> for Pallet<T> {
type CurrencyId = T::CurrencyId;
type Balance = T::Balance;
fn minimum_balance(currency_id: Self::CurrencyId) -> Self::Balance {
T::ExistentialDeposits::get(¤cy_id)
}
fn total_issuance(currency_id: Self::CurrencyId) -> Self::Balance {
<TotalIssuance<T>>::get(currency_id)
}
fn total_balance(currency_id: Self::CurrencyId, who: &T::AccountId) -> Self::Balance {
Self::accounts(who, currency_id).total()
}
fn free_balance(currency_id: Self::CurrencyId, who: &T::AccountId) -> Self::Balance {
Self::accounts(who, currency_id).free
}
// Ensure that an account can withdraw from their free balance given any
// existing withdrawal restrictions like locks and vesting balance.
// Is a no-op if amount to be withdrawn is zero.
fn ensure_can_withdraw(currency_id: Self::CurrencyId, who: &T::AccountId, amount: Self::Balance) -> DispatchResult {
if amount.is_zero() {
return Ok(());
}
let new_balance = Self::free_balance(currency_id, who)
.checked_sub(&amount)
.ok_or(Error::<T>::BalanceTooLow)?;
ensure!(
new_balance >= Self::accounts(who, currency_id).frozen(),
Error::<T>::LiquidityRestrictions
);
Ok(())
}
/// Transfer some free balance from `from` to `to`.
/// Is a no-op if value to be transferred is zero or the `from` is the
/// same as `to`.
fn transfer(
currency_id: Self::CurrencyId,
from: &T::AccountId,
to: &T::AccountId,
amount: Self::Balance,
) -> DispatchResult {
Self::do_transfer(currency_id, from, to, amount, ExistenceRequirement::AllowDeath)
}
/// Deposit some `amount` into the free balance of account `who`.
///
/// Is a no-op if the `amount` to be deposited is zero.
fn deposit(currency_id: Self::CurrencyId, who: &T::AccountId, amount: Self::Balance) -> DispatchResult {
if amount.is_zero() {
return Ok(());
}
TotalIssuance::<T>::try_mutate(currency_id, |total_issuance| -> DispatchResult {
*total_issuance = total_issuance.checked_add(&amount).ok_or(ArithmeticError::Overflow)?;
Self::set_free_balance(currency_id, who, Self::free_balance(currency_id, who) + amount);
Ok(())
})
}
fn withdraw(currency_id: Self::CurrencyId, who: &T::AccountId, amount: Self::Balance) -> DispatchResult {
if amount.is_zero() {
return Ok(());
}
Self::ensure_can_withdraw(currency_id, who, amount)?;
// Cannot underflow because ensure_can_withdraw check
<TotalIssuance<T>>::mutate(currency_id, |v| *v -= amount);
Self::set_free_balance(currency_id, who, Self::free_balance(currency_id, who) - amount);
Ok(())
}
// Check if `value` amount of free balance can be slashed from `who`.
fn can_slash(currency_id: Self::CurrencyId, who: &T::AccountId, value: Self::Balance) -> bool {
if value.is_zero() {
return true;
}
Self::free_balance(currency_id, who) >= value
}
/// Is a no-op if `value` to be slashed is zero.
///
/// NOTE: `slash()` prefers free balance, but assumes that reserve
/// balance can be drawn from in extreme circumstances. `can_slash()`
/// should be used prior to `slash()` to avoid having to draw from
/// reserved funds, however we err on the side of punishment if things
/// are inconsistent or `can_slash` wasn't used appropriately.
fn slash(currency_id: Self::CurrencyId, who: &T::AccountId, amount: Self::Balance) -> Self::Balance {
if amount.is_zero() {
return amount;
}
let account = Self::accounts(who, currency_id);
let free_slashed_amount = account.free.min(amount);
// Cannot underflow becuase free_slashed_amount can never be greater than amount
let mut remaining_slash = amount - free_slashed_amount;
// slash free balance
if !free_slashed_amount.is_zero() {
// Cannot underflow becuase free_slashed_amount can never be greater than
// account.free
Self::set_free_balance(currency_id, who, account.free - free_slashed_amount);
}
// slash reserved balance
if !remaining_slash.is_zero() {
let reserved_slashed_amount = account.reserved.min(remaining_slash);
// Cannot underflow due to above line
remaining_slash -= reserved_slashed_amount;
Self::set_reserved_balance(currency_id, who, account.reserved - reserved_slashed_amount);
}
// Cannot underflow because the slashed value cannot be greater than total
// issuance
<TotalIssuance<T>>::mutate(currency_id, |v| *v -= amount - remaining_slash);
remaining_slash
}
}
impl<T: Config> MultiCurrencyExtended<T::AccountId> for Pallet<T> {
type Amount = T::Amount;
fn update_balance(currency_id: Self::CurrencyId, who: &T::AccountId, by_amount: Self::Amount) -> DispatchResult {
if by_amount.is_zero() {
return Ok(());
}
// Ensure this doesn't overflow. There isn't any traits that exposes
// `saturating_abs` so we need to do it manually.
let by_amount_abs = if by_amount == Self::Amount::min_value() {
Self::Amount::max_value()
} else {
by_amount.abs()
};
let by_balance =
TryInto::<Self::Balance>::try_into(by_amount_abs).map_err(|_| Error::<T>::AmountIntoBalanceFailed)?;
if by_amount.is_positive() {
Self::deposit(currency_id, who, by_balance)
} else {
Self::withdraw(currency_id, who, by_balance).map(|_| ())
}
}
}
impl<T: Config> MultiLockableCurrency<T::AccountId> for Pallet<T> {
type Moment = T::BlockNumber;
// Set a lock on the balance of `who` under `currency_id`.
// Is a no-op if lock amount is zero.
fn set_lock(
lock_id: LockIdentifier,
currency_id: Self::CurrencyId,
who: &T::AccountId,
amount: Self::Balance,
) -> DispatchResult {
if amount.is_zero() {
return Ok(());
}
let mut new_lock = Some(BalanceLock { id: lock_id, amount });
let mut locks = Self::locks(who, currency_id)
.into_iter()
.filter_map(|lock| {
if lock.id == lock_id {
new_lock.take()
} else {
Some(lock)
}
})
.collect::<Vec<_>>();
if let Some(lock) = new_lock {
locks.push(lock)
}
Self::update_locks(currency_id, who, &locks[..])
}
// Extend a lock on the balance of `who` under `currency_id`.
// Is a no-op if lock amount is zero
fn extend_lock(
lock_id: LockIdentifier,
currency_id: Self::CurrencyId,
who: &T::AccountId,
amount: Self::Balance,
) -> DispatchResult {
if amount.is_zero() {
return Ok(());
}
let mut new_lock = Some(BalanceLock { id: lock_id, amount });
let mut locks = Self::locks(who, currency_id)
.into_iter()
.filter_map(|lock| {
if lock.id == lock_id {
new_lock.take().map(|nl| BalanceLock {
id: lock.id,
amount: lock.amount.max(nl.amount),
})
} else {
Some(lock)
}
})
.collect::<Vec<_>>();
if let Some(lock) = new_lock {
locks.push(lock)
}
Self::update_locks(currency_id, who, &locks[..])
}
fn remove_lock(lock_id: LockIdentifier, currency_id: Self::CurrencyId, who: &T::AccountId) -> DispatchResult {
let mut locks = Self::locks(who, currency_id);
locks.retain(|lock| lock.id != lock_id);
let locks_vec = locks.to_vec();
Self::update_locks(currency_id, who, &locks_vec[..])
}
}
impl<T: Config> MultiReservableCurrency<T::AccountId> for Pallet<T> {
/// Check if `who` can reserve `value` from their free balance.
///
/// Always `true` if value to be reserved is zero.
fn can_reserve(currency_id: Self::CurrencyId, who: &T::AccountId, value: Self::Balance) -> bool {
if value.is_zero() {
return true;
}
Self::ensure_can_withdraw(currency_id, who, value).is_ok()
}
/// Slash from reserved balance, returning any amount that was unable to
/// be slashed.
///
/// Is a no-op if the value to be slashed is zero.
fn slash_reserved(currency_id: Self::CurrencyId, who: &T::AccountId, value: Self::Balance) -> Self::Balance {
if value.is_zero() {
return value;
}
let reserved_balance = Self::reserved_balance(currency_id, who);
let actual = reserved_balance.min(value);
Self::set_reserved_balance(currency_id, who, reserved_balance - actual);
<TotalIssuance<T>>::mutate(currency_id, |v| *v -= actual);
value - actual
}
fn reserved_balance(currency_id: Self::CurrencyId, who: &T::AccountId) -> Self::Balance {
Self::accounts(who, currency_id).reserved
}
/// Move `value` from the free balance from `who` to their reserved
/// balance.
///
/// Is a no-op if value to be reserved is zero.
fn reserve(currency_id: Self::CurrencyId, who: &T::AccountId, value: Self::Balance) -> DispatchResult {
if value.is_zero() {
return Ok(());
}
Self::ensure_can_withdraw(currency_id, who, value)?;
let account = Self::accounts(who, currency_id);
Self::set_free_balance(currency_id, who, account.free - value);
// Cannot overflow becuase total issuance is using the same balance type and
// this doesn't increase total issuance
Self::set_reserved_balance(currency_id, who, account.reserved + value);
Self::deposit_event(Event::Reserved(currency_id, who.clone(), value));
Ok(())
}
/// Unreserve some funds, returning any amount that was unable to be
/// unreserved.
///
/// Is a no-op if the value to be unreserved is zero.
fn unreserve(currency_id: Self::CurrencyId, who: &T::AccountId, value: Self::Balance) -> Self::Balance {
if value.is_zero() {
return value;
}
let account = Self::accounts(who, currency_id);
let actual = account.reserved.min(value);
Self::set_reserved_balance(currency_id, who, account.reserved - actual);
Self::set_free_balance(currency_id, who, account.free + actual);
Self::deposit_event(Event::Unreserved(currency_id, who.clone(), actual));
value - actual
}
/// Move the reserved balance of one account into the balance of
/// another, according to `status`.
///
/// Is a no-op if:
/// - the value to be moved is zero; or
/// - the `slashed` id equal to `beneficiary` and the `status` is
/// `Reserved`.
fn repatriate_reserved(
currency_id: Self::CurrencyId,
slashed: &T::AccountId,
beneficiary: &T::AccountId,
value: Self::Balance,
status: BalanceStatus,
) -> sp_std::result::Result<Self::Balance, DispatchError> {
if value.is_zero() {
return Ok(value);
}
if slashed == beneficiary {
return match status {
BalanceStatus::Free => Ok(Self::unreserve(currency_id, slashed, value)),
BalanceStatus::Reserved => Ok(value.saturating_sub(Self::reserved_balance(currency_id, slashed))),
};
}
let from_account = Self::accounts(slashed, currency_id);
let to_account = Self::accounts(beneficiary, currency_id);
let actual = from_account.reserved.min(value);
match status {
BalanceStatus::Free => {
Self::set_free_balance(currency_id, beneficiary, to_account.free + actual);
}
BalanceStatus::Reserved => {
Self::set_reserved_balance(currency_id, beneficiary, to_account.reserved + actual);
}
}
Self::set_reserved_balance(currency_id, slashed, from_account.reserved - actual);
Ok(value - actual)
}
}
impl<T: Config> fungibles::Inspect<T::AccountId> for Pallet<T> {
type AssetId = T::CurrencyId;
type Balance = T::Balance;
fn total_issuance(asset_id: Self::AssetId) -> Self::Balance {
Pallet::<T>::total_issuance(asset_id)
}
fn minimum_balance(asset_id: Self::AssetId) -> Self::Balance {
<Self as MultiCurrency<_>>::minimum_balance(asset_id)
}
fn balance(asset_id: Self::AssetId, who: &T::AccountId) -> Self::Balance {
Pallet::<T>::total_balance(asset_id, who)
}
fn reducible_balance(asset_id: Self::AssetId, who: &T::AccountId, keep_alive: bool) -> Self::Balance {
let a = Pallet::<T>::accounts(who, asset_id);
// Liquid balance is what is neither reserved nor locked/frozen.
let liquid = a.free.saturating_sub(a.frozen);
if frame_system::Pallet::<T>::can_dec_provider(who) && !keep_alive {
liquid
} else {
// `must_remain_to_exist` is the part of liquid balance which must remain to
// keep total over ED.
let must_remain_to_exist = T::ExistentialDeposits::get(&asset_id).saturating_sub(a.total() - liquid);
liquid.saturating_sub(must_remain_to_exist)
}
}
fn can_deposit(asset_id: Self::AssetId, who: &T::AccountId, amount: Self::Balance) -> DepositConsequence {
Pallet::<T>::deposit_consequence(who, asset_id, amount, &Pallet::<T>::accounts(who, asset_id))
}
fn can_withdraw(
asset_id: Self::AssetId,
who: &T::AccountId,
amount: Self::Balance,
) -> WithdrawConsequence<Self::Balance> {
Pallet::<T>::withdraw_consequence(who, asset_id, amount, &Pallet::<T>::accounts(who, asset_id))
}
}
impl<T: Config> fungibles::Mutate<T::AccountId> for Pallet<T> {
fn mint_into(asset_id: Self::AssetId, who: &T::AccountId, amount: Self::Balance) -> DispatchResult {
if amount.is_zero() {
return Ok(());
}
Pallet::<T>::try_mutate_account(who, asset_id, |account, _is_new| -> DispatchResult {
Pallet::<T>::deposit_consequence(who, asset_id, amount, &account).into_result()?;
// deposit_consequence already did overflow checking
account.free += amount;
Ok(())
})?;
// deposit_consequence already did overflow checking
<TotalIssuance<T>>::mutate(asset_id, |t| *t += amount);
Ok(())
}
fn burn_from(
asset_id: Self::AssetId,
who: &T::AccountId,
amount: Self::Balance,
) -> Result<Self::Balance, DispatchError> {
if amount.is_zero() {
return Ok(Self::Balance::zero());
}
let actual =
Pallet::<T>::try_mutate_account(who, asset_id, |account, _is_new| -> Result<T::Balance, DispatchError> {
let extra = Pallet::<T>::withdraw_consequence(who, asset_id, amount, &account).into_result()?;
// withdraw_consequence already did underflow checking
let actual = amount + extra;
account.free -= actual;
Ok(actual)
})?;
// withdraw_consequence already did underflow checking
<TotalIssuance<T>>::mutate(asset_id, |t| *t -= actual);
Ok(actual)
}
}
impl<T: Config> fungibles::Transfer<T::AccountId> for Pallet<T> {
fn transfer(
asset_id: Self::AssetId,
source: &T::AccountId,
dest: &T::AccountId,
amount: T::Balance,
keep_alive: bool,
) -> Result<T::Balance, DispatchError> {
let er = if keep_alive {
ExistenceRequirement::KeepAlive
} else {
ExistenceRequirement::AllowDeath
};
Self::do_transfer(asset_id, source, dest, amount, er).map(|_| amount)
}
}
impl<T: Config> fungibles::Unbalanced<T::AccountId> for Pallet<T> {
fn set_balance(asset_id: Self::AssetId, who: &T::AccountId, amount: Self::Balance) -> DispatchResult {
// Balance is the same type and will not overflow
Pallet::<T>::mutate_account(who, asset_id, |account, _| account.free = amount);
Ok(())
}
fn set_total_issuance(asset_id: Self::AssetId, amount: Self::Balance) {
// Balance is the same type and will not overflow
<TotalIssuance<T>>::mutate(asset_id, |t| *t = amount);
}
}
impl<T: Config> fungibles::InspectHold<T::AccountId> for Pallet<T> {
fn balance_on_hold(asset_id: Self::AssetId, who: &T::AccountId) -> T::Balance {
Pallet::<T>::accounts(who, asset_id).reserved
}
fn can_hold(asset_id: Self::AssetId, who: &T::AccountId, amount: T::Balance) -> bool {
let a = Pallet::<T>::accounts(who, asset_id);
let min_balance = T::ExistentialDeposits::get(&asset_id).max(a.frozen);
if a.reserved.checked_add(&amount).is_none() {
return false;
}
// We require it to be min_balance + amount to ensure that the full reserved
// funds may be slashed without compromising locked funds or destroying the
// account.
let required_free = match min_balance.checked_add(&amount) {
Some(x) => x,
None => return false,
};
a.free >= required_free
}
}
impl<T: Config> fungibles::MutateHold<T::AccountId> for Pallet<T> {
fn hold(asset_id: Self::AssetId, who: &T::AccountId, amount: Self::Balance) -> DispatchResult {
if amount.is_zero() {
return Ok(());
}
ensure!(
Pallet::<T>::can_reserve(asset_id, who, amount),
Error::<T>::BalanceTooLow
);
Pallet::<T>::mutate_account(who, asset_id, |a, _| {
// `can_reserve` has did underflow checking
a.free -= amount;
// Cannot overflow as `amount` is from `a.free`
a.reserved += amount;
});
Ok(())
}
fn release(
asset_id: Self::AssetId,
who: &T::AccountId,
amount: Self::Balance,
best_effort: bool,
) -> Result<T::Balance, DispatchError> {
if amount.is_zero() {
return Ok(amount);
}
// Done on a best-effort basis.
Pallet::<T>::try_mutate_account(who, asset_id, |a, _| {
let new_free = a.free.saturating_add(amount.min(a.reserved));
let actual = new_free - a.free;
// Guaranteed to be <= amount and <= a.reserved
ensure!(best_effort || actual == amount, Error::<T>::BalanceTooLow);
a.free = new_free;
a.reserved = a.reserved.saturating_sub(actual);
Ok(actual)
})
}
fn transfer_held(
asset_id: Self::AssetId,
source: &T::AccountId,
dest: &T::AccountId,
amount: Self::Balance,
_best_effort: bool,
on_hold: bool,
) -> Result<Self::Balance, DispatchError> {
let status = if on_hold { Status::Reserved } else { Status::Free };
Pallet::<T>::repatriate_reserved(asset_id, source, dest, amount, status)
}
}
pub struct CurrencyAdapter<T, GetCurrencyId>(marker::PhantomData<(T, GetCurrencyId)>);
impl<T, GetCurrencyId> PalletCurrency<T::AccountId> for CurrencyAdapter<T, GetCurrencyId>
where
T: Config,
GetCurrencyId: Get<T::CurrencyId>,
{
type Balance = T::Balance;
type PositiveImbalance = PositiveImbalance<T, GetCurrencyId>;
type NegativeImbalance = NegativeImbalance<T, GetCurrencyId>;
fn total_balance(who: &T::AccountId) -> Self::Balance {
Pallet::<T>::total_balance(GetCurrencyId::get(), who)
}
fn can_slash(who: &T::AccountId, value: Self::Balance) -> bool {
Pallet::<T>::can_slash(GetCurrencyId::get(), who, value)
}
fn total_issuance() -> Self::Balance {
Pallet::<T>::total_issuance(GetCurrencyId::get())
}
fn minimum_balance() -> Self::Balance {
Pallet::<T>::minimum_balance(GetCurrencyId::get())
}
fn burn(mut amount: Self::Balance) -> Self::PositiveImbalance {
if amount.is_zero() {
return PositiveImbalance::zero();
}
<TotalIssuance<T>>::mutate(GetCurrencyId::get(), |issued| {
*issued = issued.checked_sub(&amount).unwrap_or_else(|| {
amount = *issued;
Zero::zero()
});
});
PositiveImbalance::new(amount)
}
fn issue(mut amount: Self::Balance) -> Self::NegativeImbalance {
if amount.is_zero() {
return NegativeImbalance::zero();
}
<TotalIssuance<T>>::mutate(GetCurrencyId::get(), |issued| {
*issued = issued.checked_add(&amount).unwrap_or_else(|| {
amount = Self::Balance::max_value() - *issued;
Self::Balance::max_value()
})
});
NegativeImbalance::new(amount)
}
fn free_balance(who: &T::AccountId) -> Self::Balance {
Pallet::<T>::free_balance(GetCurrencyId::get(), who)
}
fn ensure_can_withdraw(
who: &T::AccountId,
amount: Self::Balance,
_reasons: WithdrawReasons,
_new_balance: Self::Balance,
) -> DispatchResult {
Pallet::<T>::ensure_can_withdraw(GetCurrencyId::get(), who, amount)
}
fn transfer(
source: &T::AccountId,
dest: &T::AccountId,
value: Self::Balance,
existence_requirement: ExistenceRequirement,
) -> DispatchResult {
Pallet::<T>::do_transfer(GetCurrencyId::get(), &source, &dest, value, existence_requirement)
}
fn slash(who: &T::AccountId, value: Self::Balance) -> (Self::NegativeImbalance, Self::Balance) {
if value.is_zero() {
return (Self::NegativeImbalance::zero(), value);
}
let currency_id = GetCurrencyId::get();
let account = Pallet::<T>::accounts(who, currency_id);
let free_slashed_amount = account.free.min(value);
let mut remaining_slash = value - free_slashed_amount;
// slash free balance
if !free_slashed_amount.is_zero() {
Pallet::<T>::set_free_balance(currency_id, who, account.free - free_slashed_amount);
}
// slash reserved balance
if !remaining_slash.is_zero() {
let reserved_slashed_amount = account.reserved.min(remaining_slash);
remaining_slash -= reserved_slashed_amount;
Pallet::<T>::set_reserved_balance(currency_id, who, account.reserved - reserved_slashed_amount);
(
Self::NegativeImbalance::new(free_slashed_amount + reserved_slashed_amount),
remaining_slash,
)
} else {
(Self::NegativeImbalance::new(value), remaining_slash)
}
}
fn deposit_into_existing(
who: &T::AccountId,
value: Self::Balance,
) -> sp_std::result::Result<Self::PositiveImbalance, DispatchError> {
if value.is_zero() {
return Ok(Self::PositiveImbalance::zero());
}
let currency_id = GetCurrencyId::get();
let new_total = Pallet::<T>::free_balance(currency_id, who)
.checked_add(&value)
.ok_or(ArithmeticError::Overflow)?;
Pallet::<T>::set_free_balance(currency_id, who, new_total);
Ok(Self::PositiveImbalance::new(value))
}
fn deposit_creating(who: &T::AccountId, value: Self::Balance) -> Self::PositiveImbalance {
Self::deposit_into_existing(who, value).unwrap_or_else(|_| Self::PositiveImbalance::zero())
}
fn withdraw(
who: &T::AccountId,
value: Self::Balance,
_reasons: WithdrawReasons,
liveness: ExistenceRequirement,
) -> sp_std::result::Result<Self::NegativeImbalance, DispatchError> {
if value.is_zero() {
return Ok(Self::NegativeImbalance::zero());
}
let currency_id = GetCurrencyId::get();
Pallet::<T>::try_mutate_account(who, currency_id, |account, _is_new| -> DispatchResult {
account.free = account.free.checked_sub(&value).ok_or(Error::<T>::BalanceTooLow)?;
Pallet::<T>::ensure_can_withdraw(currency_id, who, value)?;
let ed = T::ExistentialDeposits::get(¤cy_id);
let allow_death = liveness == ExistenceRequirement::AllowDeath;
let allow_death = allow_death && !frame_system::Pallet::<T>::is_provider_required(who);
ensure!(allow_death || account.total() >= ed, Error::<T>::KeepAlive);
Ok(())
})?;
Ok(Self::NegativeImbalance::new(value))
}
fn make_free_balance_be(
who: &T::AccountId,
value: Self::Balance,
) -> SignedImbalance<Self::Balance, Self::PositiveImbalance> {
let currency_id = GetCurrencyId::get();
Pallet::<T>::try_mutate_account(
who,
currency_id,
|account, existed| -> Result<SignedImbalance<Self::Balance, Self::PositiveImbalance>, ()> {
// If we're attempting to set an existing account to less than ED, then
// bypass the entire operation. It's a no-op if you follow it through, but
// since this is an instance where we might account for a negative imbalance
// (in the dust cleaner of set_account) before we account for its actual
// equal and opposite cause (returned as an Imbalance), then in the
// instance that there's no other accounts on the system at all, we might
// underflow the issuance and our arithmetic will be off.
let ed = T::ExistentialDeposits::get(¤cy_id);
ensure!(value.saturating_add(account.reserved) >= ed || existed, ());
let imbalance = if account.free <= value {
SignedImbalance::Positive(PositiveImbalance::new(value - account.free))
} else {
SignedImbalance::Negative(NegativeImbalance::new(account.free - value))
};
account.free = value;
Ok(imbalance)
},
)
.unwrap_or_else(|_| SignedImbalance::Positive(Self::PositiveImbalance::zero()))
}
}
impl<T, GetCurrencyId> PalletReservableCurrency<T::AccountId> for CurrencyAdapter<T, GetCurrencyId>
where
T: Config,
GetCurrencyId: Get<T::CurrencyId>,
{
fn can_reserve(who: &T::AccountId, value: Self::Balance) -> bool {
Pallet::<T>::can_reserve(GetCurrencyId::get(), who, value)
}
fn slash_reserved(who: &T::AccountId, value: Self::Balance) -> (Self::NegativeImbalance, Self::Balance) {
let actual = Pallet::<T>::slash_reserved(GetCurrencyId::get(), who, value);
(Self::NegativeImbalance::zero(), actual)
}
fn reserved_balance(who: &T::AccountId) -> Self::Balance {
Pallet::<T>::reserved_balance(GetCurrencyId::get(), who)
}
fn reserve(who: &T::AccountId, value: Self::Balance) -> DispatchResult {
Pallet::<T>::reserve(GetCurrencyId::get(), who, value)
}
fn unreserve(who: &T::AccountId, value: Self::Balance) -> Self::Balance {
Pallet::<T>::unreserve(GetCurrencyId::get(), who, value)
}
fn repatriate_reserved(
slashed: &T::AccountId,
beneficiary: &T::AccountId,
value: Self::Balance,
status: Status,
) -> sp_std::result::Result<Self::Balance, DispatchError> {
Pallet::<T>::repatriate_reserved(GetCurrencyId::get(), slashed, beneficiary, value, status)
}
}
impl<T, GetCurrencyId> PalletLockableCurrency<T::AccountId> for CurrencyAdapter<T, GetCurrencyId>
where
T: Config,
GetCurrencyId: Get<T::CurrencyId>,
{
type Moment = T::BlockNumber;
type MaxLocks = ();
fn set_lock(id: LockIdentifier, who: &T::AccountId, amount: Self::Balance, _reasons: WithdrawReasons) {
let _ = Pallet::<T>::set_lock(id, GetCurrencyId::get(), who, amount);
}
fn extend_lock(id: LockIdentifier, who: &T::AccountId, amount: Self::Balance, _reasons: WithdrawReasons) {
let _ = Pallet::<T>::extend_lock(id, GetCurrencyId::get(), who, amount);
}
fn remove_lock(id: LockIdentifier, who: &T::AccountId) {
let _ = Pallet::<T>::remove_lock(id, GetCurrencyId::get(), who);
}
}
impl<T: Config> TransferAll<T::AccountId> for Pallet<T> {
#[transactional]
fn transfer_all(source: &T::AccountId, dest: &T::AccountId) -> DispatchResult {
Accounts::<T>::iter_prefix(source).try_for_each(|(currency_id, account_data)| -> DispatchResult {
<Self as MultiCurrency<T::AccountId>>::transfer(currency_id, source, dest, account_data.free)
})
}
}
impl<T, GetCurrencyId> fungible::Inspect<T::AccountId> for CurrencyAdapter<T, GetCurrencyId>
where
T: Config,
GetCurrencyId: Get<T::CurrencyId>,
{
type Balance = T::Balance;
fn total_issuance() -> Self::Balance {
<Pallet<T> as fungibles::Inspect<_>>::total_issuance(GetCurrencyId::get())
}
fn minimum_balance() -> Self::Balance {
<Pallet<T> as fungibles::Inspect<_>>::minimum_balance(GetCurrencyId::get())
}
fn balance(who: &T::AccountId) -> Self::Balance {
<Pallet<T> as fungibles::Inspect<_>>::balance(GetCurrencyId::get(), who)
}
fn reducible_balance(who: &T::AccountId, keep_alive: bool) -> Self::Balance {
<Pallet<T> as fungibles::Inspect<_>>::reducible_balance(GetCurrencyId::get(), who, keep_alive)
}
fn can_deposit(who: &T::AccountId, amount: Self::Balance) -> DepositConsequence {
<Pallet<T> as fungibles::Inspect<_>>::can_deposit(GetCurrencyId::get(), who, amount)
}
fn can_withdraw(who: &T::AccountId, amount: Self::Balance) -> WithdrawConsequence<Self::Balance> {
<Pallet<T> as fungibles::Inspect<_>>::can_withdraw(GetCurrencyId::get(), who, amount)
}
}
impl<T, GetCurrencyId> fungible::Mutate<T::AccountId> for CurrencyAdapter<T, GetCurrencyId>
where
T: Config,
GetCurrencyId: Get<T::CurrencyId>,
{
fn mint_into(who: &T::AccountId, amount: Self::Balance) -> DispatchResult {
<Pallet<T> as fungibles::Mutate<_>>::mint_into(GetCurrencyId::get(), who, amount)
}
fn burn_from(who: &T::AccountId, amount: Self::Balance) -> Result<Self::Balance, DispatchError> {
<Pallet<T> as fungibles::Mutate<_>>::burn_from(GetCurrencyId::get(), who, amount)
}
}
impl<T, GetCurrencyId> fungible::Transfer<T::AccountId> for CurrencyAdapter<T, GetCurrencyId>
where
T: Config,
GetCurrencyId: Get<T::CurrencyId>,
{
fn transfer(
source: &T::AccountId,
dest: &T::AccountId,
amount: T::Balance,
keep_alive: bool,
) -> Result<T::Balance, DispatchError> {
<Pallet<T> as fungibles::Transfer<_>>::transfer(GetCurrencyId::get(), source, dest, amount, keep_alive)
}
}
impl<T, GetCurrencyId> fungible::Unbalanced<T::AccountId> for CurrencyAdapter<T, GetCurrencyId>
where
T: Config,
GetCurrencyId: Get<T::CurrencyId>,
{
fn set_balance(who: &T::AccountId, amount: Self::Balance) -> DispatchResult {
<Pallet<T> as fungibles::Unbalanced<_>>::set_balance(GetCurrencyId::get(), who, amount)
}
fn set_total_issuance(amount: Self::Balance) {
<Pallet<T> as fungibles::Unbalanced<_>>::set_total_issuance(GetCurrencyId::get(), amount)
}
}
impl<T, GetCurrencyId> fungible::InspectHold<T::AccountId> for CurrencyAdapter<T, GetCurrencyId>
where
T: Config,
GetCurrencyId: Get<T::CurrencyId>,
{
fn balance_on_hold(who: &T::AccountId) -> T::Balance {
<Pallet<T> as fungibles::InspectHold<_>>::balance_on_hold(GetCurrencyId::get(), who)
}
fn can_hold(who: &T::AccountId, amount: T::Balance) -> bool {
<Pallet<T> as fungibles::InspectHold<_>>::can_hold(GetCurrencyId::get(), who, amount)
}
}
impl<T, GetCurrencyId> fungible::MutateHold<T::AccountId> for CurrencyAdapter<T, GetCurrencyId>
where
T: Config,
GetCurrencyId: Get<T::CurrencyId>,
{
fn hold(who: &T::AccountId, amount: Self::Balance) -> DispatchResult {
<Pallet<T> as fungibles::MutateHold<_>>::hold(GetCurrencyId::get(), who, amount)
}
fn release(who: &T::AccountId, amount: Self::Balance, best_effort: bool) -> Result<T::Balance, DispatchError> {
<Pallet<T> as fungibles::MutateHold<_>>::release(GetCurrencyId::get(), who, amount, best_effort)
}
fn transfer_held(
source: &T::AccountId,
dest: &T::AccountId,
amount: Self::Balance,
best_effort: bool,
on_hold: bool,
) -> Result<Self::Balance, DispatchError> {
<Pallet<T> as fungibles::MutateHold<_>>::transfer_held(
GetCurrencyId::get(),
source,
dest,
amount,
best_effort,
on_hold,
)
}
}
| 32.956581 | 117 | 0.69375 |
b9a1a376737bc2c780dc761c2569767e5ce9b2b8 | 10,009 | /*
* Copyright 2018 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ------------------------------------------------------------------------------
*/
use std::cell::RefCell;
use std::collections::HashMap;
use std::iter::repeat;
use std::num::ParseIntError;
use hashlib::sha256_digest_str;
use protobuf;
use state::StateDatabaseError;
use state::StateReader;
use proto::setting::Setting;
const CONFIG_STATE_NAMESPACE: &str = "000000";
const MAX_KEY_PARTS: usize = 4;
const ADDRESS_PART_SIZE: usize = 16;
#[derive(Debug)]
pub enum SettingsViewError {
StateDatabaseError(StateDatabaseError),
EncodingError(protobuf::ProtobufError),
ParseError(String),
ParseIntError(ParseIntError),
UnknownError,
}
impl From<StateDatabaseError> for SettingsViewError {
fn from(err: StateDatabaseError) -> Self {
SettingsViewError::StateDatabaseError(err)
}
}
impl From<protobuf::ProtobufError> for SettingsViewError {
fn from(err: protobuf::ProtobufError) -> Self {
SettingsViewError::EncodingError(err)
}
}
impl From<ParseIntError> for SettingsViewError {
fn from(err: ParseIntError) -> Self {
SettingsViewError::ParseIntError(err)
}
}
pub struct SettingsView {
state_reader: Box<dyn StateReader>,
cache: RefCell<HashMap<String, Option<String>>>,
}
// Given that this is not threadsafe, but can be members of objects that can
// be moved between threads (themselves guarded by mutexes/rwlocks), we can
// safely implement sync.
unsafe impl Sync for SettingsView {}
impl SettingsView {
/// Creates a new SettingsView with a given StateReader
pub fn new(state_reader: Box<dyn StateReader>) -> Self {
SettingsView {
state_reader,
cache: RefCell::new(HashMap::new()),
}
}
pub fn get_setting_str(
&self,
key: &str,
default_value: Option<String>,
) -> Result<Option<String>, SettingsViewError> {
self.get_setting(key, default_value, |s: &str| Ok(s.to_string()))
}
pub fn get_setting_u32(
&self,
key: &str,
default_value: Option<u32>,
) -> Result<Option<u32>, SettingsViewError> {
self.get_setting(key, default_value, |value| {
value.parse().map_err(SettingsViewError::ParseIntError)
})
}
pub fn get_setting<T, F>(
&self,
key: &str,
default_value: Option<T>,
value_parser: F,
) -> Result<Option<T>, SettingsViewError>
where
F: FnOnce(&str) -> Result<T, SettingsViewError>,
{
{
let cache = self.cache.borrow();
if cache.contains_key(key) {
return if let Some(str_value) = cache.get(key).unwrap() {
Ok(Some(value_parser(&str_value)?))
} else {
Ok(default_value)
};
}
}
let bytes_opt = match self.state_reader.get(&setting_address(key)) {
Ok(opt) => opt,
Err(StateDatabaseError::NotFound(_)) => return Ok(default_value),
Err(err) => return Err(SettingsViewError::from(err)),
};
let setting_opt = if let Some(bytes) = bytes_opt {
Some(protobuf::parse_from_bytes::<Setting>(&bytes)?)
} else {
None
};
let optional_str_value: Option<String> = setting_opt.and_then(|setting| {
setting
.get_entries()
.iter()
.find(|entry| entry.key == key)
.map(|entry| entry.get_value().to_string())
});
{
// cache it:
let mut cache = self.cache.borrow_mut();
cache.insert(key.to_string(), optional_str_value.clone());
}
if let Some(str_value) = optional_str_value.as_ref() {
Ok(Some(value_parser(str_value)?))
} else {
Ok(default_value)
}
}
}
impl From<Box<dyn StateReader>> for SettingsView {
fn from(state_reader: Box<dyn StateReader>) -> Self {
SettingsView::new(state_reader)
}
}
fn setting_address(key: &str) -> String {
let mut address = String::new();
address.push_str(CONFIG_STATE_NAMESPACE);
address.push_str(
&key.splitn(MAX_KEY_PARTS, '.')
.chain(repeat(""))
.map(short_hash)
.take(MAX_KEY_PARTS)
.collect::<Vec<_>>()
.join(""),
);
address
}
fn short_hash(s: &str) -> String {
sha256_digest_str(s)[..ADDRESS_PART_SIZE].to_string()
}
#[cfg(test)]
mod tests {
use super::*;
use proto::setting::Setting;
use proto::setting::Setting_Entry;
use protobuf;
use protobuf::Message;
use state::StateDatabaseError;
use state::StateReader;
use std::collections::HashMap;
#[test]
fn addresses() {
// These addresses were generated via the python implementation
assert_eq!(
"000000ca978112ca1bbdca3e23e8160039594a2e7d2c03a9507ae2e3b0c44298fc1c14",
setting_address("a.b.c")
);
assert_eq!(
"000000ca978112ca1bbdca3e23e8160039594a2e7d2c03a9507ae2e67adc8234459dc2",
setting_address("a.b.c.d.e")
);
assert_eq!(
"000000a87cb5eafdcca6a8c983c585ac3c40d9b1eb2ec8ac9f31ffe3b0c44298fc1c14",
setting_address("sawtooth.consensus.algorithm")
);
}
#[test]
fn basics() {
let mock_reader = MockStateReader::new(vec![
setting_entry("my.setting", "10"),
setting_entry("my.setting.list", "10,11,12"),
setting_entry("my.other.list", "13;14;15"),
]);
let settings_view = SettingsView::new(Box::new(mock_reader));
// Test not founds
assert_eq!(
None,
settings_view
.get_setting_str("some.nonexistent.setting", None)
.unwrap()
);
assert_eq!(
Some("default".to_string()),
settings_view
.get_setting_str("some.nonexistent.setting", Some("default".to_string()))
.unwrap()
);
// return setting values
assert_eq!(
Some("10".to_string()),
settings_view.get_setting_str("my.setting", None).unwrap()
);
assert_eq!(
Some(10),
settings_view.get_setting_u32("my.setting", None).unwrap()
);
// Test with advanced parsing
assert_eq!(
Some(vec![10, 11, 12]),
settings_view
.get_setting("my.setting.list", None, |value| value
.split(',')
.map(|s| s.parse().map_err(SettingsViewError::ParseIntError))
.collect::<Result<Vec<u32>, SettingsViewError>>())
.unwrap()
);
assert_eq!(
Some(vec![13, 14, 15]),
settings_view
.get_setting("my.other.list", None, |value| value
.split(';')
.map(|s| s.parse().map_err(SettingsViewError::ParseIntError))
.collect::<Result<Vec<u32>, SettingsViewError>>())
.unwrap()
);
// Verify that we still return the default
assert_eq!(
Some(vec![]),
settings_view
.get_setting("some.nonexistent.setting", Some(vec![]), |value| value
.split(',')
.map(|s| s.parse().map_err(SettingsViewError::ParseIntError))
.collect::<Result<Vec<u32>, SettingsViewError>>())
.unwrap()
);
}
fn setting_entry(key: &str, value: &str) -> (String, Vec<u8>) {
let mut setting = Setting::new();
let mut setting_entry = Setting_Entry::new();
setting_entry.set_key(key.into());
setting_entry.set_value(value.into());
setting.set_entries(protobuf::RepeatedField::from_vec(vec![setting_entry]));
(
setting_address(key),
setting
.write_to_bytes()
.expect("Unable to serialize setting"),
)
}
struct MockStateReader {
state: HashMap<String, Vec<u8>>,
}
impl MockStateReader {
fn new(values: Vec<(String, Vec<u8>)>) -> Self {
MockStateReader {
state: values.into_iter().collect(),
}
}
}
impl StateReader for MockStateReader {
fn get(&self, address: &str) -> Result<Option<Vec<u8>>, StateDatabaseError> {
match self.state.get(address).cloned() {
Some(s) => Ok(Some(s)),
None => Err(StateDatabaseError::NotFound(address.into())),
}
}
fn contains(&self, address: &str) -> Result<bool, StateDatabaseError> {
Ok(self.state.contains_key(address))
}
fn leaves(
&self,
prefix: Option<&str>,
) -> Result<
Box<dyn Iterator<Item = Result<(String, Vec<u8>), StateDatabaseError>>>,
StateDatabaseError,
> {
let iterable: Vec<_> = self
.state
.iter()
.filter(|(key, _)| key.starts_with(prefix.unwrap_or("")))
.map(|(key, value)| Ok((key.clone().to_string(), value.clone())))
.collect();
Ok(Box::new(iterable.into_iter()))
}
}
}
| 30.422492 | 89 | 0.563193 |
de5a9e3ee04d150b8a83f5f74ce729e5d5c0c899 | 4,178 | pub mod sbt;
pub mod storage;
pub mod nodegraph;
pub mod linear;
pub mod search;
use std::path::Path;
use std::rc::Rc;
use serde_derive::Deserialize;
use derive_builder::Builder;
use failure::Error;
use lazy_init::Lazy;
use crate::index::storage::{ReadData, Storage};
use crate::Signature;
pub trait Index {
type Item;
fn find<F>(
&self,
search_fn: F,
sig: &Self::Item,
threshold: f64,
) -> Result<Vec<&Self::Item>, Error>
where
F: Fn(&dyn Comparable<Self::Item>, &Self::Item, f64) -> bool;
fn insert(&mut self, node: &Self::Item);
fn save<P: AsRef<Path>>(&self, path: P) -> Result<(), Error>;
fn load<P: AsRef<Path>>(path: P) -> Result<(), Error>;
}
// TODO: split into two traits, Similarity and Containment?
pub trait Comparable<O> {
fn similarity(&self, other: &O) -> f64;
fn containment(&self, other: &O) -> f64;
}
impl<'a, N, L> Comparable<L> for &'a N
where
N: Comparable<L>,
{
fn similarity(&self, other: &L) -> f64 {
(*self).similarity(&other)
}
fn containment(&self, other: &L) -> f64 {
(*self).containment(&other)
}
}
#[derive(Deserialize)]
pub struct LeafInfo {
pub filename: String,
pub name: String,
pub metadata: String,
}
#[derive(Builder, Default, Clone)]
pub struct Leaf<T>
where
T: std::marker::Sync,
{
pub(crate) filename: String,
pub(crate) name: String,
pub(crate) metadata: String,
pub(crate) storage: Option<Rc<dyn Storage>>,
pub(crate) data: Rc<Lazy<T>>,
}
impl<T> std::fmt::Debug for Leaf<T>
where
T: std::marker::Sync,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Leaf [filename: {}, name: {}, metadata: {}]",
self.filename, self.name, self.metadata
)
}
}
impl<S: Storage + ?Sized> ReadData<Signature, S> for Leaf<Signature> {
fn data(&self, storage: &S) -> Result<&Signature, Error> {
let sig = self.data.get_or_create(|| {
let raw = storage.load(&self.filename).unwrap();
let sigs: Vec<Signature> = serde_json::from_reader(&mut &raw[..]).unwrap();
// TODO: select the right sig?
sigs[0].to_owned()
});
Ok(sig)
}
}
impl Leaf<Signature> {
pub fn count_common(&self, other: &Leaf<Signature>) -> u64 {
if let Some(storage) = &self.storage {
let ng: &Signature = self.data(&**storage).unwrap();
let ong: &Signature = other.data(&**storage).unwrap();
// TODO: select the right signatures...
ng.signatures[0].count_common(&ong.signatures[0]).unwrap() as u64
} else {
0
}
}
pub fn mins(&self) -> Vec<u64> {
if let Some(storage) = &self.storage {
let ng: &Signature = self.data(&**storage).unwrap();
ng.signatures[0].mins.to_vec()
} else {
Vec::new()
}
}
}
impl Comparable<Leaf<Signature>> for Leaf<Signature> {
fn similarity(&self, other: &Leaf<Signature>) -> f64 {
if let Some(storage) = &self.storage {
let ng: &Signature = self.data(&**storage).unwrap();
let ong: &Signature = other.data(&**storage).unwrap();
// TODO: select the right signatures...
ng.signatures[0].compare(&ong.signatures[0]).unwrap()
} else {
// TODO: in this case storage is not set up,
// so we should throw an error?
0.0
}
}
fn containment(&self, other: &Leaf<Signature>) -> f64 {
if let Some(storage) = &self.storage {
let ng: &Signature = self.data(&**storage).unwrap();
let ong: &Signature = other.data(&**storage).unwrap();
// TODO: select the right signatures...
let common = ng.signatures[0].count_common(&ong.signatures[0]).unwrap();
let size = ng.signatures[0].mins.len();
common as f64 / size as f64
} else {
// TODO: in this case storage is not set up,
// so we should throw an error?
0.0
}
}
}
| 25.790123 | 87 | 0.554572 |
cc497c89b99a6e22bf50e699a139358a45dc1e0f | 936 | use std::path::{Path, PathBuf};
#[macro_export]
macro_rules! out_ {
($path:expr) => {
&[env!("CARGO_MANIFEST_DIR"), "/../target/", $path].concat()
};
}
#[macro_export]
macro_rules! in_ {
($path:expr) => {
&[env!("CARGO_MANIFEST_DIR"), "/../resources/", $path].concat()
};
}
/// TODO{issue#128}: rework to provide flexibility and consistency, so all modules can use this;
pub fn setup_test_image(test_image_path: &str) -> PathBuf {
Path::new("").join(in_!(test_image_path))
}
pub fn setup_output_path(test_output_path: &str) -> PathBuf {
Path::new("").join(out_!(test_output_path))
}
pub fn clean_up_output_path(test_output_path: &str) {
std::fs::remove_file(setup_output_path(test_output_path))
.expect("Unable to remove file after test.");
}
pub fn open_test_image<P: AsRef<Path>>(path: P) -> sic_core::image::DynamicImage {
sic_core::image::open(path.as_ref()).unwrap()
}
| 27.529412 | 96 | 0.655983 |
26314ba9992d6ee48e3f3dd0fdc19c85a087d305 | 747 | //! Error types
use num_derive::FromPrimitive;
use paychains_program::{decode_error::DecodeError, program_error::ProgramError};
use thiserror::Error;
/// Errors that may be returned by the program.
#[derive(Clone, Debug, Eq, Error, FromPrimitive, PartialEq)]
pub enum RecordError {
/// Incorrect authority provided on update or delete
#[error("Incorrect authority provided on update or delete")]
IncorrectAuthority,
/// Calculation overflow
#[error("Calculation overflow")]
Overflow,
}
impl From<RecordError> for ProgramError {
fn from(e: RecordError) -> Self {
ProgramError::Custom(e as u32)
}
}
impl<T> DecodeError<T> for RecordError {
fn type_of() -> &'static str {
"Record Error"
}
}
| 26.678571 | 80 | 0.692102 |
79fcb6af74099982cab8d38d11b84cc628a4b75e | 17,610 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use crate::Align;
use crate::BaselinePosition;
use crate::Box;
use crate::Buildable;
use crate::Container;
use crate::FontChooser;
#[cfg(any(feature = "v3_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v3_24")))]
use crate::FontChooserLevel;
use crate::Orientable;
use crate::Orientation;
use crate::ResizeMode;
use crate::Widget;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use glib::StaticType;
use glib::ToValue;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
#[doc(alias = "GtkFontChooserWidget")]
pub struct FontChooserWidget(Object<ffi::GtkFontChooserWidget, ffi::GtkFontChooserWidgetClass>) @extends Box, Container, Widget, @implements Buildable, Orientable, FontChooser;
match fn {
type_ => || ffi::gtk_font_chooser_widget_get_type(),
}
}
impl FontChooserWidget {
pub const NONE: Option<&'static FontChooserWidget> = None;
#[doc(alias = "gtk_font_chooser_widget_new")]
pub fn new() -> FontChooserWidget {
assert_initialized_main_thread!();
unsafe { Widget::from_glib_none(ffi::gtk_font_chooser_widget_new()).unsafe_cast() }
}
// rustdoc-stripper-ignore-next
/// Creates a new builder-pattern struct instance to construct [`FontChooserWidget`] objects.
///
/// This method returns an instance of [`FontChooserWidgetBuilder`](crate::builders::FontChooserWidgetBuilder) which can be used to create [`FontChooserWidget`] objects.
pub fn builder() -> FontChooserWidgetBuilder {
FontChooserWidgetBuilder::default()
}
}
impl Default for FontChooserWidget {
fn default() -> Self {
Self::new()
}
}
#[derive(Clone, Default)]
// rustdoc-stripper-ignore-next
/// A [builder-pattern] type to construct [`FontChooserWidget`] objects.
///
/// [builder-pattern]: https://doc.rust-lang.org/1.0.0/style/ownership/builders.html
#[must_use = "The builder must be built to be used"]
pub struct FontChooserWidgetBuilder {
baseline_position: Option<BaselinePosition>,
homogeneous: Option<bool>,
spacing: Option<i32>,
border_width: Option<u32>,
child: Option<Widget>,
resize_mode: Option<ResizeMode>,
app_paintable: Option<bool>,
can_default: Option<bool>,
can_focus: Option<bool>,
events: Option<gdk::EventMask>,
expand: Option<bool>,
focus_on_click: Option<bool>,
halign: Option<Align>,
has_default: Option<bool>,
has_focus: Option<bool>,
has_tooltip: Option<bool>,
height_request: Option<i32>,
hexpand: Option<bool>,
hexpand_set: Option<bool>,
is_focus: Option<bool>,
margin: Option<i32>,
margin_bottom: Option<i32>,
margin_end: Option<i32>,
margin_start: Option<i32>,
margin_top: Option<i32>,
name: Option<String>,
no_show_all: Option<bool>,
opacity: Option<f64>,
parent: Option<Container>,
receives_default: Option<bool>,
sensitive: Option<bool>,
tooltip_markup: Option<String>,
tooltip_text: Option<String>,
valign: Option<Align>,
vexpand: Option<bool>,
vexpand_set: Option<bool>,
visible: Option<bool>,
width_request: Option<i32>,
orientation: Option<Orientation>,
font: Option<String>,
font_desc: Option<pango::FontDescription>,
#[cfg(any(feature = "v3_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v3_24")))]
language: Option<String>,
#[cfg(any(feature = "v3_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v3_24")))]
level: Option<FontChooserLevel>,
preview_text: Option<String>,
show_preview_entry: Option<bool>,
}
impl FontChooserWidgetBuilder {
// rustdoc-stripper-ignore-next
/// Create a new [`FontChooserWidgetBuilder`].
pub fn new() -> Self {
Self::default()
}
// rustdoc-stripper-ignore-next
/// Build the [`FontChooserWidget`].
#[must_use = "Building the object from the builder is usually expensive and is not expected to have side effects"]
pub fn build(self) -> FontChooserWidget {
let mut properties: Vec<(&str, &dyn ToValue)> = vec![];
if let Some(ref baseline_position) = self.baseline_position {
properties.push(("baseline-position", baseline_position));
}
if let Some(ref homogeneous) = self.homogeneous {
properties.push(("homogeneous", homogeneous));
}
if let Some(ref spacing) = self.spacing {
properties.push(("spacing", spacing));
}
if let Some(ref border_width) = self.border_width {
properties.push(("border-width", border_width));
}
if let Some(ref child) = self.child {
properties.push(("child", child));
}
if let Some(ref resize_mode) = self.resize_mode {
properties.push(("resize-mode", resize_mode));
}
if let Some(ref app_paintable) = self.app_paintable {
properties.push(("app-paintable", app_paintable));
}
if let Some(ref can_default) = self.can_default {
properties.push(("can-default", can_default));
}
if let Some(ref can_focus) = self.can_focus {
properties.push(("can-focus", can_focus));
}
if let Some(ref events) = self.events {
properties.push(("events", events));
}
if let Some(ref expand) = self.expand {
properties.push(("expand", expand));
}
if let Some(ref focus_on_click) = self.focus_on_click {
properties.push(("focus-on-click", focus_on_click));
}
if let Some(ref halign) = self.halign {
properties.push(("halign", halign));
}
if let Some(ref has_default) = self.has_default {
properties.push(("has-default", has_default));
}
if let Some(ref has_focus) = self.has_focus {
properties.push(("has-focus", has_focus));
}
if let Some(ref has_tooltip) = self.has_tooltip {
properties.push(("has-tooltip", has_tooltip));
}
if let Some(ref height_request) = self.height_request {
properties.push(("height-request", height_request));
}
if let Some(ref hexpand) = self.hexpand {
properties.push(("hexpand", hexpand));
}
if let Some(ref hexpand_set) = self.hexpand_set {
properties.push(("hexpand-set", hexpand_set));
}
if let Some(ref is_focus) = self.is_focus {
properties.push(("is-focus", is_focus));
}
if let Some(ref margin) = self.margin {
properties.push(("margin", margin));
}
if let Some(ref margin_bottom) = self.margin_bottom {
properties.push(("margin-bottom", margin_bottom));
}
if let Some(ref margin_end) = self.margin_end {
properties.push(("margin-end", margin_end));
}
if let Some(ref margin_start) = self.margin_start {
properties.push(("margin-start", margin_start));
}
if let Some(ref margin_top) = self.margin_top {
properties.push(("margin-top", margin_top));
}
if let Some(ref name) = self.name {
properties.push(("name", name));
}
if let Some(ref no_show_all) = self.no_show_all {
properties.push(("no-show-all", no_show_all));
}
if let Some(ref opacity) = self.opacity {
properties.push(("opacity", opacity));
}
if let Some(ref parent) = self.parent {
properties.push(("parent", parent));
}
if let Some(ref receives_default) = self.receives_default {
properties.push(("receives-default", receives_default));
}
if let Some(ref sensitive) = self.sensitive {
properties.push(("sensitive", sensitive));
}
if let Some(ref tooltip_markup) = self.tooltip_markup {
properties.push(("tooltip-markup", tooltip_markup));
}
if let Some(ref tooltip_text) = self.tooltip_text {
properties.push(("tooltip-text", tooltip_text));
}
if let Some(ref valign) = self.valign {
properties.push(("valign", valign));
}
if let Some(ref vexpand) = self.vexpand {
properties.push(("vexpand", vexpand));
}
if let Some(ref vexpand_set) = self.vexpand_set {
properties.push(("vexpand-set", vexpand_set));
}
if let Some(ref visible) = self.visible {
properties.push(("visible", visible));
}
if let Some(ref width_request) = self.width_request {
properties.push(("width-request", width_request));
}
if let Some(ref orientation) = self.orientation {
properties.push(("orientation", orientation));
}
if let Some(ref font) = self.font {
properties.push(("font", font));
}
if let Some(ref font_desc) = self.font_desc {
properties.push(("font-desc", font_desc));
}
#[cfg(any(feature = "v3_24", feature = "dox"))]
if let Some(ref language) = self.language {
properties.push(("language", language));
}
#[cfg(any(feature = "v3_24", feature = "dox"))]
if let Some(ref level) = self.level {
properties.push(("level", level));
}
if let Some(ref preview_text) = self.preview_text {
properties.push(("preview-text", preview_text));
}
if let Some(ref show_preview_entry) = self.show_preview_entry {
properties.push(("show-preview-entry", show_preview_entry));
}
glib::Object::new::<FontChooserWidget>(&properties)
.expect("Failed to create an instance of FontChooserWidget")
}
pub fn baseline_position(mut self, baseline_position: BaselinePosition) -> Self {
self.baseline_position = Some(baseline_position);
self
}
pub fn homogeneous(mut self, homogeneous: bool) -> Self {
self.homogeneous = Some(homogeneous);
self
}
pub fn spacing(mut self, spacing: i32) -> Self {
self.spacing = Some(spacing);
self
}
pub fn border_width(mut self, border_width: u32) -> Self {
self.border_width = Some(border_width);
self
}
pub fn child(mut self, child: &impl IsA<Widget>) -> Self {
self.child = Some(child.clone().upcast());
self
}
pub fn resize_mode(mut self, resize_mode: ResizeMode) -> Self {
self.resize_mode = Some(resize_mode);
self
}
pub fn app_paintable(mut self, app_paintable: bool) -> Self {
self.app_paintable = Some(app_paintable);
self
}
pub fn can_default(mut self, can_default: bool) -> Self {
self.can_default = Some(can_default);
self
}
pub fn can_focus(mut self, can_focus: bool) -> Self {
self.can_focus = Some(can_focus);
self
}
pub fn events(mut self, events: gdk::EventMask) -> Self {
self.events = Some(events);
self
}
pub fn expand(mut self, expand: bool) -> Self {
self.expand = Some(expand);
self
}
pub fn focus_on_click(mut self, focus_on_click: bool) -> Self {
self.focus_on_click = Some(focus_on_click);
self
}
pub fn halign(mut self, halign: Align) -> Self {
self.halign = Some(halign);
self
}
pub fn has_default(mut self, has_default: bool) -> Self {
self.has_default = Some(has_default);
self
}
pub fn has_focus(mut self, has_focus: bool) -> Self {
self.has_focus = Some(has_focus);
self
}
pub fn has_tooltip(mut self, has_tooltip: bool) -> Self {
self.has_tooltip = Some(has_tooltip);
self
}
pub fn height_request(mut self, height_request: i32) -> Self {
self.height_request = Some(height_request);
self
}
pub fn hexpand(mut self, hexpand: bool) -> Self {
self.hexpand = Some(hexpand);
self
}
pub fn hexpand_set(mut self, hexpand_set: bool) -> Self {
self.hexpand_set = Some(hexpand_set);
self
}
pub fn is_focus(mut self, is_focus: bool) -> Self {
self.is_focus = Some(is_focus);
self
}
pub fn margin(mut self, margin: i32) -> Self {
self.margin = Some(margin);
self
}
pub fn margin_bottom(mut self, margin_bottom: i32) -> Self {
self.margin_bottom = Some(margin_bottom);
self
}
pub fn margin_end(mut self, margin_end: i32) -> Self {
self.margin_end = Some(margin_end);
self
}
pub fn margin_start(mut self, margin_start: i32) -> Self {
self.margin_start = Some(margin_start);
self
}
pub fn margin_top(mut self, margin_top: i32) -> Self {
self.margin_top = Some(margin_top);
self
}
pub fn name(mut self, name: &str) -> Self {
self.name = Some(name.to_string());
self
}
pub fn no_show_all(mut self, no_show_all: bool) -> Self {
self.no_show_all = Some(no_show_all);
self
}
pub fn opacity(mut self, opacity: f64) -> Self {
self.opacity = Some(opacity);
self
}
pub fn parent(mut self, parent: &impl IsA<Container>) -> Self {
self.parent = Some(parent.clone().upcast());
self
}
pub fn receives_default(mut self, receives_default: bool) -> Self {
self.receives_default = Some(receives_default);
self
}
pub fn sensitive(mut self, sensitive: bool) -> Self {
self.sensitive = Some(sensitive);
self
}
pub fn tooltip_markup(mut self, tooltip_markup: &str) -> Self {
self.tooltip_markup = Some(tooltip_markup.to_string());
self
}
pub fn tooltip_text(mut self, tooltip_text: &str) -> Self {
self.tooltip_text = Some(tooltip_text.to_string());
self
}
pub fn valign(mut self, valign: Align) -> Self {
self.valign = Some(valign);
self
}
pub fn vexpand(mut self, vexpand: bool) -> Self {
self.vexpand = Some(vexpand);
self
}
pub fn vexpand_set(mut self, vexpand_set: bool) -> Self {
self.vexpand_set = Some(vexpand_set);
self
}
pub fn visible(mut self, visible: bool) -> Self {
self.visible = Some(visible);
self
}
pub fn width_request(mut self, width_request: i32) -> Self {
self.width_request = Some(width_request);
self
}
pub fn orientation(mut self, orientation: Orientation) -> Self {
self.orientation = Some(orientation);
self
}
pub fn font(mut self, font: &str) -> Self {
self.font = Some(font.to_string());
self
}
pub fn font_desc(mut self, font_desc: &pango::FontDescription) -> Self {
self.font_desc = Some(font_desc.clone());
self
}
#[cfg(any(feature = "v3_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v3_24")))]
pub fn language(mut self, language: &str) -> Self {
self.language = Some(language.to_string());
self
}
#[cfg(any(feature = "v3_24", feature = "dox"))]
#[cfg_attr(feature = "dox", doc(cfg(feature = "v3_24")))]
pub fn level(mut self, level: FontChooserLevel) -> Self {
self.level = Some(level);
self
}
pub fn preview_text(mut self, preview_text: &str) -> Self {
self.preview_text = Some(preview_text.to_string());
self
}
pub fn show_preview_entry(mut self, show_preview_entry: bool) -> Self {
self.show_preview_entry = Some(show_preview_entry);
self
}
}
pub trait FontChooserWidgetExt: 'static {
//#[doc(alias = "tweak-action")]
//fn tweak_action(&self) -> /*Ignored*/Option<gio::Action>;
#[doc(alias = "tweak-action")]
fn connect_tweak_action_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<FontChooserWidget>> FontChooserWidgetExt for O {
//fn tweak_action(&self) -> /*Ignored*/Option<gio::Action> {
// glib::ObjectExt::property(self.as_ref(), "tweak-action")
//}
fn connect_tweak_action_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_tweak_action_trampoline<
P: IsA<FontChooserWidget>,
F: Fn(&P) + 'static,
>(
this: *mut ffi::GtkFontChooserWidget,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(FontChooserWidget::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::tweak-action\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_tweak_action_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for FontChooserWidget {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("FontChooserWidget")
}
}
| 32.193784 | 180 | 0.597729 |
64b97756e8dce6453fb833b8e821a7d3c33ed3ec | 1,997 | #![no_main]
#![no_std]
use rp_hal::target_device as rp2040;
extern crate rp2040_panic_usb_boot;
use cortex_m_rt::entry;
extern crate rp_pico;
#[entry]
fn main() -> ! {
let pin = 25;
// gpio_init
let p = rp2040::Peripherals::take().unwrap();
p.RESETS.reset.modify(|r, w| {
unsafe { w.bits(r.bits()) }
.pads_bank0()
.clear_bit()
.io_bank0()
.clear_bit()
});
loop {
let r = p.RESETS.reset_done.read();
if r.pads_bank0().bit() && r.io_bank0().bit() {
break;
}
}
p.SIO.gpio_oe_clr.write(|w| unsafe { w.bits(1 << pin) });
p.SIO.gpio_out_clr.write(|w| unsafe { w.bits(1 << pin) });
// gpio_set_function(gpio, GPIO_FUNC_SIO);
//invalid_params_if(GPIO, gpio >= N_GPIOS);
//invalid_params_if(GPIO, fn << IO_BANK0_GPIO0_CTRL_FUNCSEL_LSB & ~IO_BANK0_GPIO0_CTRL_FUNCSEL_BITS);
// Set input enable on, output disable off
// hw_write_masked(&padsbank0_hw->io[gpio],
// PADS_BANK0_GPIO0_IE_BITS,
// PADS_BANK0_GPIO0_IE_BITS | PADS_BANK0_GPIO0_OD_BITS
// );
p.PADS_BANK0
.gpio25
.write(|w| w.ie().bit(true).od().bit(false));
// Zero all fields apart from fsel; we want this IO to do what the peripheral tells it.
// This doesn't affect e.g. pullup/pulldown, as these are in pad controls.
//iobank0_hw->io[gpio].ctrl = fn << IO_BANK0_GPIO0_CTRL_FUNCSEL_LSB;
// is this possible using parameter 'pin'?
p.IO_BANK0.gpio25_ctrl.write(|w| w.funcsel().sio_25());
// gpio_set_dir(LED_PIN, GPIO_OUT);
p.SIO.gpio_oe_set.write(|w| unsafe { w.bits(1 << pin) });
p.SIO.gpio_out_set.write(|w| unsafe { w.bits(1 << pin) });
for _ in 1..10 {
p.SIO.gpio_out_set.write(|w| unsafe { w.bits(1 << pin) });
cortex_m::asm::delay(2000000);
p.SIO.gpio_out_clr.write(|w| unsafe { w.bits(1 << pin) });
cortex_m::asm::delay(2000000);
}
panic!("Something went wrong!");
}
| 28.942029 | 105 | 0.604407 |
719cc976a4292fcd96ac6ff9e0e5c3664cc05213 | 696 | use super::expression::Expression;
use crate::types;
use std::sync::Arc;
#[derive(Clone, Debug, PartialEq)]
pub struct UnionAddress {
type_: types::Union,
pointer: Arc<Expression>, // pointer to union
member_index: usize,
}
impl UnionAddress {
pub fn new(type_: types::Union, pointer: impl Into<Expression>, member_index: usize) -> Self {
Self {
type_,
pointer: pointer.into().into(),
member_index,
}
}
pub fn type_(&self) -> &types::Union {
&self.type_
}
pub fn pointer(&self) -> &Expression {
&self.pointer
}
pub fn member_index(&self) -> usize {
self.member_index
}
}
| 21.090909 | 98 | 0.583333 |
72c59e3f982c2c256ab8af88178896685de1eb24 | 16,976 | use crate::{
buffer::Buffer,
layout::{Constraint, Rect},
style::{Color, Style},
symbols,
text::{Span, Spans},
widgets::{
canvas::{Canvas, Line, Points},
Block, Borders, Widget,
},
};
use std::{borrow::Cow, cmp::max};
use unicode_width::UnicodeWidthStr;
/// An X or Y axis for the chart widget
#[derive(Debug, Clone)]
pub struct Axis<'a> {
/// Title displayed next to axis end
title: Option<Spans<'a>>,
/// Bounds for the axis (all data points outside these limits will not be represented)
bounds: [f64; 2],
/// A list of labels to put to the left or below the axis
labels: Option<Vec<Span<'a>>>,
/// The style used to draw the axis itself
style: Style,
}
impl<'a> Default for Axis<'a> {
fn default() -> Axis<'a> {
Axis {
title: None,
bounds: [0.0, 0.0],
labels: None,
style: Default::default(),
}
}
}
impl<'a> Axis<'a> {
pub fn title<T>(mut self, title: T) -> Axis<'a>
where
T: Into<Spans<'a>>,
{
self.title = Some(title.into());
self
}
#[deprecated(
since = "0.10.0",
note = "You should use styling capabilities of `text::Spans` given as argument of the `title` method to apply styling to the title."
)]
pub fn title_style(mut self, style: Style) -> Axis<'a> {
if let Some(t) = self.title {
let title = String::from(t);
self.title = Some(Spans::from(Span::styled(title, style)));
}
self
}
pub fn bounds(mut self, bounds: [f64; 2]) -> Axis<'a> {
self.bounds = bounds;
self
}
pub fn labels(mut self, labels: Vec<Span<'a>>) -> Axis<'a> {
self.labels = Some(labels);
self
}
pub fn style(mut self, style: Style) -> Axis<'a> {
self.style = style;
self
}
}
/// Used to determine which style of graphing to use
#[derive(Debug, Clone, Copy)]
pub enum GraphType {
/// Draw each point
Scatter,
/// Draw each point and lines between each point using the same marker
Line,
}
/// A group of data points
#[derive(Debug, Clone)]
pub struct Dataset<'a> {
/// Name of the dataset (used in the legend if shown)
name: Cow<'a, str>,
/// A reference to the actual data
data: &'a [(f64, f64)],
/// Symbol used for each points of this dataset
marker: symbols::Marker,
/// Determines graph type used for drawing points
graph_type: GraphType,
/// Style used to plot this dataset
style: Style,
}
impl<'a> Default for Dataset<'a> {
fn default() -> Dataset<'a> {
Dataset {
name: Cow::from(""),
data: &[],
marker: symbols::Marker::Dot,
graph_type: GraphType::Scatter,
style: Style::default(),
}
}
}
impl<'a> Dataset<'a> {
pub fn name<S>(mut self, name: S) -> Dataset<'a>
where
S: Into<Cow<'a, str>>,
{
self.name = name.into();
self
}
pub fn data(mut self, data: &'a [(f64, f64)]) -> Dataset<'a> {
self.data = data;
self
}
pub fn marker(mut self, marker: symbols::Marker) -> Dataset<'a> {
self.marker = marker;
self
}
pub fn graph_type(mut self, graph_type: GraphType) -> Dataset<'a> {
self.graph_type = graph_type;
self
}
pub fn style(mut self, style: Style) -> Dataset<'a> {
self.style = style;
self
}
}
/// A container that holds all the infos about where to display each elements of the chart (axis,
/// labels, legend, ...).
#[derive(Debug, Clone, PartialEq)]
struct ChartLayout {
/// Location of the title of the x axis
title_x: Option<(u16, u16)>,
/// Location of the title of the y axis
title_y: Option<(u16, u16)>,
/// Location of the first label of the x axis
label_x: Option<u16>,
/// Location of the first label of the y axis
label_y: Option<u16>,
/// Y coordinate of the horizontal axis
axis_x: Option<u16>,
/// X coordinate of the vertical axis
axis_y: Option<u16>,
/// Area of the legend
legend_area: Option<Rect>,
/// Area of the graph
graph_area: Rect,
}
impl Default for ChartLayout {
fn default() -> ChartLayout {
ChartLayout {
title_x: None,
title_y: None,
label_x: None,
label_y: None,
axis_x: None,
axis_y: None,
legend_area: None,
graph_area: Rect::default(),
}
}
}
/// A widget to plot one or more dataset in a cartesian coordinate system
///
/// # Examples
///
/// ```
/// # use tui::symbols;
/// # use tui::widgets::{Block, Borders, Chart, Axis, Dataset, GraphType};
/// # use tui::style::{Style, Color};
/// # use tui::text::Span;
/// let datasets = vec![
/// Dataset::default()
/// .name("data1")
/// .marker(symbols::Marker::Dot)
/// .graph_type(GraphType::Scatter)
/// .style(Style::default().fg(Color::Cyan))
/// .data(&[(0.0, 5.0), (1.0, 6.0), (1.5, 6.434)]),
/// Dataset::default()
/// .name("data2")
/// .marker(symbols::Marker::Braille)
/// .graph_type(GraphType::Line)
/// .style(Style::default().fg(Color::Magenta))
/// .data(&[(4.0, 5.0), (5.0, 8.0), (7.66, 13.5)]),
/// ];
/// Chart::new(datasets)
/// .block(Block::default().title("Chart"))
/// .x_axis(Axis::default()
/// .title(Span::styled("X Axis", Style::default().fg(Color::Red)))
/// .style(Style::default().fg(Color::White))
/// .bounds([0.0, 10.0])
/// .labels(["0.0", "5.0", "10.0"].iter().cloned().map(Span::from).collect()))
/// .y_axis(Axis::default()
/// .title(Span::styled("Y Axis", Style::default().fg(Color::Red)))
/// .style(Style::default().fg(Color::White))
/// .bounds([0.0, 10.0])
/// .labels(["0.0", "5.0", "10.0"].iter().cloned().map(Span::from).collect()));
/// ```
#[derive(Debug, Clone)]
pub struct Chart<'a> {
/// A block to display around the widget eventually
block: Option<Block<'a>>,
/// The horizontal axis
x_axis: Axis<'a>,
/// The vertical axis
y_axis: Axis<'a>,
/// A reference to the datasets
datasets: Vec<Dataset<'a>>,
/// The widget base style
style: Style,
/// Constraints used to determine whether the legend should be shown or not
hidden_legend_constraints: (Constraint, Constraint),
}
impl<'a> Chart<'a> {
pub fn new(datasets: Vec<Dataset<'a>>) -> Chart<'a> {
Chart {
block: None,
x_axis: Axis::default(),
y_axis: Axis::default(),
style: Default::default(),
datasets,
hidden_legend_constraints: (Constraint::Ratio(1, 4), Constraint::Ratio(1, 4)),
}
}
pub fn block(mut self, block: Block<'a>) -> Chart<'a> {
self.block = Some(block);
self
}
pub fn style(mut self, style: Style) -> Chart<'a> {
self.style = style;
self
}
pub fn x_axis(mut self, axis: Axis<'a>) -> Chart<'a> {
self.x_axis = axis;
self
}
pub fn y_axis(mut self, axis: Axis<'a>) -> Chart<'a> {
self.y_axis = axis;
self
}
/// Set the constraints used to determine whether the legend should be shown or not.
///
/// # Examples
///
/// ```
/// # use tui::widgets::Chart;
/// # use tui::layout::Constraint;
/// let constraints = (
/// Constraint::Ratio(1, 3),
/// Constraint::Ratio(1, 4)
/// );
/// // Hide the legend when either its width is greater than 33% of the total widget width
/// // or if its height is greater than 25% of the total widget height.
/// let _chart: Chart = Chart::new(vec![])
/// .hidden_legend_constraints(constraints);
/// ```
pub fn hidden_legend_constraints(mut self, constraints: (Constraint, Constraint)) -> Chart<'a> {
self.hidden_legend_constraints = constraints;
self
}
/// Compute the internal layout of the chart given the area. If the area is too small some
/// elements may be automatically hidden
fn layout(&self, area: Rect) -> ChartLayout {
let mut layout = ChartLayout::default();
if area.height == 0 || area.width == 0 {
return layout;
}
let mut x = area.left();
let mut y = area.bottom() - 1;
if self.x_axis.labels.is_some() && y > area.top() {
layout.label_x = Some(y);
y -= 1;
}
if let Some(ref y_labels) = self.y_axis.labels {
let mut max_width = y_labels.iter().map(Span::width).max().unwrap_or_default() as u16;
if let Some(ref x_labels) = self.x_axis.labels {
if !x_labels.is_empty() {
max_width = max(max_width, x_labels[0].content.width() as u16);
}
}
if x + max_width < area.right() {
layout.label_y = Some(x);
x += max_width;
}
}
if self.x_axis.labels.is_some() && y > area.top() {
layout.axis_x = Some(y);
y -= 1;
}
if self.y_axis.labels.is_some() && x + 1 < area.right() {
layout.axis_y = Some(x);
x += 1;
}
if x < area.right() && y > 1 {
layout.graph_area = Rect::new(x, area.top(), area.right() - x, y - area.top() + 1);
}
if let Some(ref title) = self.x_axis.title {
let w = title.width() as u16;
if w < layout.graph_area.width && layout.graph_area.height > 2 {
layout.title_x = Some((x + layout.graph_area.width - w, y));
}
}
if let Some(ref title) = self.y_axis.title {
let w = title.width() as u16;
if w + 1 < layout.graph_area.width && layout.graph_area.height > 2 {
layout.title_y = Some((x + 1, area.top()));
}
}
if let Some(inner_width) = self.datasets.iter().map(|d| d.name.width() as u16).max() {
let legend_width = inner_width + 2;
let legend_height = self.datasets.len() as u16 + 2;
let max_legend_width = self
.hidden_legend_constraints
.0
.apply(layout.graph_area.width);
let max_legend_height = self
.hidden_legend_constraints
.1
.apply(layout.graph_area.height);
if inner_width > 0
&& legend_width < max_legend_width
&& legend_height < max_legend_height
{
layout.legend_area = Some(Rect::new(
layout.graph_area.right() - legend_width,
layout.graph_area.top(),
legend_width,
legend_height,
));
}
}
layout
}
}
impl<'a> Widget for Chart<'a> {
fn render(mut self, area: Rect, buf: &mut Buffer) {
buf.set_style(area, self.style);
let chart_area = match self.block.take() {
Some(b) => {
let inner_area = b.inner(area);
b.render(area, buf);
inner_area
}
None => area,
};
let layout = self.layout(chart_area);
let graph_area = layout.graph_area;
if graph_area.width < 1 || graph_area.height < 1 {
return;
}
if let Some((x, y)) = layout.title_x {
let title = self.x_axis.title.unwrap();
buf.set_spans(x, y, &title, graph_area.right().saturating_sub(x));
}
if let Some((x, y)) = layout.title_y {
let title = self.y_axis.title.unwrap();
buf.set_spans(x, y, &title, graph_area.right().saturating_sub(x));
}
if let Some(y) = layout.label_x {
let labels = self.x_axis.labels.unwrap();
let total_width = labels.iter().map(Span::width).sum::<usize>() as u16;
let labels_len = labels.len() as u16;
if total_width < graph_area.width && labels_len > 1 {
for (i, label) in labels.iter().enumerate() {
buf.set_span(
graph_area.left() + i as u16 * (graph_area.width - 1) / (labels_len - 1)
- label.content.width() as u16,
y,
label,
label.width() as u16,
);
}
}
}
if let Some(x) = layout.label_y {
let labels = self.y_axis.labels.unwrap();
let labels_len = labels.len() as u16;
for (i, label) in labels.iter().enumerate() {
let dy = i as u16 * (graph_area.height - 1) / (labels_len - 1);
if dy < graph_area.bottom() {
buf.set_span(x, graph_area.bottom() - 1 - dy, label, label.width() as u16);
}
}
}
if let Some(y) = layout.axis_x {
for x in graph_area.left()..graph_area.right() {
buf.get_mut(x, y)
.set_symbol(symbols::line::HORIZONTAL)
.set_style(self.x_axis.style);
}
}
if let Some(x) = layout.axis_y {
for y in graph_area.top()..graph_area.bottom() {
buf.get_mut(x, y)
.set_symbol(symbols::line::VERTICAL)
.set_style(self.y_axis.style);
}
}
if let Some(y) = layout.axis_x {
if let Some(x) = layout.axis_y {
buf.get_mut(x, y)
.set_symbol(symbols::line::BOTTOM_LEFT)
.set_style(self.x_axis.style);
}
}
for dataset in &self.datasets {
Canvas::default()
.background_color(self.style.bg.unwrap_or(Color::Reset))
.x_bounds(self.x_axis.bounds)
.y_bounds(self.y_axis.bounds)
.marker(dataset.marker)
.paint(|ctx| {
ctx.draw(&Points {
coords: dataset.data,
color: dataset.style.fg.unwrap_or(Color::Reset),
});
if let GraphType::Line = dataset.graph_type {
for data in dataset.data.windows(2) {
ctx.draw(&Line {
x1: data[0].0,
y1: data[0].1,
x2: data[1].0,
y2: data[1].1,
color: dataset.style.fg.unwrap_or(Color::Reset),
})
}
}
})
.render(graph_area, buf);
}
if let Some(legend_area) = layout.legend_area {
Block::default()
.borders(Borders::ALL)
.render(legend_area, buf);
for (i, dataset) in self.datasets.iter().enumerate() {
buf.set_string(
legend_area.x + 1,
legend_area.y + 1 + i as u16,
&dataset.name,
dataset.style,
);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
struct LegendTestCase {
chart_area: Rect,
hidden_legend_constraints: (Constraint, Constraint),
legend_area: Option<Rect>,
}
#[test]
fn it_should_hide_the_legend() {
let data = [(0.0, 5.0), (1.0, 6.0), (3.0, 7.0)];
let cases = [
LegendTestCase {
chart_area: Rect::new(0, 0, 100, 100),
hidden_legend_constraints: (Constraint::Ratio(1, 4), Constraint::Ratio(1, 4)),
legend_area: Some(Rect::new(88, 0, 12, 12)),
},
LegendTestCase {
chart_area: Rect::new(0, 0, 100, 100),
hidden_legend_constraints: (Constraint::Ratio(1, 10), Constraint::Ratio(1, 4)),
legend_area: None,
},
];
for case in &cases {
let datasets = (0..10)
.map(|i| {
let name = format!("Dataset #{}", i);
Dataset::default().name(name).data(&data)
})
.collect::<Vec<_>>();
let chart = Chart::new(datasets)
.x_axis(Axis::default().title("X axis"))
.y_axis(Axis::default().title("Y axis"))
.hidden_legend_constraints(case.hidden_legend_constraints);
let layout = chart.layout(case.chart_area);
assert_eq!(layout.legend_area, case.legend_area);
}
}
}
| 32.030189 | 140 | 0.502533 |
890654bffa30ea8e7013b6bb561f3903a984cb3c | 1,332 | // crypto_stream_xchacha20.h
pub const crypto_stream_xchacha20_KEYBYTES: usize = 32;
pub const crypto_stream_xchacha20_NONCEBYTES: usize = 24;
extern {
pub fn crypto_stream_xchacha20(
c: *mut u8,
clen: c_ulonglong,
n: *const [u8; crypto_stream_xchacha20_NONCEBYTES],
k: *const [u8; crypto_stream_xchacha20_KEYBYTES]) -> c_int;
pub fn crypto_stream_xchacha20_xor(
c: *mut u8,
m: *const u8,
mlen: c_ulonglong,
n: *const [u8; crypto_stream_xchacha20_NONCEBYTES],
k: *const [u8; crypto_stream_xchacha20_KEYBYTES]) -> c_int;
pub fn crypto_stream_xchacha20_xor_ic(
c: *mut u8,
m: *const u8,
mlen: c_ulonglong,
n: *const [u8; crypto_stream_xchacha20_NONCEBYTES],
ic: uint64_t,
k: *const [u8; crypto_stream_xchacha20_KEYBYTES]) -> c_int;
pub fn crypto_stream_xchacha20_keybytes() -> size_t;
pub fn crypto_stream_xchacha20_noncebytes() -> size_t;
}
#[test]
fn test_crypto_stream_xchacha20_keybytes() {
assert!(unsafe { crypto_stream_xchacha20_keybytes() as usize } ==
crypto_stream_xchacha20_KEYBYTES)
}
#[test]
fn test_crypto_stream_xchacha20_noncebytes() {
assert!(unsafe { crypto_stream_xchacha20_noncebytes() as usize } ==
crypto_stream_xchacha20_NONCEBYTES)
}
| 33.3 | 71 | 0.688438 |
48eb4ac9fa06bfd25fd470ffab9d1f7fd03f5bca | 7,595 | // Copyright 2014 Tyler Neely
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//! Rust wrapper for RocksDB.
//!
//! # Examples
//!
//! ```
//! use rocksdb::{DB, Options};
//! // NB: db is automatically closed at end of lifetime
//! let path = "_path_for_rocksdb_storage";
//! {
//! let db = DB::open_default(path).unwrap();
//! db.put(b"my key", b"my value").unwrap();
//! match db.get(b"my key") {
//! Ok(Some(value)) => println!("retrieved value {}", value.to_utf8().unwrap()),
//! Ok(None) => println!("value not found"),
//! Err(e) => println!("operational problem encountered: {}", e),
//! }
//! db.delete(b"my key").unwrap();
//! }
//! let _ = DB::destroy(&Options::default(), path);
//! ```
//!
//! Opening a database and a single column family with custom options:
//!
//! ```
//! use rocksdb::{DB, ColumnFamilyDescriptor, Options};
//!
//! let path = "_path_for_rocksdb_storage_with_cfs";
//! let mut cf_opts = Options::default();
//! cf_opts.set_max_write_buffer_number(16);
//! let cf = ColumnFamilyDescriptor::new("cf1", cf_opts);
//!
//! let mut db_opts = Options::default();
//! db_opts.create_missing_column_families(true);
//! db_opts.create_if_missing(true);
//! {
//! let db = DB::open_cf_descriptors(&db_opts, path, vec![cf]).unwrap();
//! }
//! let _ = DB::destroy(&db_opts, path);
//! ```
//!
extern crate libc;
extern crate librocksdb_sys as ffi;
#[macro_use]
mod ffi_util;
pub mod backup;
pub mod checkpoint;
pub mod compaction_filter;
mod comparator;
mod db;
mod db_options;
pub mod merge_operator;
mod slice_transform;
pub use compaction_filter::Decision as CompactionDecision;
pub use db::{
DBCompactionStyle, DBCompressionType, DBIterator, DBPinnableSlice, DBRawIterator,
DBRecoveryMode, DBVector, Direction, IteratorMode, ReadOptions, Snapshot, WriteBatch,
};
pub use slice_transform::SliceTransform;
pub use merge_operator::MergeOperands;
use std::collections::BTreeMap;
use std::error;
use std::fmt;
use std::marker::PhantomData;
use std::path::PathBuf;
use std::sync::{Arc, RwLock};
/// A RocksDB database.
///
/// See crate level documentation for a simple usage example.
pub struct DB {
inner: *mut ffi::rocksdb_t,
cfs: Arc<RwLock<BTreeMap<String, *mut ffi::rocksdb_column_family_handle_t>>>,
path: PathBuf,
}
/// A descriptor for a RocksDB column family.
///
/// A description of the column family, containing the name and `Options`.
pub struct ColumnFamilyDescriptor {
name: String,
options: Options,
}
/// A simple wrapper round a string, used for errors reported from
/// ffi calls.
#[derive(Debug, Clone, PartialEq)]
pub struct Error {
message: String,
}
impl Error {
fn new(message: String) -> Error {
Error { message }
}
pub fn into_string(self) -> String {
self.into()
}
}
impl AsRef<str> for Error {
fn as_ref(&self) -> &str {
&self.message
}
}
impl From<Error> for String {
fn from(e: Error) -> String {
e.message
}
}
impl error::Error for Error {
fn description(&self) -> &str {
&self.message
}
}
impl fmt::Display for Error {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.message.fmt(formatter)
}
}
/// For configuring block-based file storage.
pub struct BlockBasedOptions {
inner: *mut ffi::rocksdb_block_based_table_options_t,
}
/// Used by BlockBasedOptions::set_index_type.
pub enum BlockBasedIndexType {
/// A space efficient index block that is optimized for
/// binary-search-based index.
BinarySearch,
/// The hash index, if enabled, will perform a hash lookup if
/// a prefix extractor has been provided through Options::set_prefix_extractor.
HashSearch,
/// A two-level index implementation. Both levels are binary search indexes.
TwoLevelIndexSearch,
}
/// Defines the underlying memtable implementation.
/// See https://github.com/facebook/rocksdb/wiki/MemTable for more information.
pub enum MemtableFactory {
Vector,
HashSkipList {
bucket_count: usize,
height: i32,
branching_factor: i32,
},
HashLinkList {
bucket_count: usize,
},
}
/// Used with DBOptions::set_plain_table_factory.
/// See https://github.com/facebook/rocksdb/wiki/PlainTable-Format.
///
/// Defaults:
/// user_key_length: 0 (variable length)
/// bloom_bits_per_key: 10
/// hash_table_ratio: 0.75
/// index_sparseness: 16
pub struct PlainTableFactoryOptions {
pub user_key_length: u32,
pub bloom_bits_per_key: i32,
pub hash_table_ratio: f64,
pub index_sparseness: usize,
}
/// Database-wide options around performance and behavior.
///
/// Please read [the official tuning guide](https://github.com/facebook/rocksdb/wiki/RocksDB-Tuning-Guide), and most importantly, measure performance under realistic workloads with realistic hardware.
///
/// # Examples
///
/// ```
/// use rocksdb::{Options, DB};
/// use rocksdb::DBCompactionStyle;
///
/// fn badly_tuned_for_somebody_elses_disk() -> DB {
/// let path = "path/for/rocksdb/storageX";
/// let mut opts = Options::default();
/// opts.create_if_missing(true);
/// opts.set_max_open_files(10000);
/// opts.set_use_fsync(false);
/// opts.set_bytes_per_sync(8388608);
/// opts.optimize_for_point_lookup(1024);
/// opts.set_table_cache_num_shard_bits(6);
/// opts.set_max_write_buffer_number(32);
/// opts.set_write_buffer_size(536870912);
/// opts.set_target_file_size_base(1073741824);
/// opts.set_min_write_buffer_number_to_merge(4);
/// opts.set_level_zero_stop_writes_trigger(2000);
/// opts.set_level_zero_slowdown_writes_trigger(0);
/// opts.set_compaction_style(DBCompactionStyle::Universal);
/// opts.set_max_background_compactions(4);
/// opts.set_max_background_flushes(4);
/// opts.set_disable_auto_compactions(true);
///
/// DB::open(&opts, path).unwrap()
/// }
/// ```
pub struct Options {
inner: *mut ffi::rocksdb_options_t,
}
/// Optionally disable WAL or sync for this write.
///
/// # Examples
///
/// Making an unsafe write of a batch:
///
/// ```
/// use rocksdb::{DB, Options, WriteBatch, WriteOptions};
///
/// let path = "_path_for_rocksdb_storageY";
/// {
/// let db = DB::open_default(path).unwrap();
/// let mut batch = WriteBatch::default();
/// batch.put(b"my key", b"my value");
/// batch.put(b"key2", b"value2");
/// batch.put(b"key3", b"value3");
///
/// let mut write_options = WriteOptions::default();
/// write_options.set_sync(false);
/// write_options.disable_wal(true);
///
/// db.write_opt(batch, &write_options);
/// }
/// let _ = DB::destroy(&Options::default(), path);
/// ```
pub struct WriteOptions {
inner: *mut ffi::rocksdb_writeoptions_t,
}
/// An opaque type used to represent a column family. Returned from some functions, and used
/// in others
#[derive(Copy, Clone)]
pub struct ColumnFamily<'a> {
inner: *mut ffi::rocksdb_column_family_handle_t,
db: PhantomData<&'a DB>,
}
unsafe impl<'a> Send for ColumnFamily<'a> {}
| 28.234201 | 200 | 0.675839 |
4b51fa7ba9ebc9ef325bbdb4bca4487dfeaf3321 | 20,322 | //! The implementation for CSI node service
use grpcio::{RpcContext, UnarySink};
use log::{debug, error, info, warn};
use nix::sys::stat::{self, SFlag};
use protobuf::RepeatedField;
use std::sync::Arc;
use super::csi::{
NodeExpandVolumeRequest, NodeExpandVolumeResponse, NodeGetCapabilitiesRequest,
NodeGetCapabilitiesResponse, NodeGetInfoRequest, NodeGetInfoResponse,
NodeGetVolumeStatsRequest, NodeGetVolumeStatsResponse, NodePublishVolumeRequest,
NodePublishVolumeResponse, NodeServiceCapability, NodeServiceCapability_RPC_Type,
NodeStageVolumeRequest, NodeStageVolumeResponse, NodeUnpublishVolumeRequest,
NodeUnpublishVolumeResponse, NodeUnstageVolumeRequest, NodeUnstageVolumeResponse, Topology,
VolumeCapability_oneof_access_type,
};
use super::csi_grpc::Node;
use super::error::{
Context,
DatenLordError::{ArgumentInvalid, Unimplemented},
DatenLordResult,
};
use super::meta_data::{DatenLordVolume, MetaData};
use super::util;
/// for `NodeService` implementation
#[derive(Clone)]
pub struct NodeImpl {
/// Inner data
inner: Arc<NodeImplInner>,
}
/// Holding `NodeImpl` inner data
struct NodeImplInner {
/// Node capabilities
caps: Vec<NodeServiceCapability>,
/// Volume meta data for this node
meta_data: Arc<MetaData>,
}
impl NodeImpl {
/// Create `NodeImpl`
pub fn new(meta_data: Arc<MetaData>) -> Self {
Self {
inner: Arc::new(NodeImplInner::new(meta_data)),
}
}
}
impl NodeImplInner {
/// Create `NodeImpl`
fn new(meta_data: Arc<MetaData>) -> Self {
let cap_vec = vec![NodeServiceCapability_RPC_Type::EXPAND_VOLUME];
let caps = cap_vec
.into_iter()
.map(|rpc_type| {
let mut csc = NodeServiceCapability::new();
csc.mut_rpc().set_field_type(rpc_type);
csc
})
.collect();
Self { caps, meta_data }
}
/// Validate request with controller capabilities
fn validate_request_capability(&self, rpc_type: NodeServiceCapability_RPC_Type) -> bool {
rpc_type == NodeServiceCapability_RPC_Type::UNKNOWN
|| self
.caps
.iter()
.any(|cap| cap.get_rpc().get_field_type() == rpc_type)
}
/// Create ephemeral volume
async fn create_ephemeral_volume(&self, vol_id: &str) -> DatenLordResult<()> {
let vol_name = format!("ephemeral-{}", vol_id);
let volume = DatenLordVolume::build_ephemeral_volume(
vol_id,
&vol_name,
self.meta_data.get_node_id(),
&self.meta_data.get_volume_path(vol_id),
)
.with_context(|| {
format!(
"failed to create ephemeral volume ID={} and name={}",
vol_id, vol_name,
)
})?;
info!(
"ephemeral mode: created volume ID={} and name={}",
volume.vol_id, volume.vol_name,
);
let add_ephemeral_res = self.meta_data.add_volume_meta_data(vol_id, &volume).await;
debug_assert!(
add_ephemeral_res.is_ok(),
format!(
"ephemeral volume ID={} and name={} is duplicated",
vol_id, vol_name,
)
);
Ok(())
}
/// Delete ephemeral volume
/// `tolerant_error` means whether to ignore umount error or not
async fn delete_ephemeral_volume(&self, volume: &DatenLordVolume, tolerant_error: bool) {
let delete_ephemeral_res = self.meta_data.delete_volume_meta_data(&volume.vol_id).await;
if let Err(e) = delete_ephemeral_res {
if tolerant_error {
error!(
"failed to delete ephemeral volume ID={} and name={}, \
the error is: {}",
volume.vol_id, volume.vol_name, e,
);
} else {
panic!(
"failed to delete ephemeral volume ID={} and name={}, \
the error is: {}",
volume.vol_id, volume.vol_name, e,
);
}
}
let delete_dir_res = volume.delete_directory();
if let Err(e) = delete_dir_res {
if tolerant_error {
error!(
"failed to delete the directory of ephemerial volume ID={}, \
the error is: {}",
volume.vol_id, e,
);
} else {
panic!(
"failed to delete the directory of ephemerial volume ID={}, \
the error is: {}",
volume.vol_id, e,
);
}
}
}
/// The pre-check helper function for `node_publish_volume`
fn node_publish_volume_pre_check(req: &NodePublishVolumeRequest) -> DatenLordResult<()> {
if !req.has_volume_capability() {
return Err(ArgumentInvalid {
context: vec!["volume capability missing in request".to_string()],
});
}
let vol_id = req.get_volume_id();
if vol_id.is_empty() {
return Err(ArgumentInvalid {
context: vec!["volume ID missing in request".to_string()],
});
}
let target_dir = req.get_target_path();
if target_dir.is_empty() {
return Err(ArgumentInvalid {
context: vec!["target path missing in request".to_string()],
});
}
Ok(())
}
}
impl Node for NodeImpl {
fn node_stage_volume(
&mut self,
_ctx: RpcContext,
req: NodeStageVolumeRequest,
sink: UnarySink<NodeStageVolumeResponse>,
) {
debug!("node_stage_volume request: {:?}", req);
let self_inner = Arc::<NodeImplInner>::clone(&self.inner);
let task = async move {
let rpc_type = NodeServiceCapability_RPC_Type::STAGE_UNSTAGE_VOLUME;
if !self_inner.validate_request_capability(rpc_type) {
return Err(ArgumentInvalid {
context: vec![format!("unsupported capability {:?}", rpc_type)],
});
}
// Check arguments
let vol_id = req.get_volume_id();
if vol_id.is_empty() {
return Err(ArgumentInvalid {
context: vec!["volume ID missing in request".to_string()],
});
}
if req.get_staging_target_path().is_empty() {
return Err(ArgumentInvalid {
context: vec!["target path missing in request".to_string()],
});
}
if !req.has_volume_capability() {
return Err(ArgumentInvalid {
context: vec!["volume capability missing in request".to_string()],
});
}
let r = NodeStageVolumeResponse::new();
Ok(r)
};
util::spawn_grpc_task(sink, task);
}
fn node_unstage_volume(
&mut self,
_ctx: RpcContext,
req: NodeUnstageVolumeRequest,
sink: UnarySink<NodeUnstageVolumeResponse>,
) {
debug!("node_unstage_volume request: {:?}", req);
let self_inner = Arc::<NodeImplInner>::clone(&self.inner);
let task = async move {
let rpc_type = NodeServiceCapability_RPC_Type::STAGE_UNSTAGE_VOLUME;
if !self_inner.validate_request_capability(rpc_type) {
return Err(ArgumentInvalid {
context: vec![format!("unsupported capability {:?}", rpc_type)],
});
}
// Check arguments
if req.get_volume_id().is_empty() {
return Err(ArgumentInvalid {
context: vec!["volume ID missing in request".to_string()],
});
}
if req.get_staging_target_path().is_empty() {
return Err(ArgumentInvalid {
context: vec!["target path missing in request".to_string()],
});
}
let r = NodeUnstageVolumeResponse::new();
Ok(r)
};
util::spawn_grpc_task(sink, task);
}
fn node_publish_volume(
&mut self,
_ctx: RpcContext,
req: NodePublishVolumeRequest,
sink: UnarySink<NodePublishVolumeResponse>,
) {
debug!("node_publish_volume request: {:?}", req);
let self_inner = Arc::<NodeImplInner>::clone(&self.inner);
let task = async move {
NodeImplInner::node_publish_volume_pre_check(&req)?;
let read_only = req.get_readonly();
let volume_context = req.get_volume_context();
let device_id = match volume_context.get("deviceID") {
Some(did) => did,
None => "",
};
// Kubernetes 1.15 doesn't have csi.storage.k8s.io/ephemeral
let context_ephemeral_res = volume_context.get(util::EPHEMERAL_KEY_CONTEXT);
let ephemeral = context_ephemeral_res.map_or(
self_inner.meta_data.is_ephemeral(),
|context_ephemeral_val| {
if context_ephemeral_val == "true" {
true
} else if context_ephemeral_val == "false" {
false
} else {
self_inner.meta_data.is_ephemeral()
}
},
);
let vol_id = req.get_volume_id();
// If ephemeral is true, create volume here to avoid errors if not exists
let volume_exist = self_inner.meta_data.find_volume_by_id(vol_id).await?;
if ephemeral && !volume_exist {
if let Err(e) = self_inner.create_ephemeral_volume(vol_id).await {
warn!(
"failed to create ephemeral volume ID={}, the error is:{}",
vol_id, e,
);
return Err(e);
};
}
let target_dir = req.get_target_path();
match req.get_volume_capability().access_type {
None => {
return Err(ArgumentInvalid {
context: vec!["access_type missing in request".to_string()],
});
}
Some(ref access_type) => {
if let VolumeCapability_oneof_access_type::mount(ref volume_mount_option) =
*access_type
{
let fs_type = volume_mount_option.get_fs_type();
let mount_flags = volume_mount_option.get_mount_flags();
let mount_options = mount_flags.join(",");
info!(
"target={}\nfstype={}\ndevice={}\nreadonly={}\n\
volume ID={}\nattributes={:?}\nmountflags={}\n",
target_dir,
fs_type,
device_id,
read_only,
vol_id,
volume_context,
mount_options,
);
// Bind mount from target_dir to vol_path
self_inner
.meta_data
.bind_mount(
target_dir,
fs_type,
read_only,
vol_id,
&mount_options,
ephemeral,
)
.await?;
} else {
// VolumeCapability_oneof_access_type::block(..) not supported
return Err(ArgumentInvalid {
context: vec![format!("unsupported access type {:?}", access_type)],
});
}
}
}
let r = NodePublishVolumeResponse::new();
Ok(r)
};
util::spawn_grpc_task(sink, task);
}
fn node_unpublish_volume(
&mut self,
_ctx: RpcContext,
req: NodeUnpublishVolumeRequest,
sink: UnarySink<NodeUnpublishVolumeResponse>,
) {
debug!("node_unpublish_volume request: {:?}", req);
let self_inner = Arc::<NodeImplInner>::clone(&self.inner);
let task = async move {
// Check arguments
let vol_id = req.get_volume_id();
if vol_id.is_empty() {
return Err(ArgumentInvalid {
context: vec!["volume ID missing in request".to_string()],
});
}
let target_path = req.get_target_path();
if target_path.is_empty() {
return Err(ArgumentInvalid {
context: vec!["target path missing in request".to_string()],
});
}
let volume = self_inner.meta_data.get_volume_by_id(vol_id).await?;
let r = NodeUnpublishVolumeResponse::new();
// Do not return error for non-existent path, repeated calls OK for idempotency
// if unistd::geteuid().is_root() {
let delete_res = self_inner
.meta_data
.delete_volume_one_bind_mount_path(vol_id, target_path)
.await;
let mut pre_mount_path_set = match delete_res {
Ok(s) => s,
Err(e) => {
warn!(
"failed to delete mount path={} of volume ID={} from etcd, \
the error is: {}",
target_path, vol_id, e,
);
return Ok(r);
}
};
let remove_res = pre_mount_path_set.remove(target_path);
let tolerant_error = if remove_res {
debug!("the target path to un-mount found in etcd");
// Do not tolerant umount error,
// since the target path is one of the mount paths of this volume
false
} else {
warn!(
"the target path={} to un-mount not found in etcd",
target_path
);
// Tolerant umount error,
// since the target path is not one of the mount paths of this volume
true
};
let target_path_owned = target_path.to_owned();
if let Err(e) =
smol::unblock(move || util::umount_volume_bind_path(&target_path_owned)).await
{
if tolerant_error {
// Try to un-mount the path not stored in etcd, if error just log it
warn!(
"failed to un-mount volume ID={} bind path={}, the error is: {}",
vol_id, target_path, e,
);
} else {
// Un-mount the path stored in etcd, if error then panic
panic!(
"failed to un-mount volume ID={} bind path={}, the error is: {}",
vol_id, target_path, e,
);
}
} else {
debug!(
"successfully un-mount voluem ID={} bind path={}",
vol_id, target_path,
);
}
info!(
"volume ID={} and name={} with target path={} has been unpublished.",
vol_id, volume.vol_name, target_path
);
// Delete ephemeral volume if no more bind mount
// Does not return error when delete failure, repeated calls OK for idempotency
if volume.ephemeral && pre_mount_path_set.is_empty() {
self_inner
.delete_ephemeral_volume(&volume, tolerant_error)
.await;
}
Ok(r)
};
util::spawn_grpc_task(sink, task);
}
fn node_get_volume_stats(
&mut self,
_ctx: RpcContext,
req: NodeGetVolumeStatsRequest,
sink: UnarySink<NodeGetVolumeStatsResponse>,
) {
debug!("node_get_volume_stats request: {:?}", req);
util::spawn_grpc_task(sink, async {
Err(Unimplemented {
context: vec!["unimplemented".to_string()],
})
});
}
// node_expand_volume is only implemented so the driver can be used for e2e testing
// no actual volume expansion operation
fn node_expand_volume(
&mut self,
_ctx: RpcContext,
req: NodeExpandVolumeRequest,
sink: UnarySink<NodeExpandVolumeResponse>,
) {
debug!("node_expand_volume request: {:?}", req);
let self_inner = Arc::<NodeImplInner>::clone(&self.inner);
let task = async move {
// Check arguments
let vol_id = req.get_volume_id();
if vol_id.is_empty() {
return Err(ArgumentInvalid {
context: vec!["volume ID missing in request".to_string()],
});
}
let vol_path = req.get_volume_path();
if vol_path.is_empty() {
return Err(ArgumentInvalid {
context: vec!["volume path missing in request".to_string()],
});
}
self_inner
.meta_data
.get_volume_by_id(vol_id)
.await
.with_context(|| format!("failed to find volume ID={}", vol_id))?;
if !req.has_capacity_range() {
return Err(ArgumentInvalid {
context: vec!["volume expand capacity missing in request".to_string()],
});
}
let file_stat = stat::stat(vol_path)
.with_context(|| format!("failed to get file stat of {}", vol_path))?;
let sflag = SFlag::from_bits_truncate(file_stat.st_mode);
if let SFlag::S_IFDIR = sflag {
// SFlag::S_IFBLK and other type not supported
// TODO: implement volume expansion here
debug!("volume access type mount requires volume file type directory");
} else {
return Err(ArgumentInvalid {
context: vec![format!(
"volume ID={} has unsupported file type={:?}",
vol_id, sflag
)],
});
}
let mut r = NodeExpandVolumeResponse::new();
r.set_capacity_bytes(req.get_capacity_range().get_required_bytes());
Ok(r)
};
util::spawn_grpc_task(sink, task);
}
fn node_get_capabilities(
&mut self,
_ctx: RpcContext,
req: NodeGetCapabilitiesRequest,
sink: UnarySink<NodeGetCapabilitiesResponse>,
) {
debug!("node_get_capabilities request: {:?}", req);
let mut r = NodeGetCapabilitiesResponse::new();
r.set_capabilities(RepeatedField::from_vec(self.inner.caps.clone()));
util::spawn_grpc_task(sink, async move { Ok(r) });
}
fn node_get_info(
&mut self,
_ctx: RpcContext,
req: NodeGetInfoRequest,
sink: UnarySink<NodeGetInfoResponse>,
) {
debug!("node_get_info request: {:?}", req);
let mut topology = Topology::new();
topology.mut_segments().insert(
util::TOPOLOGY_KEY_NODE.to_owned(),
self.inner.meta_data.get_node_id().to_owned(),
);
let mut r = NodeGetInfoResponse::new();
r.set_node_id(self.inner.meta_data.get_node_id().to_owned());
r.set_max_volumes_per_node(self.inner.meta_data.get_max_volumes_per_node().into());
r.set_accessible_topology(topology);
util::spawn_grpc_task(sink, async move { Ok(r) });
}
}
| 36.882033 | 96 | 0.512597 |
1c66089fad6e63764abb5e48d71b3195bb2259d4 | 78,125 | //! Utilities for formatting and printing strings.
#![stable(feature = "rust1", since = "1.0.0")]
use crate::cell::{Cell, Ref, RefCell, RefMut, SyncUnsafeCell, UnsafeCell};
use crate::char::EscapeDebugExtArgs;
use crate::marker::PhantomData;
use crate::mem;
use crate::num::fmt as numfmt;
use crate::ops::Deref;
use crate::result;
use crate::str;
mod builders;
#[cfg(not(no_fp_fmt_parse))]
mod float;
#[cfg(no_fp_fmt_parse)]
mod nofloat;
mod num;
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "Alignment")]
/// Possible alignments returned by `Formatter::align`
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum Alignment {
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Indication that contents should be left-aligned.
Left,
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Indication that contents should be right-aligned.
Right,
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
/// Indication that contents should be center-aligned.
Center,
}
#[stable(feature = "debug_builders", since = "1.2.0")]
pub use self::builders::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple};
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
#[doc(hidden)]
pub mod rt {
pub mod v1;
}
/// The type returned by formatter methods.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// #[derive(Debug)]
/// struct Triangle {
/// a: f32,
/// b: f32,
/// c: f32
/// }
///
/// impl fmt::Display for Triangle {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "({}, {}, {})", self.a, self.b, self.c)
/// }
/// }
///
/// let pythagorean_triple = Triangle { a: 3.0, b: 4.0, c: 5.0 };
///
/// assert_eq!(format!("{pythagorean_triple}"), "(3, 4, 5)");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub type Result = result::Result<(), Error>;
/// The error type which is returned from formatting a message into a stream.
///
/// This type does not support transmission of an error other than that an error
/// occurred. Any extra information must be arranged to be transmitted through
/// some other means.
///
/// An important thing to remember is that the type `fmt::Error` should not be
/// confused with [`std::io::Error`] or [`std::error::Error`], which you may also
/// have in scope.
///
/// [`std::io::Error`]: ../../std/io/struct.Error.html
/// [`std::error::Error`]: ../../std/error/trait.Error.html
///
/// # Examples
///
/// ```rust
/// use std::fmt::{self, write};
///
/// let mut output = String::new();
/// if let Err(fmt::Error) = write(&mut output, format_args!("Hello {}!", "world")) {
/// panic!("An error occurred");
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct Error;
/// A trait for writing or formatting into Unicode-accepting buffers or streams.
///
/// This trait only accepts UTF-8βencoded data and is not [flushable]. If you only
/// want to accept Unicode and you don't need flushing, you should implement this trait;
/// otherwise you should implement [`std::io::Write`].
///
/// [`std::io::Write`]: ../../std/io/trait.Write.html
/// [flushable]: ../../std/io/trait.Write.html#tymethod.flush
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Write {
/// Writes a string slice into this writer, returning whether the write
/// succeeded.
///
/// This method can only succeed if the entire string slice was successfully
/// written, and this method will not return until all data has been
/// written or an error occurs.
///
/// # Errors
///
/// This function will return an instance of [`Error`] on error.
///
/// # Examples
///
/// ```
/// use std::fmt::{Error, Write};
///
/// fn writer<W: Write>(f: &mut W, s: &str) -> Result<(), Error> {
/// f.write_str(s)
/// }
///
/// let mut buf = String::new();
/// writer(&mut buf, "hola").unwrap();
/// assert_eq!(&buf, "hola");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn write_str(&mut self, s: &str) -> Result;
/// Writes a [`char`] into this writer, returning whether the write succeeded.
///
/// A single [`char`] may be encoded as more than one byte.
/// This method can only succeed if the entire byte sequence was successfully
/// written, and this method will not return until all data has been
/// written or an error occurs.
///
/// # Errors
///
/// This function will return an instance of [`Error`] on error.
///
/// # Examples
///
/// ```
/// use std::fmt::{Error, Write};
///
/// fn writer<W: Write>(f: &mut W, c: char) -> Result<(), Error> {
/// f.write_char(c)
/// }
///
/// let mut buf = String::new();
/// writer(&mut buf, 'a').unwrap();
/// writer(&mut buf, 'b').unwrap();
/// assert_eq!(&buf, "ab");
/// ```
#[stable(feature = "fmt_write_char", since = "1.1.0")]
fn write_char(&mut self, c: char) -> Result {
self.write_str(c.encode_utf8(&mut [0; 4]))
}
/// Glue for usage of the [`write!`] macro with implementors of this trait.
///
/// This method should generally not be invoked manually, but rather through
/// the [`write!`] macro itself.
///
/// # Examples
///
/// ```
/// use std::fmt::{Error, Write};
///
/// fn writer<W: Write>(f: &mut W, s: &str) -> Result<(), Error> {
/// f.write_fmt(format_args!("{s}"))
/// }
///
/// let mut buf = String::new();
/// writer(&mut buf, "world").unwrap();
/// assert_eq!(&buf, "world");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn write_fmt(mut self: &mut Self, args: Arguments<'_>) -> Result {
write(&mut self, args)
}
}
#[stable(feature = "fmt_write_blanket_impl", since = "1.4.0")]
impl<W: Write + ?Sized> Write for &mut W {
fn write_str(&mut self, s: &str) -> Result {
(**self).write_str(s)
}
fn write_char(&mut self, c: char) -> Result {
(**self).write_char(c)
}
fn write_fmt(&mut self, args: Arguments<'_>) -> Result {
(**self).write_fmt(args)
}
}
/// Configuration for formatting.
///
/// A `Formatter` represents various options related to formatting. Users do not
/// construct `Formatter`s directly; a mutable reference to one is passed to
/// the `fmt` method of all formatting traits, like [`Debug`] and [`Display`].
///
/// To interact with a `Formatter`, you'll call various methods to change the
/// various options related to formatting. For examples, please see the
/// documentation of the methods defined on `Formatter` below.
#[allow(missing_debug_implementations)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Formatter<'a> {
flags: u32,
fill: char,
align: rt::v1::Alignment,
width: Option<usize>,
precision: Option<usize>,
buf: &'a mut (dyn Write + 'a),
}
impl<'a> Formatter<'a> {
/// Creates a new formatter with default settings.
///
/// This can be used as a micro-optimization in cases where a full `Arguments`
/// structure (as created by `format_args!`) is not necessary; `Arguments`
/// is a little more expensive to use in simple formatting scenarios.
///
/// Currently not intended for use outside of the standard library.
#[unstable(feature = "fmt_internals", reason = "internal to standard library", issue = "none")]
#[doc(hidden)]
pub fn new(buf: &'a mut (dyn Write + 'a)) -> Formatter<'a> {
Formatter {
flags: 0,
fill: ' ',
align: rt::v1::Alignment::Unknown,
width: None,
precision: None,
buf,
}
}
}
// NB. Argument is essentially an optimized partially applied formatting function,
// equivalent to `exists T.(&T, fn(&T, &mut Formatter<'_>) -> Result`.
extern "C" {
type Opaque;
}
/// This struct represents the generic "argument" which is taken by the Xprintf
/// family of functions. It contains a function to format the given value. At
/// compile time it is ensured that the function and the value have the correct
/// types, and then this struct is used to canonicalize arguments to one type.
#[derive(Copy, Clone)]
#[allow(missing_debug_implementations)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
#[doc(hidden)]
pub struct ArgumentV1<'a> {
value: &'a Opaque,
formatter: fn(&Opaque, &mut Formatter<'_>) -> Result,
}
/// This struct represents the unsafety of constructing an `Arguments`.
/// It exists, rather than an unsafe function, in order to simplify the expansion
/// of `format_args!(..)` and reduce the scope of the `unsafe` block.
#[allow(missing_debug_implementations)]
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
pub struct UnsafeArg {
_private: (),
}
impl UnsafeArg {
/// See documentation where `UnsafeArg` is required to know when it is safe to
/// create and use `UnsafeArg`.
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
#[inline(always)]
pub unsafe fn new() -> Self {
Self { _private: () }
}
}
// This guarantees a single stable value for the function pointer associated with
// indices/counts in the formatting infrastructure.
//
// Note that a function defined as such would not be correct as functions are
// always tagged unnamed_addr with the current lowering to LLVM IR, so their
// address is not considered important to LLVM and as such the as_usize cast
// could have been miscompiled. In practice, we never call as_usize on non-usize
// containing data (as a matter of static generation of the formatting
// arguments), so this is merely an additional check.
//
// We primarily want to ensure that the function pointer at `USIZE_MARKER` has
// an address corresponding *only* to functions that also take `&usize` as their
// first argument. The read_volatile here ensures that we can safely ready out a
// usize from the passed reference and that this address does not point at a
// non-usize taking function.
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
static USIZE_MARKER: fn(&usize, &mut Formatter<'_>) -> Result = |ptr, _| {
// SAFETY: ptr is a reference
let _v: usize = unsafe { crate::ptr::read_volatile(ptr) };
loop {}
};
macro_rules! arg_new {
($f: ident, $t: ident) => {
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
#[inline]
pub fn $f<'b, T: $t>(x: &'b T) -> ArgumentV1<'_> {
Self::new(x, $t::fmt)
}
};
}
impl<'a> ArgumentV1<'a> {
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
#[inline]
pub fn new<'b, T>(x: &'b T, f: fn(&T, &mut Formatter<'_>) -> Result) -> ArgumentV1<'b> {
// SAFETY: `mem::transmute(x)` is safe because
// 1. `&'b T` keeps the lifetime it originated with `'b`
// (so as to not have an unbounded lifetime)
// 2. `&'b T` and `&'b Opaque` have the same memory layout
// (when `T` is `Sized`, as it is here)
// `mem::transmute(f)` is safe since `fn(&T, &mut Formatter<'_>) -> Result`
// and `fn(&Opaque, &mut Formatter<'_>) -> Result` have the same ABI
// (as long as `T` is `Sized`)
unsafe { ArgumentV1 { formatter: mem::transmute(f), value: mem::transmute(x) } }
}
arg_new!(new_display, Display);
arg_new!(new_debug, Debug);
arg_new!(new_octal, Octal);
arg_new!(new_lower_hex, LowerHex);
arg_new!(new_upper_hex, UpperHex);
arg_new!(new_pointer, Pointer);
arg_new!(new_binary, Binary);
arg_new!(new_lower_exp, LowerExp);
arg_new!(new_upper_exp, UpperExp);
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
pub fn from_usize(x: &usize) -> ArgumentV1<'_> {
ArgumentV1::new(x, USIZE_MARKER)
}
fn as_usize(&self) -> Option<usize> {
// We are type punning a bit here: USIZE_MARKER only takes an &usize but
// formatter takes an &Opaque. Rust understandably doesn't think we should compare
// the function pointers if they don't have the same signature, so we cast to
// usizes to tell it that we just want to compare addresses.
if self.formatter as usize == USIZE_MARKER as usize {
// SAFETY: The `formatter` field is only set to USIZE_MARKER if
// the value is a usize, so this is safe
Some(unsafe { *(self.value as *const _ as *const usize) })
} else {
None
}
}
}
// flags available in the v1 format of format_args
#[derive(Copy, Clone)]
enum FlagV1 {
SignPlus,
SignMinus,
Alternate,
SignAwareZeroPad,
DebugLowerHex,
DebugUpperHex,
}
impl<'a> Arguments<'a> {
/// When using the format_args!() macro, this function is used to generate the
/// Arguments structure.
#[doc(hidden)]
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
#[rustc_const_unstable(feature = "const_fmt_arguments_new", issue = "none")]
pub const fn new_v1(pieces: &'a [&'static str], args: &'a [ArgumentV1<'a>]) -> Arguments<'a> {
if pieces.len() < args.len() || pieces.len() > args.len() + 1 {
panic!("invalid args");
}
Arguments { pieces, fmt: None, args }
}
/// This function is used to specify nonstandard formatting parameters.
///
/// An `UnsafeArg` is required because the following invariants must be held
/// in order for this function to be safe:
/// 1. The `pieces` slice must be at least as long as `fmt`.
/// 2. Every [`rt::v1::Argument::position`] value within `fmt` must be a
/// valid index of `args`.
/// 3. Every [`Count::Param`] within `fmt` must contain a valid index of
/// `args`.
#[doc(hidden)]
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
#[rustc_const_unstable(feature = "const_fmt_arguments_new", issue = "none")]
pub const fn new_v1_formatted(
pieces: &'a [&'static str],
args: &'a [ArgumentV1<'a>],
fmt: &'a [rt::v1::Argument],
_unsafe_arg: UnsafeArg,
) -> Arguments<'a> {
Arguments { pieces, fmt: Some(fmt), args }
}
/// Estimates the length of the formatted text.
///
/// This is intended to be used for setting initial `String` capacity
/// when using `format!`. Note: this is neither the lower nor upper bound.
#[doc(hidden)]
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
pub fn estimated_capacity(&self) -> usize {
let pieces_length: usize = self.pieces.iter().map(|x| x.len()).sum();
if self.args.is_empty() {
pieces_length
} else if !self.pieces.is_empty() && self.pieces[0].is_empty() && pieces_length < 16 {
// If the format string starts with an argument,
// don't preallocate anything, unless length
// of pieces is significant.
0
} else {
// There are some arguments, so any additional push
// will reallocate the string. To avoid that,
// we're "pre-doubling" the capacity here.
pieces_length.checked_mul(2).unwrap_or(0)
}
}
}
/// This structure represents a safely precompiled version of a format string
/// and its arguments. This cannot be generated at runtime because it cannot
/// safely be done, so no constructors are given and the fields are private
/// to prevent modification.
///
/// The [`format_args!`] macro will safely create an instance of this structure.
/// The macro validates the format string at compile-time so usage of the
/// [`write()`] and [`format()`] functions can be safely performed.
///
/// You can use the `Arguments<'a>` that [`format_args!`] returns in `Debug`
/// and `Display` contexts as seen below. The example also shows that `Debug`
/// and `Display` format to the same thing: the interpolated format string
/// in `format_args!`.
///
/// ```rust
/// let debug = format!("{:?}", format_args!("{} foo {:?}", 1, 2));
/// let display = format!("{}", format_args!("{} foo {:?}", 1, 2));
/// assert_eq!("1 foo 2", display);
/// assert_eq!(display, debug);
/// ```
///
/// [`format()`]: ../../std/fmt/fn.format.html
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "Arguments")]
#[derive(Copy, Clone)]
pub struct Arguments<'a> {
// Format string pieces to print.
pieces: &'a [&'static str],
// Placeholder specs, or `None` if all specs are default (as in "{}{}").
fmt: Option<&'a [rt::v1::Argument]>,
// Dynamic arguments for interpolation, to be interleaved with string
// pieces. (Every argument is preceded by a string piece.)
args: &'a [ArgumentV1<'a>],
}
impl<'a> Arguments<'a> {
/// Get the formatted string, if it has no arguments to be formatted.
///
/// This can be used to avoid allocations in the most trivial case.
///
/// # Examples
///
/// ```rust
/// use std::fmt::Arguments;
///
/// fn write_str(_: &str) { /* ... */ }
///
/// fn write_fmt(args: &Arguments) {
/// if let Some(s) = args.as_str() {
/// write_str(s)
/// } else {
/// write_str(&args.to_string());
/// }
/// }
/// ```
///
/// ```rust
/// assert_eq!(format_args!("hello").as_str(), Some("hello"));
/// assert_eq!(format_args!("").as_str(), Some(""));
/// assert_eq!(format_args!("{}", 1).as_str(), None);
/// ```
#[stable(feature = "fmt_as_str", since = "1.52.0")]
#[rustc_const_unstable(feature = "const_arguments_as_str", issue = "none")]
#[must_use]
#[inline]
pub const fn as_str(&self) -> Option<&'static str> {
match (self.pieces, self.args) {
([], []) => Some(""),
([s], []) => Some(s),
_ => None,
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for Arguments<'_> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
Display::fmt(self, fmt)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for Arguments<'_> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
write(fmt.buf, *self)
}
}
/// `?` formatting.
///
/// `Debug` should format the output in a programmer-facing, debugging context.
///
/// Generally speaking, you should just `derive` a `Debug` implementation.
///
/// When used with the alternate format specifier `#?`, the output is pretty-printed.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// This trait can be used with `#[derive]` if all fields implement `Debug`. When
/// `derive`d for structs, it will use the name of the `struct`, then `{`, then a
/// comma-separated list of each field's name and `Debug` value, then `}`. For
/// `enum`s, it will use the name of the variant and, if applicable, `(`, then the
/// `Debug` values of the fields, then `)`.
///
/// # Stability
///
/// Derived `Debug` formats are not stable, and so may change with future Rust
/// versions. Additionally, `Debug` implementations of types provided by the
/// standard library (`libstd`, `libcore`, `liballoc`, etc.) are not stable, and
/// may also change with future Rust versions.
///
/// # Examples
///
/// Deriving an implementation:
///
/// ```
/// #[derive(Debug)]
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// assert_eq!(format!("The origin is: {origin:?}"), "The origin is: Point { x: 0, y: 0 }");
/// ```
///
/// Manually implementing:
///
/// ```
/// use std::fmt;
///
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// impl fmt::Debug for Point {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// f.debug_struct("Point")
/// .field("x", &self.x)
/// .field("y", &self.y)
/// .finish()
/// }
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// assert_eq!(format!("The origin is: {origin:?}"), "The origin is: Point { x: 0, y: 0 }");
/// ```
///
/// There are a number of helper methods on the [`Formatter`] struct to help you with manual
/// implementations, such as [`debug_struct`].
///
/// [`debug_struct`]: Formatter::debug_struct
///
/// Types that do not wish to use the standard suite of debug representations
/// provided by the `Formatter` trait (`debug_struct`, `debug_tuple`,
/// `debug_list`, `debug_set`, `debug_map`) can do something totally custom by
/// manually writing an arbitrary representation to the `Formatter`.
///
/// ```
/// # use std::fmt;
/// # struct Point {
/// # x: i32,
/// # y: i32,
/// # }
/// #
/// impl fmt::Debug for Point {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "Point [{} {}]", self.x, self.y)
/// }
/// }
/// ```
///
/// `Debug` implementations using either `derive` or the debug builder API
/// on [`Formatter`] support pretty-printing using the alternate flag: `{:#?}`.
///
/// Pretty-printing with `#?`:
///
/// ```
/// #[derive(Debug)]
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// assert_eq!(format!("The origin is: {origin:#?}"),
/// "The origin is: Point {
/// x: 0,
/// y: 0,
/// }");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
on(
crate_local,
label = "`{Self}` cannot be formatted using `{{:?}}`",
note = "add `#[derive(Debug)]` to `{Self}` or manually `impl {Debug} for {Self}`"
),
message = "`{Self}` doesn't implement `{Debug}`",
label = "`{Self}` cannot be formatted using `{{:?}}` because it doesn't implement `{Debug}`"
)]
#[doc(alias = "{:?}")]
#[rustc_diagnostic_item = "Debug"]
#[rustc_trivial_field_reads]
pub trait Debug {
/// Formats the value using the given formatter.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Position {
/// longitude: f32,
/// latitude: f32,
/// }
///
/// impl fmt::Debug for Position {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// f.debug_tuple("")
/// .field(&self.longitude)
/// .field(&self.latitude)
/// .finish()
/// }
/// }
///
/// let position = Position { longitude: 1.987, latitude: 2.983 };
/// assert_eq!(format!("{position:?}"), "(1.987, 2.983)");
///
/// assert_eq!(format!("{position:#?}"), "(
/// 1.987,
/// 2.983,
/// )");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
// Separate module to reexport the macro `Debug` from prelude without the trait `Debug`.
pub(crate) mod macros {
/// Derive macro generating an impl of the trait `Debug`.
#[rustc_builtin_macro]
#[stable(feature = "builtin_macro_prelude", since = "1.38.0")]
#[allow_internal_unstable(core_intrinsics)]
pub macro Debug($item:item) {
/* compiler built-in */
}
}
#[stable(feature = "builtin_macro_prelude", since = "1.38.0")]
#[doc(inline)]
pub use macros::Debug;
/// Format trait for an empty format, `{}`.
///
/// `Display` is similar to [`Debug`], but `Display` is for user-facing
/// output, and so cannot be derived.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Implementing `Display` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Point {
/// x: i32,
/// y: i32,
/// }
///
/// impl fmt::Display for Point {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "({}, {})", self.x, self.y)
/// }
/// }
///
/// let origin = Point { x: 0, y: 0 };
///
/// assert_eq!(format!("The origin is: {origin}"), "The origin is: (0, 0)");
/// ```
#[rustc_on_unimplemented(
on(
any(_Self = "std::path::Path", _Self = "std::path::PathBuf"),
label = "`{Self}` cannot be formatted with the default formatter; call `.display()` on it",
note = "call `.display()` or `.to_string_lossy()` to safely print paths, \
as they may contain non-Unicode data"
),
message = "`{Self}` doesn't implement `{Display}`",
label = "`{Self}` cannot be formatted with the default formatter",
note = "in format strings you may be able to use `{{:?}}` (or {{:#?}} for pretty-print) instead"
)]
#[doc(alias = "{}")]
#[rustc_diagnostic_item = "Display"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Display {
/// Formats the value using the given formatter.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Position {
/// longitude: f32,
/// latitude: f32,
/// }
///
/// impl fmt::Display for Position {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "({}, {})", self.longitude, self.latitude)
/// }
/// }
///
/// assert_eq!("(1.987, 2.983)",
/// format!("{}", Position { longitude: 1.987, latitude: 2.983, }));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `o` formatting.
///
/// The `Octal` trait should format its output as a number in base-8.
///
/// For primitive signed integers (`i8` to `i128`, and `isize`),
/// negative values are formatted as the twoβs complement representation.
///
/// The alternate flag, `#`, adds a `0o` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `i32`:
///
/// ```
/// let x = 42; // 42 is '52' in octal
///
/// assert_eq!(format!("{x:o}"), "52");
/// assert_eq!(format!("{x:#o}"), "0o52");
///
/// assert_eq!(format!("{:o}", -16), "37777777760");
/// ```
///
/// Implementing `Octal` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::Octal for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::Octal::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(9);
///
/// assert_eq!(format!("l as octal is: {l:o}"), "l as octal is: 11");
///
/// assert_eq!(format!("l as octal is: {l:#06o}"), "l as octal is: 0o0011");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Octal {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `b` formatting.
///
/// The `Binary` trait should format its output as a number in binary.
///
/// For primitive signed integers ([`i8`] to [`i128`], and [`isize`]),
/// negative values are formatted as the twoβs complement representation.
///
/// The alternate flag, `#`, adds a `0b` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with [`i32`]:
///
/// ```
/// let x = 42; // 42 is '101010' in binary
///
/// assert_eq!(format!("{x:b}"), "101010");
/// assert_eq!(format!("{x:#b}"), "0b101010");
///
/// assert_eq!(format!("{:b}", -16), "11111111111111111111111111110000");
/// ```
///
/// Implementing `Binary` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::Binary for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::Binary::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(107);
///
/// assert_eq!(format!("l as binary is: {l:b}"), "l as binary is: 1101011");
///
/// assert_eq!(
/// format!("l as binary is: {l:#032b}"),
/// "l as binary is: 0b000000000000000000000001101011"
/// );
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Binary {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `x` formatting.
///
/// The `LowerHex` trait should format its output as a number in hexadecimal, with `a` through `f`
/// in lower case.
///
/// For primitive signed integers (`i8` to `i128`, and `isize`),
/// negative values are formatted as the twoβs complement representation.
///
/// The alternate flag, `#`, adds a `0x` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `i32`:
///
/// ```
/// let x = 42; // 42 is '2a' in hex
///
/// assert_eq!(format!("{x:x}"), "2a");
/// assert_eq!(format!("{x:#x}"), "0x2a");
///
/// assert_eq!(format!("{:x}", -16), "fffffff0");
/// ```
///
/// Implementing `LowerHex` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::LowerHex for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::LowerHex::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(9);
///
/// assert_eq!(format!("l as hex is: {l:x}"), "l as hex is: 9");
///
/// assert_eq!(format!("l as hex is: {l:#010x}"), "l as hex is: 0x00000009");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait LowerHex {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `X` formatting.
///
/// The `UpperHex` trait should format its output as a number in hexadecimal, with `A` through `F`
/// in upper case.
///
/// For primitive signed integers (`i8` to `i128`, and `isize`),
/// negative values are formatted as the twoβs complement representation.
///
/// The alternate flag, `#`, adds a `0x` in front of the output.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `i32`:
///
/// ```
/// let x = 42; // 42 is '2A' in hex
///
/// assert_eq!(format!("{x:X}"), "2A");
/// assert_eq!(format!("{x:#X}"), "0x2A");
///
/// assert_eq!(format!("{:X}", -16), "FFFFFFF0");
/// ```
///
/// Implementing `UpperHex` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::UpperHex for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = self.0;
///
/// fmt::UpperHex::fmt(&val, f) // delegate to i32's implementation
/// }
/// }
///
/// let l = Length(i32::MAX);
///
/// assert_eq!(format!("l as hex is: {l:X}"), "l as hex is: 7FFFFFFF");
///
/// assert_eq!(format!("l as hex is: {l:#010X}"), "l as hex is: 0x7FFFFFFF");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait UpperHex {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `p` formatting.
///
/// The `Pointer` trait should format its output as a memory location. This is commonly presented
/// as hexadecimal.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `&i32`:
///
/// ```
/// let x = &42;
///
/// let address = format!("{x:p}"); // this produces something like '0x7f06092ac6d0'
/// ```
///
/// Implementing `Pointer` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::Pointer for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// // use `as` to convert to a `*const T`, which implements Pointer, which we can use
///
/// let ptr = self as *const Self;
/// fmt::Pointer::fmt(&ptr, f)
/// }
/// }
///
/// let l = Length(42);
///
/// println!("l is in memory here: {l:p}");
///
/// let l_ptr = format!("{l:018p}");
/// assert_eq!(l_ptr.len(), 18);
/// assert_eq!(&l_ptr[..2], "0x");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "Pointer"]
pub trait Pointer {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_diagnostic_item = "pointer_trait_fmt"]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `e` formatting.
///
/// The `LowerExp` trait should format its output in scientific notation with a lower-case `e`.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `f64`:
///
/// ```
/// let x = 42.0; // 42.0 is '4.2e1' in scientific notation
///
/// assert_eq!(format!("{x:e}"), "4.2e1");
/// ```
///
/// Implementing `LowerExp` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::LowerExp for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = f64::from(self.0);
/// fmt::LowerExp::fmt(&val, f) // delegate to f64's implementation
/// }
/// }
///
/// let l = Length(100);
///
/// assert_eq!(
/// format!("l in scientific notation is: {l:e}"),
/// "l in scientific notation is: 1e2"
/// );
///
/// assert_eq!(
/// format!("l in scientific notation is: {l:05e}"),
/// "l in scientific notation is: 001e2"
/// );
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait LowerExp {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// `E` formatting.
///
/// The `UpperExp` trait should format its output in scientific notation with an upper-case `E`.
///
/// For more information on formatters, see [the module-level documentation][module].
///
/// [module]: ../../std/fmt/index.html
///
/// # Examples
///
/// Basic usage with `f64`:
///
/// ```
/// let x = 42.0; // 42.0 is '4.2E1' in scientific notation
///
/// assert_eq!(format!("{x:E}"), "4.2E1");
/// ```
///
/// Implementing `UpperExp` on a type:
///
/// ```
/// use std::fmt;
///
/// struct Length(i32);
///
/// impl fmt::UpperExp for Length {
/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// let val = f64::from(self.0);
/// fmt::UpperExp::fmt(&val, f) // delegate to f64's implementation
/// }
/// }
///
/// let l = Length(100);
///
/// assert_eq!(
/// format!("l in scientific notation is: {l:E}"),
/// "l in scientific notation is: 1E2"
/// );
///
/// assert_eq!(
/// format!("l in scientific notation is: {l:05E}"),
/// "l in scientific notation is: 001E2"
/// );
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub trait UpperExp {
/// Formats the value using the given formatter.
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter<'_>) -> Result;
}
/// The `write` function takes an output stream, and an `Arguments` struct
/// that can be precompiled with the `format_args!` macro.
///
/// The arguments will be formatted according to the specified format string
/// into the output stream provided.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use std::fmt;
///
/// let mut output = String::new();
/// fmt::write(&mut output, format_args!("Hello {}!", "world"))
/// .expect("Error occurred while trying to write in String");
/// assert_eq!(output, "Hello world!");
/// ```
///
/// Please note that using [`write!`] might be preferable. Example:
///
/// ```
/// use std::fmt::Write;
///
/// let mut output = String::new();
/// write!(&mut output, "Hello {}!", "world")
/// .expect("Error occurred while trying to write in String");
/// assert_eq!(output, "Hello world!");
/// ```
///
/// [`write!`]: crate::write!
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write(output: &mut dyn Write, args: Arguments<'_>) -> Result {
let mut formatter = Formatter::new(output);
let mut idx = 0;
match args.fmt {
None => {
// We can use default formatting parameters for all arguments.
for (i, arg) in args.args.iter().enumerate() {
// SAFETY: args.args and args.pieces come from the same Arguments,
// which guarantees the indexes are always within bounds.
let piece = unsafe { args.pieces.get_unchecked(i) };
if !piece.is_empty() {
formatter.buf.write_str(*piece)?;
}
(arg.formatter)(arg.value, &mut formatter)?;
idx += 1;
}
}
Some(fmt) => {
// Every spec has a corresponding argument that is preceded by
// a string piece.
for (i, arg) in fmt.iter().enumerate() {
// SAFETY: fmt and args.pieces come from the same Arguments,
// which guarantees the indexes are always within bounds.
let piece = unsafe { args.pieces.get_unchecked(i) };
if !piece.is_empty() {
formatter.buf.write_str(*piece)?;
}
// SAFETY: arg and args.args come from the same Arguments,
// which guarantees the indexes are always within bounds.
unsafe { run(&mut formatter, arg, args.args) }?;
idx += 1;
}
}
}
// There can be only one trailing string piece left.
if let Some(piece) = args.pieces.get(idx) {
formatter.buf.write_str(*piece)?;
}
Ok(())
}
unsafe fn run(fmt: &mut Formatter<'_>, arg: &rt::v1::Argument, args: &[ArgumentV1<'_>]) -> Result {
fmt.fill = arg.format.fill;
fmt.align = arg.format.align;
fmt.flags = arg.format.flags;
// SAFETY: arg and args come from the same Arguments,
// which guarantees the indexes are always within bounds.
unsafe {
fmt.width = getcount(args, &arg.format.width);
fmt.precision = getcount(args, &arg.format.precision);
}
// Extract the correct argument
debug_assert!(arg.position < args.len());
// SAFETY: arg and args come from the same Arguments,
// which guarantees its index is always within bounds.
let value = unsafe { args.get_unchecked(arg.position) };
// Then actually do some printing
(value.formatter)(value.value, fmt)
}
unsafe fn getcount(args: &[ArgumentV1<'_>], cnt: &rt::v1::Count) -> Option<usize> {
match *cnt {
rt::v1::Count::Is(n) => Some(n),
rt::v1::Count::Implied => None,
rt::v1::Count::Param(i) => {
debug_assert!(i < args.len());
// SAFETY: cnt and args come from the same Arguments,
// which guarantees this index is always within bounds.
unsafe { args.get_unchecked(i).as_usize() }
}
}
}
/// Padding after the end of something. Returned by `Formatter::padding`.
#[must_use = "don't forget to write the post padding"]
pub(crate) struct PostPadding {
fill: char,
padding: usize,
}
impl PostPadding {
fn new(fill: char, padding: usize) -> PostPadding {
PostPadding { fill, padding }
}
/// Write this post padding.
pub(crate) fn write(self, f: &mut Formatter<'_>) -> Result {
for _ in 0..self.padding {
f.buf.write_char(self.fill)?;
}
Ok(())
}
}
impl<'a> Formatter<'a> {
fn wrap_buf<'b, 'c, F>(&'b mut self, wrap: F) -> Formatter<'c>
where
'b: 'c,
F: FnOnce(&'b mut (dyn Write + 'b)) -> &'c mut (dyn Write + 'c),
{
Formatter {
// We want to change this
buf: wrap(self.buf),
// And preserve these
flags: self.flags,
fill: self.fill,
align: self.align,
width: self.width,
precision: self.precision,
}
}
// Helper methods used for padding and processing formatting arguments that
// all formatting traits can use.
/// Performs the correct padding for an integer which has already been
/// emitted into a str. The str should *not* contain the sign for the
/// integer, that will be added by this method.
///
/// # Arguments
///
/// * is_nonnegative - whether the original integer was either positive or zero.
/// * prefix - if the '#' character (Alternate) is provided, this
/// is the prefix to put in front of the number.
/// * buf - the byte array that the number has been formatted into
///
/// This function will correctly account for the flags provided as well as
/// the minimum width. It will not take precision into account.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo { nb: i32 }
///
/// impl Foo {
/// fn new(nb: i32) -> Foo {
/// Foo {
/// nb,
/// }
/// }
/// }
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// // We need to remove "-" from the number output.
/// let tmp = self.nb.abs().to_string();
///
/// formatter.pad_integral(self.nb >= 0, "Foo ", &tmp)
/// }
/// }
///
/// assert_eq!(&format!("{}", Foo::new(2)), "2");
/// assert_eq!(&format!("{}", Foo::new(-1)), "-1");
/// assert_eq!(&format!("{}", Foo::new(0)), "0");
/// assert_eq!(&format!("{:#}", Foo::new(-1)), "-Foo 1");
/// assert_eq!(&format!("{:0>#8}", Foo::new(-1)), "00-Foo 1");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pad_integral(&mut self, is_nonnegative: bool, prefix: &str, buf: &str) -> Result {
let mut width = buf.len();
let mut sign = None;
if !is_nonnegative {
sign = Some('-');
width += 1;
} else if self.sign_plus() {
sign = Some('+');
width += 1;
}
let prefix = if self.alternate() {
width += prefix.chars().count();
Some(prefix)
} else {
None
};
// Writes the sign if it exists, and then the prefix if it was requested
#[inline(never)]
fn write_prefix(f: &mut Formatter<'_>, sign: Option<char>, prefix: Option<&str>) -> Result {
if let Some(c) = sign {
f.buf.write_char(c)?;
}
if let Some(prefix) = prefix { f.buf.write_str(prefix) } else { Ok(()) }
}
// The `width` field is more of a `min-width` parameter at this point.
match self.width {
// If there's no minimum length requirements then we can just
// write the bytes.
None => {
write_prefix(self, sign, prefix)?;
self.buf.write_str(buf)
}
// Check if we're over the minimum width, if so then we can also
// just write the bytes.
Some(min) if width >= min => {
write_prefix(self, sign, prefix)?;
self.buf.write_str(buf)
}
// The sign and prefix goes before the padding if the fill character
// is zero
Some(min) if self.sign_aware_zero_pad() => {
let old_fill = crate::mem::replace(&mut self.fill, '0');
let old_align = crate::mem::replace(&mut self.align, rt::v1::Alignment::Right);
write_prefix(self, sign, prefix)?;
let post_padding = self.padding(min - width, rt::v1::Alignment::Right)?;
self.buf.write_str(buf)?;
post_padding.write(self)?;
self.fill = old_fill;
self.align = old_align;
Ok(())
}
// Otherwise, the sign and prefix goes after the padding
Some(min) => {
let post_padding = self.padding(min - width, rt::v1::Alignment::Right)?;
write_prefix(self, sign, prefix)?;
self.buf.write_str(buf)?;
post_padding.write(self)
}
}
}
/// This function takes a string slice and emits it to the internal buffer
/// after applying the relevant formatting flags specified. The flags
/// recognized for generic strings are:
///
/// * width - the minimum width of what to emit
/// * fill/align - what to emit and where to emit it if the string
/// provided needs to be padded
/// * precision - the maximum length to emit, the string is truncated if it
/// is longer than this length
///
/// Notably this function ignores the `flag` parameters.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// formatter.pad("Foo")
/// }
/// }
///
/// assert_eq!(&format!("{Foo:<4}"), "Foo ");
/// assert_eq!(&format!("{Foo:0>4}"), "0Foo");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pad(&mut self, s: &str) -> Result {
// Make sure there's a fast path up front
if self.width.is_none() && self.precision.is_none() {
return self.buf.write_str(s);
}
// The `precision` field can be interpreted as a `max-width` for the
// string being formatted.
let s = if let Some(max) = self.precision {
// If our string is longer that the precision, then we must have
// truncation. However other flags like `fill`, `width` and `align`
// must act as always.
if let Some((i, _)) = s.char_indices().nth(max) {
// LLVM here can't prove that `..i` won't panic `&s[..i]`, but
// we know that it can't panic. Use `get` + `unwrap_or` to avoid
// `unsafe` and otherwise don't emit any panic-related code
// here.
s.get(..i).unwrap_or(s)
} else {
&s
}
} else {
&s
};
// The `width` field is more of a `min-width` parameter at this point.
match self.width {
// If we're under the maximum length, and there's no minimum length
// requirements, then we can just emit the string
None => self.buf.write_str(s),
Some(width) => {
let chars_count = s.chars().count();
// If we're under the maximum width, check if we're over the minimum
// width, if so it's as easy as just emitting the string.
if chars_count >= width {
self.buf.write_str(s)
}
// If we're under both the maximum and the minimum width, then fill
// up the minimum width with the specified string + some alignment.
else {
let align = rt::v1::Alignment::Left;
let post_padding = self.padding(width - chars_count, align)?;
self.buf.write_str(s)?;
post_padding.write(self)
}
}
}
}
/// Write the pre-padding and return the unwritten post-padding. Callers are
/// responsible for ensuring post-padding is written after the thing that is
/// being padded.
pub(crate) fn padding(
&mut self,
padding: usize,
default: rt::v1::Alignment,
) -> result::Result<PostPadding, Error> {
let align = match self.align {
rt::v1::Alignment::Unknown => default,
_ => self.align,
};
let (pre_pad, post_pad) = match align {
rt::v1::Alignment::Left => (0, padding),
rt::v1::Alignment::Right | rt::v1::Alignment::Unknown => (padding, 0),
rt::v1::Alignment::Center => (padding / 2, (padding + 1) / 2),
};
for _ in 0..pre_pad {
self.buf.write_char(self.fill)?;
}
Ok(PostPadding::new(self.fill, post_pad))
}
/// Takes the formatted parts and applies the padding.
/// Assumes that the caller already has rendered the parts with required precision,
/// so that `self.precision` can be ignored.
fn pad_formatted_parts(&mut self, formatted: &numfmt::Formatted<'_>) -> Result {
if let Some(mut width) = self.width {
// for the sign-aware zero padding, we render the sign first and
// behave as if we had no sign from the beginning.
let mut formatted = formatted.clone();
let old_fill = self.fill;
let old_align = self.align;
let mut align = old_align;
if self.sign_aware_zero_pad() {
// a sign always goes first
let sign = formatted.sign;
self.buf.write_str(sign)?;
// remove the sign from the formatted parts
formatted.sign = "";
width = width.saturating_sub(sign.len());
align = rt::v1::Alignment::Right;
self.fill = '0';
self.align = rt::v1::Alignment::Right;
}
// remaining parts go through the ordinary padding process.
let len = formatted.len();
let ret = if width <= len {
// no padding
self.write_formatted_parts(&formatted)
} else {
let post_padding = self.padding(width - len, align)?;
self.write_formatted_parts(&formatted)?;
post_padding.write(self)
};
self.fill = old_fill;
self.align = old_align;
ret
} else {
// this is the common case and we take a shortcut
self.write_formatted_parts(formatted)
}
}
fn write_formatted_parts(&mut self, formatted: &numfmt::Formatted<'_>) -> Result {
fn write_bytes(buf: &mut dyn Write, s: &[u8]) -> Result {
// SAFETY: This is used for `numfmt::Part::Num` and `numfmt::Part::Copy`.
// It's safe to use for `numfmt::Part::Num` since every char `c` is between
// `b'0'` and `b'9'`, which means `s` is valid UTF-8.
// It's also probably safe in practice to use for `numfmt::Part::Copy(buf)`
// since `buf` should be plain ASCII, but it's possible for someone to pass
// in a bad value for `buf` into `numfmt::to_shortest_str` since it is a
// public function.
// FIXME: Determine whether this could result in UB.
buf.write_str(unsafe { str::from_utf8_unchecked(s) })
}
if !formatted.sign.is_empty() {
self.buf.write_str(formatted.sign)?;
}
for part in formatted.parts {
match *part {
numfmt::Part::Zero(mut nzeroes) => {
const ZEROES: &str = // 64 zeroes
"0000000000000000000000000000000000000000000000000000000000000000";
while nzeroes > ZEROES.len() {
self.buf.write_str(ZEROES)?;
nzeroes -= ZEROES.len();
}
if nzeroes > 0 {
self.buf.write_str(&ZEROES[..nzeroes])?;
}
}
numfmt::Part::Num(mut v) => {
let mut s = [0; 5];
let len = part.len();
for c in s[..len].iter_mut().rev() {
*c = b'0' + (v % 10) as u8;
v /= 10;
}
write_bytes(self.buf, &s[..len])?;
}
numfmt::Part::Copy(buf) => {
write_bytes(self.buf, buf)?;
}
}
}
Ok(())
}
/// Writes some data to the underlying buffer contained within this
/// formatter.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// formatter.write_str("Foo")
/// // This is equivalent to:
/// // write!(formatter, "Foo")
/// }
/// }
///
/// assert_eq!(&format!("{Foo}"), "Foo");
/// assert_eq!(&format!("{Foo:0>8}"), "Foo");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write_str(&mut self, data: &str) -> Result {
self.buf.write_str(data)
}
/// Writes some formatted information into this instance.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// formatter.write_fmt(format_args!("Foo {}", self.0))
/// }
/// }
///
/// assert_eq!(&format!("{}", Foo(-1)), "Foo -1");
/// assert_eq!(&format!("{:0>8}", Foo(2)), "Foo 2");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn write_fmt(&mut self, fmt: Arguments<'_>) -> Result {
write(self.buf, fmt)
}
/// Flags for formatting
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
#[deprecated(
since = "1.24.0",
note = "use the `sign_plus`, `sign_minus`, `alternate`, \
or `sign_aware_zero_pad` methods instead"
)]
pub fn flags(&self) -> u32 {
self.flags
}
/// Character used as 'fill' whenever there is alignment.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// let c = formatter.fill();
/// if let Some(width) = formatter.width() {
/// for _ in 0..width {
/// write!(formatter, "{c}")?;
/// }
/// Ok(())
/// } else {
/// write!(formatter, "{c}")
/// }
/// }
/// }
///
/// // We set alignment to the right with ">".
/// assert_eq!(&format!("{Foo:G>3}"), "GGG");
/// assert_eq!(&format!("{Foo:t>6}"), "tttttt");
/// ```
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn fill(&self) -> char {
self.fill
}
/// Flag indicating what form of alignment was requested.
///
/// # Examples
///
/// ```
/// extern crate core;
///
/// use std::fmt::{self, Alignment};
///
/// struct Foo;
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// let s = if let Some(s) = formatter.align() {
/// match s {
/// Alignment::Left => "left",
/// Alignment::Right => "right",
/// Alignment::Center => "center",
/// }
/// } else {
/// "into the void"
/// };
/// write!(formatter, "{s}")
/// }
/// }
///
/// assert_eq!(&format!("{Foo:<}"), "left");
/// assert_eq!(&format!("{Foo:>}"), "right");
/// assert_eq!(&format!("{Foo:^}"), "center");
/// assert_eq!(&format!("{Foo}"), "into the void");
/// ```
#[must_use]
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
pub fn align(&self) -> Option<Alignment> {
match self.align {
rt::v1::Alignment::Left => Some(Alignment::Left),
rt::v1::Alignment::Right => Some(Alignment::Right),
rt::v1::Alignment::Center => Some(Alignment::Center),
rt::v1::Alignment::Unknown => None,
}
}
/// Optionally specified integer width that the output should be.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if let Some(width) = formatter.width() {
/// // If we received a width, we use it
/// write!(formatter, "{:width$}", &format!("Foo({})", self.0), width = width)
/// } else {
/// // Otherwise we do nothing special
/// write!(formatter, "Foo({})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:10}", Foo(23)), "Foo(23) ");
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
/// ```
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn width(&self) -> Option<usize> {
self.width
}
/// Optionally specified precision for numeric types. Alternatively, the
/// maximum width for string types.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(f32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if let Some(precision) = formatter.precision() {
/// // If we received a precision, we use it.
/// write!(formatter, "Foo({1:.*})", precision, self.0)
/// } else {
/// // Otherwise we default to 2.
/// write!(formatter, "Foo({:.2})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:.4}", Foo(23.2)), "Foo(23.2000)");
/// assert_eq!(&format!("{}", Foo(23.2)), "Foo(23.20)");
/// ```
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn precision(&self) -> Option<usize> {
self.precision
}
/// Determines if the `+` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if formatter.sign_plus() {
/// write!(formatter,
/// "Foo({}{})",
/// if self.0 < 0 { '-' } else { '+' },
/// self.0)
/// } else {
/// write!(formatter, "Foo({})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:+}", Foo(23)), "Foo(+23)");
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
/// ```
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_plus(&self) -> bool {
self.flags & (1 << FlagV1::SignPlus as u32) != 0
}
/// Determines if the `-` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if formatter.sign_minus() {
/// // You want a minus sign? Have one!
/// write!(formatter, "-Foo({})", self.0)
/// } else {
/// write!(formatter, "Foo({})", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:-}", Foo(23)), "-Foo(23)");
/// assert_eq!(&format!("{}", Foo(23)), "Foo(23)");
/// ```
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_minus(&self) -> bool {
self.flags & (1 << FlagV1::SignMinus as u32) != 0
}
/// Determines if the `#` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// if formatter.alternate() {
/// write!(formatter, "Foo({})", self.0)
/// } else {
/// write!(formatter, "{}", self.0)
/// }
/// }
/// }
///
/// assert_eq!(&format!("{:#}", Foo(23)), "Foo(23)");
/// assert_eq!(&format!("{}", Foo(23)), "23");
/// ```
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn alternate(&self) -> bool {
self.flags & (1 << FlagV1::Alternate as u32) != 0
}
/// Determines if the `0` flag was specified.
///
/// # Examples
///
/// ```
/// use std::fmt;
///
/// struct Foo(i32);
///
/// impl fmt::Display for Foo {
/// fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
/// assert!(formatter.sign_aware_zero_pad());
/// assert_eq!(formatter.width(), Some(4));
/// // We ignore the formatter's options.
/// write!(formatter, "{}", self.0)
/// }
/// }
///
/// assert_eq!(&format!("{:04}", Foo(23)), "23");
/// ```
#[must_use]
#[stable(feature = "fmt_flags", since = "1.5.0")]
pub fn sign_aware_zero_pad(&self) -> bool {
self.flags & (1 << FlagV1::SignAwareZeroPad as u32) != 0
}
// FIXME: Decide what public API we want for these two flags.
// https://github.com/rust-lang/rust/issues/48584
fn debug_lower_hex(&self) -> bool {
self.flags & (1 << FlagV1::DebugLowerHex as u32) != 0
}
fn debug_upper_hex(&self) -> bool {
self.flags & (1 << FlagV1::DebugUpperHex as u32) != 0
}
/// Creates a [`DebugStruct`] builder designed to assist with creation of
/// [`fmt::Debug`] implementations for structs.
///
/// [`fmt::Debug`]: self::Debug
///
/// # Examples
///
/// ```rust
/// use std::fmt;
/// use std::net::Ipv4Addr;
///
/// struct Foo {
/// bar: i32,
/// baz: String,
/// addr: Ipv4Addr,
/// }
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_struct("Foo")
/// .field("bar", &self.bar)
/// .field("baz", &self.baz)
/// .field("addr", &format_args!("{}", self.addr))
/// .finish()
/// }
/// }
///
/// assert_eq!(
/// "Foo { bar: 10, baz: \"Hello World\", addr: 127.0.0.1 }",
/// format!("{:?}", Foo {
/// bar: 10,
/// baz: "Hello World".to_string(),
/// addr: Ipv4Addr::new(127, 0, 0, 1),
/// })
/// );
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_struct<'b>(&'b mut self, name: &str) -> DebugStruct<'b, 'a> {
builders::debug_struct_new(self, name)
}
/// Creates a `DebugTuple` builder designed to assist with creation of
/// `fmt::Debug` implementations for tuple structs.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
/// use std::marker::PhantomData;
///
/// struct Foo<T>(i32, String, PhantomData<T>);
///
/// impl<T> fmt::Debug for Foo<T> {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_tuple("Foo")
/// .field(&self.0)
/// .field(&self.1)
/// .field(&format_args!("_"))
/// .finish()
/// }
/// }
///
/// assert_eq!(
/// "Foo(10, \"Hello\", _)",
/// format!("{:?}", Foo(10, "Hello".to_string(), PhantomData::<u8>))
/// );
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_tuple<'b>(&'b mut self, name: &str) -> DebugTuple<'b, 'a> {
builders::debug_tuple_new(self, name)
}
/// Creates a `DebugList` builder designed to assist with creation of
/// `fmt::Debug` implementations for list-like structures.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
///
/// struct Foo(Vec<i32>);
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_list().entries(self.0.iter()).finish()
/// }
/// }
///
/// assert_eq!(format!("{:?}", Foo(vec![10, 11])), "[10, 11]");
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_list<'b>(&'b mut self) -> DebugList<'b, 'a> {
builders::debug_list_new(self)
}
/// Creates a `DebugSet` builder designed to assist with creation of
/// `fmt::Debug` implementations for set-like structures.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
///
/// struct Foo(Vec<i32>);
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_set().entries(self.0.iter()).finish()
/// }
/// }
///
/// assert_eq!(format!("{:?}", Foo(vec![10, 11])), "{10, 11}");
/// ```
///
/// [`format_args!`]: crate::format_args
///
/// In this more complex example, we use [`format_args!`] and `.debug_set()`
/// to build a list of match arms:
///
/// ```rust
/// use std::fmt;
///
/// struct Arm<'a, L: 'a, R: 'a>(&'a (L, R));
/// struct Table<'a, K: 'a, V: 'a>(&'a [(K, V)], V);
///
/// impl<'a, L, R> fmt::Debug for Arm<'a, L, R>
/// where
/// L: 'a + fmt::Debug, R: 'a + fmt::Debug
/// {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// L::fmt(&(self.0).0, fmt)?;
/// fmt.write_str(" => ")?;
/// R::fmt(&(self.0).1, fmt)
/// }
/// }
///
/// impl<'a, K, V> fmt::Debug for Table<'a, K, V>
/// where
/// K: 'a + fmt::Debug, V: 'a + fmt::Debug
/// {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_set()
/// .entries(self.0.iter().map(Arm))
/// .entry(&Arm(&(format_args!("_"), &self.1)))
/// .finish()
/// }
/// }
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_set<'b>(&'b mut self) -> DebugSet<'b, 'a> {
builders::debug_set_new(self)
}
/// Creates a `DebugMap` builder designed to assist with creation of
/// `fmt::Debug` implementations for map-like structures.
///
/// # Examples
///
/// ```rust
/// use std::fmt;
///
/// struct Foo(Vec<(String, i32)>);
///
/// impl fmt::Debug for Foo {
/// fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
/// fmt.debug_map().entries(self.0.iter().map(|&(ref k, ref v)| (k, v))).finish()
/// }
/// }
///
/// assert_eq!(
/// format!("{:?}", Foo(vec![("A".to_string(), 10), ("B".to_string(), 11)])),
/// r#"{"A": 10, "B": 11}"#
/// );
/// ```
#[stable(feature = "debug_builders", since = "1.2.0")]
pub fn debug_map<'b>(&'b mut self) -> DebugMap<'b, 'a> {
builders::debug_map_new(self)
}
}
#[stable(since = "1.2.0", feature = "formatter_write")]
impl Write for Formatter<'_> {
fn write_str(&mut self, s: &str) -> Result {
self.buf.write_str(s)
}
fn write_char(&mut self, c: char) -> Result {
self.buf.write_char(c)
}
fn write_fmt(&mut self, args: Arguments<'_>) -> Result {
write(self.buf, args)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Display::fmt("an error occurred when formatting an argument", f)
}
}
// Implementations of the core formatting traits
macro_rules! fmt_refs {
($($tr:ident),*) => {
$(
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + $tr> $tr for &T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result { $tr::fmt(&**self, f) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + $tr> $tr for &mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result { $tr::fmt(&**self, f) }
}
)*
}
}
fmt_refs! { Debug, Display, Octal, Binary, LowerHex, UpperHex, LowerExp, UpperExp }
#[unstable(feature = "never_type", issue = "35121")]
impl Debug for ! {
fn fmt(&self, _: &mut Formatter<'_>) -> Result {
*self
}
}
#[unstable(feature = "never_type", issue = "35121")]
impl Display for ! {
fn fmt(&self, _: &mut Formatter<'_>) -> Result {
*self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for bool {
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Display::fmt(self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for bool {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Display::fmt(if *self { "true" } else { "false" }, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for str {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.write_char('"')?;
let mut from = 0;
for (i, c) in self.char_indices() {
let esc = c.escape_debug_ext(EscapeDebugExtArgs {
escape_grapheme_extended: true,
escape_single_quote: false,
escape_double_quote: true,
});
// If char needs escaping, flush backlog so far and write, else skip
if esc.len() != 1 {
f.write_str(&self[from..i])?;
for c in esc {
f.write_char(c)?;
}
from = i + c.len_utf8();
}
}
f.write_str(&self[from..])?;
f.write_char('"')
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for str {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.pad(self)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for char {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.write_char('\'')?;
for c in self.escape_debug_ext(EscapeDebugExtArgs {
escape_grapheme_extended: true,
escape_single_quote: true,
escape_double_quote: false,
}) {
f.write_char(c)?
}
f.write_char('\'')
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Display for char {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
if f.width.is_none() && f.precision.is_none() {
f.write_char(*self)
} else {
f.pad(self.encode_utf8(&mut [0; 4]))
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for *const T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
// Cast is needed here because `.addr()` requires `T: Sized`.
pointer_fmt_inner((*self as *const ()).addr(), f)
}
}
/// Since the formatting will be identical for all pointer types, use a non-monomorphized
/// implementation for the actual formatting to reduce the amount of codegen work needed.
///
/// This uses `ptr_addr: usize` and not `ptr: *const ()` to be able to use this for
/// `fn(...) -> ...` without using [problematic] "Oxford Casts".
///
/// [problematic]: https://github.com/rust-lang/rust/issues/95489
pub(crate) fn pointer_fmt_inner(ptr_addr: usize, f: &mut Formatter<'_>) -> Result {
let old_width = f.width;
let old_flags = f.flags;
// The alternate flag is already treated by LowerHex as being special-
// it denotes whether to prefix with 0x. We use it to work out whether
// or not to zero extend, and then unconditionally set it to get the
// prefix.
if f.alternate() {
f.flags |= 1 << (FlagV1::SignAwareZeroPad as u32);
if f.width.is_none() {
f.width = Some((usize::BITS / 4) as usize + 2);
}
}
f.flags |= 1 << (FlagV1::Alternate as u32);
let ret = LowerHex::fmt(&ptr_addr, f);
f.width = old_width;
f.flags = old_flags;
ret
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for *mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(&(*self as *const T), f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for &T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(&(*self as *const T), f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Pointer for &mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(&(&**self as *const T), f)
}
}
// Implementation of Display/Debug for various core types
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Debug for *const T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Debug for *mut T {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Pointer::fmt(self, f)
}
}
macro_rules! peel {
($name:ident, $($other:ident,)*) => (tuple! { $($other,)* })
}
macro_rules! tuple {
() => ();
( $($name:ident,)+ ) => (
maybe_tuple_doc! {
$($name)+ @
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($name:Debug),+> Debug for ($($name,)+) where last_type!($($name,)+): ?Sized {
#[allow(non_snake_case, unused_assignments)]
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
let mut builder = f.debug_tuple("");
let ($(ref $name,)+) = *self;
$(
builder.field(&$name);
)+
builder.finish()
}
}
}
peel! { $($name,)+ }
)
}
macro_rules! maybe_tuple_doc {
($a:ident @ #[$meta:meta] $item:item) => {
#[cfg_attr(not(bootstrap), doc(tuple_variadic))]
#[doc = "This trait is implemented for tuples up to twelve items long."]
#[$meta]
$item
};
($a:ident $($rest_a:ident)+ @ #[$meta:meta] $item:item) => {
#[doc(hidden)]
#[$meta]
$item
};
}
macro_rules! last_type {
($a:ident,) => { $a };
($a:ident, $($rest_a:ident,)+) => { last_type!($($rest_a,)+) };
}
tuple! { E, D, C, B, A, Z, Y, X, W, V, U, T, }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Debug> Debug for [T] {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.debug_list().entries(self.iter()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for () {
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.pad("()")
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Debug for PhantomData<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.debug_struct("PhantomData").finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Copy + Debug> Debug for Cell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.debug_struct("Cell").field("value", &self.get()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Debug> Debug for RefCell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
match self.try_borrow() {
Ok(borrow) => f.debug_struct("RefCell").field("value", &borrow).finish(),
Err(_) => {
// The RefCell is mutably borrowed so we can't look at its value
// here. Show a placeholder instead.
struct BorrowedPlaceholder;
impl Debug for BorrowedPlaceholder {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.write_str("<borrowed>")
}
}
f.debug_struct("RefCell").field("value", &BorrowedPlaceholder).finish()
}
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Debug> Debug for Ref<'_, T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Debug::fmt(&**self, f)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Debug> Debug for RefMut<'_, T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
Debug::fmt(&*(self.deref()), f)
}
}
#[stable(feature = "core_impl_debug", since = "1.9.0")]
impl<T: ?Sized> Debug for UnsafeCell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.debug_struct("UnsafeCell").finish_non_exhaustive()
}
}
#[unstable(feature = "sync_unsafe_cell", issue = "95439")]
impl<T: ?Sized> Debug for SyncUnsafeCell<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
f.debug_struct("SyncUnsafeCell").finish_non_exhaustive()
}
}
// If you expected tests to be here, look instead at the core/tests/fmt.rs file,
// it's a lot easier than creating all of the rt::Piece structures here.
// There are also tests in the alloc crate, for those that need allocations.
| 32.044709 | 100 | 0.533184 |
e8fec0953d9f608b2c602c8fe3a7bf5d0bc56754 | 4,435 | use mockito::mock;
use http_client::HttpClient;
use http_types::{Body, Request, Response, Url};
use cfg_if::cfg_if;
cfg_if! {
if #[cfg(feature = "curl_client")] {
use http_client::isahc::IsahcClient as DefaultClient;
} else if #[cfg(feature = "wasm_client")] {
use http_client::wasm::WasmClient as DefaultClient;
} else if #[cfg(any(feature = "h1_client", feature = "h1_client_rustls"))] {
use http_client::h1::H1Client as DefaultClient;
} else if #[cfg(feature = "hyper_client")] {
use http_client::hyper::HyperClient as DefaultClient;
}
}
#[async_std::test]
async fn post_json() -> Result<(), http_types::Error> {
#[derive(serde::Deserialize, serde::Serialize)]
struct Cat {
name: String,
}
let cat = Cat {
name: "Chashu".to_string(),
};
let m = mock("POST", "/")
.with_status(200)
.match_body(&serde_json::to_string(&cat)?[..])
.with_body(&serde_json::to_string(&cat)?[..])
.create();
let mut req = Request::new(
http_types::Method::Post,
Url::parse(&mockito::server_url()).unwrap(),
);
req.append_header("Accept", "application/json");
req.set_body(Body::from_json(&cat)?);
let res: Response = DefaultClient::new().send(req).await?;
m.assert();
assert_eq!(res.status(), http_types::StatusCode::Ok);
Ok(())
}
#[async_std::test]
async fn get_json() -> Result<(), http_types::Error> {
#[derive(serde::Deserialize)]
struct Message {
message: String,
}
let m = mock("GET", "/")
.with_status(200)
.with_body(r#"{"message": "hello, world!"}"#)
.create();
let req = Request::new(
http_types::Method::Get,
Url::parse(&mockito::server_url()).unwrap(),
);
let mut res: Response = DefaultClient::new().send(req).await?;
let msg: Message = serde_json::from_str(&res.body_string().await?)?;
m.assert();
assert_eq!(msg.message, "hello, world!");
Ok(())
}
#[async_std::test]
async fn get_google() -> Result<(), http_types::Error> {
let url = "https://www.google.com";
let req = Request::new(http_types::Method::Get, Url::parse(url).unwrap());
let mut res: Response = DefaultClient::new().send(req).await?;
assert_eq!(res.status(), http_types::StatusCode::Ok);
let msg = res.body_bytes().await?;
let msg = String::from_utf8_lossy(&msg);
println!("recieved: '{}'", msg);
assert!(msg.contains("<!doctype html>"));
assert!(msg.contains("<title>Google</title>"));
assert!(msg.contains("<head>"));
assert!(msg.contains("</head>"));
assert!(msg.contains("</script>"));
assert!(msg.contains("</script>"));
assert!(msg.contains("<body"));
assert!(msg.contains("</body>"));
assert!(msg.contains("</html>"));
Ok(())
}
#[async_std::test]
async fn get_github() -> Result<(), http_types::Error> {
let url = "https://raw.githubusercontent.com/http-rs/surf/6627d9fc15437aea3c0a69e0b620ae7769ea6765/LICENSE-MIT";
let req = Request::new(http_types::Method::Get, Url::parse(url).unwrap());
let mut res: Response = DefaultClient::new().send(req).await?;
assert_eq!(res.status(), http_types::StatusCode::Ok, "{:?}", &res);
let msg = res.body_string().await?;
assert_eq!(
msg,
"The MIT License (MIT)
Copyright (c) 2019 Yoshua Wuyts
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the \"Software\"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"
);
Ok(())
}
| 34.115385 | 116 | 0.651635 |
48d91c70b711434d5722806ef57e1c15b9d9240d | 2,975 | use clap::{Arg, App};
fn main() {
let matches = App::new("A toolkit used to diagnosis services")
.version("1.0")
.author("allen <94291@sangfor.com>")
.about("Let's together make IT simple more simple")
.arg(Arg::new("service")
.about("Sets the service to use")
.value_name("ServiceName")
.required(true)
.index(1))
.arg(Arg::new("config")
.short('c')
.long("config")
.value_name("ConfigFile")
.about("Sets config file of your service")
.takes_value(true))
.arg(Arg::new("log")
.short('l')
.long("log")
.value_name("LogPath")
.about("Sets log path of your service")
.takes_value(true))
.arg(Arg::new("v")
.short('v')
.multiple_occurrences(true)
.takes_value(true)
.about("Sets the level of verbosity"))
.subcommand(App::new("test") // subcommand
.about("controls testing features")
.version("1.0.0")
.author("allen <94291@sangfor.com>")
.arg(Arg::new("debug")
.about("debug info detail")
.short('d')
.long("debug")
.about("print debug information verbosely")))
.subcommand(App::new("check")
// .short_flag('c')
.long_flag("check")
.about("check whether service is online and healthy")
.version("1.0.0")
.author("allen <94291@sangfor.com>")
.arg(Arg::new("net")
.about("check network status[net,log,ping]")
.value_name("category")
.required(true)
.index(1)))
.get_matches();
// You can check the value provided by positional arguments, or option arguments
if let Some(i) = matches.value_of("INPUT") {
println!("Value for input: {}", i);
}
if let Some(c) = matches.value_of("config") {
println!("Value for config: {}", c);
}
// You can see how many times a particular flag or argument occurred
// Note, only flags can have multiple occurrences
match matches.occurrences_of("v") {
0 => println!("Verbose mode is off 0"),
1 => println!("Verbose mode is kind of on 1"),
2 => println!("Verbose mode is on 2"),
_ => println!("Don't be crazy 3+"),
}
// You can check for the existence of subcommands, and if found use their
// matches just as you would the top level app
if let Some(ref matches) = matches.subcommand_matches("test") {
// "$ myapp test" was run
if matches.is_present("debug") {
// "$ myapp test -d" was run
println!("Printing debug info...");
} else {
println!("Printing normally...");
}
}
// Continued program logic goes here...
} | 35.843373 | 84 | 0.518655 |
0981668d8170fab6a80266b6d8598569c3296420 | 11,871 | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
//! This crate provides a macro that can be used to generate code to
//! implement `OnlineConfig` trait
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use proc_macro2::{Span, TokenStream};
use quote::quote;
use syn::{punctuated::Punctuated, token::Comma, *};
#[proc_macro_derive(OnlineConfig, attributes(online_config))]
pub fn config(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
match generate_token(parse_macro_input!(input as DeriveInput)) {
Ok(res) => res.into(),
Err(e) => proc_macro::TokenStream::from(e.to_compile_error()),
}
}
fn generate_token(ast: DeriveInput) -> std::result::Result<TokenStream, Error> {
let name = &ast.ident;
check_generics(&ast.generics, name.span())?;
let crate_name = Ident::new("online_config", Span::call_site());
let encoder_name = Ident::new(
{
// Avoid naming conflict
let mut hasher = DefaultHasher::new();
format!("{}", &name).hash(&mut hasher);
format!("{}_encoder_{:x}", name, hasher.finish()).as_str()
},
Span::call_site(),
);
let encoder_lt = Lifetime::new("'lt", Span::call_site());
let fields = get_struct_fields(ast.data, name.span())?;
let update_fn = update(&fields, &crate_name)?;
let diff_fn = diff(&fields, &crate_name)?;
let get_encoder_fn = get_encoder(&encoder_name, &encoder_lt);
let typed_fn = typed(&fields, &crate_name)?;
let encoder_struct = encoder(
name,
&crate_name,
&encoder_name,
&encoder_lt,
ast.attrs,
fields,
)?;
Ok(quote! {
impl<#encoder_lt> #crate_name::OnlineConfig<#encoder_lt> for #name {
type Encoder = #encoder_name<#encoder_lt>;
#update_fn
#diff_fn
#get_encoder_fn
#typed_fn
}
#encoder_struct
})
}
fn check_generics(g: &Generics, sp: Span) -> Result<()> {
if !g.params.is_empty() || g.where_clause.is_some() {
return Err(Error::new(
sp,
"can not derive OnlineConfig on struct with generics type",
));
}
Ok(())
}
fn get_struct_fields(
data: Data,
span: Span,
) -> std::result::Result<Punctuated<Field, Comma>, Error> {
if let Data::Struct(DataStruct {
fields: Fields::Named(FieldsNamed { named, .. }),
..
}) = data
{
Ok(named)
} else {
Err(Error::new(
span,
"expect derive OnlineConfig on struct with named fields!",
))
}
}
fn encoder(
name: &Ident,
crate_name: &Ident,
encoder_name: &Ident,
lt: &Lifetime,
attrs: Vec<Attribute>,
fields: Punctuated<Field, Comma>,
) -> Result<TokenStream> {
let from_ident = Ident::new("source", Span::call_site());
let mut construct_fields = Vec::with_capacity(fields.len());
let mut serialize_fields = Vec::with_capacity(fields.len());
for mut field in fields {
let (_, hidden, submodule) = get_config_attrs(&field.attrs)?;
if hidden || field.ident.is_none() {
continue;
}
let field_name = field.ident.as_ref().unwrap();
construct_fields.push(if submodule {
quote! { #field_name: #from_ident.#field_name.get_encoder() }
} else {
quote! { #field_name: &#from_ident.#field_name }
});
field.ty = {
let ty = &field.ty;
if submodule {
Type::Verbatim(quote! { <#ty as #crate_name::OnlineConfig<#lt>>::Encoder })
} else {
Type::Verbatim(quote! { &#lt #ty })
}
};
// Only reserve attributes that related to `serde`
field.attrs = field
.attrs
.into_iter()
.filter(|f| is_attr("serde", f))
.collect();
serialize_fields.push(field);
}
// Only reserve attributes that related to `serde`
let attrs: Vec<_> = attrs.into_iter().filter(|a| is_attr("serde", a)).collect();
Ok(quote! {
#[doc(hidden)]
#[derive(serde::Serialize)]
#(#attrs)*
pub struct #encoder_name<#lt> {
#(#serialize_fields,)*
}
impl<#lt> From<&#lt #name> for #encoder_name<#lt> {
fn from(#from_ident: &#lt #name) -> #encoder_name<#lt> {
#encoder_name {
#(#construct_fields,)*
}
}
}
})
}
fn get_encoder(encoder_name: &Ident, lt: &Lifetime) -> TokenStream {
quote! {
fn get_encoder(&#lt self) -> Self::Encoder {
#encoder_name::from(self)
}
}
}
fn update(fields: &Punctuated<Field, Comma>, crate_name: &Ident) -> Result<TokenStream> {
let incoming = Ident::new("incoming", Span::call_site());
let mut update_fields = Vec::with_capacity(fields.len());
for field in fields {
let (skip, hidden, submodule) = get_config_attrs(&field.attrs)?;
if skip || hidden || field.ident.is_none() {
continue;
}
let name = field.ident.as_ref().unwrap();
let name_lit = LitStr::new(&format!("{}", name), name.span());
let f = if submodule {
quote! {
if let Some(#crate_name::ConfigValue::Module(v)) = #incoming.remove(#name_lit) {
#crate_name::OnlineConfig::update(&mut self.#name, v);
}
}
} else if is_option_type(&field.ty) {
quote! {
if let Some(v) = #incoming.remove(#name_lit) {
if #crate_name::ConfigValue::None == v {
self.#name = None;
} else {
self.#name = Some(v.into());
}
}
}
} else {
quote! {
if let Some(v) = #incoming.remove(#name_lit) {
self.#name = v.into();
}
}
};
update_fields.push(f);
}
Ok(quote! {
fn update(&mut self, mut #incoming: #crate_name::ConfigChange) {
#(#update_fields)*
}
})
}
fn diff(fields: &Punctuated<Field, Comma>, crate_name: &Ident) -> Result<TokenStream> {
let diff_ident = Ident::new("diff_ident", Span::call_site());
let incoming = Ident::new("incoming", Span::call_site());
let mut diff_fields = Vec::with_capacity(fields.len());
for field in fields {
let (skip, hidden, submodule) = get_config_attrs(&field.attrs)?;
if skip || hidden || field.ident.is_none() {
continue;
}
let name = field.ident.as_ref().unwrap();
let name_lit = LitStr::new(&format!("{}", name), name.span());
let f = if submodule {
quote! {
{
let diff = #crate_name::OnlineConfig::diff(&self.#name, &#incoming.#name);
if diff.len() != 0 {
#diff_ident.insert(#name_lit.to_owned(), #crate_name::ConfigValue::from(diff));
}
}
}
} else if is_option_type(&field.ty) {
quote! {
if self.#name != #incoming.#name {
if let Some(ref v) = #incoming.#name {
#diff_ident.insert(#name_lit.to_owned(), #crate_name::ConfigValue::from(v.clone()));
} else {
#diff_ident.insert(#name_lit.to_owned(), #crate_name::ConfigValue::None);
}
}
}
} else {
quote! {
if self.#name != #incoming.#name {
#diff_ident.insert(#name_lit.to_owned(), #crate_name::ConfigValue::from(#incoming.#name.clone()));
}
}
};
diff_fields.push(f);
}
Ok(quote! {
#[allow(clippy::float_cmp)]
fn diff(&self, mut #incoming: &Self) -> #crate_name::ConfigChange {
let mut #diff_ident = std::collections::HashMap::default();
#(#diff_fields)*
#diff_ident
}
})
}
fn typed(fields: &Punctuated<Field, Comma>, crate_name: &Ident) -> Result<TokenStream> {
let typed_ident = Ident::new("typed_ident", Span::call_site());
let mut typed_fields = Vec::with_capacity(fields.len());
for field in fields {
let (skip, hidden, submodule) = get_config_attrs(&field.attrs)?;
if field.ident.is_none() {
continue;
}
let name = field.ident.as_ref().unwrap();
let name_lit = LitStr::new(&format!("{}", name), name.span());
let f = if submodule {
quote! {
{
let typed = #crate_name::OnlineConfig::typed(&self.#name);
#typed_ident.insert(#name_lit.to_owned(), #crate_name::ConfigValue::from(typed));
}
}
} else if skip || hidden {
quote! {
#typed_ident.insert(#name_lit.to_owned(), #crate_name::ConfigValue::Skip);
}
} else if is_option_type(&field.ty) {
quote! {
#typed_ident.insert(#name_lit.to_owned(), #crate_name::ConfigValue::from(self.#name.clone().unwrap_or_default()));
}
} else {
quote! {
#typed_ident.insert(#name_lit.to_owned(), #crate_name::ConfigValue::from(self.#name.clone()));
}
};
typed_fields.push(f);
}
Ok(quote! {
fn typed(&self) -> #crate_name::ConfigChange {
let mut #typed_ident = std::collections::HashMap::default();
#(#typed_fields)*
#typed_ident
}
})
}
fn get_config_attrs(attrs: &[Attribute]) -> Result<(bool, bool, bool)> {
let (mut skip, mut hidden, mut submodule) = (false, false, false);
for attr in attrs {
if !is_attr("online_config", attr) {
continue;
}
match attr.parse_args::<Ident>()? {
name if name == "skip" => skip = true,
name if name == "hidden" => hidden = true,
name if name == "submodule" => submodule = true,
name => {
return Err(Error::new(
name.span(),
"expect #[online_config(skip)], #[online_config(hidden)] or #[online_config(submodule)]",
));
}
}
}
Ok((skip, hidden, submodule))
}
fn is_attr(name: &str, attr: &Attribute) -> bool {
for s in &attr.path.segments {
if s.ident == name {
return true;
}
}
false
}
// Copied from https://stackoverflow.com/questions/55271857/how-can-i-get-the-t-from-an-optiont-when-using-syn.
fn is_option_type(ty: &Type) -> bool {
fn extract_type_path(ty: &syn::Type) -> Option<&Path> {
match *ty {
syn::Type::Path(ref typepath) if typepath.qself.is_none() => Some(&typepath.path),
_ => None,
}
}
// TODO store (with lazy static) the vec of string
// TODO maybe optimization, reverse the order of segments
fn extract_option_segment(path: &Path) -> Option<&PathSegment> {
let idents_of_path = path
.segments
.iter()
.into_iter()
.fold(String::new(), |mut acc, v| {
acc.push_str(&v.ident.to_string());
acc.push('|');
acc
});
vec!["Option|", "std|option|Option|", "core|option|Option|"]
.into_iter()
.find(|s| idents_of_path == *s)
.and_then(|_| path.segments.last())
}
extract_type_path(ty)
.and_then(|path| extract_option_segment(path))
.is_some()
}
| 33.439437 | 130 | 0.526493 |
11dfdbffca677273ec95017d1462b40598000c16 | 33,048 | //! CAN interface
//!
//! The `Can` interface can be used with the following CAN instances:
//!
//! # CAN1
//!
//! - TX = PA12 | PB9
//! - RX = PA11 | PB8
//! - Interrupt = CAN1
use core::marker::PhantomData;
use core::ptr;
use crate::afio::MAPR;
use crate::gpio::{
gpioa::{PA11, PA12},
gpiob::{PB8, PB9},
Alternate, Floating, Input, PushPull,
};
use crate::rcc::APB1;
use nb;
use stm32f103xx::{CAN, USB};
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord)]
pub struct Id {
// standard part: 0xFFE00000 //11 valid bits
// extended part: 0xFFFFFFF8 //11+18 vaild bits
// is_extended: 0x00000004
// is_rtr: 0x00000002
raw: u32,
}
//TODO put a part of this in the HAL as Id trait?
impl Id {
pub fn new_standard(standard: u32) -> Id {
// this could become a const function, when it becomes stable in Rust
Id {
raw: (standard & 0b11111111111) << 21,
}
}
pub fn new_extended(extended: u32) -> Id {
// this could become a const function, when it becomes stable in Rust
Id {
raw: 0b100 | ((extended & 0b11111111111111111111111111111) << 3),
}
}
pub fn with_rtr(&self) -> Id {
Id {
raw: self.raw | 0b10,
}
}
fn from_received_register(register: u32) -> Id {
Id { raw: register }
}
pub fn is_extended(&self) -> bool {
0 != (self.raw & 0b100)
}
pub fn standard(&self) -> u16 {
((self.raw & 0b11111111111000000000000000000000) >> 21) as u16
}
pub fn extended(&self) -> u32 {
(self.raw & 0b11111111111111111111111111111000) >> 3
}
pub fn extended_part(&self) -> u32 {
(self.raw & 0b000000000001111111111111111111000) >> 3
}
pub fn is_rtr(&self) -> bool {
0 != (self.raw & 0b10)
}
// these are private:
fn as_32bit_filter(&self) -> u32 {
// [31:0] = stdid[10:0], extid[17:0], IDE, RTR, 0
self.raw
}
fn as_16bit_filter(&self) -> u32 {
// [31:0] = stdid[10:0], IDE, RTR, extid[17:15]
((self.raw & 0b11111111111000000000000000000000) >> 16)
| ((self.raw & 0b1100) << 1)
| ((self.raw & 0b111000000000000000000) >> 18)
}
}
pub struct Payload {
dlc: u8,
// this below would not be not OK in case of CAN FD where the max data length is 64 byte (not only 8)
// however can fd is not supported on this microcontroller, so here this implementation is fine
data_low: u32,
data_high: u32,
}
impl Payload {
pub fn new(data: &[u8]) -> Payload {
let n = data.len() as u32;
Payload {
dlc: n as u8,
data_low: if n > 0 {
let mut l: u32 = data[0] as u32;
if n > 1 {
l |= (data[1] as u32) << 8;
if n > 2 {
l |= (data[2] as u32) << 16;
if n > 3 {
l |= (data[3] as u32) << 24;
}
}
}
l
} else {
0
},
data_high: if n > 4 {
let mut h: u32 = data[4] as u32;
if n > 5 {
h |= (data[5] as u32) << 8;
if n > 6 {
h |= (data[6] as u32) << 16;
if n > 7 {
h |= (data[7] as u32) << 24;
}
}
}
h
} else {
0
},
}
}
//length of the payload in bytes [0..8]
pub fn len(&self) -> u8 {
self.dlc
}
//little endian
pub fn data_as_u64(&self) -> u64 {
(self.data_low as u64) | ((self.data_high as u64) << 32)
}
}
pub struct Frame {
id: Id,
data: Payload,
}
impl Frame {
pub fn new(id: Id, data: Payload) -> Frame {
Frame { id: id, data: data }
}
pub fn id(&self) -> &Id {
&self.id
}
pub fn data(&self) -> &Payload {
&self.data
}
}
pub struct Configuration {
/// In this mode, the internal counter of the CAN hardware is
/// activated and used to generate the Time Stamp value stored
/// in the CAN_RDTxR/CAN_TDTxR registers, respectively (for Rx
/// and Tx mailboxes). The internal counter is captured on the
/// sample point of the Start Of Frame bit in both reception and
/// transmission.
/// In this mode, if DLC == 8, the last two data bytes of the
/// 8-byte message is replaced with a 16 bit timestamp: TIME[7:0]
/// in data byte 6 and TIME[15:8] in data byte 7
pub time_triggered_communication_mode: bool,
/// Depending on the ABOM bit in the CAN_MCR register bxCAN will
/// recover from Bus-Off (become error active again) either
/// automatically or on software request. But in both cases the
/// bxCAN has to wait at least for the recovery sequence specified
/// in the CAN standard (128 occurrences of 11 consecutive recessive
/// bits monitored on CANRX).
/// If ABOM is set, the bxCAN will start the recovering sequence
/// automatically after it has entered Bus-Off state.
/// If ABOM is cleared, the software must initiate the recovering
/// sequence by requesting bxCAN to enter and to leave initialization
/// mode.
pub automatic_bus_off_management: bool,
/// On CAN bus activity detection, hardware automatically performs
/// the wakeup sequence by clearing the SLEEP bit if the AWUM bit
/// in the CAN_MCR register is set. If the AWUM bit is cleared,
/// software has to clear the SLEEP bit when a wakeup interrupt
/// occurs, in order to exit from Sleep mode.
pub automatic_wake_up_mode: bool,
/// This mode has been implemented in order to fulfil the requirement
/// of the Time Triggered Communication option of the CAN standard.
/// In this mode, each transmission is started only once. If the
/// first attempt fails, due to an arbitration loss or an error, the
/// hardware will not automatically restart the message transmission.
/// At the end of the first transmission attempt, the hardware
/// considers the request as completed and sets the RQCP bit in the
/// CAN_TSR register. The result of the transmission is indicated in
/// the CAN_TSR register by the TXOK, ALST and TERR bits.
pub no_automatic_retransmission: bool,
/// false: Once a receive FIFO is full the next incoming message will
/// overwrite the previous one.
/// true: Once a receive FIFO is full the next incoming message will
/// be discarded.
pub receive_fifo_locked_mode: bool,
/// The transmit mailboxes can be configured as a transmit FIFO by
/// setting the TXFP bit in the CAN_MCR register. In this mode the
/// priority order is given by the transmit request order.
pub transmit_fifo_priority: bool,
/// In Silent mode, the bxCAN is able to receive valid data frames
/// and valid remote frames, but it sends only recessive bits on the
/// CAN bus and it cannot start a transmission. If the bxCAN has to
/// send a dominant bit (ACK bit, overload flag, active error flag),
/// the bit is rerouted internally so that the CAN Core monitors
/// this dominant bit, although the CAN bus may remain in recessive
/// state. Silent mode can be used to analyze the traffic on a CAN
/// bus without affecting it by the transmission of dominant bits
/// (Acknowledge Bits, Error Frames).
/// It is also possible to combine Loop Back mode and Silent mode.
pub silent_mode: bool,
/// In Loop Back Mode, the bxCAN treats its own transmitted messages
/// as received messages and stores them (if they pass acceptance
/// filtering) in a Receive mailbox.
/// It is also possible to combine Loop Back mode and Silent mode
pub loopback_mode: bool,
/// Specifies the maximum number of time quanta the CAN hardware is allowed to
/// lengthen or shorten a bit to perform resynchronization: 1TQ..4TQ
pub synchronisation_jump_width: u8,
/// Specifies the number of time quanta in Bit Segment 1: 1TQ..16TQ
/// defines the location of the sample point. It includes the PROP_SEG
/// and PHASE_SEG1 of the CAN standard. Its duration is programmable
/// between 1 and 16 time quanta but may be automatically lengthened
/// to compensate for positive phase drifts due to differences in the
/// frequency of the various nodes of the network.
pub bit_segment_1: u8,
/// Specifies the number of time quanta in Bit Segment 2: 1TQ..8TQ
/// defines the location of the transmit point. It represents the
/// PHASE_SEG2 of the CAN standard. Its duration is programmable
/// between 1 and 8 time quanta but may also be automatically
/// shortened to compensate for negative phase drifts.
pub bit_segment_2: u8,
/// Prescaling for time quantum: 1..1024
/// Length of a time quanta: tq = (BRP[9:0]+1) x tPCLK
pub time_quantum_length: u16,
}
pub const FILTERBANKCOUNT: FilterBankIndex = 14; //in case of STM32F105xC, STM32F105xC this should be 28
pub type FilterBankIndex = u8; //[0..FILTERBANKCOUNT-1] is valid
pub enum FilterMode {
/// Identifier mask mode
Mask,
/// Identifier list mode
List,
}
pub struct FilterData {
/// Specifies the filter identification number
pub id: Id,
/// Specifies the filter mask number or a second identification number
pub mask_or_id2: Id,
}
pub enum FilterInfo {
/// One 32-bit filter for the STDID[10:0], EXTID[17:0], IDE and RTR bits.
Whole(FilterData),
/// Two 16-bit filters for the STDID[10:0], RTR, IDE and EXTID[17:15] bits (the rest of extid bits are ignored).
Halves((FilterData, FilterData)),
}
pub struct FilterBankConfiguration {
/// Specifies the filter mode to be InitializationMode.
/// This parameter can be a value of @ref CAN_filter_mode
pub mode: FilterMode,
pub info: FilterInfo,
/// Specifies the FIFO (0 or 1) which will be assigned to the filter.
/// This parameter can be a value of @ref CAN_filter_FIFO
pub fifo_assignment: RxFifoIndex,
/// Enable or disable the filter.
pub active: bool,
}
#[derive(Debug)]
pub enum Error {
Unexpected, //we should never get this
TooLongPayload, //tx mailbox can send up to 8 bytes at a time
InvalidArgument,
NotEmptyTxMailBox,
}
pub trait Pins<CAN> {
const REMAP: u8;
}
impl Pins<CAN> for (PA12<Alternate<PushPull>>, PA11<Input<Floating>>) {
const REMAP: u8 = 0b00;
}
impl Pins<CAN> for (PB9<Alternate<PushPull>>, PB8<Input<Floating>>) {
const REMAP: u8 = 0b10;
}
/// CAN abstraction
pub struct Can<CAN, PINS> {
can: CAN,
pins: PINS,
/// The USB and CAN share a dedicated 512-byte SRAM memory for data
/// transmission and reception, and so they cannot be used concurrently
/// (the shared SRAM is accessed through CAN and USB exclusively).
/// The USB and CAN can be used in the same application but not
/// at the same time.
_usb: USB,
}
/// To reduce power consumption, bxCAN has a low-power mode called Sleep mode.
/// In this mode, the bxCAN clock is stopped, however software can still access
/// the bxCAN mailboxes.
impl<PINS> Can<CAN, PINS> {
pub fn can1(can: CAN, pins: PINS, mapr: &mut MAPR, apb1: &mut APB1, usb: USB) -> Can<CAN, PINS>
where
PINS: Pins<CAN>,
{
// power up CAN peripheral
apb1.enr().modify(|_, w| w.canen().enabled());
// delay after an RCC peripheral clock enabling
apb1.enr().read();
// choose pin mapping
#[allow(unused_unsafe)]
mapr.mapr()
.modify(|_, w| unsafe { w.can_remap().bits(PINS::REMAP) });
Can {
can: can,
pins: pins,
_usb: usb,
}
}
/// moves from Sleep or Normal to Initialization mode
fn to_initialization(&mut self) -> nb::Result<(), ()> {
let msr = self.can.can_msr.read();
if msr.slak().bit_is_set() || msr.inak().bit_is_clear() {
// request exit from sleep and enter initialization modes
self.can
.can_mcr
.write(|w| w.sleep().clear_bit().inrq().set_bit());
Err(nb::Error::WouldBlock)
} else {
Ok(())
}
}
/// While in Initialization Mode, all message transfers to and from the
/// CAN bus are stopped and the status of the CAN bus output CANTX is
/// recessive (high).
pub fn configure(&mut self, config: &Configuration) {
let slept = self.is_sleeping();
// exit from sleep mode, request initialization
while let Err(nb::Error::WouldBlock) = self.to_initialization() {}
// Update register MCR
self.can.can_mcr.modify(|_, w| {
w.ttcm()
.bit(config.time_triggered_communication_mode)
.abom()
.bit(config.automatic_bus_off_management)
.awum()
.bit(config.automatic_wake_up_mode)
.nart()
.bit(config.no_automatic_retransmission)
.rflm()
.bit(config.receive_fifo_locked_mode)
.txfp()
.bit(config.transmit_fifo_priority)
});
//Set the bit timing register
self.can.can_btr.modify(|_, w| unsafe {
w.silm()
.bit(config.silent_mode)
.lbkm()
.bit(config.loopback_mode)
.sjw()
.bits(config.synchronisation_jump_width)
.ts1()
.bits(config.bit_segment_1 - 1)
.ts2()
.bits(config.bit_segment_2 - 1)
.brp()
.bits(config.time_quantum_length - 1)
});
//exit from initialization mode and return to the previous mode
if slept {
while let Err(nb::Error::WouldBlock) = self.to_sleep() {}
} else {
while let Err(nb::Error::WouldBlock) = self.to_normal() {}
}
}
//TODO this below can not be called if the can if is splitted. - what to do?
/// If automatic bus off management is disabled, the software must initiate
/// the recovering sequence by requesting bxCAN to enter and to leave initialization
/// mode. Note: In initialization mode, bxCAN does not monitor the CANRX signal,
/// therefore it cannot complete the recovery sequence. To recover, bxCAN must be
/// in normal mode.
pub fn recover_from_bus_off(&mut self) {
// exit from sleep mode, request initialization
while let Err(nb::Error::WouldBlock) = self.to_initialization() {}
while let Err(nb::Error::WouldBlock) = self.to_normal() {}
}
pub fn is_sleeping(&self) -> bool {
self.can.can_msr.read().slak().bit_is_set()
}
/// moves from Sleep to Normal mode
pub fn to_normal(&mut self) -> nb::Result<(), ()> {
let msr = self.can.can_msr.read();
if msr.slak().bit_is_set() || msr.inak().bit_is_set() {
// request exit from both sleep and initialization modes
self.can
.can_mcr
.write(|w| w.sleep().clear_bit().inrq().clear_bit());
Err(nb::Error::WouldBlock)
} else {
Ok(())
}
}
/// moves from Normal to Sleep mode
pub fn to_sleep(&mut self) -> nb::Result<(), ()> {
let msr = self.can.can_msr.read();
if msr.slak().bit_is_clear() || msr.inak().bit_is_set() {
// request exit from both sleep and initialization modes
self.can
.can_mcr
.write(|w| w.sleep().set_bit().inrq().clear_bit());
Err(nb::Error::WouldBlock)
} else {
Ok(())
}
}
/// releasing the resources
/// (required for example to use USB instead of CAN)
pub fn release(self, apb1: &mut APB1) -> (CAN, PINS, USB) {
apb1.enr().write(|w| w.canen().clear_bit());
(self.can, self.pins, self._usb)
}
pub fn activate_filter_bank(&mut self, index: FilterBankIndex, activate: bool) {
if index >= FILTERBANKCOUNT {
return; // Err(Error::InvalidArgument);
}
let bit = 1u32 << index;
if activate {
self.can
.can_fa1r
.modify(|r, w| unsafe { w.bits(r.bits() | bit) });
} else {
self.can
.can_fa1r
.modify(|r, w| unsafe { w.bits(r.bits() & (!bit)) });
}
}
pub fn configure_filter_bank(
&mut self,
index: FilterBankIndex,
config: &FilterBankConfiguration,
) {
if index >= FILTERBANKCOUNT {
return; // Err(Error::InvalidArgument);
}
// The filters values can be modified either deactivating the associated
// filter banks or by setting the FINIT bit. Moreover, the modification
// of the filter configuration (scale, mode and FIFO assignment) in
// CAN_FMxR, CAN_FSxR and CAN_FFAR registers can only be done when the
// filter initialization mode is set (FINIT=1) in the CAN_FMR register.
let bit = 1u32 << index;
let nbit = !bit;
// Initialization mode for the filter
self.can.can_fmr.write(|w| w.finit().bit(true));
//Filter deactivation
self.can
.can_fa1r
.modify(|r, w| unsafe { w.bits(r.bits() & nbit) });
//Filter Mode
match config.mode {
FilterMode::Mask => {
//Id/Mask mode for the filter
self.can
.can_fm1r
.modify(|r, w| unsafe { w.bits(r.bits() & nbit) });
}
FilterMode::List => {
//Identifier list mode for the filter
self.can
.can_fm1r
.modify(|r, w| unsafe { w.bits(r.bits() | bit) });
}
};
// Filter FIFO assignment
match config.fifo_assignment {
0 => {
self.can
.can_ffa1r
.modify(|r, w| unsafe { w.bits(r.bits() & nbit) });
}
1 => {
self.can
.can_ffa1r
.modify(|r, w| unsafe { w.bits(r.bits() | bit) });
}
_ => {
//return Err(Error::InvalidArgument);
}
}
//Depending on filter scale
match config.info {
FilterInfo::Halves((ref low, ref high)) => {
// 16-bit scale for the filter
self.can
.can_fs1r
.modify(|r, w| unsafe { w.bits(r.bits() & nbit) });
// First 16-bit identifier and First 16-bit mask
// Or First 16-bit identifier and Second 16-bit identifier
// Second 16-bit identifier and Second 16-bit mask
// Or Third 16-bit identifier and Fourth 16-bit identifier
self.fill_filter_registers(
index,
low.id.as_16bit_filter() | (low.mask_or_id2.as_16bit_filter() << 16),
high.id.as_16bit_filter() | (high.mask_or_id2.as_16bit_filter() << 16),
);
}
FilterInfo::Whole(ref whole) => {
// 32-bit scale for the filter
self.can
.can_fs1r
.modify(|r, w| unsafe { w.bits(r.bits() | bit) });
//32-bit identifier or First 32-bit identifier,
//32-bit mask or Second 32-bit identifier
self.fill_filter_registers(
index,
whole.id.as_32bit_filter(),
whole.mask_or_id2.as_32bit_filter(),
);
}
}
// Filter activation
if config.active {
self.can
.can_fa1r
.modify(|r, w| unsafe { w.bits(r.bits() | bit) });
}
// Leave the initialisation mode for the filter
self.can.can_fmr.write(|w| w.finit().bit(false));
}
//private helper function to get indexed access to the filter registers
fn fill_filter_registers(&self, index: FilterBankIndex, r1: u32, r2: u32) {
let offset = (index as u32)
* (((&self.can.f1r1 as *const _) as u32) - ((&self.can.f0r1 as *const _) as u32));
let addr1 = ((&self.can.f0r1 as *const _) as u32) + offset; //f0r1..f13r1
let addr2 = ((&self.can.f0r2 as *const _) as u32) + offset; //f0r2..f13r2
unsafe {
ptr::write_volatile(addr1 as *mut u32, r1);
ptr::write_volatile(addr2 as *mut u32, r2);
}
}
//TODO a join function may be needed also (which is the reverse of this one)...
pub fn split(self) -> (Tx<CAN>, Rx<CAN>) {
(Tx { _can: PhantomData }, Rx { _can: PhantomData })
}
/// interrupts for transmission:
/// CAN_IT_TME - Transmit mailbox empty Interrupt
///
/// interrupts for reception:
/// CAN_IT_FMP0 - FIFO 0 message pending interrupt
/// CAN_IT_FF0 - FIFO 0 full interrupt
/// CAN_IT_FOV0 - FIFO 0 overrun interrupt
/// CAN_IT_FMP1 - FIFO 1 message pending interrupt
/// CAN_IT_FF1 - FIFO 1 full interrupt
/// CAN_IT_FOV1 - FIFO 1 overrun interrupt
///
/// Operating Mode Interrupts:
/// CAN_IT_WKU - Wake-up interrupt
/// CAN_IT_SLK - Sleep acknowledge interrupt
///
/// Error Interrupts:
/// CAN_IT_EWG - Error warning Interrupt
/// CAN_IT_EPV - Error passive Interrupt
/// CAN_IT_BOF - Bus-off Interrupt
/// CAN_IT_LEC - Last error code Interrupt
/// CAN_IT_ERR - Error Interrupt
pub fn listen() {
//TODO
}
pub fn unlisten() {
//TODO
}
}
//TODO put a part of this in the HAL as TransmitMailbox trait?
pub trait TransmitMailbox {
const INDEX: TxMailBoxIndex;
fn is_empty(&self) -> bool;
fn has_the_lowest_priority(&self) -> bool;
fn was_transmission_error(&self) -> bool;
fn was_arbitration_lost(&self) -> bool;
fn has_transmission_succeeded(&self) -> bool;
fn has_last_request_completed(&self) -> bool;
fn request_abort_transmit(&mut self);
/// In order to transmit a message, the application must select
/// one empty transmit mailbox, set up the identifier, the data
/// length code (DLC) and the data before requesting the transmission
/// Once the mailbox has left empty state, the software no longer
/// has write access to the mailbox registers.
/// The hardware indicates a successful transmission by setting
/// the RQCP and TXOK bits in the CAN_TSR register.
fn request_transmit(&mut self, frame: &Frame) -> Result<(), Error>;
}
/// Can transmitter mailboxes
pub struct Tx<CAN> {
_can: PhantomData<CAN>,
}
impl Tx<CAN> {
pub fn split(
self,
) -> (
TxMailBox<CAN, TxMailBox0>,
TxMailBox<CAN, TxMailBox1>,
TxMailBox<CAN, TxMailBox2>,
) {
(
TxMailBox {
_can: PhantomData,
_index: PhantomData,
},
TxMailBox {
_can: PhantomData,
_index: PhantomData,
},
TxMailBox {
_can: PhantomData,
_index: PhantomData,
},
)
}
}
/// Can receiver fifos
pub struct Rx<CAN> {
_can: PhantomData<CAN>,
}
impl Rx<CAN> {
pub fn split(self) -> (RxFifo<CAN, RxFifo0>, RxFifo<CAN, RxFifo1>) {
(
RxFifo {
_can: PhantomData,
_index: PhantomData,
},
RxFifo {
_can: PhantomData,
_index: PhantomData,
},
)
}
}
pub struct TxMailBox<CAN, IDX> {
_can: PhantomData<CAN>,
_index: PhantomData<IDX>,
}
pub type TxMailBoxIndex = u8; //[0..2] is valid
//TODO search a better API for this...
///returns the index of an empty or the less important mailbox as candidate
pub fn recommend_transmitter() -> TxMailBoxIndex {
//TODO return error in sleep, init mode?
// NOTE(unsafe) atomic read with no side effects?
let tsr = unsafe { (*CAN::ptr()).can_tsr.read() };
let autoidx = tsr.code().bits();
autoidx
}
macro_rules! TxMailBox {
($CANX:ident, [
$($TxMailBoxi:ident: ($i:expr, $tmei:ident, $lowi:ident, $terri:ident,
$alsti:ident, $txoki:ident, $rqcpi:ident, $abrqi:ident, $can_tiir:ident,
$can_tdtir:ident, $can_tdlir:ident, $can_tdhir:ident),)+
]) => {
$(
//type state
pub struct $TxMailBoxi;
impl TransmitMailbox for TxMailBox<$CANX, $TxMailBoxi>
{
const INDEX: TxMailBoxIndex = $i;
fn is_empty(&self) -> bool {
// NOTE(unsafe) atomic read with no side effects
let tsr = unsafe { (*$CANX::ptr()).can_tsr.read() };
tsr.$tmei().bit_is_set()
}
fn has_the_lowest_priority(&self) -> bool {
// NOTE(unsafe) atomic read with no side effects
let tsr = unsafe { (*$CANX::ptr()).can_tsr.read() };
tsr.$lowi().bit_is_set()
}
fn was_transmission_error(&self) -> bool {
// NOTE(unsafe) atomic read with no side effects
let tsr = unsafe { (*$CANX::ptr()).can_tsr.read() };
tsr.$terri().bit_is_set()
}
fn was_arbitration_lost(&self) -> bool {
// NOTE(unsafe) atomic read with no side effects
let tsr = unsafe { (*$CANX::ptr()).can_tsr.read() };
tsr.$alsti().bit_is_set()
}
fn has_transmission_succeeded(&self) -> bool {
// NOTE(unsafe) atomic read with no side effects
let tsr = unsafe { (*$CANX::ptr()).can_tsr.read() };
tsr.$txoki().bit_is_set()
}
// //16-bit timer value captured at the SOF transmission.
// fn get_transmission_time(&mut self) -> u16 {
// unsafe { (*$CANX::ptr()).$can_tdtir.read().time().bits() }
// }
fn has_last_request_completed(&self) -> bool {
// NOTE(unsafe) atomic read with no side effects
let tsr = unsafe { (*$CANX::ptr()).can_tsr.read() };
tsr.$rqcpi().bit_is_set()
}
fn request_abort_transmit(&mut self) {
unsafe { (*$CANX::ptr()).can_tsr.write(|w| w.$abrqi().set_bit()); }
}
//TODO non blocking on the top of this:
//TODO use a message struct as input
fn request_transmit(&mut self, frame: &Frame) -> Result<(), Error> {
if self.is_empty() {
if frame.data.dlc > 8 {
return Err(Error::TooLongPayload);
}
//fill message length [0..8]
unsafe { (*$CANX::ptr()).$can_tdtir.write(|w|
w
.dlc().bits(frame.data.dlc));
}
unsafe { (*$CANX::ptr()).$can_tdlir.write(|w| w.bits(frame.data.data_low)); }
unsafe { (*$CANX::ptr()).$can_tdhir.write(|w| w.bits(frame.data.data_high)); }
// Bits 31:21 STID[10:0]: Standard Identifier
// The standard part of the identifier.
// Bit 20:3 EXID[17:0]: Extended Identifier
// The extended part of the identifier.
// Bit 2 IDE: Identifier Extension
// This bit defines the identifier type of message in the mailbox.
// 0: Standard identifier.
// 1: Extended identifier.
// Bit 1 RTR: Remote Transmission Request
// 0: Data frame
// 1: Remote frame
// Bit 0 TXRQ: Transmit Mailbox Request
// Set by software to request the transmission for the corresponding mailbox.
// Cleared by hardware when the mailbox becomes empty.
let id = &frame.id;
unsafe { (*$CANX::ptr()).$can_tiir.write(|w|
w
.stid().bits(id.standard())
.exid().bits(id.extended_part())
.ide().bit(id.is_extended())
.rtr().bit(id.is_rtr())
.txrq().set_bit()); //request transmit
}
Ok(())
} else {
//this mailbox is not empty, so return the index of the less
//important mailbox as candidate for request_abort_transmit
Err(Error::NotEmptyTxMailBox)
}
}
}
)+
}
}
TxMailBox!(CAN, [
TxMailBox0: (0, tme0, low0, terr0, alst0, txok0, rqcp0, abrq0, can_ti0r, can_tdt0r, can_tdl0r, can_tdh0r),
TxMailBox1: (1, tme1, low1, terr1, alst1, txok1, rqcp1, abrq1, can_ti1r, can_tdt1r, can_tdl1r, can_tdh1r),
TxMailBox2: (2, tme2, low2, terr2, alst2, txok2, rqcp2, abrq2, can_ti2r, can_tdt2r, can_tdl2r, can_tdh2r),
]);
pub type RxFifoIndex = u8; //[0..1] is valid
pub type FilterMatchIndex = u8;
pub type TimeStamp = u16;
//TODO put a part of this in the HAL as ReceiveFifo trait?
pub trait ReceiveFifo {
const INDEX: RxFifoIndex;
fn has_overun(&self) -> bool;
fn is_full(&self) -> bool;
fn pending_count(&self) -> u8;
fn read(&mut self) -> nb::Result<(FilterMatchIndex, TimeStamp, Frame), ()>;
}
pub struct RxFifo<CAN, IDX> {
_can: PhantomData<CAN>,
_index: PhantomData<IDX>,
}
macro_rules! RxFifo {
($CANX:ident, [
$($RxFifoi:ident: ($i:expr,
$can_rfir:ident, $rfomi:ident, $fovri:ident, $fulli:ident, $fmpi:ident,
$can_riir:ident, $can_rdtir:ident, $can_rdlir:ident, $can_rdhir:ident),)+
]) => {
$(
//type state
pub struct $RxFifoi;
impl ReceiveFifo for RxFifo<$CANX, $RxFifoi> {
const INDEX: RxFifoIndex = $i;
fn has_overun(&self) -> bool {
unsafe { (*$CANX::ptr()).$can_rfir.read().$fovri().bit() }
}
fn is_full(&self) -> bool {
unsafe { (*$CANX::ptr()).$can_rfir.read().$fulli().bit() }
}
fn pending_count(&self) -> u8 {
unsafe { (*$CANX::ptr()).$can_rfir.read().$fmpi().bits() }
}
fn read(&mut self) -> nb::Result<(FilterMatchIndex, TimeStamp, Frame), ()> {
let n = self.pending_count();
if n < 1 {
//there are no messages in the fifo
Err(nb::Error::WouldBlock)
} else {
let rdtir = unsafe { (*$CANX::ptr()).$can_rdtir.read() };
let filter_match_index = rdtir.fmi().bits();
let time = rdtir.time().bits();
let frame = Frame {
id: Id::from_received_register(unsafe { (*$CANX::ptr()).$can_riir.read().bits() }),
data: Payload {
dlc: rdtir.dlc().bits(),
data_low: unsafe { (*$CANX::ptr()).$can_rdlir.read().bits() },
data_high: unsafe { (*$CANX::ptr()).$can_rdhir.read().bits() },
},
};
//after every info captured release fifo output mailbox:
unsafe { (*$CANX::ptr()).$can_rfir.write(|w| w.$rfomi().set_bit()) };
Ok((filter_match_index, time, frame))
}
}
}
)+
}
}
RxFifo!(CAN, [
RxFifo0: (0, can_rf0r, rfom0, fovr0, full0, fmp0, can_ri0r, can_rdt0r, can_rdl0r, can_rdh0r),
RxFifo1: (1, can_rf1r, rfom1, fovr1, full1, fmp1, can_ri1r, can_rdt1r, can_rdl1r, can_rdh1r),
]);
| 35.612069 | 116 | 0.541334 |
71c3f212bca281f0d9567582e95293a165308022 | 6,415 | use std::cell::RefCell;
use std::ffi::{CStr, CString, OsStr, OsString};
use std::io::{Error as IoError, ErrorKind, Result as IoResult};
use std::os::raw::c_void;
use std::os::unix::ffi::{OsStrExt, OsStringExt};
use std::os::unix::io::RawFd;
use std::ptr;
use std::rc::Rc;
use wayland_sys::server::*;
use calloop::generic::Generic;
use calloop::{LoopHandle, Source};
use Fd;
use super::globals::GlobalData;
use super::{ClientInner, GlobalInner};
use display::get_runtime_dir;
use {Interface, NewResource};
pub(crate) struct DisplayInner {
pub(crate) ptr: *mut wl_display,
source: Option<Source<Generic<Fd>>>,
rust_globals: Rc<RefCell<Vec<*mut wl_global>>>,
}
impl Drop for DisplayInner {
fn drop(&mut self) {
{
self.source.take().map(|s| s.remove());
unsafe {
ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_display_destroy_clients, self.ptr);
ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_display_destroy, self.ptr);
}
}
}
}
impl DisplayInner {
pub(crate) fn new<Data: 'static>(handle: LoopHandle<Data>) -> Rc<RefCell<DisplayInner>> {
unsafe {
let ptr = ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_display_create,);
// setup the client_created listener
let listener = signal::rust_listener_create(client_created);
ffi_dispatch!(
WAYLAND_SERVER_HANDLE,
wl_display_add_client_created_listener,
ptr,
listener
);
let rust_globals = Rc::new(RefCell::new(Vec::new()));
// setup the global filter
ffi_dispatch!(
WAYLAND_SERVER_HANDLE,
wl_display_set_global_filter,
ptr,
super::globals::global_filter,
&*rust_globals as *const RefCell<Vec<*mut wl_global>> as *mut _
);
let evl_ptr = ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_display_get_event_loop, ptr);
let evl_fd = ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_event_loop_get_fd, evl_ptr);
let mut evtsrc = Generic::new(Fd(evl_fd));
evtsrc.set_interest(::mio::Ready::readable());
evtsrc.set_pollopts(::mio::PollOpt::edge());
let source = Some(
handle
.insert_source(evtsrc, move |_, _| {
ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_event_loop_dispatch, evl_ptr, 0);
})
.map_err(Into::<::std::io::Error>::into)
.unwrap(),
);
Rc::new(RefCell::new(DisplayInner {
ptr,
source,
rust_globals,
}))
}
}
pub(crate) fn ptr(&self) -> *mut wl_display {
self.ptr
}
pub(crate) fn create_global<I: Interface, F1, F2>(
&mut self,
version: u32,
implementation: F1,
filter: Option<F2>,
) -> GlobalInner<I>
where
F1: FnMut(NewResource<I>, u32) + 'static,
F2: FnMut(ClientInner) -> bool + 'static,
{
let data = Box::new(GlobalData::new(implementation, filter));
unsafe {
let ptr = ffi_dispatch!(
WAYLAND_SERVER_HANDLE,
wl_global_create,
self.ptr,
I::c_interface(),
version as i32,
&*data as *const GlobalData<I> as *mut _,
super::globals::global_bind::<I>
);
self.rust_globals.borrow_mut().push(ptr);
GlobalInner::create(ptr, data, self.rust_globals.clone())
}
}
pub(crate) fn flush_clients(&mut self) {
unsafe { ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_display_flush_clients, self.ptr) };
}
pub(crate) fn add_socket<S>(&mut self, name: Option<S>) -> IoResult<()>
where
S: AsRef<OsStr>,
{
let cname = match name.as_ref().map(|s| CString::new(s.as_ref().as_bytes())) {
Some(Ok(n)) => Some(n),
Some(Err(_)) => {
return Err(IoError::new(
ErrorKind::InvalidInput,
"nulls are not allowed in socket name",
))
}
None => None,
};
let ret = unsafe {
ffi_dispatch!(
WAYLAND_SERVER_HANDLE,
wl_display_add_socket,
self.ptr,
cname.as_ref().map(|s| s.as_ptr()).unwrap_or(ptr::null())
)
};
if ret == -1 {
// lets try to be helpful
let mut socket_name = get_runtime_dir()?;
match name {
Some(s) => socket_name.push(s.as_ref()),
None => socket_name.push("wayland-0"),
}
Err(IoError::new(
ErrorKind::PermissionDenied,
format!("could not bind socket {}", socket_name.to_string_lossy()),
))
} else {
Ok(())
}
}
pub(crate) fn add_socket_auto(&mut self) -> IoResult<OsString> {
let ret = unsafe { ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_display_add_socket_auto, self.ptr) };
if ret.is_null() {
// try to be helpful
let socket_name = get_runtime_dir()?;
Err(IoError::new(
ErrorKind::Other,
format!("no available wayland-* name in {}", socket_name.to_string_lossy()),
))
} else {
let sockname = unsafe { CStr::from_ptr(ret) };
Ok(<OsString as OsStringExt>::from_vec(sockname.to_bytes().into()))
}
}
pub(crate) unsafe fn add_socket_fd(&mut self, fd: RawFd) -> IoResult<()> {
let ret = ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_display_add_socket_fd, self.ptr, fd);
if ret == 0 {
Ok(())
} else {
Err(IoError::new(ErrorKind::InvalidInput, "invalid socket fd"))
}
}
pub unsafe fn create_client(&mut self, fd: RawFd) -> ClientInner {
let ret = ffi_dispatch!(WAYLAND_SERVER_HANDLE, wl_client_create, self.ptr, fd);
ClientInner::from_ptr(ret)
}
}
unsafe extern "C" fn client_created(_listener: *mut wl_listener, data: *mut c_void) {
// init the client
let _client = ClientInner::from_ptr(data as *mut wl_client);
}
| 32.729592 | 104 | 0.542634 |
f924a380996f738410c47b9adc39467ddf81aecd | 2,984 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{account::Account, executor::FakeExecutor, gas_costs};
use compiled_stdlib::transaction_scripts::StdlibScript;
use libra_crypto::{ed25519::Ed25519PrivateKey, PrivateKey, Uniform};
use libra_types::{
account_config::{self, BurnEvent, COIN1_NAME},
transaction::{authenticator::AuthenticationKey, TransactionArgument},
vm_status::KeptVMStatus,
};
use move_core_types::{
identifier::Identifier,
language_storage::{StructTag, TypeTag},
};
use std::convert::TryFrom;
use transaction_builder::{
encode_burn_txn_fees_script, encode_create_testing_account_script, encode_testnet_mint_script,
};
#[test]
fn burn_txn_fees() {
let mut executor = FakeExecutor::from_genesis_file();
let sender = Account::new();
let dd = Account::new_genesis_account(account_config::testnet_dd_account_address());
let tc = Account::new_blessed_tc();
let libra_root = Account::new_libra_root();
executor.execute_and_apply(libra_root.signed_script_txn(
encode_create_testing_account_script(
account_config::coin1_tag(),
*sender.address(),
sender.auth_key_prefix(),
false,
),
1,
));
executor.execute_and_apply(dd.signed_script_txn(
encode_testnet_mint_script(account_config::coin1_tag(), *sender.address(), 10_000_000),
0,
));
let gas_used = {
let privkey = Ed25519PrivateKey::generate_for_testing();
let pubkey = privkey.public_key();
let new_key_hash = AuthenticationKey::ed25519(&pubkey).to_vec();
let args = vec![TransactionArgument::U8Vector(new_key_hash)];
let status = executor.execute_and_apply(
sender.create_signed_txn_with_args(
StdlibScript::RotateAuthenticationKey
.compiled_bytes()
.into_vec(),
vec![],
args,
0,
gas_costs::TXN_RESERVED,
1,
COIN1_NAME.to_owned(),
),
);
assert_eq!(status.status().status(), Ok(KeptVMStatus::Executed));
status.gas_used()
};
let coin1_ty = TypeTag::Struct(StructTag {
address: account_config::CORE_CODE_ADDRESS,
module: Identifier::new("Coin1").unwrap(),
name: Identifier::new("Coin1").unwrap(),
type_params: vec![],
});
let output =
executor.execute_and_apply(tc.signed_script_txn(encode_burn_txn_fees_script(coin1_ty), 0));
let burn_events: Vec<_> = output
.events()
.iter()
.filter_map(|event| BurnEvent::try_from(event).ok())
.collect();
assert_eq!(burn_events.len(), 1);
assert!(burn_events
.iter()
.any(|event| event.currency_code().as_str() == "Coin1"));
burn_events
.iter()
.for_each(|event| assert_eq!(event.amount(), gas_used));
}
| 33.155556 | 99 | 0.637064 |
11ef2c3e1823438be508fc944fba1c22b424f3ac | 7,453 | // Copyright 2021 Parallel Finance Developer.
// This file is part of Parallel Finance.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! # Prices pallet
//!
//! ## Overview
//!
//! This pallet provides the price from Oracle Module by implementing the
//! `PriceFeeder` trait. In case of emergency, the price can be set directly
//! by Oracle Collective.
#![cfg_attr(not(feature = "std"), no_std)]
#![allow(clippy::unused_unit)]
use frame_support::{pallet_prelude::*, transactional};
use frame_system::pallet_prelude::*;
use orml_oracle::DataProviderExtended;
use orml_traits::DataProvider;
pub use pallet::*;
use primitives::*;
use sp_runtime::{
traits::{CheckedDiv, CheckedMul},
FixedU128,
};
use sp_std::vec::Vec;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
#[frame_support::pallet]
pub mod pallet {
use super::*;
#[pallet::config]
pub trait Config: frame_system::Config {
type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;
/// The data source, such as Oracle.
type Source: DataProvider<CurrencyId, TimeStampedPrice>
+ DataProviderExtended<CurrencyId, TimeStampedPrice>;
/// The origin which may set prices feed to system.
type FeederOrigin: EnsureOrigin<Self::Origin>;
/// Currency used for staking
#[pallet::constant]
type StakingCurrency: Get<CurrencyId>;
/// Currency used for liquid voucher
#[pallet::constant]
type LiquidCurrency: Get<CurrencyId>;
/// The provider of the exchange rate between liquid currency and
/// staking currency.
type LiquidStakingExchangeRateProvider: ExchangeRateProvider;
}
#[pallet::event]
#[pallet::generate_deposit(pub(crate) fn deposit_event)]
pub enum Event<T: Config> {
/// Set emergency price. \[currency_id, price_detail\]
SetPrice(CurrencyId, PriceWithDecimal),
/// Reset emergency price. \[currency_id\]
ResetPrice(CurrencyId),
}
/// Mapping from currency id to it's emergency price
#[pallet::storage]
#[pallet::getter(fn emergency_price)]
pub type EmergencyPrice<T: Config> =
StorageMap<_, Twox64Concat, CurrencyId, PriceWithDecimal, OptionQuery>;
#[pallet::pallet]
pub struct Pallet<T>(PhantomData<T>);
#[pallet::hooks]
impl<T: Config> Hooks<T::BlockNumber> for Pallet<T> {}
#[pallet::call]
impl<T: Config> Pallet<T> {
/// Set emergency price
#[pallet::weight(100)]
#[transactional]
pub fn set_price(
origin: OriginFor<T>,
currency_id: CurrencyId,
price: PriceWithDecimal,
) -> DispatchResultWithPostInfo {
T::FeederOrigin::ensure_origin(origin)?;
<Pallet<T> as EmergencyPriceFeeder<CurrencyId, PriceWithDecimal>>::set_emergency_price(
currency_id,
price,
);
Ok(().into())
}
/// Reset emergency price
#[pallet::weight(100)]
#[transactional]
pub fn reset_price(
origin: OriginFor<T>,
currency_id: CurrencyId,
) -> DispatchResultWithPostInfo {
T::FeederOrigin::ensure_origin(origin)?;
<Pallet<T> as EmergencyPriceFeeder<CurrencyId, PriceWithDecimal>>::reset_emergency_price(
currency_id,
);
Ok(().into())
}
}
}
impl<T: Config> Pallet<T> {
// get emergency price, the timestamp is zero
fn get_emergency_price(currency_id: &CurrencyId) -> Option<PriceDetail> {
Self::emergency_price(currency_id).and_then(|p| {
10u128.checked_pow(p.decimal.into()).and_then(|d| {
p.price
.checked_div(&FixedU128::from_inner(d))
.map(|price| (price, 0))
})
})
}
}
impl<T: Config> PriceFeeder for Pallet<T> {
/// Get price and timestamp by currency id
/// Timestamp is zero means the price is emergency price
fn get_price(currency_id: &CurrencyId) -> Option<PriceDetail> {
// if emergency price exists, return it, otherwise return latest price from oracle.
Self::get_emergency_price(currency_id).or_else(|| {
if currency_id == &T::LiquidCurrency::get() {
T::Source::get(&T::StakingCurrency::get()).and_then(|p| {
10u128.checked_pow(p.value.decimal.into()).and_then(|d| {
p.value
.price
.checked_div(&FixedU128::from_inner(d))
.and_then(|staking_currency_price| {
staking_currency_price.checked_mul(
&T::LiquidStakingExchangeRateProvider::get_exchange_rate(),
)
})
.map(|price| (price, p.timestamp))
})
})
} else {
T::Source::get(¤cy_id).and_then(|p| {
10u128.checked_pow(p.value.decimal.into()).and_then(|d| {
p.value
.price
.checked_div(&FixedU128::from_inner(d))
.map(|price| (price, p.timestamp))
})
})
}
})
}
}
impl<T: Config> EmergencyPriceFeeder<CurrencyId, PriceWithDecimal> for Pallet<T> {
/// Set emergency price
fn set_emergency_price(currency_id: CurrencyId, price: PriceWithDecimal) {
// set price direct
EmergencyPrice::<T>::insert(currency_id, price);
<Pallet<T>>::deposit_event(Event::SetPrice(currency_id, price));
}
/// Reset emergency price
fn reset_emergency_price(currency_id: CurrencyId) {
EmergencyPrice::<T>::remove(currency_id);
<Pallet<T>>::deposit_event(Event::ResetPrice(currency_id));
}
}
impl<T: Config> DataProviderExtended<CurrencyId, TimeStampedPrice> for Pallet<T> {
fn get_no_op(key: &CurrencyId) -> Option<TimeStampedPrice> {
if key == &T::LiquidCurrency::get() {
T::Source::get_no_op(&T::StakingCurrency::get()).and_then(|p| {
p.value
.price
.checked_mul(&T::LiquidStakingExchangeRateProvider::get_exchange_rate())
.map(|price| TimeStampedPrice {
value: PriceWithDecimal {
price,
decimal: p.value.decimal,
},
timestamp: p.timestamp,
})
})
} else {
T::Source::get_no_op(key)
}
}
#[allow(clippy::complexity)]
fn get_all_values() -> Vec<(CurrencyId, Option<TimeStampedPrice>)> {
T::Source::get_all_values()
}
}
| 34.99061 | 101 | 0.580572 |