hexsha
stringlengths 40
40
| size
int64 2
1.05M
| content
stringlengths 2
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
ef6db47ade0c272eb5c5d1e5e6d32c7146a4cd84 | 8,713 | // SPDX-License-Identifier: MIT
// Copyright (C) 2018-present iced project and contributors
//! iced-x86 JavaScript bindings
#![allow(unknown_lints)]
#![warn(absolute_paths_not_starting_with_crate)]
#![warn(anonymous_parameters)]
#![warn(elided_lifetimes_in_paths)]
#![warn(explicit_outlives_requirements)]
#![warn(invalid_html_tags)]
#![warn(keyword_idents)]
#![warn(macro_use_extern_crate)]
#![warn(meta_variable_misuse)]
#![warn(missing_docs)]
#![warn(non_ascii_idents)]
#![warn(trivial_casts)]
#![warn(trivial_numeric_casts)]
#![warn(unused_extern_crates)]
#![warn(unused_import_braces)]
#![warn(unused_lifetimes)]
#![warn(unused_must_use)]
#![warn(unused_qualifications)]
#![warn(unused_results)]
#![allow(clippy::cast_lossless)]
#![allow(clippy::collapsible_else_if)]
#![allow(clippy::collapsible_if)]
#![allow(clippy::field_reassign_with_default)]
#![allow(clippy::manual_range_contains)]
#![allow(clippy::manual_strip)]
#![allow(clippy::match_like_matches_macro)]
#![allow(clippy::match_ref_pats)]
#![allow(clippy::ptr_eq)]
#![allow(clippy::too_many_arguments)]
#![allow(clippy::type_complexity)]
#![allow(clippy::upper_case_acronyms)]
#![allow(clippy::wrong_self_convention)]
#![warn(clippy::cloned_instead_of_copied)]
#![warn(clippy::dbg_macro)]
#![warn(clippy::debug_assert_with_mut_call)]
#![warn(clippy::default_trait_access)]
#![warn(clippy::doc_markdown)]
#![warn(clippy::empty_line_after_outer_attr)]
#![warn(clippy::expect_used)]
#![warn(clippy::explicit_into_iter_loop)]
#![warn(clippy::explicit_iter_loop)]
#![warn(clippy::fallible_impl_from)]
#![warn(clippy::get_unwrap)]
#![warn(clippy::implicit_saturating_sub)]
#![warn(clippy::large_digit_groups)]
#![warn(clippy::let_unit_value)]
#![warn(clippy::match_bool)]
#![warn(clippy::match_on_vec_items)]
#![warn(clippy::match_wild_err_arm)]
#![warn(clippy::missing_errors_doc)]
#![warn(clippy::needless_borrow)]
#![warn(clippy::print_stderr)]
#![warn(clippy::print_stdout)]
#![warn(clippy::rc_buffer)]
#![warn(clippy::redundant_closure)]
#![warn(clippy::redundant_closure_for_method_calls)]
#![warn(clippy::same_functions_in_if_condition)]
#![warn(clippy::todo)]
#![warn(clippy::unimplemented)]
#![warn(clippy::unreadable_literal)]
#![warn(clippy::unused_self)]
#![warn(clippy::unwrap_in_result)]
#![warn(clippy::unwrap_used)]
#![warn(clippy::used_underscore_binding)]
#![warn(clippy::useless_let_if_seq)]
#![warn(clippy::useless_transmute)]
#![warn(clippy::zero_sized_map_values)]
#[cfg(all(feature = "encoder", feature = "block_encoder"))]
mod block_encoder;
#[cfg(all(feature = "encoder", feature = "block_encoder"))]
mod block_encoder_options;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
mod cc;
#[cfg(any(feature = "instr_api", all(feature = "encoder", feature = "op_code_info"), feature = "instr_create"))]
mod code;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
mod code_ext;
#[cfg(feature = "instr_api")]
mod code_size;
#[cfg(feature = "instr_info")]
#[cfg(feature = "instr_api")]
mod condition_code;
#[cfg(any(feature = "encoder", all(feature = "decoder", feature = "instr_info")))]
mod constant_offsets;
#[cfg(feature = "instr_info")]
mod cpuid_feature;
#[cfg(feature = "decoder")]
mod decoder;
#[cfg(feature = "decoder")]
mod decoder_error;
#[cfg(feature = "decoder")]
mod decoder_options;
#[cfg(feature = "encoder")]
mod encoder;
#[cfg(any(feature = "instr_info", all(feature = "encoder", feature = "op_code_info")))]
mod encoding_kind;
#[cfg(any(feature = "decoder", feature = "instr_info", feature = "encoder", feature = "instr_api", feature = "instr_create"))]
mod ex_utils;
#[cfg(feature = "fast_fmt")]
mod fast_fmt;
#[cfg(any(feature = "instr_info", all(feature = "instr_info", feature = "instr_api")))]
mod flow_control;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
mod format_mnemonic_options;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
mod formatter;
#[cfg(feature = "instr_info")]
mod info;
mod instruction;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
mod mandatory_prefix;
#[cfg(feature = "instr_create")]
mod memory_operand;
#[cfg(any(feature = "instr_api", feature = "instr_info", all(feature = "encoder", feature = "op_code_info")))]
mod memory_size;
#[cfg(feature = "instr_info")]
mod memory_size_ext;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
mod memory_size_options;
#[cfg(any(feature = "instr_api", all(feature = "encoder", feature = "op_code_info")))]
mod mnemonic;
#[cfg(feature = "instr_info")]
mod op_access;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
mod op_code_info;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
mod op_code_operand_kind;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
mod op_code_table_kind;
#[cfg(feature = "instr_api")]
mod op_kind;
#[cfg(any(feature = "instr_api", feature = "instr_info", feature = "instr_create"))]
mod register;
#[cfg(feature = "instr_info")]
mod register_ext;
#[cfg(feature = "instr_create")]
mod rep_prefix_kind;
#[cfg(feature = "instr_info")]
mod rflags_bits;
#[cfg(feature = "instr_api")]
mod rounding_control;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
mod tuple_type;
#[cfg(all(feature = "encoder", feature = "block_encoder"))]
pub use block_encoder::*;
#[cfg(all(feature = "encoder", feature = "block_encoder"))]
pub use block_encoder_options::*;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub use cc::*;
#[cfg(any(feature = "instr_api", all(feature = "encoder", feature = "op_code_info"), feature = "instr_create"))]
pub use code::*;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
pub use code_ext::*;
#[cfg(feature = "instr_api")]
pub use code_size::*;
#[cfg(feature = "instr_info")]
#[cfg(feature = "instr_api")]
pub use condition_code::*;
#[cfg(any(feature = "encoder", all(feature = "decoder", feature = "instr_info")))]
pub use constant_offsets::*;
#[cfg(feature = "instr_info")]
pub use cpuid_feature::*;
#[cfg(feature = "decoder")]
pub use decoder::*;
#[cfg(feature = "decoder")]
pub use decoder_error::*;
#[cfg(feature = "decoder")]
pub use decoder_options::*;
#[cfg(feature = "encoder")]
pub use encoder::*;
#[cfg(any(feature = "instr_info", all(feature = "encoder", feature = "op_code_info")))]
pub use encoding_kind::*;
#[cfg(feature = "fast_fmt")]
pub use fast_fmt::*;
#[cfg(any(feature = "instr_info", all(feature = "instr_info", feature = "instr_api")))]
pub use flow_control::*;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub use format_mnemonic_options::*;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub use formatter::*;
#[cfg(feature = "instr_info")]
pub use info::*;
pub use instruction::*;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
pub use mandatory_prefix::*;
#[cfg(feature = "instr_create")]
pub use memory_operand::*;
#[cfg(any(feature = "instr_api", feature = "instr_info", all(feature = "encoder", feature = "op_code_info")))]
pub use memory_size::*;
#[cfg(feature = "instr_info")]
pub use memory_size_ext::*;
#[cfg(any(feature = "gas", feature = "intel", feature = "masm", feature = "nasm"))]
pub use memory_size_options::*;
#[cfg(any(feature = "instr_api", all(feature = "encoder", feature = "op_code_info")))]
pub use mnemonic::*;
#[cfg(feature = "instr_info")]
pub use op_access::*;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
pub use op_code_info::*;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
pub use op_code_operand_kind::*;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
pub use op_code_table_kind::*;
#[cfg(feature = "instr_api")]
pub use op_kind::*;
#[cfg(any(feature = "instr_api", feature = "instr_info", feature = "instr_create"))]
pub use register::*;
#[cfg(feature = "instr_info")]
pub use register_ext::*;
#[cfg(feature = "instr_create")]
pub use rep_prefix_kind::*;
#[cfg(feature = "instr_info")]
pub use rflags_bits::*;
#[cfg(feature = "instr_api")]
pub use rounding_control::*;
#[cfg(all(feature = "encoder", feature = "op_code_info"))]
pub use tuple_type::*;
use wasm_bindgen::prelude::*;
/// Gets feature flags.
///
/// Flag | Value
/// -----|-------
/// 0x01 | `VEX`
/// 0x02 | `EVEX`
/// 0x04 | `XOP`
/// 0x08 | `3DNow!`
#[wasm_bindgen(js_name = "getIcedFeatures")]
pub fn get_iced_features() -> u32 {
#[allow(unused_mut)]
let mut flags = 0;
#[cfg(not(feature = "no_vex"))]
{
flags |= 1;
}
#[cfg(not(feature = "no_evex"))]
{
flags |= 2;
}
#[cfg(not(feature = "no_xop"))]
{
flags |= 4;
}
#[cfg(not(feature = "no_d3now"))]
{
flags |= 8;
}
flags
}
| 33.902724 | 126 | 0.693447 |
28b0fc496fe85e3e897e6906178cd7a17c8b51bc | 5,901 | use crate::error::RPCError;
use ckb_dao::DaoCalculator;
use ckb_fee_estimator::MAX_CONFIRM_BLOCKS;
use ckb_jsonrpc_types::{
Capacity, DryRunResult, EstimateResult, OutPoint, Script, Transaction, Uint64,
};
use ckb_logger::error;
use ckb_shared::{shared::Shared, Snapshot};
use ckb_store::ChainStore;
use ckb_types::{
core::cell::{resolve_transaction, CellProvider, CellStatus, HeaderChecker},
packed,
prelude::*,
H256,
};
use ckb_verification::ScriptVerifier;
use jsonrpc_core::{Error, Result};
use jsonrpc_derive::rpc;
use std::collections::HashSet;
#[rpc]
pub trait ExperimentRpc {
#[rpc(name = "_compute_transaction_hash")]
fn compute_transaction_hash(&self, tx: Transaction) -> Result<H256>;
#[rpc(name = "_compute_script_hash")]
fn compute_script_hash(&self, script: Script) -> Result<H256>;
#[rpc(name = "dry_run_transaction")]
fn dry_run_transaction(&self, _tx: Transaction) -> Result<DryRunResult>;
// Calculate the maximum withdraw one can get, given a referenced DAO cell,
// and a withdraw block hash
#[rpc(name = "calculate_dao_maximum_withdraw")]
fn calculate_dao_maximum_withdraw(&self, _out_point: OutPoint, _hash: H256)
-> Result<Capacity>;
// Estimate fee
#[rpc(name = "estimate_fee_rate")]
fn estimate_fee_rate(&self, expect_confirm_blocks: Uint64) -> Result<EstimateResult>;
}
pub(crate) struct ExperimentRpcImpl {
pub shared: Shared,
}
impl ExperimentRpc for ExperimentRpcImpl {
fn compute_transaction_hash(&self, tx: Transaction) -> Result<H256> {
let tx: packed::Transaction = tx.into();
Ok(tx.calc_tx_hash().unpack())
}
fn compute_script_hash(&self, script: Script) -> Result<H256> {
let script: packed::Script = script.into();
Ok(script.calc_script_hash().unpack())
}
fn dry_run_transaction(&self, tx: Transaction) -> Result<DryRunResult> {
let tx: packed::Transaction = tx.into();
DryRunner::new(&self.shared).run(tx)
}
fn calculate_dao_maximum_withdraw(&self, out_point: OutPoint, hash: H256) -> Result<Capacity> {
let snapshot: &Snapshot = &self.shared.snapshot();
let consensus = snapshot.consensus();
let calculator = DaoCalculator::new(consensus, snapshot);
match calculator.maximum_withdraw(&out_point.into(), &hash.pack()) {
Ok(capacity) => Ok(capacity.into()),
Err(err) => {
error!("calculate_dao_maximum_withdraw error {:?}", err);
Err(Error::internal_error())
}
}
}
fn estimate_fee_rate(&self, expect_confirm_blocks: Uint64) -> Result<EstimateResult> {
let expect_confirm_blocks = expect_confirm_blocks.value() as usize;
// A tx need 1 block to propose, then 2 block to get confirmed
// so at least confirm blocks is 3 blocks.
if expect_confirm_blocks < 3 || expect_confirm_blocks > MAX_CONFIRM_BLOCKS {
return Err(RPCError::custom(
RPCError::Invalid,
format!(
"expect_confirm_blocks should between 3 and {}, got {}",
MAX_CONFIRM_BLOCKS, expect_confirm_blocks
),
));
}
let tx_pool = self.shared.tx_pool_controller();
let fee_rate = tx_pool.estimate_fee_rate(expect_confirm_blocks);
if let Err(e) = fee_rate {
error!("send estimate_fee_rate request error {}", e);
return Err(Error::internal_error());
};
let fee_rate = fee_rate.unwrap();
if fee_rate.as_u64() == 0 {
return Err(RPCError::custom(
RPCError::Invalid,
"collected samples is not enough, please make sure node has peers and try later"
.into(),
));
}
Ok(EstimateResult {
fee_rate: fee_rate.as_u64().into(),
})
}
}
// DryRunner dry run given transaction, and return the result, including execution cycles.
pub(crate) struct DryRunner<'a> {
shared: &'a Shared,
}
impl<'a> CellProvider for DryRunner<'a> {
fn cell(&self, out_point: &packed::OutPoint, with_data: bool) -> CellStatus {
let snapshot = self.shared.snapshot();
snapshot
.get_cell_meta(&out_point.tx_hash(), out_point.index().unpack())
.map(|mut cell_meta| {
if with_data {
cell_meta.mem_cell_data = snapshot
.get_cell_data(&out_point.tx_hash(), out_point.index().unpack());
}
CellStatus::live_cell(cell_meta)
}) // treat as live cell, regardless of live or dead
.unwrap_or(CellStatus::Unknown)
}
}
impl<'a> HeaderChecker for DryRunner<'a> {
fn check_valid(
&self,
block_hash: &packed::Byte32,
) -> std::result::Result<(), ckb_error::Error> {
self.shared.snapshot().check_valid(block_hash)
}
}
impl<'a> DryRunner<'a> {
pub(crate) fn new(shared: &'a Shared) -> Self {
Self { shared }
}
pub(crate) fn run(&self, tx: packed::Transaction) -> Result<DryRunResult> {
let snapshot: &Snapshot = &self.shared.snapshot();
match resolve_transaction(tx.into_view(), &mut HashSet::new(), self, self) {
Ok(resolved) => {
let consensus = snapshot.consensus();
let max_cycles = consensus.max_block_cycles;
match ScriptVerifier::new(&resolved, snapshot).verify(max_cycles) {
Ok(cycles) => Ok(DryRunResult {
cycles: cycles.into(),
}),
Err(err) => Err(RPCError::custom(RPCError::Invalid, format!("{:?}", err))),
}
}
Err(err) => Err(RPCError::custom(RPCError::Invalid, format!("{:?}", err))),
}
}
}
| 36.202454 | 99 | 0.606338 |
e2c816bb84df6c0dbbb6f224b30da9efb90d35cd | 50,636 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::archive::{Archive, ArchiveBuilder, ArchiveConfig, METADATA_FILENAME};
use super::linker::{Linker, GnuLinker, MsvcLinker};
use super::rpath::RPathConfig;
use super::rpath;
use super::svh::Svh;
use session::config;
use session::config::NoDebugInfo;
use session::config::{OutputFilenames, Input, OutputTypeBitcode, OutputTypeExe, OutputTypeObject};
use session::search_paths::PathKind;
use session::Session;
use metadata::common::LinkMeta;
use metadata::{encoder, cstore, filesearch, csearch, creader};
use metadata::filesearch::FileDoesntMatch;
use middle::ty::{self, Ty};
use rustc::ast_map::{PathElem, PathElems, PathName};
use trans::{CrateContext, CrateTranslation, gensym_name};
use util::common::time;
use util::ppaux;
use util::sha2::{Digest, Sha256};
use util::fs::fix_windows_verbatim_for_gcc;
use rustc_back::tempdir::TempDir;
use std::fs::{self, PathExt};
use std::io::{self, Read, Write};
use std::mem;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::str;
use flate;
use serialize::hex::ToHex;
use syntax::ast;
use syntax::attr::AttrMetaMethods;
use syntax::codemap::Span;
use syntax::parse::token;
// RLIB LLVM-BYTECODE OBJECT LAYOUT
// Version 1
// Bytes Data
// 0..10 "RUST_OBJECT" encoded in ASCII
// 11..14 format version as little-endian u32
// 15..22 size in bytes of deflate compressed LLVM bitcode as
// little-endian u64
// 23.. compressed LLVM bitcode
// This is the "magic number" expected at the beginning of a LLVM bytecode
// object in an rlib.
pub const RLIB_BYTECODE_OBJECT_MAGIC: &'static [u8] = b"RUST_OBJECT";
// The version number this compiler will write to bytecode objects in rlibs
pub const RLIB_BYTECODE_OBJECT_VERSION: u32 = 1;
// The offset in bytes the bytecode object format version number can be found at
pub const RLIB_BYTECODE_OBJECT_VERSION_OFFSET: usize = 11;
// The offset in bytes the size of the compressed bytecode can be found at in
// format version 1
pub const RLIB_BYTECODE_OBJECT_V1_DATASIZE_OFFSET: usize =
RLIB_BYTECODE_OBJECT_VERSION_OFFSET + 4;
// The offset in bytes the compressed LLVM bytecode can be found at in format
// version 1
pub const RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET: usize =
RLIB_BYTECODE_OBJECT_V1_DATASIZE_OFFSET + 8;
/*
* Name mangling and its relationship to metadata. This is complex. Read
* carefully.
*
* The semantic model of Rust linkage is, broadly, that "there's no global
* namespace" between crates. Our aim is to preserve the illusion of this
* model despite the fact that it's not *quite* possible to implement on
* modern linkers. We initially didn't use system linkers at all, but have
* been convinced of their utility.
*
* There are a few issues to handle:
*
* - Linkers operate on a flat namespace, so we have to flatten names.
* We do this using the C++ namespace-mangling technique. Foo::bar
* symbols and such.
*
* - Symbols with the same name but different types need to get different
* linkage-names. We do this by hashing a string-encoding of the type into
* a fixed-size (currently 16-byte hex) cryptographic hash function (CHF:
* we use SHA256) to "prevent collisions". This is not airtight but 16 hex
* digits on uniform probability means you're going to need 2**32 same-name
* symbols in the same process before you're even hitting birthday-paradox
* collision probability.
*
* - Symbols in different crates but with same names "within" the crate need
* to get different linkage-names.
*
* - The hash shown in the filename needs to be predictable and stable for
* build tooling integration. It also needs to be using a hash function
* which is easy to use from Python, make, etc.
*
* So here is what we do:
*
* - Consider the package id; every crate has one (specified with crate_id
* attribute). If a package id isn't provided explicitly, we infer a
* versionless one from the output name. The version will end up being 0.0
* in this case. CNAME and CVERS are taken from this package id. For
* example, github.com/mozilla/CNAME#CVERS.
*
* - Define CMH as SHA256(crateid).
*
* - Define CMH8 as the first 8 characters of CMH.
*
* - Compile our crate to lib CNAME-CMH8-CVERS.so
*
* - Define STH(sym) as SHA256(CMH, type_str(sym))
*
* - Suffix a mangled sym with ::STH@CVERS, so that it is unique in the
* name, non-name metadata, and type sense, and versioned in the way
* system linkers understand.
*/
pub fn find_crate_name(sess: Option<&Session>,
attrs: &[ast::Attribute],
input: &Input) -> String {
let validate = |s: String, span: Option<Span>| {
creader::validate_crate_name(sess, &s[..], span);
s
};
// Look in attributes 100% of the time to make sure the attribute is marked
// as used. After doing this, however, we still prioritize a crate name from
// the command line over one found in the #[crate_name] attribute. If we
// find both we ensure that they're the same later on as well.
let attr_crate_name = attrs.iter().find(|at| at.check_name("crate_name"))
.and_then(|at| at.value_str().map(|s| (at, s)));
if let Some(sess) = sess {
if let Some(ref s) = sess.opts.crate_name {
if let Some((attr, ref name)) = attr_crate_name {
if *s != &name[..] {
let msg = format!("--crate-name and #[crate_name] are \
required to match, but `{}` != `{}`",
s, name);
sess.span_err(attr.span, &msg[..]);
}
}
return validate(s.clone(), None);
}
}
if let Some((attr, s)) = attr_crate_name {
return validate(s.to_string(), Some(attr.span));
}
if let Input::File(ref path) = *input {
if let Some(s) = path.file_stem().and_then(|s| s.to_str()) {
if s.starts_with("-") {
let msg = format!("crate names cannot start with a `-`, but \
`{}` has a leading hyphen", s);
if let Some(sess) = sess {
sess.err(&msg);
}
} else {
return validate(s.replace("-", "_"), None);
}
}
}
"rust_out".to_string()
}
pub fn build_link_meta(sess: &Session, krate: &ast::Crate,
name: String) -> LinkMeta {
let r = LinkMeta {
crate_name: name,
crate_hash: Svh::calculate(&sess.opts.cg.metadata, krate),
};
info!("{:?}", r);
return r;
}
fn truncated_hash_result(symbol_hasher: &mut Sha256) -> String {
let output = symbol_hasher.result_bytes();
// 64 bits should be enough to avoid collisions.
output[.. 8].to_hex().to_string()
}
// This calculates STH for a symbol, as defined above
fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>,
symbol_hasher: &mut Sha256,
t: Ty<'tcx>,
link_meta: &LinkMeta)
-> String {
// NB: do *not* use abbrevs here as we want the symbol names
// to be independent of one another in the crate.
symbol_hasher.reset();
symbol_hasher.input_str(&link_meta.crate_name);
symbol_hasher.input_str("-");
symbol_hasher.input_str(link_meta.crate_hash.as_str());
for meta in tcx.sess.crate_metadata.borrow().iter() {
symbol_hasher.input_str(&meta[..]);
}
symbol_hasher.input_str("-");
symbol_hasher.input_str(&encoder::encoded_ty(tcx, t));
// Prefix with 'h' so that it never blends into adjacent digits
let mut hash = String::from("h");
hash.push_str(&truncated_hash_result(symbol_hasher));
hash
}
fn get_symbol_hash<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> String {
match ccx.type_hashcodes().borrow().get(&t) {
Some(h) => return h.to_string(),
None => {}
}
let mut symbol_hasher = ccx.symbol_hasher().borrow_mut();
let hash = symbol_hash(ccx.tcx(), &mut *symbol_hasher, t, ccx.link_meta());
ccx.type_hashcodes().borrow_mut().insert(t, hash.clone());
hash
}
// Name sanitation. LLVM will happily accept identifiers with weird names, but
// gas doesn't!
// gas accepts the following characters in symbols: a-z, A-Z, 0-9, ., _, $
pub fn sanitize(s: &str) -> String {
let mut result = String::new();
for c in s.chars() {
match c {
// Escape these with $ sequences
'@' => result.push_str("$SP$"),
'*' => result.push_str("$BP$"),
'&' => result.push_str("$RF$"),
'<' => result.push_str("$LT$"),
'>' => result.push_str("$GT$"),
'(' => result.push_str("$LP$"),
')' => result.push_str("$RP$"),
',' => result.push_str("$C$"),
// '.' doesn't occur in types and functions, so reuse it
// for ':' and '-'
'-' | ':' => result.push('.'),
// These are legal symbols
'a' ... 'z'
| 'A' ... 'Z'
| '0' ... '9'
| '_' | '.' | '$' => result.push(c),
_ => {
result.push('$');
for c in c.escape_unicode().skip(1) {
match c {
'{' => {},
'}' => result.push('$'),
c => result.push(c),
}
}
}
}
}
// Underscore-qualify anything that didn't start as an ident.
if !result.is_empty() &&
result.as_bytes()[0] != '_' as u8 &&
! (result.as_bytes()[0] as char).is_xid_start() {
return format!("_{}", &result[..]);
}
return result;
}
pub fn mangle<PI: Iterator<Item=PathElem>>(path: PI,
hash: Option<&str>) -> String {
// Follow C++ namespace-mangling style, see
// http://en.wikipedia.org/wiki/Name_mangling for more info.
//
// It turns out that on OSX you can actually have arbitrary symbols in
// function names (at least when given to LLVM), but this is not possible
// when using unix's linker. Perhaps one day when we just use a linker from LLVM
// we won't need to do this name mangling. The problem with name mangling is
// that it seriously limits the available characters. For example we can't
// have things like &T in symbol names when one would theoretically
// want them for things like impls of traits on that type.
//
// To be able to work on all platforms and get *some* reasonable output, we
// use C++ name-mangling.
let mut n = String::from("_ZN"); // _Z == Begin name-sequence, N == nested
fn push(n: &mut String, s: &str) {
let sani = sanitize(s);
n.push_str(&format!("{}{}", sani.len(), sani));
}
// First, connect each component with <len, name> pairs.
for e in path {
push(&mut n, &token::get_name(e.name()))
}
match hash {
Some(s) => push(&mut n, s),
None => {}
}
n.push('E'); // End name-sequence.
n
}
pub fn exported_name(path: PathElems, hash: &str) -> String {
mangle(path, Some(hash))
}
pub fn mangle_exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, path: PathElems,
t: Ty<'tcx>, id: ast::NodeId) -> String {
let mut hash = get_symbol_hash(ccx, t);
// Paths can be completely identical for different nodes,
// e.g. `fn foo() { { fn a() {} } { fn a() {} } }`, so we
// generate unique characters from the node id. For now
// hopefully 3 characters is enough to avoid collisions.
const EXTRA_CHARS: &'static str =
"abcdefghijklmnopqrstuvwxyz\
ABCDEFGHIJKLMNOPQRSTUVWXYZ\
0123456789";
let id = id as usize;
let extra1 = id % EXTRA_CHARS.len();
let id = id / EXTRA_CHARS.len();
let extra2 = id % EXTRA_CHARS.len();
let id = id / EXTRA_CHARS.len();
let extra3 = id % EXTRA_CHARS.len();
hash.push(EXTRA_CHARS.as_bytes()[extra1] as char);
hash.push(EXTRA_CHARS.as_bytes()[extra2] as char);
hash.push(EXTRA_CHARS.as_bytes()[extra3] as char);
exported_name(path, &hash[..])
}
pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
t: Ty<'tcx>,
name: &str) -> String {
let s = ppaux::ty_to_string(ccx.tcx(), t);
let path = [PathName(token::intern(&s[..])),
gensym_name(name)];
let hash = get_symbol_hash(ccx, t);
mangle(path.iter().cloned(), Some(&hash[..]))
}
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String {
mangle(path.chain(Some(gensym_name(flav))), None)
}
pub fn get_cc_prog(sess: &Session) -> String {
match sess.opts.cg.linker {
Some(ref linker) => return linker.to_string(),
None => sess.target.target.options.linker.clone(),
}
}
pub fn get_ar_prog(sess: &Session) -> String {
sess.opts.cg.ar.clone().unwrap_or_else(|| {
sess.target.target.options.ar.clone()
})
}
pub fn remove(sess: &Session, path: &Path) {
match fs::remove_file(path) {
Ok(..) => {}
Err(e) => {
sess.err(&format!("failed to remove {}: {}",
path.display(),
e));
}
}
}
/// Perform the linkage portion of the compilation phase. This will generate all
/// of the requested outputs for this compilation session.
pub fn link_binary(sess: &Session,
trans: &CrateTranslation,
outputs: &OutputFilenames,
crate_name: &str) -> Vec<PathBuf> {
let mut out_filenames = Vec::new();
for &crate_type in sess.crate_types.borrow().iter() {
if invalid_output_for_target(sess, crate_type) {
sess.bug(&format!("invalid output type `{:?}` for target os `{}`",
crate_type, sess.opts.target_triple));
}
let out_file = link_binary_output(sess, trans, crate_type, outputs,
crate_name);
out_filenames.push(out_file);
}
// Remove the temporary object file and metadata if we aren't saving temps
if !sess.opts.cg.save_temps {
let obj_filename = outputs.temp_path(OutputTypeObject);
if !sess.opts.output_types.contains(&OutputTypeObject) {
remove(sess, &obj_filename);
}
remove(sess, &obj_filename.with_extension("metadata.o"));
}
out_filenames
}
/// Returns default crate type for target
///
/// Default crate type is used when crate type isn't provided neither
/// through cmd line arguments nor through crate attributes
///
/// It is CrateTypeExecutable for all platforms but iOS as there is no
/// way to run iOS binaries anyway without jailbreaking and
/// interaction with Rust code through static library is the only
/// option for now
pub fn default_output_for_target(sess: &Session) -> config::CrateType {
if !sess.target.target.options.executables {
config::CrateTypeStaticlib
} else {
config::CrateTypeExecutable
}
}
/// Checks if target supports crate_type as output
pub fn invalid_output_for_target(sess: &Session,
crate_type: config::CrateType) -> bool {
match (sess.target.target.options.dynamic_linking,
sess.target.target.options.executables, crate_type) {
(false, _, config::CrateTypeDylib) => true,
(_, false, config::CrateTypeExecutable) => true,
_ => false
}
}
fn is_writeable(p: &Path) -> bool {
match p.metadata() {
Err(..) => true,
Ok(m) => !m.permissions().readonly()
}
}
pub fn filename_for_input(sess: &Session,
crate_type: config::CrateType,
name: &str,
out_filename: &Path) -> PathBuf {
let libname = format!("{}{}", name, sess.opts.cg.extra_filename);
match crate_type {
config::CrateTypeRlib => {
out_filename.with_file_name(&format!("lib{}.rlib", libname))
}
config::CrateTypeDylib => {
let (prefix, suffix) = (&sess.target.target.options.dll_prefix,
&sess.target.target.options.dll_suffix);
out_filename.with_file_name(&format!("{}{}{}",
prefix,
libname,
suffix))
}
config::CrateTypeStaticlib => {
out_filename.with_file_name(&format!("lib{}.a", libname))
}
config::CrateTypeExecutable => {
let suffix = &sess.target.target.options.exe_suffix;
if suffix.is_empty() {
out_filename.to_path_buf()
} else {
out_filename.with_extension(&suffix[1..])
}
}
}
}
fn link_binary_output(sess: &Session,
trans: &CrateTranslation,
crate_type: config::CrateType,
outputs: &OutputFilenames,
crate_name: &str) -> PathBuf {
let obj_filename = outputs.temp_path(OutputTypeObject);
let out_filename = match outputs.single_output_file {
Some(ref file) => file.clone(),
None => {
let out_filename = outputs.path(OutputTypeExe);
filename_for_input(sess, crate_type, crate_name, &out_filename)
}
};
// Make sure the output and obj_filename are both writeable.
// Mac, FreeBSD, and Windows system linkers check this already --
// however, the Linux linker will happily overwrite a read-only file.
// We should be consistent.
let obj_is_writeable = is_writeable(&obj_filename);
let out_is_writeable = is_writeable(&out_filename);
if !out_is_writeable {
sess.fatal(&format!("output file {} is not writeable -- check its \
permissions.",
out_filename.display()));
}
else if !obj_is_writeable {
sess.fatal(&format!("object file {} is not writeable -- check its \
permissions.",
obj_filename.display()));
}
match crate_type {
config::CrateTypeRlib => {
link_rlib(sess, Some(trans), &obj_filename, &out_filename).build();
}
config::CrateTypeStaticlib => {
link_staticlib(sess, &obj_filename, &out_filename);
}
config::CrateTypeExecutable => {
link_natively(sess, trans, false, &obj_filename, &out_filename);
}
config::CrateTypeDylib => {
link_natively(sess, trans, true, &obj_filename, &out_filename);
}
}
out_filename
}
fn archive_search_paths(sess: &Session) -> Vec<PathBuf> {
let mut search = Vec::new();
sess.target_filesearch(PathKind::Native).for_each_lib_search_path(|path, _| {
search.push(path.to_path_buf());
FileDoesntMatch
});
return search;
}
// Create an 'rlib'
//
// An rlib in its current incarnation is essentially a renamed .a file. The
// rlib primarily contains the object file of the crate, but it also contains
// all of the object files from native libraries. This is done by unzipping
// native libraries and inserting all of the contents into this archive.
fn link_rlib<'a>(sess: &'a Session,
trans: Option<&CrateTranslation>, // None == no metadata/bytecode
obj_filename: &Path,
out_filename: &Path) -> ArchiveBuilder<'a> {
info!("preparing rlib from {:?} to {:?}", obj_filename, out_filename);
let handler = &sess.diagnostic().handler;
let config = ArchiveConfig {
handler: handler,
dst: out_filename.to_path_buf(),
lib_search_paths: archive_search_paths(sess),
slib_prefix: sess.target.target.options.staticlib_prefix.clone(),
slib_suffix: sess.target.target.options.staticlib_suffix.clone(),
ar_prog: get_ar_prog(sess),
};
let mut ab = ArchiveBuilder::create(config);
ab.add_file(obj_filename).unwrap();
for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() {
match kind {
cstore::NativeStatic => ab.add_native_library(&l).unwrap(),
cstore::NativeFramework | cstore::NativeUnknown => {}
}
}
// After adding all files to the archive, we need to update the
// symbol table of the archive.
ab.update_symbols();
let mut ab = match sess.target.target.options.is_like_osx {
// For OSX/iOS, we must be careful to update symbols only when adding
// object files. We're about to start adding non-object files, so run
// `ar` now to process the object files.
true => ab.build().extend(),
false => ab,
};
// Note that it is important that we add all of our non-object "magical
// files" *after* all of the object files in the archive. The reason for
// this is as follows:
//
// * When performing LTO, this archive will be modified to remove
// obj_filename from above. The reason for this is described below.
//
// * When the system linker looks at an archive, it will attempt to
// determine the architecture of the archive in order to see whether its
// linkable.
//
// The algorithm for this detection is: iterate over the files in the
// archive. Skip magical SYMDEF names. Interpret the first file as an
// object file. Read architecture from the object file.
//
// * As one can probably see, if "metadata" and "foo.bc" were placed
// before all of the objects, then the architecture of this archive would
// not be correctly inferred once 'foo.o' is removed.
//
// Basically, all this means is that this code should not move above the
// code above.
match trans {
Some(trans) => {
// Instead of putting the metadata in an object file section, rlibs
// contain the metadata in a separate file. We use a temp directory
// here so concurrent builds in the same directory don't try to use
// the same filename for metadata (stomping over one another)
let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir");
let metadata = tmpdir.path().join(METADATA_FILENAME);
match fs::File::create(&metadata).and_then(|mut f| {
f.write_all(&trans.metadata)
}) {
Ok(..) => {}
Err(e) => {
sess.fatal(&format!("failed to write {}: {}",
metadata.display(), e));
}
}
ab.add_file(&metadata).unwrap();
remove(sess, &metadata);
// For LTO purposes, the bytecode of this library is also inserted
// into the archive. If codegen_units > 1, we insert each of the
// bitcode files.
for i in 0..sess.opts.cg.codegen_units {
// Note that we make sure that the bytecode filename in the
// archive is never exactly 16 bytes long by adding a 16 byte
// extension to it. This is to work around a bug in LLDB that
// would cause it to crash if the name of a file in an archive
// was exactly 16 bytes.
let bc_filename = obj_filename.with_extension(&format!("{}.bc", i));
let bc_deflated_filename = obj_filename.with_extension(
&format!("{}.bytecode.deflate", i));
let mut bc_data = Vec::new();
match fs::File::open(&bc_filename).and_then(|mut f| {
f.read_to_end(&mut bc_data)
}) {
Ok(..) => {}
Err(e) => sess.fatal(&format!("failed to read bytecode: {}",
e))
}
let bc_data_deflated = flate::deflate_bytes(&bc_data[..]);
let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) {
Ok(file) => file,
Err(e) => {
sess.fatal(&format!("failed to create compressed \
bytecode file: {}", e))
}
};
match write_rlib_bytecode_object_v1(&mut bc_file_deflated,
&bc_data_deflated) {
Ok(()) => {}
Err(e) => {
sess.fatal(&format!("failed to write compressed \
bytecode: {}", e));
}
};
ab.add_file(&bc_deflated_filename).unwrap();
remove(sess, &bc_deflated_filename);
// See the bottom of back::write::run_passes for an explanation
// of when we do and don't keep .0.bc files around.
let user_wants_numbered_bitcode =
sess.opts.output_types.contains(&OutputTypeBitcode) &&
sess.opts.cg.codegen_units > 1;
if !sess.opts.cg.save_temps && !user_wants_numbered_bitcode {
remove(sess, &bc_filename);
}
}
// After adding all files to the archive, we need to update the
// symbol table of the archive. This currently dies on OSX (see
// #11162), and isn't necessary there anyway
if !sess.target.target.options.is_like_osx {
ab.update_symbols();
}
}
None => {}
}
ab
}
fn write_rlib_bytecode_object_v1(writer: &mut Write,
bc_data_deflated: &[u8]) -> io::Result<()> {
let bc_data_deflated_size: u64 = bc_data_deflated.len() as u64;
try!(writer.write_all(RLIB_BYTECODE_OBJECT_MAGIC));
try!(writer.write_all(&[1, 0, 0, 0]));
try!(writer.write_all(&[
(bc_data_deflated_size >> 0) as u8,
(bc_data_deflated_size >> 8) as u8,
(bc_data_deflated_size >> 16) as u8,
(bc_data_deflated_size >> 24) as u8,
(bc_data_deflated_size >> 32) as u8,
(bc_data_deflated_size >> 40) as u8,
(bc_data_deflated_size >> 48) as u8,
(bc_data_deflated_size >> 56) as u8,
]));
try!(writer.write_all(&bc_data_deflated));
let number_of_bytes_written_so_far =
RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id
mem::size_of_val(&RLIB_BYTECODE_OBJECT_VERSION) + // version
mem::size_of_val(&bc_data_deflated_size) + // data size field
bc_data_deflated_size as usize; // actual data
// If the number of bytes written to the object so far is odd, add a
// padding byte to make it even. This works around a crash bug in LLDB
// (see issue #15950)
if number_of_bytes_written_so_far % 2 == 1 {
try!(writer.write_all(&[0]));
}
return Ok(());
}
// Create a static archive
//
// This is essentially the same thing as an rlib, but it also involves adding
// all of the upstream crates' objects into the archive. This will slurp in
// all of the native libraries of upstream dependencies as well.
//
// Additionally, there's no way for us to link dynamic libraries, so we warn
// about all dynamic library dependencies that they're not linked in.
//
// There's no need to include metadata in a static archive, so ensure to not
// link in the metadata object file (and also don't prepare the archive with a
// metadata file).
fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
let ab = link_rlib(sess, None, obj_filename, out_filename);
let mut ab = match sess.target.target.options.is_like_osx {
true => ab.build().extend(),
false => ab,
};
if sess.target.target.options.morestack {
ab.add_native_library("morestack").unwrap();
}
if !sess.target.target.options.no_compiler_rt {
ab.add_native_library("compiler-rt").unwrap();
}
let crates = sess.cstore.get_used_crates(cstore::RequireStatic);
let mut all_native_libs = vec![];
for &(cnum, ref path) in &crates {
let ref name = sess.cstore.get_crate_data(cnum).name;
let p = match *path {
Some(ref p) => p.clone(), None => {
sess.err(&format!("could not find rlib for: `{}`",
name));
continue
}
};
ab.add_rlib(&p, &name[..], sess.lto()).unwrap();
let native_libs = csearch::get_native_libraries(&sess.cstore, cnum);
all_native_libs.extend(native_libs);
}
ab.update_symbols();
let _ = ab.build();
if !all_native_libs.is_empty() {
sess.note("link against the following native artifacts when linking against \
this static library");
sess.note("the order and any duplication can be significant on some platforms, \
and so may need to be preserved");
}
for &(kind, ref lib) in &all_native_libs {
let name = match kind {
cstore::NativeStatic => "static library",
cstore::NativeUnknown => "library",
cstore::NativeFramework => "framework",
};
sess.note(&format!("{}: {}", name, *lib));
}
}
// Create a dynamic library or executable
//
// This will invoke the system linker/cc to create the resulting file. This
// links to all upstream files as well.
fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
obj_filename: &Path, out_filename: &Path) {
info!("preparing dylib? ({}) from {:?} to {:?}", dylib, obj_filename,
out_filename);
let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir");
// The invocations of cc share some flags across platforms
let pname = get_cc_prog(sess);
let mut cmd = Command::new(&pname[..]);
let root = sess.target_filesearch(PathKind::Native).get_lib_path();
cmd.args(&sess.target.target.options.pre_link_args);
for obj in &sess.target.target.options.pre_link_objects {
cmd.arg(root.join(obj));
}
{
let mut linker = if sess.target.target.options.is_like_msvc {
Box::new(MsvcLinker { cmd: &mut cmd, sess: &sess }) as Box<Linker>
} else {
Box::new(GnuLinker { cmd: &mut cmd, sess: &sess }) as Box<Linker>
};
link_args(&mut *linker, sess, dylib, tmpdir.path(),
trans, obj_filename, out_filename);
if !sess.target.target.options.no_compiler_rt {
linker.link_staticlib("compiler-rt");
}
}
for obj in &sess.target.target.options.post_link_objects {
cmd.arg(root.join(obj));
}
cmd.args(&sess.target.target.options.post_link_args);
if sess.opts.debugging_opts.print_link_args {
println!("{:?}", &cmd);
}
// May have not found libraries in the right formats.
sess.abort_if_errors();
// Invoke the system linker
info!("{:?}", &cmd);
let prog = time(sess.time_passes(), "running linker", (), |()| cmd.output());
match prog {
Ok(prog) => {
if !prog.status.success() {
sess.err(&format!("linking with `{}` failed: {}",
pname,
prog.status));
sess.note(&format!("{:?}", &cmd));
let mut output = prog.stderr.clone();
output.push_all(&prog.stdout);
sess.note(str::from_utf8(&output[..]).unwrap());
sess.abort_if_errors();
}
info!("linker stderr:\n{}", String::from_utf8(prog.stderr).unwrap());
info!("linker stdout:\n{}", String::from_utf8(prog.stdout).unwrap());
},
Err(e) => {
sess.fatal(&format!("could not exec the linker `{}`: {}", pname, e));
}
}
// On OSX, debuggers need this utility to get run to do some munging of
// the symbols
if sess.target.target.options.is_like_osx && sess.opts.debuginfo != NoDebugInfo {
match Command::new("dsymutil").arg(out_filename).output() {
Ok(..) => {}
Err(e) => sess.fatal(&format!("failed to run dsymutil: {}", e)),
}
}
}
fn link_args(cmd: &mut Linker,
sess: &Session,
dylib: bool,
tmpdir: &Path,
trans: &CrateTranslation,
obj_filename: &Path,
out_filename: &Path) {
// The default library location, we need this to find the runtime.
// The location of crates will be determined as needed.
let lib_path = sess.target_filesearch(PathKind::All).get_lib_path();
// target descriptor
let t = &sess.target.target;
cmd.include_path(&fix_windows_verbatim_for_gcc(&lib_path));
cmd.add_object(obj_filename);
cmd.output_filename(out_filename);
// Stack growth requires statically linking a __morestack function. Note
// that this is listed *before* all other libraries. Due to the usage of the
// --as-needed flag below, the standard library may only be useful for its
// rust_stack_exhausted function. In this case, we must ensure that the
// libmorestack.a file appears *before* the standard library (so we put it
// at the very front).
//
// Most of the time this is sufficient, except for when LLVM gets super
// clever. If, for example, we have a main function `fn main() {}`, LLVM
// will optimize out calls to `__morestack` entirely because the function
// doesn't need any stack at all!
//
// To get around this snag, we specially tell the linker to always include
// all contents of this library. This way we're guaranteed that the linker
// will include the __morestack symbol 100% of the time, always resolving
// references to it even if the object above didn't use it.
if t.options.morestack {
cmd.link_whole_staticlib("morestack", &[lib_path]);
}
// When linking a dynamic library, we put the metadata into a section of the
// executable. This metadata is in a separate object file from the main
// object file, so we link that in here.
if dylib {
cmd.add_object(&obj_filename.with_extension("metadata.o"));
}
// Try to strip as much out of the generated object by removing unused
// sections if possible. See more comments in linker.rs
cmd.gc_sections(dylib);
let used_link_args = sess.cstore.get_used_link_args().borrow();
if !dylib && t.options.position_independent_executables {
let empty_vec = Vec::new();
let empty_str = String::new();
let args = sess.opts.cg.link_args.as_ref().unwrap_or(&empty_vec);
let mut args = args.iter().chain(used_link_args.iter());
let relocation_model = sess.opts.cg.relocation_model.as_ref()
.unwrap_or(&empty_str);
if (t.options.relocation_model == "pic" || *relocation_model == "pic")
&& !args.any(|x| *x == "-static") {
cmd.position_independent_executable();
}
}
// Pass optimization flags down to the linker.
cmd.optimize();
// We want to prevent the compiler from accidentally leaking in any system
// libraries, so we explicitly ask gcc to not link to any libraries by
// default. Note that this does not happen for windows because windows pulls
// in some large number of libraries and I couldn't quite figure out which
// subset we wanted.
cmd.no_default_libraries();
// Take careful note of the ordering of the arguments we pass to the linker
// here. Linkers will assume that things on the left depend on things to the
// right. Things on the right cannot depend on things on the left. This is
// all formally implemented in terms of resolving symbols (libs on the right
// resolve unknown symbols of libs on the left, but not vice versa).
//
// For this reason, we have organized the arguments we pass to the linker as
// such:
//
// 1. The local object that LLVM just generated
// 2. Upstream rust libraries
// 3. Local native libraries
// 4. Upstream native libraries
//
// This is generally fairly natural, but some may expect 2 and 3 to be
// swapped. The reason that all native libraries are put last is that it's
// not recommended for a native library to depend on a symbol from a rust
// crate. If this is the case then a staticlib crate is recommended, solving
// the problem.
//
// Additionally, it is occasionally the case that upstream rust libraries
// depend on a local native library. In the case of libraries such as
// lua/glfw/etc the name of the library isn't the same across all platforms,
// so only the consumer crate of a library knows the actual name. This means
// that downstream crates will provide the #[link] attribute which upstream
// crates will depend on. Hence local native libraries are after out
// upstream rust crates.
//
// In theory this means that a symbol in an upstream native library will be
// shadowed by a local native library when it wouldn't have been before, but
// this kind of behavior is pretty platform specific and generally not
// recommended anyway, so I don't think we're shooting ourself in the foot
// much with that.
add_upstream_rust_crates(cmd, sess, dylib, tmpdir, trans);
add_local_native_libraries(cmd, sess);
add_upstream_native_libraries(cmd, sess);
// # Telling the linker what we're doing
if dylib {
cmd.build_dylib(out_filename);
}
// FIXME (#2397): At some point we want to rpath our guesses as to
// where extern libraries might live, based on the
// addl_lib_search_paths
if sess.opts.cg.rpath {
let sysroot = sess.sysroot();
let target_triple = &sess.opts.target_triple;
let mut get_install_prefix_lib_path = || {
let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX");
let tlib = filesearch::relative_target_lib_path(sysroot, target_triple);
let mut path = PathBuf::from(install_prefix);
path.push(&tlib);
path
};
let mut rpath_config = RPathConfig {
used_crates: sess.cstore.get_used_crates(cstore::RequireDynamic),
out_filename: out_filename.to_path_buf(),
has_rpath: sess.target.target.options.has_rpath,
is_like_osx: sess.target.target.options.is_like_osx,
get_install_prefix_lib_path: &mut get_install_prefix_lib_path,
};
cmd.args(&rpath::get_rpath_flags(&mut rpath_config));
}
// Finally add all the linker arguments provided on the command line along
// with any #[link_args] attributes found inside the crate
if let Some(ref args) = sess.opts.cg.link_args {
cmd.args(args);
}
cmd.args(&used_link_args);
}
// # Native library linking
//
// User-supplied library search paths (-L on the command line). These are
// the same paths used to find Rust crates, so some of them may have been
// added already by the previous crate linking code. This only allows them
// to be found at compile time so it is still entirely up to outside
// forces to make sure that library can be found at runtime.
//
// Also note that the native libraries linked here are only the ones located
// in the current crate. Upstream crates with native library dependencies
// may have their native library pulled in above.
fn add_local_native_libraries(cmd: &mut Linker, sess: &Session) {
sess.target_filesearch(PathKind::All).for_each_lib_search_path(|path, k| {
match k {
PathKind::Framework => { cmd.framework_path(path); }
_ => { cmd.include_path(&fix_windows_verbatim_for_gcc(path)); }
}
FileDoesntMatch
});
let libs = sess.cstore.get_used_libraries();
let libs = libs.borrow();
let staticlibs = libs.iter().filter_map(|&(ref l, kind)| {
if kind == cstore::NativeStatic {Some(l)} else {None}
});
let others = libs.iter().filter(|&&(_, kind)| {
kind != cstore::NativeStatic
});
// Some platforms take hints about whether a library is static or dynamic.
// For those that support this, we ensure we pass the option if the library
// was flagged "static" (most defaults are dynamic) to ensure that if
// libfoo.a and libfoo.so both exist that the right one is chosen.
cmd.hint_static();
let search_path = archive_search_paths(sess);
for l in staticlibs {
// Here we explicitly ask that the entire archive is included into the
// result artifact. For more details see #15460, but the gist is that
// the linker will strip away any unused objects in the archive if we
// don't otherwise explicitly reference them. This can occur for
// libraries which are just providing bindings, libraries with generic
// functions, etc.
cmd.link_whole_staticlib(l, &search_path);
}
cmd.hint_dynamic();
for &(ref l, kind) in others {
match kind {
cstore::NativeUnknown => cmd.link_dylib(l),
cstore::NativeFramework => cmd.link_framework(l),
cstore::NativeStatic => unreachable!(),
}
}
}
// # Rust Crate linking
//
// Rust crates are not considered at all when creating an rlib output. All
// dependencies will be linked when producing the final output (instead of
// the intermediate rlib version)
fn add_upstream_rust_crates(cmd: &mut Linker, sess: &Session,
dylib: bool, tmpdir: &Path,
trans: &CrateTranslation) {
// All of the heavy lifting has previously been accomplished by the
// dependency_format module of the compiler. This is just crawling the
// output of that module, adding crates as necessary.
//
// Linking to a rlib involves just passing it to the linker (the linker
// will slurp up the object files inside), and linking to a dynamic library
// involves just passing the right -l flag.
let data = if dylib {
trans.crate_formats.get(&config::CrateTypeDylib).unwrap()
} else {
trans.crate_formats.get(&config::CrateTypeExecutable).unwrap()
};
// Invoke get_used_crates to ensure that we get a topological sorting of
// crates.
let deps = sess.cstore.get_used_crates(cstore::RequireDynamic);
for &(cnum, _) in &deps {
// We may not pass all crates through to the linker. Some crates may
// appear statically in an existing dylib, meaning we'll pick up all the
// symbols from the dylib.
let kind = match data[cnum as usize - 1] {
Some(t) => t,
None => continue
};
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
match kind {
cstore::RequireDynamic => {
add_dynamic_crate(cmd, sess, &src.dylib.unwrap().0)
}
cstore::RequireStatic => {
add_static_crate(cmd, sess, tmpdir, &src.rlib.unwrap().0)
}
}
}
// Converts a library file-stem into a cc -l argument
fn unlib<'a>(config: &config::Config, stem: &'a str) -> &'a str {
if stem.starts_with("lib") && !config.target.options.is_like_windows {
&stem[3..]
} else {
stem
}
}
// Adds the static "rlib" versions of all crates to the command line.
fn add_static_crate(cmd: &mut Linker, sess: &Session, tmpdir: &Path,
cratepath: &Path) {
// When performing LTO on an executable output, all of the
// bytecode from the upstream libraries has already been
// included in our object file output. We need to modify all of
// the upstream archives to remove their corresponding object
// file to make sure we don't pull the same code in twice.
//
// We must continue to link to the upstream archives to be sure
// to pull in native static dependencies. As the final caveat,
// on Linux it is apparently illegal to link to a blank archive,
// so if an archive no longer has any object files in it after
// we remove `lib.o`, then don't link against it at all.
//
// If we're not doing LTO, then our job is simply to just link
// against the archive.
if sess.lto() {
let name = cratepath.file_name().unwrap().to_str().unwrap();
let name = &name[3..name.len() - 5]; // chop off lib/.rlib
time(sess.time_passes(),
&format!("altering {}.rlib", name),
(), |()| {
let dst = tmpdir.join(cratepath.file_name().unwrap());
match fs::copy(&cratepath, &dst) {
Ok(..) => {}
Err(e) => {
sess.fatal(&format!("failed to copy {} to {}: {}",
cratepath.display(),
dst.display(), e));
}
}
// Fix up permissions of the copy, as fs::copy() preserves
// permissions, but the original file may have been installed
// by a package manager and may be read-only.
match fs::metadata(&dst).and_then(|m| {
let mut perms = m.permissions();
perms.set_readonly(false);
fs::set_permissions(&dst, perms)
}) {
Ok(..) => {}
Err(e) => {
sess.fatal(&format!("failed to chmod {} when preparing \
for LTO: {}", dst.display(), e));
}
}
let handler = &sess.diagnostic().handler;
let config = ArchiveConfig {
handler: handler,
dst: dst.clone(),
lib_search_paths: archive_search_paths(sess),
slib_prefix: sess.target.target.options.staticlib_prefix.clone(),
slib_suffix: sess.target.target.options.staticlib_suffix.clone(),
ar_prog: get_ar_prog(sess),
};
let mut archive = Archive::open(config);
archive.remove_file(&format!("{}.o", name));
let files = archive.files();
if files.iter().any(|s| s.ends_with(".o")) {
cmd.link_rlib(&dst);
}
});
} else {
cmd.link_rlib(&fix_windows_verbatim_for_gcc(cratepath));
}
}
// Same thing as above, but for dynamic crates instead of static crates.
fn add_dynamic_crate(cmd: &mut Linker, sess: &Session, cratepath: &Path) {
// If we're performing LTO, then it should have been previously required
// that all upstream rust dependencies were available in an rlib format.
assert!(!sess.lto());
// Just need to tell the linker about where the library lives and
// what its name is
if let Some(dir) = cratepath.parent() {
cmd.include_path(&fix_windows_verbatim_for_gcc(dir));
}
let filestem = cratepath.file_stem().unwrap().to_str().unwrap();
cmd.link_dylib(&unlib(&sess.target, filestem));
}
}
// Link in all of our upstream crates' native dependencies. Remember that
// all of these upstream native dependencies are all non-static
// dependencies. We've got two cases then:
//
// 1. The upstream crate is an rlib. In this case we *must* link in the
// native dependency because the rlib is just an archive.
//
// 2. The upstream crate is a dylib. In order to use the dylib, we have to
// have the dependency present on the system somewhere. Thus, we don't
// gain a whole lot from not linking in the dynamic dependency to this
// crate as well.
//
// The use case for this is a little subtle. In theory the native
// dependencies of a crate are purely an implementation detail of the crate
// itself, but the problem arises with generic and inlined functions. If a
// generic function calls a native function, then the generic function must
// be instantiated in the target crate, meaning that the native symbol must
// also be resolved in the target crate.
fn add_upstream_native_libraries(cmd: &mut Linker, sess: &Session) {
// Be sure to use a topological sorting of crates because there may be
// interdependencies between native libraries. When passing -nodefaultlibs,
// for example, almost all native libraries depend on libc, so we have to
// make sure that's all the way at the right (liblibc is near the base of
// the dependency chain).
//
// This passes RequireStatic, but the actual requirement doesn't matter,
// we're just getting an ordering of crate numbers, we're not worried about
// the paths.
let crates = sess.cstore.get_used_crates(cstore::RequireStatic);
for (cnum, _) in crates {
let libs = csearch::get_native_libraries(&sess.cstore, cnum);
for &(kind, ref lib) in &libs {
match kind {
cstore::NativeUnknown => cmd.link_dylib(lib),
cstore::NativeFramework => cmd.link_framework(lib),
cstore::NativeStatic => {
sess.bug("statics shouldn't be propagated");
}
}
}
}
}
| 40.411812 | 98 | 0.599771 |
67bf8791692142c6d14e5f9c4c2e8ed4a70f6c0b | 5,318 | #![deny(missing_docs)]
//! Holochain utilities for websocket serving and connecting.
//!
//! To establish an outgoing connection, use [`connect`]
//! which will return a tuple
//! ([`WebsocketSender`], [`WebsocketReceiver`])
//!
//! To open a listening socket, use [`WebsocketListener::bind`]
//! which will give you a [`WebsocketListener`]
//! which is an async Stream whose items resolve to that same tuple (
//! [`WebsocketSender`],
//! [`WebsocketReceiver`]
//! ).
//!
//! If you want to be able to shutdown the stream use [`WebsocketListener::bind_with_handle`]
//! which will give you a tuple ([`ListenerHandle`], [`ListenerStream`]).
//! You can use [`ListenerHandle::close`] to close immediately or
//! [`ListenerHandle::close_on`] to close on a future completing.
//!
//! # Example
//!
//! ```
//! use holochain_serialized_bytes::prelude::*;
//! use holochain_websocket::*;
//!
//! use std::convert::TryInto;
//! use tokio::stream::StreamExt;
//! use url2::prelude::*;
//!
//! #[derive(serde::Serialize, serde::Deserialize, SerializedBytes, Debug)]
//! struct TestMessage(pub String);
//!
//! #[tokio::main]
//! async fn main() {
//! // Create a new server listening for connections
//! let mut server = WebsocketListener::bind(
//! url2!("ws://127.0.0.1:0"),
//! std::sync::Arc::new(WebsocketConfig::default()),
//! )
//! .await
//! .unwrap();
//!
//! // Get the address of the server
//! let binding = server.local_addr().clone();
//!
//! tokio::task::spawn(async move {
//! // Handle new connections
//! while let Some(Ok((_send, mut recv))) = server.next().await {
//! tokio::task::spawn(async move {
//! // Receive a message and echo it back
//! if let Some((msg, resp)) = recv.next().await {
//! // Deserialize the message
//! let msg: TestMessage = msg.try_into().unwrap();
//! // If this message is a request then we can respond
//! if resp.is_request() {
//! let msg = TestMessage(format!("echo: {}", msg.0));
//! resp.respond(msg.try_into().unwrap()).await.unwrap();
//! }
//! }
//! });
//! }
//! });
//!
//! // Connect the client to the server
//! let (mut send, _recv) = connect(binding, std::sync::Arc::new(WebsocketConfig::default()))
//! .await
//! .unwrap();
//!
//! let msg = TestMessage("test".to_string());
//! // Make a request and get the echoed response
//! let rsp: TestMessage = send.request(msg).await.unwrap();
//!
//! assert_eq!("echo: test", &rsp.0,);
//! }
//!
//! ```
//!
use std::io::Error;
use std::io::ErrorKind;
use std::sync::Arc;
use holochain_serialized_bytes::prelude::*;
use stream_cancel::Valve;
use tracing::instrument;
use url2::Url2;
use util::url_to_addr;
use websocket::Websocket;
mod websocket_config;
pub use websocket_config::*;
#[allow(missing_docs)]
mod error;
pub use error::*;
mod websocket_listener;
pub use websocket_listener::*;
mod websocket_sender;
pub use websocket_sender::*;
mod websocket_receiver;
pub use websocket_receiver::*;
mod websocket;
mod util;
#[instrument(skip(config))]
/// Create a new external websocket connection.
pub async fn connect(
url: Url2,
config: Arc<WebsocketConfig>,
) -> WebsocketResult<(WebsocketSender, WebsocketReceiver)> {
let addr = url_to_addr(&url, config.scheme).await?;
let socket = tokio::net::TcpStream::connect(addr).await?;
socket.set_keepalive(Some(std::time::Duration::from_secs(
config.tcp_keepalive_s as u64,
)))?;
let (socket, _) = tokio_tungstenite::client_async_with_config(
url.as_str(),
socket,
Some(config.to_tungstenite()),
)
.await
.map_err(|e| Error::new(ErrorKind::Other, e))?;
tracing::debug!("Client connected");
// Noop valve because we don't have a listener to shutdown the
// ends when creating a client
let (exit, valve) = Valve::new();
exit.disable();
Websocket::create_ends(config, socket, valve)
}
#[derive(Debug, serde::Serialize, serde::Deserialize, SerializedBytes)]
#[serde(tag = "type")]
/// The messages actually sent over the wire by this library.
/// If you want to impliment your own server or client you
/// will need this type or be able to serialize / deserialize it.
pub enum WireMessage {
/// A message without a response.
Signal {
#[serde(with = "serde_bytes")]
/// Actual bytes of the message serialized as [message pack](https://msgpack.org/).
data: Vec<u8>,
},
/// A request that requires a response.
Request {
/// The id of this request.
/// Note ids are recycled once they are used.
id: u64,
#[serde(with = "serde_bytes")]
/// Actual bytes of the message serialized as [message pack](https://msgpack.org/).
data: Vec<u8>,
},
/// The response to a request.
Response {
/// The id of the request that this response is for.
id: u64,
#[serde(with = "serde_bytes")]
/// Actual bytes of the message serialized as [message pack](https://msgpack.org/).
data: Option<Vec<u8>>,
},
}
| 31.844311 | 97 | 0.604551 |
01312181a96bbec4f9fb5425949919794a2ea309 | 487 | use crate::beam::{Beam, BeamIntersect};
pub struct BeamIter {
beam: Beam,
}
impl Beam {
const fn iter(self) -> BeamIter {
BeamIter { beam: self }
}
}
impl Iterator for BeamIter {
type Item = BeamIntersect;
fn next(&mut self) -> Option<Self::Item> {
self.beam.next_intersect()
}
}
impl IntoIterator for Beam {
type Item = BeamIntersect;
type IntoIter = BeamIter;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
| 16.793103 | 46 | 0.601643 |
16b95cfc61f09791850a4ad6ba274de440d99d3c | 26 | pub mod vm;
pub mod stack; | 13 | 14 | 0.730769 |
262de24b609817f8ef68f282340c2fae6be83755 | 15,249 | #![deny(missing_docs)]
//! A search API for piston_meta
extern crate piston_meta;
use std::sync::Arc;
use piston_meta::{ MetaData, ParseError, Range };
/// Used to search through meta data.
pub struct Search<'a> {
/// The previous range of search.
/// Used in errors if there is no meta data left.
pub range: Option<Range>,
/// The data to search.
pub data: &'a [Range<MetaData>],
}
impl<'a> Search<'a> {
/// Creates a new search.
pub fn new(data: &'a [Range<MetaData>]) -> Search<'a> {
Search {
data: data,
range: None
}
}
/// Searches anywhere in meta data for a string.
/// Calls closure on the first match.
pub fn for_string<T, F>(
&'a self,
name: &str,
val: &str,
f: F
) -> Result<T, Range<ParseError>>
where F: FnOnce(Search<'a>) -> Result<T, Range<ParseError>>
{
if self.data.len() == 0 {
return Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Could not find string `{}`:`{}`",
name, val))
));
}
for (i, d) in self.data.iter().enumerate() {
match &d.data {
&MetaData::String(ref n, ref v) => {
if &**n == name && &**v == val {
return f(Search {
data: &self.data[i + 1..],
range: Some(d.range())
})
}
}
_ => {}
}
}
Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Could not find string `{}`:`{}`",
name, val))
))
}
/// Searches anywhere in meta data for a f64.
/// Calls closure on the first match.
pub fn for_f64<T, F>(
&'a self,
name: &str,
val: f64,
f: F
) -> Result<T, Range<ParseError>>
where F: FnOnce(Search<'a>) -> Result<T, Range<ParseError>>
{
if self.data.len() == 0 {
return Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Could not find f64 `{}`:`{}`",
name, val))
));
}
for (i, d) in self.data.iter().enumerate() {
match &d.data {
&MetaData::F64(ref n, v) => {
if &**n == name && v == val {
return f(Search {
data: &self.data[i + 1..],
range: Some(d.range())
})
}
}
_ => {}
}
}
Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Could not find f64 `{}`:`{}`",
name, val))
))
}
/// Searches anywhere in meta data for a bool.
/// Calls closure on the first match.
pub fn for_bool<T, F>(
&'a self,
name: &str,
val: bool,
f: F
) -> Result<T, Range<ParseError>>
where F: FnOnce(Search<'a>) -> Result<T, Range<ParseError>>
{
if self.data.len() == 0 {
return Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Could not find bool `{}`:`{}`",
name, val))
));
}
for (i, d) in self.data.iter().enumerate() {
match &d.data {
&MetaData::Bool(ref n, v) => {
if &**n == name && v == val {
return f(Search {
data: &self.data[i + 1..],
range: Some(d.range())
})
}
}
_ => {}
}
}
Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Could not find bool `{}`:`{}`",
name, val))
))
}
/// Searches anywhere in meta data for a node.
/// Calls closure on the first match.
pub fn for_node<T, F>(
&'a self,
name: &str,
f: F
) -> Result<T, Range<ParseError>>
where F: FnOnce(Search<'a>) -> Result<T, Range<ParseError>>
{
if self.data.len() == 0 {
return Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Could not find node `{}`", name))
));
}
for (i, d) in self.data.iter().enumerate() {
match &d.data {
&MetaData::StartNode(ref n) => {
if &**n == name {
return f(Search {
data: &self.data[i + 1..],
range: Some(d.range())
})
}
}
_ => {}
}
}
Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Could not find node `{}`", name))
))
}
/// Searches anywhere in meta data for an end node.
/// Calls closure on the first match.
pub fn for_end_node<T, F>(
&'a self,
name: &str,
f: F
) -> Result<T, Range<ParseError>>
where F: FnOnce(Search<'a>) -> Result<T, Range<ParseError>>
{
if self.data.len() == 0 {
return Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Could not find end node `{}`", name))
));
}
for (i, d) in self.data.iter().enumerate() {
match &d.data {
&MetaData::EndNode(ref n) => {
if &**n == name {
return f(Search {
data: &self.data[i + 1..],
range: Some(d.range())
})
}
}
_ => {}
}
}
Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Could not find end node `{}`", name))
))
}
/// Reads next as bool value.
pub fn bool(&mut self, name: &str) -> Result<bool, Range<ParseError>> {
if self.data.len() == 0 {
return Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Expected bool `{}`", name))
));
}
let range = self.data[0].range();
match &self.data[0].data {
&MetaData::Bool(ref n, v) => {
if &**n == name {
self.data = &self.data[1..];
self.range = Some(range);
Ok(v)
} else {
Err(range.wrap(ParseError::Conversion(
format!("Expected name `{}` found `{}`", name, n))))
}
}
val => {
Err(range.wrap(ParseError::Conversion(
format!("Expected bool `{}`, found `{:?}`", name, val))))
}
}
}
/// Reads next as f64 value.
pub fn f64(&mut self, name: &str) -> Result<f64, Range<ParseError>> {
if self.data.len() == 0 {
return Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Expected f64 `{}`", name))
));
}
let range = self.data[0].range();
match &self.data[0].data {
&MetaData::F64(ref n, v) => {
if &**n == name {
self.data = &self.data[1..];
self.range = Some(range);
Ok(v)
} else {
Err(range.wrap(ParseError::Conversion(
format!("Expected name `{}`, found `{}`", name, n))))
}
}
val => {
Err(range.wrap(ParseError::Conversion(
format!("Expected f64 `{}`, found `{:?}`", name, val))))
}
}
}
/// Reads next as string value.
pub fn string(
&mut self,
name: &str
) -> Result<Arc<String>, Range<ParseError>> {
if self.data.len() == 0 {
return Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Expected string `{}`", name))
));
}
let range = self.data[0].range();
match &self.data[0].data {
&MetaData::String(ref n, ref v) => {
if &**n == name {
self.data = &self.data[1..];
self.range = Some(range);
Ok(v.clone())
} else {
Err(range.wrap(ParseError::Conversion(
format!("Expected name `{}`, found `{}`", name, n))))
}
}
val => {
Err(range.wrap(ParseError::Conversion(
format!("Expected string `{}`, found `{:?}`", name, val))))
}
}
}
/// Reads next as node.
pub fn node(&mut self, name: &str) -> Result<(), Range<ParseError>> {
if self.data.len() == 0 {
return Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Expected node `{}`", name))
));
}
let range = self.data[0].range();
match &self.data[0].data {
&MetaData::StartNode(ref n) => {
if &**n == name {
self.data = &self.data[1..];
self.range = Some(range);
Ok(())
} else {
Err(range.wrap(ParseError::Conversion(
format!("Expected name `{}`, found `{}`", name, n))))
}
}
val => {
Err(range.wrap(ParseError::Conversion(
format!("Expected node `{}`, found `{:?}`", name, val))))
}
}
}
/// Reads next as end node.
pub fn end_node(&mut self, name: &str) -> Result<(), Range<ParseError>> {
if self.data.len() == 0 {
return Err(self.range.unwrap_or(Range::empty(0)).wrap(
ParseError::Conversion(format!("Expected end node `{}`", name))
));
}
let range = self.data[0].range();
match &self.data[0].data {
&MetaData::EndNode(ref n) => {
if &**n == name {
self.data = &self.data[1..];
self.range = Some(range);
Ok(())
} else {
Err(range.wrap(ParseError::Conversion(
format!("Expected name `{}`, found `{}`", name, n))))
}
}
val => {
Err(range.wrap(ParseError::Conversion(
format!("Expected end node `{}`, found `{:?}`", name, val))))
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use piston_meta::*;
#[test]
fn search_for_string() {
let text = "a 1 b 2";
let rules = r#"
0 document = .r?([..""!:"name" .w? .$:"val" .w?])
"#;
let rules = stderr_unwrap(rules, syntax(rules));
let mut data = vec![];
stderr_unwrap(text, parse(&rules, text, &mut data));
let s = Search::new(&data);
let a = stderr_unwrap(text, s.for_string("name", "a", |mut s| s.f64("val")));
assert_eq!(a, 1.0);
let b = stderr_unwrap(text, s.for_string("name", "b", |mut s| s.f64("val")));
assert_eq!(b, 2.0);
let c = s.for_string("name", "c", |mut s| s.f64("val"));
assert!(c.is_err());
}
#[test]
fn search_for_f64() {
let text = "a 1 b 2";
let rules = r#"
0 document = .r?([..""!:"name" .w? .$:"val" .w?])
"#;
let rules = stderr_unwrap(rules, syntax(rules));
let mut data = vec![];
stderr_unwrap(text, parse(&rules, text, &mut data));
let s = Search::new(&data);
let a = stderr_unwrap(text, s.for_f64("val", 1.0, |mut s| s.string("name")));
assert_eq!(&**a, "b");
}
#[test]
fn search_for_bool() {
let text = "a true b false";
let rules = r#"
0 document = .r?([..""!:"name" .w? {"true":"val" "false":!"val"} .w?])
"#;
let rules = stderr_unwrap(rules, syntax(rules));
let mut data = vec![];
stderr_unwrap(text, parse(&rules, text, &mut data));
let s = Search::new(&data);
let a = stderr_unwrap(text, s.for_bool("val", true, |mut s| s.string("name")));
assert_eq!(&**a, "b");
}
#[test]
fn search_for_end_node() {
let text = "true false";
let rules = r#"
0 proposition = {"true":"val" "false":!"val"}
0 document = .r?([proposition:"proposition" .w?])
"#;
let rules = stderr_unwrap(rules, syntax(rules));
let mut data = vec![];
stderr_unwrap(text, parse(&rules, text, &mut data));
let mut s = Search::new(&data);
stderr_unwrap(text, s.node("proposition"));
assert!(s.for_end_node("proposition", |mut s| {
stderr_unwrap(text, s.node("proposition"));
assert_eq!(s.bool("val"), Ok(false));
stderr_unwrap(text, s.end_node("proposition"));
Ok(())
}).is_ok());
}
#[test]
fn f64() {
let text = "1 2";
let rules = r#"
0 document = .r?([.$:"val" .w?])
"#;
let rules = stderr_unwrap(rules, syntax(rules));
let mut data = vec![];
stderr_unwrap(text, parse(&rules, text, &mut data));
let mut s = Search::new(&data);
let res = (s.f64("val").unwrap(), s.f64("val").unwrap());
assert_eq!(res, (1.0, 2.0));
}
#[test]
fn bool() {
let text = "true false";
let rules = r#"
0 document = .r?([{"true":"val" "false":!"val"} .w?])
"#;
let rules = stderr_unwrap(rules, syntax(rules));
let mut data = vec![];
stderr_unwrap(text, parse(&rules, text, &mut data));
let mut s = Search::new(&data);
let res = (s.bool("val").unwrap(), s.bool("val").unwrap());
assert_eq!(res, (true, false));
}
#[test]
fn node() {
let text = "true false";
let rules = r#"
0 proposition = {"true":"val" "false":!"val"}
0 document = .r?([proposition:"proposition" .w?])
"#;
let rules = stderr_unwrap(rules, syntax(rules));
let mut data = vec![];
stderr_unwrap(text, parse(&rules, text, &mut data));
let mut s = Search::new(&data);
stderr_unwrap(text, s.node("proposition"));
assert_eq!(s.bool("val"), Ok(true));
stderr_unwrap(text, s.end_node("proposition"));
stderr_unwrap(text, s.node("proposition"));
assert_eq!(s.bool("val"), Ok(false));
stderr_unwrap(text, s.end_node("proposition"));
}
}
| 33.15 | 87 | 0.43793 |
e6c88341bd610093e212f9087ae5a075b741a4ed | 17,715 | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
//! Garbage-collected Storage structures for AST nodes.
use crate::{Node, Path, SourceManager, Visitor};
use juno_support::atom_table::{Atom, AtomTable};
use libc::c_void;
use memoffset::offset_of;
use std::hash::{Hash, Hasher};
use std::ops::Deref;
use std::{
cell::{Cell, UnsafeCell},
pin::Pin,
ptr::NonNull,
sync::atomic::{AtomicU32, Ordering},
};
/// ID which indicates a `StorageEntry` is free.
const FREE_ENTRY: u32 = 0;
/// A single entry in the heap.
#[derive(Debug)]
struct StorageEntry<'ctx> {
/// ID of the context to which this entry belongs.
/// Top bit is used as a mark bit, and flips meaning every time a GC happens.
/// If this field is `0`, then this entry is free.
ctx_id_markbit: Cell<u32>,
/// Refcount of how many [`NodeRc`] point to this node.
/// Entry may only be freed if this number is `0` and no other entries reference this entry
/// directly.
count: Cell<u32>,
/// Actual node stored in this entry.
inner: Node<'ctx>,
}
impl<'ctx> StorageEntry<'ctx> {
unsafe fn from_node<'a>(node: &'a Node<'a>) -> &'a StorageEntry<'a> {
let inner_offset = offset_of!(StorageEntry, inner) as isize;
let inner = node as *const Node<'a>;
&*(inner.offset(-inner_offset) as *const StorageEntry<'a>)
}
#[inline]
fn set_markbit(&self, bit: bool) {
let id = self.ctx_id_markbit.get();
if bit {
self.ctx_id_markbit.set(id | 1 << 31);
} else {
self.ctx_id_markbit.set(id & !(1 << 31));
}
}
#[inline]
fn markbit(&self) -> bool {
(self.ctx_id_markbit.get() >> 31) != 0
}
fn is_free(&self) -> bool {
self.ctx_id_markbit.get() == FREE_ENTRY
}
}
/// Structure pointed to by `Context` and `NodeRc` to facilitate panicking if there are
/// outstanding `NodeRc` when the `Context` is dropped.
#[derive(Debug)]
struct NodeRcCounter {
/// ID of the context owning the counter.
ctx_id: u32,
/// Number of [`NodeRc`]s allocated in this `Context`.
/// Must be `0` when `Context` is dropped.
count: Cell<usize>,
}
/// The storage for AST nodes.
///
/// Can be used to allocate and free nodes.
/// Nodes allocated in one `Context` must not be referenced by another `Context`'s AST.
#[derive(Debug)]
pub struct Context<'ast> {
/// Unique number used to identify this context.
id: u32,
/// List of all the nodes stored in this context.
/// Each element is a "chunk" of nodes.
/// None of the chunks are ever resized after allocation.
nodes: UnsafeCell<Vec<Vec<StorageEntry<'ast>>>>,
/// First element of the free list if there is one.
free: UnsafeCell<Vec<NonNull<StorageEntry<'ast>>>>,
/// `NodeRc` count stored in a `Box` to ensure that `NodeRc`s can also point to it
/// and decrement the count on drop.
/// Placed separately to guard against `Context` moving, though relying on that behavior is
/// technically unsafe.
noderc_count: Pin<Box<NodeRcCounter>>,
/// Capacity at which to allocate the next chunk.
/// Doubles every chunk until reaching [`MAX_CHUNK_CAPACITY`].
next_chunk_capacity: Cell<usize>,
/// All identifiers are kept here.
atom_tab: AtomTable,
/// Source manager of this context.
source_mgr: SourceManager,
/// `true` if `1` indicates an entry is marked, `false` if `0` indicates an entry is marked.
/// Flipped every time GC occurs.
markbit_marked: bool,
/// Whether strict mode has been forced.
strict_mode: bool,
/// Whether to warn about undefined variables in strict mode functions.
pub warn_undefined: bool,
}
const MIN_CHUNK_CAPACITY: usize = 1 << 10;
const MAX_CHUNK_CAPACITY: usize = MIN_CHUNK_CAPACITY * (1 << 10);
impl Default for Context<'_> {
fn default() -> Self {
Self::new()
}
}
impl<'ast> Context<'ast> {
/// Allocate a new `Context` with a new ID.
pub fn new() -> Self {
static NEXT_ID: AtomicU32 = AtomicU32::new(FREE_ENTRY + 1);
let id = NEXT_ID.fetch_add(1, Ordering::Relaxed);
let result = Self {
id,
nodes: Default::default(),
free: Default::default(),
noderc_count: Pin::new(Box::new(NodeRcCounter {
ctx_id: id,
count: Cell::new(0),
})),
atom_tab: Default::default(),
source_mgr: Default::default(),
next_chunk_capacity: Cell::new(MIN_CHUNK_CAPACITY),
markbit_marked: true,
strict_mode: false,
warn_undefined: false,
};
result.new_chunk();
result
}
/// Allocate a new `Node` in this `Context`.
pub(crate) fn alloc<'s>(&'s self, n: Node<'_>) -> &'s Node<'s> {
let free = unsafe { &mut *self.free.get() };
let nodes: &mut Vec<Vec<StorageEntry>> = unsafe { &mut *self.nodes.get() };
// Transmutation is safe here, because `Node`s can only be allocated through
// this path and only one GCLock can be made available at a time per thread.
let node: Node<'ast> = unsafe { std::mem::transmute(n) };
let entry = if let Some(mut entry) = free.pop() {
let entry = unsafe { entry.as_mut() };
debug_assert!(
entry.ctx_id_markbit.get() == FREE_ENTRY,
"Incorrect context ID"
);
debug_assert!(entry.count.get() == 0, "Freed entry has pointers to it");
entry.ctx_id_markbit.set(self.id);
entry.set_markbit(!self.markbit_marked);
entry.inner = node;
entry
} else {
let chunk = nodes.last().unwrap();
if chunk.len() >= chunk.capacity() {
self.new_chunk();
}
let chunk = nodes.last_mut().unwrap();
let entry = StorageEntry {
ctx_id_markbit: Cell::new(self.id),
count: Cell::new(0),
inner: node,
};
entry.set_markbit(!self.markbit_marked);
chunk.push(entry);
chunk.last().unwrap()
};
&entry.inner
}
/// Allocate a new chunk in the node storage.
fn new_chunk(&self) {
let nodes = unsafe { &mut *self.nodes.get() };
let capacity = self.next_chunk_capacity.get();
nodes.push(Vec::with_capacity(capacity));
// Double the capacity if there's room.
if capacity < MAX_CHUNK_CAPACITY {
self.next_chunk_capacity.set(capacity * 2);
}
}
/// Return the atom table.
pub fn atom_table(&self) -> &AtomTable {
&self.atom_tab
}
/// Add a string to the identifier table.
#[inline]
pub fn atom<V: Into<String> + AsRef<str>>(&self, value: V) -> Atom {
self.atom_tab.atom(value)
}
/// Obtain the contents of an atom from the atom table.
#[inline]
pub fn str(&self, index: Atom) -> &str {
self.atom_tab.str(index)
}
/// Return an immutable reference to SourceManager
pub fn sm(&self) -> &SourceManager {
&self.source_mgr
}
/// Return a mutable reference to SourceManager
pub fn sm_mut(&mut self) -> &mut SourceManager {
&mut self.source_mgr
}
/// Return true if strict mode has been forced globally.
pub fn strict_mode(&self) -> bool {
self.strict_mode
}
/// Enable strict mode. Note that it cannot be unset.
pub fn enable_strict_mode(&mut self) {
self.strict_mode = true;
}
pub fn gc(&mut self) {
let nodes = unsafe { &mut *self.nodes.get() };
let free = unsafe { &mut *self.free.get() };
// Begin by collecting all the roots: entries with non-zero refcount.
let mut roots: Vec<&StorageEntry> = vec![];
for chunk in nodes.iter() {
for entry in chunk.iter() {
if entry.is_free() {
continue;
}
debug_assert!(
entry.markbit() != self.markbit_marked,
"Entry marked before start of GC: {:?}\nentry.markbit()={}\nmarkbit_marked={}",
&entry,
entry.markbit(),
self.markbit_marked,
);
if entry.count.get() > 0 {
roots.push(entry);
}
}
}
struct Marker {
markbit_marked: bool,
}
impl<'gc> Visitor<'gc> for Marker {
fn call(&mut self, gc: &'gc GCLock, node: &'gc Node<'gc>, _path: Option<Path<'gc>>) {
let entry = unsafe { StorageEntry::from_node(node) };
if entry.markbit() == self.markbit_marked {
// Stop visiting early if we've already marked this part,
// because we must have also marked all the children.
return;
}
entry.set_markbit(self.markbit_marked);
node.visit_children(gc, self);
}
}
// Use a visitor to mark every node reachable from roots.
let mut marker = Marker {
markbit_marked: self.markbit_marked,
};
{
let gc = GCLock::new(self);
for root in &roots {
root.inner.visit(&gc, &mut marker, None);
}
}
for chunk in nodes.iter_mut() {
for entry in chunk.iter_mut() {
if entry.is_free() {
// Skip free entries.
continue;
}
if entry.count.get() > 0 {
// Keep referenced entries alive.
continue;
}
if entry.markbit() == self.markbit_marked {
// Keep marked entries alive.
continue;
}
// Passed all checks, this entry is free.
entry.ctx_id_markbit.set(FREE_ENTRY);
free.push(unsafe { NonNull::new_unchecked(entry as *mut StorageEntry) });
}
}
self.markbit_marked = !self.markbit_marked;
}
}
impl Drop for Context<'_> {
/// Ensure that there are no outstanding `NodeRc`s into this `Context` which will be
/// invalidated once it is dropped.
///
/// # Panics
///
/// Will panic if there are any `NodeRc`s stored when this `Context` is dropped.
fn drop(&mut self) {
if self.noderc_count.count.get() > 0 {
#[cfg(debug_assertions)]
{
// In debug mode, provide more information on which node was leaked.
let nodes = unsafe { &*self.nodes.get() };
for chunk in nodes {
for entry in chunk {
assert!(
entry.count.get() == 0,
"NodeRc must not outlive Context: {:#?}\n",
&entry.inner
);
}
}
}
// In release mode, just panic immediately.
panic!("NodeRc must not outlive Context");
}
}
}
thread_local! {
/// Whether there exists a `GCLock` on the current thread.
static GCLOCK_IN_USE: Cell<bool> = Cell::new(false);
}
/// A way to view the [`Context`].
///
/// Provides the user the ability to create new nodes and dereference [`NodeRc`].
///
/// **At most one is allowed to be active in any thread at any time.**
/// This is to ensure no `&Node` can be shared between `Context`s.
pub struct GCLock<'ast, 'ctx> {
ctx: &'ctx mut Context<'ast>,
}
impl Drop for GCLock<'_, '_> {
fn drop(&mut self) {
GCLOCK_IN_USE.with(|flag| {
flag.set(false);
});
}
}
impl<'ast, 'ctx> GCLock<'ast, 'ctx> {
/// # Panics
///
/// Will panic if there is already an active `GCLock` on this thread.
pub fn new(ctx: &'ctx mut Context<'ast>) -> Self {
GCLOCK_IN_USE.with(|flag| {
if flag.get() {
panic!("Attempt to create multiple GCLocks in a single thread");
}
flag.set(true);
});
GCLock { ctx }
}
/// Allocate a node in the `ctx`.
#[inline]
pub(crate) fn alloc(&self, n: Node) -> &Node {
self.ctx.alloc(n)
}
/// Return a reference to the owning Context.
pub fn ctx(&self) -> &Context<'ast> {
self.ctx
}
/// Add a string to the identifier table.
#[inline]
pub fn atom<V: Into<String> + AsRef<str>>(&self, value: V) -> Atom {
self.ctx.atom(value)
}
/// Obtain the contents of an atom from the atom table.
#[inline]
pub fn str(&self, index: Atom) -> &str {
self.ctx.str(index)
}
/// Return an immutable reference to SourceManager.
#[inline]
pub fn sm(&self) -> &SourceManager {
self.ctx.sm()
}
/// Return a mutable reference to SourceManager.
#[inline]
pub fn sm_mut(&mut self) -> &mut SourceManager {
self.ctx.sm_mut()
}
}
/// A wrapper around Node&, with "shallow" hashing and equality, suitable for
/// hash tables.
#[derive(Debug, Copy, Clone)]
pub struct NodePtr<'gc>(pub &'gc Node<'gc>);
impl<'gc> NodePtr<'gc> {
pub fn from_node(node: &'gc Node<'gc>) -> Self {
Self(node)
}
}
impl<'gc> PartialEq for NodePtr<'gc> {
fn eq(&self, other: &Self) -> bool {
std::ptr::eq(self.0, other.0)
}
}
impl Eq for NodePtr<'_> {}
impl Hash for NodePtr<'_> {
fn hash<H: Hasher>(&self, state: &mut H) {
(self.0 as *const Node).hash(state)
}
}
impl<'gc> Deref for NodePtr<'gc> {
type Target = Node<'gc>;
fn deref(&self) -> &'gc Self::Target {
self.0
}
}
impl<'gc> AsRef<Node<'gc>> for NodePtr<'gc> {
fn as_ref(&self) -> &'gc Node<'gc> {
self.0
}
}
impl<'gc> From<&'gc Node<'gc>> for NodePtr<'gc> {
fn from(node: &'gc Node<'gc>) -> Self {
NodePtr(node)
}
}
/// Reference counted pointer to a [`Node`] in any [`Context`].
///
/// It can be used to keep references to `Node`s outside of the lifetime of a [`GCLock`],
/// but the only way to derefence and inspect the `Node` is to use a `GCLock`.
#[derive(Debug, Eq)]
pub struct NodeRc {
/// The `NodeRcCounter` counting for the `Context` to which this belongs.
counter: NonNull<NodeRcCounter>,
/// Pointer to the `StorageEntry` containing the `Node`.
/// Stored as `c_void` to avoid specifying lifetimes, as dereferencing is checked manually.
entry: NonNull<c_void>,
}
impl Hash for NodeRc {
fn hash<H: Hasher>(&self, state: &mut H) {
self.entry.hash(state)
}
}
impl PartialEq for NodeRc {
fn eq(&self, other: &Self) -> bool {
self.entry == other.entry
}
}
impl Drop for NodeRc {
fn drop(&mut self) {
let entry = unsafe { self.entry().as_mut() };
let c = entry.count.get();
debug_assert!(c > 0);
entry.count.set(c - 1);
let noderc_count = unsafe { self.counter.as_mut() };
let c = noderc_count.count.get();
debug_assert!(c > 0);
noderc_count.count.set(c - 1);
}
}
impl Clone for NodeRc {
/// Cloning a `NodeRc` increments refcounts on the entry and the context.
fn clone(&self) -> Self {
let mut cloned = NodeRc { ..*self };
let entry = unsafe { cloned.entry().as_mut() };
let c = entry.count.get();
entry.count.set(c + 1);
let noderc_count = unsafe { cloned.counter.as_mut() };
let c = noderc_count.count.get();
noderc_count.count.set(c + 1);
cloned
}
}
impl NodeRc {
/// Turn a node reference into a `NodeRc` for storage outside `GCLock`.
pub fn from_node<'gc>(gc: &'gc GCLock, node: &'gc Node<'gc>) -> NodeRc {
let inner_offset = offset_of!(StorageEntry, inner) as isize;
let inner = node as *const Node<'gc>;
unsafe {
let entry: &mut StorageEntry = &mut *(inner.offset(-inner_offset) as *mut StorageEntry);
Self::from_entry(gc, entry)
}
}
/// Return the actual `Node` that `self` points to.
///
/// # Panics
///
/// Will panic if `gc` is not for the same context as this `NodeRc` was created in.
pub fn node<'gc>(&'_ self, gc: &'gc GCLock<'_, '_>) -> &'gc Node {
unsafe {
assert_eq!(
self.counter.as_ref().ctx_id,
gc.ctx.id,
"Attempt to derefence NodeRc allocated context {} in context {}",
self.counter.as_ref().ctx_id,
gc.ctx.id
);
&self.entry().as_ref().inner
}
}
/// Get the pointer to the `StorageEntry`.
unsafe fn entry(&self) -> NonNull<StorageEntry> {
let outer = self.entry.as_ptr() as *mut StorageEntry;
NonNull::new_unchecked(outer)
}
unsafe fn from_entry(gc: &GCLock, entry: &StorageEntry<'_>) -> NodeRc {
let c = entry.count.get();
entry.count.set(c + 1);
let c = gc.ctx.noderc_count.count.get();
gc.ctx.noderc_count.count.set(c + 1);
NodeRc {
counter: NonNull::new_unchecked(gc.ctx.noderc_count.as_ref().get_ref()
as *const NodeRcCounter
as *mut NodeRcCounter),
entry: NonNull::new_unchecked(entry as *const StorageEntry as *mut c_void),
}
}
}
| 30.91623 | 100 | 0.556026 |
ebb9ef4116f27fcca12f0471147be32ed1369e6f | 165 | pub mod image;
mod process;
pub mod resolution;
pub mod server;
pub use process::start_lol;
pub use process::Process;
mod lol_config;
pub use lol_config::LolConfig;
| 18.333333 | 30 | 0.781818 |
03d993096663b8a6b5bcf6e4e819cb5aaaf6d281 | 1,438 | // Copyright (c) The cargo-guppy Contributors
// SPDX-License-Identifier: MIT OR Apache-2.0
//! Print out crates in a workspace in topological order.
//!
//! The into_iter_ids and into_iter_metadatas iterators return packages in topological order. Note
//! that into_iter_links returns links in "link order" -- see its documentation for more.
use guppy::graph::{DependencyDirection, PackageGraph};
use guppy::Error;
fn main() -> Result<(), Error> {
// `guppy` accepts `cargo metadata` JSON output. Use a pre-existing fixture for these examples.
let fixture = include_str!("../fixtures/metadata_libra.json");
let package_graph = PackageGraph::from_json(fixture)?;
// Non-workspace packages cannot depend on packages within the workspace, so the reverse
// transitive deps of workspace packages are exactly the set of workspace packages.
let select = package_graph.select_reverse(package_graph.workspace().member_ids())?;
// Here we pass in `Forward` -- by default, select_reverse will cause packages
// to be returned in reverse topo order.
for package in select.into_iter_metadatas(Some(DependencyDirection::Forward)) {
// All selected packages are in the workspace.
let workspace_path = package
.workspace_path()
.expect("packages in workspace should have workspace path");
println!("{}: {:?}", package.name(), workspace_path);
}
Ok(())
}
| 43.575758 | 99 | 0.710709 |
c1506bf4724e5471b88dfd6e6980a5f9af8c6260 | 926 | /*
* YNAB API Endpoints
*
* Our API uses a REST based design, leverages the JSON data format, and relies upon HTTPS for transport. We respond with meaningful HTTP response codes and if an error occurs, we include error details in the response body. API Documentation is at https://api.youneedabudget.com
*
* The version of the OpenAPI document: 1.0.0
*
* Generated by: https://openapi-generator.tech
*/
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct BudgetSettings {
#[serde(rename = "date_format")]
pub date_format: crate::models::DateFormat,
#[serde(rename = "currency_format")]
pub currency_format: crate::models::CurrencyFormat,
}
impl BudgetSettings {
pub fn new(date_format: crate::models::DateFormat, currency_format: crate::models::CurrencyFormat) -> BudgetSettings {
BudgetSettings {
date_format,
currency_format,
}
}
}
| 28.9375 | 279 | 0.700864 |
2f24e8a5487666c22f06024e311612888bf2e4c7 | 4,057 | use core::marker::PhantomData;
use spin::{Mutex, MutexGuard};
pub mod x86_io {
/// Read a single byte from the port.
pub unsafe fn inb(port: u16) -> u8 {
let result: u8;
asm!("inb %dx, %al" : "={al}"(result) : "{dx}"(port) :: "volatile");
result
}
/// Write a single byte to the port.
pub unsafe fn outb(value: u8, port: u16) {
asm!("outb %al, %dx" :: "{dx}"(port), "{al}"(value) :: "volatile");
}
/// Read a word from the port.
pub unsafe fn inw(port: u16) -> u16 {
let result: u16;
asm!("inw %dx, %ax" : "={ax}"(result) : "{dx}"(port) :: "volatile");
result
}
/// Write a word to the port.
pub unsafe fn outw(value: u16, port: u16) {
asm!("outw %ax, %dx" :: "{dx}"(port), "{ax}"(value) :: "volatile");
}
/// Read a dword from `port`.
pub unsafe fn inl(port: u16) -> u32 {
let result: u32;
asm!("inl %dx, %eax" : "={eax}"(result) : "{dx}"(port) :: "volatile");
result
}
/// Write a dword to the `port`.
pub unsafe fn outl(value: u32, port: u16) {
asm!("outl %eax, %dx" :: "{dx}"(port), "{eax}"(value) :: "volatile");
}
}
use self::x86_io::{inb, inl, inw, outb, outl, outw};
/// Nice little type that allows us to specify the size of the value read without using inb
/// directly.
pub trait InOut {
unsafe fn port_in(port: u16) -> Self;
unsafe fn port_out(port: u16, value: Self);
}
impl InOut for u8 {
unsafe fn port_in(port: u16) -> u8 {
inb(port)
}
unsafe fn port_out(port: u16, value: u8) {
outb(value, port);
}
}
impl InOut for u16 {
unsafe fn port_in(port: u16) -> u16 {
inw(port)
}
unsafe fn port_out(port: u16, value: u16) {
outw(value, port);
}
}
impl InOut for u32 {
unsafe fn port_in(port: u16) -> u32 {
inl(port)
}
unsafe fn port_out(port: u16, value: u32) {
outl(value, port);
}
}
/// An `InOut`sized port. This could be any of the type implementors for `InOut`.
#[derive(Debug)]
pub struct Port<T: InOut> {
/// Port address.
port: u16,
/// Zero-byte placeholder. This is only here so that we can have a
/// type parameter `T` without a compiler error.
phantom: PhantomData<T>,
}
impl<T: InOut> Port<T> {
/// Create a port which can handle values of `T` size.
pub const unsafe fn new(port: u16) -> Port<T> {
Port {
port,
phantom: PhantomData,
}
}
/// Read a value from `self.port`.
pub fn read(&mut self) -> T {
unsafe { T::port_in(self.port) }
}
/// Write a value to `self.port`.
pub fn write(&mut self, value: T) {
unsafe { T::port_out(self.port, value); }
}
}
/// An `InOut` sized port that is synchronized using a spinlock. See [Port]
pub struct SynchronizedPort<T: InOut> {
inner: Mutex<Port<T>>,
}
impl<'a, T: InOut> SynchronizedPort<T> {
///Create a port which can handle values of `T` size.
pub const unsafe fn new(port: u16) -> SynchronizedPort<T> {
SynchronizedPort {
inner: Mutex::new(Port::new(port))
}
}
/// Read a value from `self.port`. Synchronized over context of this read.
pub fn read(&self) -> T {
self.inner.lock().read()
}
/// Write a value to `self.port`. Synchronized over context of this write.
#[allow(dead_code)] // Part of API
pub fn write(&self, value: T) {
self.inner.lock().write(value)
}
/// Operates a closure on the synchronized port. Synchronized over the whole context of the
/// closure.
pub fn with_lock<R, F: FnOnce(MutexGuard<'a, Port<T>>) -> R>(&'a self, f: F) -> R {
f(self.inner.lock())
}
/// Locks the port and returns a mutex guard over the port
pub fn lock(&'a self) -> MutexGuard<'a, Port<T>> {
self.inner.lock()
}
} | 28.370629 | 96 | 0.539068 |
0e551ac964795c0f909496db4ddeaa73f462c490 | 11,014 | extern crate hamcrest;
extern crate cargo;
use std::collections::HashMap;
use hamcrest::{assert_that, equal_to, contains};
use cargo::core::source::{SourceId, GitReference};
use cargo::core::dependency::Kind::{self, Development};
use cargo::core::{Dependency, PackageId, Summary, Registry};
use cargo::util::{CargoResult, ToUrl};
use cargo::core::resolver::{self, Method};
fn resolve<R: Registry>(pkg: PackageId, deps: Vec<Dependency>,
registry: &mut R)
-> CargoResult<Vec<PackageId>> {
let summary = Summary::new(pkg, deps, HashMap::new()).unwrap();
let method = Method::Everything;
Ok(try!(resolver::resolve(&summary, &method, registry)).iter().map(|p| {
p.clone()
}).collect())
}
trait ToDep {
fn to_dep(self) -> Dependency;
}
impl ToDep for &'static str {
fn to_dep(self) -> Dependency {
let url = "http://example.com".to_url().unwrap();
let source_id = SourceId::for_registry(&url);
Dependency::parse(self, Some("1.0.0"), &source_id).unwrap()
}
}
impl ToDep for Dependency {
fn to_dep(self) -> Dependency {
self
}
}
trait ToPkgId {
fn to_pkgid(&self) -> PackageId;
}
impl ToPkgId for &'static str {
fn to_pkgid(&self) -> PackageId {
PackageId::new(*self, "1.0.0", ®istry_loc()).unwrap()
}
}
impl ToPkgId for (&'static str, &'static str) {
fn to_pkgid(&self) -> PackageId {
let (name, vers) = *self;
PackageId::new(name, vers, ®istry_loc()).unwrap()
}
}
macro_rules! pkg {
($pkgid:expr => [$($deps:expr),+]) => ({
let d: Vec<Dependency> = vec![$($deps.to_dep()),+];
Summary::new($pkgid.to_pkgid(), d, HashMap::new()).unwrap()
});
($pkgid:expr) => (
Summary::new($pkgid.to_pkgid(), Vec::new(), HashMap::new()).unwrap()
)
}
fn registry_loc() -> SourceId {
let remote = "http://example.com".to_url().unwrap();
SourceId::for_registry(&remote)
}
fn pkg(name: &str) -> Summary {
Summary::new(pkg_id(name), Vec::new(), HashMap::new()).unwrap()
}
fn pkg_id(name: &str) -> PackageId {
PackageId::new(name, "1.0.0", ®istry_loc()).unwrap()
}
fn pkg_id_loc(name: &str, loc: &str) -> PackageId {
let remote = loc.to_url();
let master = GitReference::Branch("master".to_string());
let source_id = SourceId::for_git(&remote.unwrap(), master);
PackageId::new(name, "1.0.0", &source_id).unwrap()
}
fn pkg_loc(name: &str, loc: &str) -> Summary {
Summary::new(pkg_id_loc(name, loc), Vec::new(), HashMap::new()).unwrap()
}
fn dep(name: &str) -> Dependency { dep_req(name, "1.0.0") }
fn dep_req(name: &str, req: &str) -> Dependency {
let url = "http://example.com".to_url().unwrap();
let source_id = SourceId::for_registry(&url);
Dependency::parse(name, Some(req), &source_id).unwrap()
}
fn dep_loc(name: &str, location: &str) -> Dependency {
let url = location.to_url().unwrap();
let master = GitReference::Branch("master".to_string());
let source_id = SourceId::for_git(&url, master);
Dependency::parse(name, Some("1.0.0"), &source_id).unwrap()
}
fn dep_kind(name: &str, kind: Kind) -> Dependency {
dep(name).clone_inner().set_kind(kind).into_dependency()
}
fn registry(pkgs: Vec<Summary>) -> Vec<Summary> {
pkgs
}
fn names<P: ToPkgId>(names: &[P]) -> Vec<PackageId> {
names.iter().map(|name| name.to_pkgid()).collect()
}
fn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {
names.iter()
.map(|&(name, loc)| pkg_id_loc(name, loc)).collect()
}
#[test]
fn test_resolving_empty_dependency_list() {
let res = resolve(pkg_id("root"), Vec::new(),
&mut registry(vec!())).unwrap();
assert_that(&res, equal_to(&names(&["root"])));
}
#[test]
fn test_resolving_only_package() {
let mut reg = registry(vec!(pkg("foo")));
let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg);
assert_that(&res.unwrap(), contains(names(&["root", "foo"])).exactly());
}
#[test]
fn test_resolving_one_dep() {
let mut reg = registry(vec!(pkg("foo"), pkg("bar")));
let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg);
assert_that(&res.unwrap(), contains(names(&["root", "foo"])).exactly());
}
#[test]
fn test_resolving_multiple_deps() {
let mut reg = registry(vec!(pkg!("foo"), pkg!("bar"), pkg!("baz")));
let res = resolve(pkg_id("root"), vec![dep("foo"), dep("baz")],
&mut reg).unwrap();
assert_that(&res, contains(names(&["root", "foo", "baz"])).exactly());
}
#[test]
fn test_resolving_transitive_deps() {
let mut reg = registry(vec!(pkg!("foo"), pkg!("bar" => ["foo"])));
let res = resolve(pkg_id("root"), vec![dep("bar")], &mut reg).unwrap();
assert_that(&res, contains(names(&["root", "foo", "bar"])));
}
#[test]
fn test_resolving_common_transitive_deps() {
let mut reg = registry(vec!(pkg!("foo" => ["bar"]), pkg!("bar")));
let res = resolve(pkg_id("root"), vec![dep("foo"), dep("bar")],
&mut reg).unwrap();
assert_that(&res, contains(names(&["root", "foo", "bar"])));
}
#[test]
fn test_resolving_with_same_name() {
let list = vec![pkg_loc("foo", "http://first.example.com"),
pkg_loc("bar", "http://second.example.com")];
let mut reg = registry(list);
let res = resolve(pkg_id("root"),
vec![dep_loc("foo", "http://first.example.com"),
dep_loc("bar", "http://second.example.com")],
&mut reg);
let mut names = loc_names(&[("foo", "http://first.example.com"),
("bar", "http://second.example.com")]);
names.push(pkg_id("root"));
assert_that(&res.unwrap(), contains(names).exactly());
}
#[test]
fn test_resolving_with_dev_deps() {
let mut reg = registry(vec!(
pkg!("foo" => ["bar", dep_kind("baz", Development)]),
pkg!("baz" => ["bat", dep_kind("bam", Development)]),
pkg!("bar"),
pkg!("bat")
));
let res = resolve(pkg_id("root"),
vec![dep("foo"), dep_kind("baz", Development)],
&mut reg).unwrap();
assert_that(&res, contains(names(&["root", "foo", "bar", "baz"])));
}
#[test]
fn resolving_with_many_versions() {
let mut reg = registry(vec!(
pkg!(("foo", "1.0.1")),
pkg!(("foo", "1.0.2")),
));
let res = resolve(pkg_id("root"), vec![dep("foo")], &mut reg).unwrap();
assert_that(&res, contains(names(&[("root", "1.0.0"),
("foo", "1.0.2")])));
}
#[test]
fn resolving_with_specific_version() {
let mut reg = registry(vec!(
pkg!(("foo", "1.0.1")),
pkg!(("foo", "1.0.2")),
));
let res = resolve(pkg_id("root"), vec![dep_req("foo", "=1.0.1")],
&mut reg).unwrap();
assert_that(&res, contains(names(&[("root", "1.0.0"),
("foo", "1.0.1")])));
}
#[test]
fn resolving_incompat_versions() {
let mut reg = registry(vec!(
pkg!(("foo", "1.0.1")),
pkg!(("foo", "1.0.2")),
pkg!("bar" => [dep_req("foo", "=1.0.2")]),
));
assert!(resolve(pkg_id("root"), vec![
dep_req("foo", "=1.0.1"),
dep("bar"),
], &mut reg).is_err());
}
#[test]
fn resolving_backtrack() {
let mut reg = registry(vec!(
pkg!(("foo", "1.0.2") => [dep("bar")]),
pkg!(("foo", "1.0.1") => [dep("baz")]),
pkg!("bar" => [dep_req("foo", "=2.0.2")]),
pkg!("baz"),
));
let res = resolve(pkg_id("root"), vec![
dep_req("foo", "^1"),
], &mut reg).unwrap();
assert_that(&res, contains(names(&[("root", "1.0.0"),
("foo", "1.0.1"),
("baz", "1.0.0")])));
}
#[test]
fn resolving_allows_multiple_compatible_versions() {
let mut reg = registry(vec!(
pkg!(("foo", "1.0.0")),
pkg!(("foo", "2.0.0")),
pkg!(("foo", "0.1.0")),
pkg!(("foo", "0.2.0")),
pkg!("bar" => ["d1", "d2", "d3", "d4"]),
pkg!("d1" => [dep_req("foo", "1")]),
pkg!("d2" => [dep_req("foo", "2")]),
pkg!("d3" => [dep_req("foo", "0.1")]),
pkg!("d4" => [dep_req("foo", "0.2")]),
));
let res = resolve(pkg_id("root"), vec![
dep("bar"),
], &mut reg).unwrap();
assert_that(&res, contains(names(&[("root", "1.0.0"),
("foo", "1.0.0"),
("foo", "2.0.0"),
("foo", "0.1.0"),
("foo", "0.2.0"),
("d1", "1.0.0"),
("d2", "1.0.0"),
("d3", "1.0.0"),
("d4", "1.0.0"),
("bar", "1.0.0")])));
}
#[test]
fn resolving_with_deep_backtracking() {
let mut reg = registry(vec!(
pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]),
pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]),
pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"),
dep_req("other", "1")]),
pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]),
pkg!(("baz", "1.0.2") => [dep_req("other", "2")]),
pkg!(("baz", "1.0.1")),
pkg!(("dep_req", "1.0.0")),
pkg!(("dep_req", "2.0.0")),
));
let res = resolve(pkg_id("root"), vec![
dep_req("foo", "1"),
], &mut reg).unwrap();
assert_that(&res, contains(names(&[("root", "1.0.0"),
("foo", "1.0.0"),
("bar", "2.0.0"),
("baz", "1.0.1")])));
}
#[test]
fn resolving_but_no_exists() {
let mut reg = registry(vec!(
));
let res = resolve(pkg_id("root"), vec![
dep_req("foo", "1"),
], &mut reg);
assert!(res.is_err());
assert_eq!(res.err().unwrap().to_string(), "\
no matching package named `foo` found (required by `root`)
location searched: registry http://example.com/
version required: ^1\
");
}
#[test]
fn resolving_cycle() {
let mut reg = registry(vec!(
pkg!("foo" => ["foo"]),
));
let _ = resolve(pkg_id("root"), vec![
dep_req("foo", "1"),
], &mut reg);
}
#[test]
fn hard_equality() {
extern crate env_logger;
let mut reg = registry(vec!(
pkg!(("foo", "1.0.1")),
pkg!(("foo", "1.0.0")),
pkg!(("bar", "1.0.0") => [dep_req("foo", "1.0.0")]),
));
let res = resolve(pkg_id("root"), vec![
dep_req("bar", "1"),
dep_req("foo", "=1.0.0"),
], &mut reg).unwrap();
assert_that(&res, contains(names(&[("root", "1.0.0"),
("foo", "1.0.0"),
("bar", "1.0.0")])));
}
| 29.370667 | 76 | 0.498184 |
d9e83550b8eaeebfc2bc2ff62a4d3c823f9f9416 | 28,990 | //#![crate_name = "doc"]
use plexrbac::persistence::models::*;
use plexrbac::common::Constants;
use chrono::{NaiveDateTime, Utc};
use std::collections::HashMap;
//////////////////////////////////////////////////////////////////////////////////////////////
///
/// This module defines common domain model
///
//////////////////////////////////////////////////////////////////////////////////////////////
/// SecurityRealm defines abstraction for security realm that encompasses roles/claims
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct SecurityRealm {
pub id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
impl SecurityRealm {
/// Creates instance from persistent realm
pub fn from(realm: &PSecurityRealm) -> SecurityRealm {
SecurityRealm::new(realm.id.as_str(), realm.description.clone())
}
/// Creates instance of persistent realm
pub fn to(&self) -> PSecurityRealm {
PSecurityRealm::new(self.id.as_str(), self.description.clone())
}
/// Creates new instance of realm
pub fn new(id: &str, description: Option<String>) -> SecurityRealm {
SecurityRealm{
id: id.to_string(),
description: description.clone()
}
}
}
impl std::fmt::Display for SecurityRealm {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.id)
}
}
/// Organization represents org that principal users belong to
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Organization {
#[serde(skip_deserializing)]
pub id: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub parent_id: Option<String>,
pub name: String,
pub url: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing, skip_deserializing)]
pub groups: HashMap<String, Group>,
#[serde(skip_serializing, skip_deserializing)]
pub roles: HashMap<String, Role>,
#[serde(skip_serializing, skip_deserializing)]
pub claims: Vec<ClaimClaimable>,
#[serde(skip_serializing, skip_deserializing)]
pub resources: Vec<Resource>,
#[serde(skip_serializing, skip_deserializing)]
pub license_policy: Option<LicensePolicy>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(skip_deserializing)]
pub created_by: Option<String>,
#[serde(skip_deserializing)]
pub created_at: Option<NaiveDateTime>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(skip_deserializing)]
pub updated_by: Option<String>,
#[serde(skip_deserializing)]
pub updated_at: Option<NaiveDateTime>,
}
impl std::fmt::Display for Organization {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let mut buf = String::from("");
for (_, group) in &self.groups {
buf.push_str(format!("\tgroup: {}\n", group).as_str());
}
for (_, role) in &self.roles {
buf.push_str(format!("{}", role).as_str());
}
for claim in &self.claims {
buf.push_str(format!("{}", claim).as_str());
}
write!(f, "org: {}\n{}", self.name, buf)
}
}
impl Organization {
/// Creates instance from persistent organization
pub fn from(org: &POrganization) -> Organization {
Organization {
id: org.id.clone(),
parent_id: org.parent_id.clone(),
name: org.name.clone(),
url: org.url.clone(),
description: org.description.clone(),
groups: HashMap::new(),
roles: HashMap::new(),
claims: vec![],
resources: vec![],
license_policy: None,
created_at: org.created_at.clone(),
created_by: org.created_by.clone(),
updated_at:org.updated_at.clone(),
updated_by: org.updated_by.clone(),
}
}
/// Creates instance of persistent organization
pub fn to(&self) -> POrganization {
POrganization::new(self.id.as_str(), self.parent_id.clone(), self.name.as_str(), self.url.as_str(), self.description.clone())
}
pub fn new(id: &str, parent_id: Option<String>, name: &str, url: &str, description: Option<String>) -> Organization {
Organization {
id: id.to_string(),
parent_id: parent_id,
name: name.to_string(),
url: url.to_string(),
description: description,
groups: HashMap::new(),
roles: HashMap::new(),
claims: vec![],
resources: vec![],
license_policy: None,
created_at: Some(Utc::now().naive_utc()),
created_by: None,
updated_at: Some(Utc::now().naive_utc()),
updated_by: None
}
}
}
/// Principal represents user of the organization and belongs to an organization
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Principal {
#[serde(skip_deserializing)]
pub id: String,
pub organization_id: String,
pub username: String,
pub description: Option<String>,
#[serde(skip_serializing, skip_deserializing)]
pub groups: HashMap<String, Group>,
#[serde(skip_serializing, skip_deserializing)]
pub roles: HashMap<String, Role>,
#[serde(skip_serializing, skip_deserializing)]
pub claims: Vec<ClaimClaimable>,
#[serde(skip_serializing, skip_deserializing)]
pub resources: Vec<Resource>,
pub created_by: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub updated_by: Option<String>,
pub updated_at: Option<NaiveDateTime>,
}
impl std::fmt::Display for Principal {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let mut buf = String::from("");
for (_, group) in &self.groups {
buf.push_str(format!("\tgroup: {}\n", group).as_str());
}
for (_, role) in &self.roles {
buf.push_str(format!("{}", role).as_str());
}
for claim in &self.claims {
buf.push_str(format!("{}", claim).as_str());
}
for resource in &self.resources {
buf.push_str(format!("\n\t\t{}", resource).as_str());
}
write!(f, "user: {}\n{}", self.username, buf)
}
}
impl Principal {
/// Creates instance from persistent principal
pub fn from(principal: &PPrincipal) -> Principal {
Principal {
id: principal.id.clone(),
username: principal.username.clone(),
organization_id: principal.organization_id.clone(),
description: principal.description.clone(),
groups: HashMap::new(),
roles: HashMap::new(),
claims: vec![],
resources: vec![],
created_at: principal.created_at.clone(),
created_by: principal.created_by.clone(),
updated_at: principal.updated_at.clone(),
updated_by: principal.updated_by.clone(),
}
}
/// Creates instance of persistent principal
pub fn to(&self) -> PPrincipal {
PPrincipal ::new(self.id.as_str(), self.organization_id.as_str(), self.username.as_str(), self.description.clone())
}
pub fn new(id: &str, organization_id: &str, username: &str, description: Option<String>) -> Principal {
Principal {
id: id.to_string(),
username: username.to_string(),
organization_id: organization_id.to_string(),
description: description,
groups: HashMap::new(),
roles: HashMap::new(),
claims: vec![],
resources: vec![],
created_at: Some(Utc::now().naive_utc()),
created_by: None,
updated_at: Some(Utc::now().naive_utc()),
updated_by: None
}
}
}
/// An organization can have one or more groups, where each group is associated with Principal or users. A user can be associated
/// with multiple groups and each group can inherit from another group.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Group {
#[serde(skip_deserializing)]
pub id: String,
pub parent_id: Option<String>,
pub organization_id: String,
pub name: String,
pub description: Option<String>,
#[serde(skip_serializing, skip_deserializing)]
pub roles: HashMap<String, Role>,
pub created_by: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub updated_by: Option<String>,
pub updated_at: Option<NaiveDateTime>,
}
impl std::fmt::Display for Group {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let mut buf = String::from("");
for (_, role) in &self.roles {
buf.push_str(format!("{}", role).as_str());
}
write!(f, "group: {}\n{}", self.name, buf)
}
}
impl Group {
/// Creates instance from persistent group
pub fn from(group: &PGroup) -> Group {
Group {
id: group.id.clone(),
parent_id: group.parent_id.clone(),
organization_id: group.organization_id.clone(),
name: group.name.clone(),
description: group.description.clone(),
roles: HashMap::new(),
created_at: group.created_at.clone(),
created_by: group.created_by.clone(),
updated_at: group.updated_at.clone(),
updated_by: group.updated_by.clone(),
}
}
/// Creates instance of persistent group
pub fn to(&self) -> PGroup {
PGroup::new(self.id.as_str(), self.organization_id.as_str(), self.name.as_str(), self.description.clone(), self.parent_id.clone())
}
pub fn new(id: &str, organization_id: &str, name: &str, description: Option<String>, parent_id: Option<String>) -> Group {
Group {
id: id.to_string(),
parent_id: parent_id,
organization_id: organization_id.to_string(),
name: name.to_string(),
description: description,
roles: HashMap::new(),
created_at: Some(Utc::now().naive_utc()),
created_by: None,
updated_at: Some(Utc::now().naive_utc()),
updated_by: None
}
}
}
/// Resource represents target object that needs to be secured within a security realm
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(skip_deserializing)]
pub id: String,
pub realm_id: String,
pub resource_name: String,
pub description: Option<String>,
pub allowable_actions: Option<String>,
#[serde(skip_serializing, skip_deserializing)]
pub instances: HashMap<String, ResourceInstance>,
#[serde(skip_serializing, skip_deserializing)]
pub quotas: HashMap<String, ResourceQuota>,
pub created_by: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub updated_by: Option<String>,
pub updated_at: Option<NaiveDateTime>,
}
impl std::fmt::Display for Resource {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "resource: {}", self.resource_name)
}
}
impl Resource {
/// Creates instance from persistent resource
pub fn from(resource: &PResource) -> Resource {
Resource {
id: resource.id.clone(),
realm_id: resource.realm_id.clone(),
resource_name: resource.resource_name.clone(),
description: resource.description.clone(),
allowable_actions: resource.allowable_actions.clone(),
instances: HashMap::new(),
quotas: HashMap::new(),
created_at: resource.created_at.clone(),
created_by: resource.created_by.clone(),
updated_at: resource.updated_at.clone(),
updated_by: resource.updated_by.clone()
}
}
/// Creates instance of persistent resource
pub fn to(&self) -> PResource {
PResource::new(self.id.as_str(), self.realm_id.as_str(), self.resource_name.as_str(), self.description.clone(), self.allowable_actions.clone())
}
pub fn new(id: &str, realm_id: &str, resource_name: &str, description: Option<String>, allowable_actions: Option<String>) -> Resource {
Resource {
id: id.to_string(),
realm_id: realm_id.to_string(),
resource_name: resource_name.to_string(),
description: description,
allowable_actions: allowable_actions,
instances: HashMap::new(),
quotas: HashMap::new(),
created_at: Some(Utc::now().naive_utc()),
created_by: None,
updated_at: Some(Utc::now().naive_utc()),
updated_by: None
}
}
}
/// ResourceInstance represents an instance of target object in case number of objects need constraints
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ResourceInstance {
#[serde(skip_deserializing)]
pub id: String,
#[serde(skip_deserializing)]
pub resource_id: String,
#[serde(skip_deserializing)]
pub license_policy_id: String,
pub scope: String,
pub ref_id: String,
pub status: String,
pub description: Option<String>,
pub created_by: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub updated_by: Option<String>,
pub updated_at: Option<NaiveDateTime>,
}
impl ResourceInstance {
/// Creates instance from persistent resource instance
pub fn from(instance: &PResourceInstance) -> ResourceInstance {
ResourceInstance {
id: instance.id.clone(),
resource_id: instance.resource_id.clone(),
scope: instance.scope.clone(),
license_policy_id: instance.license_policy_id.clone(),
ref_id: instance.ref_id.clone(),
status: instance.status.clone(),
description: instance.description.clone(),
created_at: instance.created_at.clone(),
created_by: instance.created_by.clone(),
updated_at: instance.updated_at.clone(),
updated_by: instance.updated_by.clone()
}
}
/// Creates instance of persistent resource instance
pub fn to(&self) -> PResourceInstance {
PResourceInstance::new(self.id.as_str(), self.resource_id.as_str(), self.license_policy_id.as_str(), self.scope.as_str(), self.ref_id.as_str(), self.status.as_str(), self.description.clone())
}
pub fn new(id: &str, resource_id: &str, license_policy_id: &str, scope: &str, ref_id: &str, status: &str, description: Option<String>) -> ResourceInstance {
ResourceInstance {
id: id.to_string(),
resource_id: resource_id.to_string(),
license_policy_id: license_policy_id.to_string(),
scope: scope.to_string(),
ref_id: ref_id.to_string(),
status: status.to_string(),
description: description,
created_at: Some(Utc::now().naive_utc()),
created_by: None,
updated_at: Some(Utc::now().naive_utc()),
updated_by: None
}
}
}
/// ResourceQuota represents max quota for number of instances of target object
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ResourceQuota {
#[serde(skip_deserializing)]
pub id: String,
#[serde(skip_deserializing)]
pub resource_id: String,
pub scope: String,
#[serde(skip_deserializing)]
pub license_policy_id: String,
pub max_value: i32,
pub effective_at: NaiveDateTime,
pub expired_at: NaiveDateTime,
pub created_by: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub updated_by: Option<String>,
pub updated_at: Option<NaiveDateTime>,
}
impl ResourceQuota {
/// Creates quota from persistent resource quota
pub fn from(quota: &PResourceQuota) -> ResourceQuota {
ResourceQuota {
id: quota.id.clone(),
resource_id: quota.resource_id.clone(),
license_policy_id: quota.license_policy_id.clone(),
scope: quota.scope.clone(),
max_value: quota.max_value.clone(),
effective_at: quota.effective_at.clone(),
expired_at: quota.expired_at.clone(),
created_at: quota.created_at.clone(),
created_by: quota.created_by.clone(),
updated_at: quota.updated_at.clone(),
updated_by: quota.updated_by.clone()
}
}
/// Creates quota of persistent resource quota
pub fn to(&self) -> PResourceQuota {
PResourceQuota::new(self.id.as_str(), self.resource_id.as_str(), self.license_policy_id.as_str(), self.scope.as_str(), self.max_value, self.effective_at.clone(), self.expired_at.clone())
}
pub fn new(id: &str, resource_id: &str, license_policy_id: &str, scope: &str, max_value: i32, effective_at: NaiveDateTime, expired_at: NaiveDateTime) -> ResourceQuota {
ResourceQuota {
id: id.to_string(),
resource_id: resource_id.to_string(),
license_policy_id: license_policy_id.to_string(),
scope: scope.to_string(),
max_value: max_value,
effective_at: effective_at,
expired_at: expired_at,
created_at: Some(Utc::now().naive_utc()),
created_by: None,
updated_at: Some(Utc::now().naive_utc()),
updated_by: None
}
}
}
/// RoleRoleable defines mapping of role and roleable
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum RoleRoleable {
Group(Role, String, String), // id, constraints
Principal(Role, String, String), // id, constraints
}
/// Role defines abstraction for defining claims/capabilities/permissions to a group of users
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Role {
#[serde(skip_deserializing)]
pub id: String,
pub parent_id: Option<String>,
pub realm_id: String,
pub organization_id: String,
pub name: String,
pub description: Option<String>,
#[serde(skip_serializing, skip_deserializing)]
pub claims: Vec<ClaimClaimable>, // All claims mapped to role
pub constraints: Option<String>,
pub created_by: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub updated_by: Option<String>,
pub updated_at: Option<NaiveDateTime>,
}
impl std::fmt::Display for Role {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let mut buf = String::from("");
for claim in &self.claims {
buf.push_str(format!("{}", claim).as_str());
}
write!(f, "\trole: {}\n{}", self.name, buf)
}
}
impl Role {
/// Creates instance from persistent role
pub fn from(role: &PRole) -> Role {
Role {
id: role.id.clone(),
parent_id: role.parent_id.clone(),
organization_id: role.organization_id.clone(),
realm_id: role.realm_id.clone(),
name: role.name.clone(),
description: role.description.clone(),
claims: vec![],
constraints: None,
created_at: role.created_at.clone(),
created_by: role.created_by.clone(),
updated_at: role.updated_at.clone(),
updated_by: role.updated_by.clone(),
}
}
/// Creates instance of persistent role
pub fn to(&self) -> PRole {
PRole::new(self.id.as_str(), self.realm_id.as_str(), self.organization_id.as_str(), self.name.as_str(), self.description.clone(), self.parent_id.clone())
}
pub fn new(id: &str, realm_id: &str, organization_id: &str, name: &str, description: Option<String>, parent_id: Option<String>) -> Role {
Role {
id: id.to_string(),
parent_id: parent_id,
organization_id: organization_id.to_string(),
realm_id: realm_id.to_string(),
name: name.to_string(),
description: description,
claims: vec![],
constraints: None,
created_at: Some(Utc::now().naive_utc()),
created_by: None,
updated_at: Some(Utc::now().naive_utc()),
updated_by: None
}
}
}
/// ClaimClaimable defines mapping of claim and claimable
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum ClaimClaimable {
Realm(Claim, String), // realm
LicensePolicy(Claim, String, String, String), // realm, scope, constraints
Role(Claim, String, String, String, String), // realm, role-id, scope, constraints
Principal(Claim, String, String, String, String), // realm, principal-id, scope, constraints
}
impl std::fmt::Display for ClaimClaimable {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
ClaimClaimable::Realm(claim, realm) => write!(f, "\n\trealm-claim: {}\t\trealm: {}", claim, realm),
ClaimClaimable::LicensePolicy(claim, _realm, scope, constraints) => write!(f, "\n\tlicense-claim: {}\t\tscope: {}, constraints: {}", claim, scope, constraints),
ClaimClaimable::Role(claim, _, _realm, scope, constraints) => write!(f, "\n\trole-claim: {}\t\tscope: {}, constraints: {}", claim, scope, constraints),
ClaimClaimable::Principal(claim, _, _realm, scope, constraints) => write!(f, "\n\tprincipal-claim: {}\t\tscope: {}, constraints: {}", claim, scope, constraints),
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ClaimResource {
pub claim: Claim,
pub scope: String,
pub constraints: String,
pub resource: Resource,
}
impl ClaimResource {
pub fn new(claim: Claim, scope: String, constraints: String, resource: Resource) -> ClaimResource {
ClaimResource { claim: claim, scope: scope, constraints: constraints, resource: resource}
}
}
/// Claim defines mapping of target resource that needs protection and action that can be performed
/// on those resources.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Claim {
#[serde(skip_deserializing)]
pub id: String,
pub realm_id: String,
#[serde(skip_deserializing)]
pub resource_id: String,
pub action: String,
pub effect: Option<String>,
pub description: Option<String>,
pub created_by: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub updated_by: Option<String>,
pub updated_at: Option<NaiveDateTime>,
}
impl std::fmt::Display for Claim {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "claim: {} - {}\n", self.action, self.resource_id)
}
}
impl Claim {
/// Creates instance from persistent claim
pub fn from(claim: &PClaim) -> Claim {
Claim {
id: claim.id.clone(),
realm_id: claim.realm_id.clone(),
resource_id: claim.resource_id.clone(),
action: claim.action.clone(),
effect: claim.effect.clone(),
description: claim.description.clone(),
created_at: claim.created_at.clone(),
created_by: claim.created_by.clone(),
updated_at: claim.updated_at.clone(),
updated_by: claim.updated_by.clone(),
}
}
/// Creates instance of persistent claim
pub fn to(&self) -> PClaim {
PClaim::new(self.id.as_str(), self.realm_id.as_str(), self.resource_id.as_str(), self.action.as_str(), self.effect().as_str(), self.description.clone())
}
pub fn new(id: &str, realm_id: &str, resource_id: &str, action: &str, effect: &str, description: Option<String>) -> Claim {
Claim {
id: id.to_string(),
realm_id: realm_id.to_string(),
resource_id: resource_id.to_string(),
action: action.to_string(),
effect: Some(effect.to_string()),
description: description,
created_at: Some(Utc::now().naive_utc()),
created_by: None,
updated_at: Some(Utc::now().naive_utc()),
updated_by: None
}
}
pub fn effect(&self) -> String {
if let Some(effect) = self.effect.clone() {
if effect.len() > 0 {
return effect;
}
}
Constants::Allow.to_string()
}
}
/// LicensePolicy defines what an organization can access
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct LicensePolicy {
#[serde(skip_deserializing)]
pub id: String,
pub organization_id: String,
pub name: String,
pub description: Option<String>,
pub effective_at: NaiveDateTime,
pub expired_at: NaiveDateTime,
#[serde(skip_serializing, skip_deserializing)]
pub claims: Vec<ClaimClaimable>, // All claims mapped to organization via license policy
pub created_by: Option<String>,
pub created_at: Option<NaiveDateTime>,
pub updated_by: Option<String>,
pub updated_at: Option<NaiveDateTime>,
}
impl LicensePolicy {
/// Creates instance from persistent license-policy
pub fn from(policy: &PLicensePolicy) -> LicensePolicy {
LicensePolicy {
id: policy.id.clone(),
organization_id: policy.organization_id.clone(),
name: policy.name.clone(),
description: policy.description.clone(),
effective_at: policy.effective_at.clone(),
expired_at: policy.expired_at.clone(),
claims: vec![],
created_at: policy.created_at.clone(),
created_by: policy.created_by.clone(),
updated_at: policy.updated_at.clone(),
updated_by: policy.updated_by.clone(),
}
}
/// Creates instance of persistent license-policy
pub fn to(&self) -> PLicensePolicy {
PLicensePolicy::new(self.id.as_str(), self.organization_id.as_str(), self.name.as_str(), self.description.clone(), self.effective_at.clone(), self.expired_at.clone())
}
pub fn new(id: &str, organization_id: &str, name: &str, description: Option<String>, effective_at: NaiveDateTime, expired_at: NaiveDateTime) -> LicensePolicy {
LicensePolicy {
id: id.to_string(),
organization_id: organization_id.to_string(),
name: name.to_string(),
description: description,
effective_at: effective_at,
expired_at: expired_at,
claims: vec![],
created_at: Some(Utc::now().naive_utc()),
created_by: None,
updated_at: Some(Utc::now().naive_utc()),
updated_by: None
}
}
}
#[cfg(test)]
mod tests {
extern crate uuid as uuu;
use self::uuu::Uuid;
use plexrbac::domain::models::*;
use chrono::{NaiveDate, Utc};
#[test]
fn test_create_realm() {
let r = SecurityRealm::new("test", None);
assert_eq!("test", r.id);
}
#[test]
fn test_create_org() {
let o = Organization::new(Uuid::new_v4().to_hyphenated().to_string().as_str(), None, "test", "url", None);
assert_eq!("test", o.name);
}
#[test]
fn test_create_principal() {
let g = Principal::new(Uuid::new_v4().to_hyphenated().to_string().as_str(), "22", "test", None);
assert_eq!("test", g.username);
}
#[test]
fn test_create_group() {
let g = Group::new(Uuid::new_v4().to_hyphenated().to_string().as_str(), "2", "test", None, None);
assert_eq!("test", g.name);
}
#[test]
fn test_create_role() {
let r = Role::new(Uuid::new_v4().to_hyphenated().to_string().as_str(), "11", "22", "test", None, None);
assert_eq!("test", r.name);
}
#[test]
fn test_create_resource() {
let r = Resource::new(Uuid::new_v4().to_hyphenated().to_string().as_str(), "11", "app", None, None);
assert_eq!("app", r.resource_name);
}
#[test]
fn test_create_resource_instance() {
let r = ResourceInstance::new(Uuid::new_v4().to_hyphenated().to_string().as_str(), "11", "22", "", "ref", "INFLIGHT", None);
assert_eq!("22", r.license_policy_id);
}
#[test]
fn test_create_resource_quota() {
let r = ResourceQuota::new(Uuid::new_v4().to_hyphenated().to_string().as_str(), "11", "22", "", 0, Utc::now().naive_utc(), NaiveDate::from_ymd(2100, 1, 1).and_hms(0, 0, 0));
assert_eq!("22", r.license_policy_id);
}
#[test]
fn test_create_claim() {
let r = Claim::new(Uuid::new_v4().to_hyphenated().to_string().as_str(), "11", "22", "action", "allow", None);
assert_eq!("action", r.action);
}
#[test]
fn test_create_license_policy() {
let license_policy = PLicensePolicy::new(Uuid::new_v4().to_hyphenated().to_string().as_str(), "99", "mylicense_policy", None, Utc::now().naive_utc(), NaiveDate::from_ymd(2100, 1, 1).and_hms(0, 0, 0));
assert_eq!("mylicense_policy", license_policy.name);
}
}
| 36.836086 | 208 | 0.613901 |
01f137e377515759c8c390f6c85c4a3ba3093d72 | 41,826 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
pub struct StartStreamTranscriptionOutput {
/// <p>An identifier for the streaming transcription.</p>
pub request_id: std::option::Option<std::string::String>,
/// <p>The language code for the input audio stream.</p>
pub language_code: std::option::Option<crate::model::LanguageCode>,
/// <p>The sample rate for the input audio stream. Use 8,000 Hz for low quality audio and 16,000 Hz
/// for high quality audio.</p>
pub media_sample_rate_hertz: std::option::Option<i32>,
/// <p>The encoding used for the input audio stream.</p>
pub media_encoding: std::option::Option<crate::model::MediaEncoding>,
/// <p>The name of the vocabulary used when processing the stream.</p>
pub vocabulary_name: std::option::Option<std::string::String>,
/// <p>An identifier for a specific transcription session.</p>
pub session_id: std::option::Option<std::string::String>,
/// <p>Represents the stream of transcription events from Amazon Transcribe to your application.</p>
pub transcript_result_stream: aws_smithy_http::event_stream::Receiver<
crate::model::TranscriptResultStream,
crate::error::StartStreamTranscriptionError,
>,
/// <p>The name of the vocabulary filter used in your real-time stream.</p>
pub vocabulary_filter_name: std::option::Option<std::string::String>,
/// <p>The vocabulary filtering method used in the real-time stream.</p>
pub vocabulary_filter_method: std::option::Option<crate::model::VocabularyFilterMethod>,
/// <p>Shows whether speaker identification was enabled in the stream.</p>
pub show_speaker_label: bool,
/// <p>Shows whether channel identification has been enabled in the stream.</p>
pub enable_channel_identification: bool,
/// <p>The number of channels identified in the stream.</p>
pub number_of_channels: std::option::Option<i32>,
/// <p>Shows whether partial results stabilization has been enabled in the stream.</p>
pub enable_partial_results_stabilization: bool,
/// <p>If partial results stabilization has been enabled in the stream, shows the stability
/// level.</p>
pub partial_results_stability: std::option::Option<crate::model::PartialResultsStability>,
/// <p>Shows whether content identification was enabled in this stream.</p>
pub content_identification_type: std::option::Option<crate::model::ContentIdentificationType>,
/// <p>Shows whether content redaction was enabled in this stream.</p>
pub content_redaction_type: std::option::Option<crate::model::ContentRedactionType>,
/// <p>Lists the PII entity types you specified in your request.</p>
pub pii_entity_types: std::option::Option<std::string::String>,
#[allow(missing_docs)] // documentation missing in model
pub language_model_name: std::option::Option<std::string::String>,
}
impl StartStreamTranscriptionOutput {
/// <p>An identifier for the streaming transcription.</p>
pub fn request_id(&self) -> std::option::Option<&str> {
self.request_id.as_deref()
}
/// <p>The language code for the input audio stream.</p>
pub fn language_code(&self) -> std::option::Option<&crate::model::LanguageCode> {
self.language_code.as_ref()
}
/// <p>The sample rate for the input audio stream. Use 8,000 Hz for low quality audio and 16,000 Hz
/// for high quality audio.</p>
pub fn media_sample_rate_hertz(&self) -> std::option::Option<i32> {
self.media_sample_rate_hertz
}
/// <p>The encoding used for the input audio stream.</p>
pub fn media_encoding(&self) -> std::option::Option<&crate::model::MediaEncoding> {
self.media_encoding.as_ref()
}
/// <p>The name of the vocabulary used when processing the stream.</p>
pub fn vocabulary_name(&self) -> std::option::Option<&str> {
self.vocabulary_name.as_deref()
}
/// <p>An identifier for a specific transcription session.</p>
pub fn session_id(&self) -> std::option::Option<&str> {
self.session_id.as_deref()
}
/// <p>Represents the stream of transcription events from Amazon Transcribe to your application.</p>
pub fn transcript_result_stream(
&self,
) -> &aws_smithy_http::event_stream::Receiver<
crate::model::TranscriptResultStream,
crate::error::StartStreamTranscriptionError,
> {
&self.transcript_result_stream
}
/// <p>The name of the vocabulary filter used in your real-time stream.</p>
pub fn vocabulary_filter_name(&self) -> std::option::Option<&str> {
self.vocabulary_filter_name.as_deref()
}
/// <p>The vocabulary filtering method used in the real-time stream.</p>
pub fn vocabulary_filter_method(
&self,
) -> std::option::Option<&crate::model::VocabularyFilterMethod> {
self.vocabulary_filter_method.as_ref()
}
/// <p>Shows whether speaker identification was enabled in the stream.</p>
pub fn show_speaker_label(&self) -> bool {
self.show_speaker_label
}
/// <p>Shows whether channel identification has been enabled in the stream.</p>
pub fn enable_channel_identification(&self) -> bool {
self.enable_channel_identification
}
/// <p>The number of channels identified in the stream.</p>
pub fn number_of_channels(&self) -> std::option::Option<i32> {
self.number_of_channels
}
/// <p>Shows whether partial results stabilization has been enabled in the stream.</p>
pub fn enable_partial_results_stabilization(&self) -> bool {
self.enable_partial_results_stabilization
}
/// <p>If partial results stabilization has been enabled in the stream, shows the stability
/// level.</p>
pub fn partial_results_stability(
&self,
) -> std::option::Option<&crate::model::PartialResultsStability> {
self.partial_results_stability.as_ref()
}
/// <p>Shows whether content identification was enabled in this stream.</p>
pub fn content_identification_type(
&self,
) -> std::option::Option<&crate::model::ContentIdentificationType> {
self.content_identification_type.as_ref()
}
/// <p>Shows whether content redaction was enabled in this stream.</p>
pub fn content_redaction_type(
&self,
) -> std::option::Option<&crate::model::ContentRedactionType> {
self.content_redaction_type.as_ref()
}
/// <p>Lists the PII entity types you specified in your request.</p>
pub fn pii_entity_types(&self) -> std::option::Option<&str> {
self.pii_entity_types.as_deref()
}
#[allow(missing_docs)] // documentation missing in model
pub fn language_model_name(&self) -> std::option::Option<&str> {
self.language_model_name.as_deref()
}
}
impl std::fmt::Debug for StartStreamTranscriptionOutput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("StartStreamTranscriptionOutput");
formatter.field("request_id", &self.request_id);
formatter.field("language_code", &self.language_code);
formatter.field("media_sample_rate_hertz", &self.media_sample_rate_hertz);
formatter.field("media_encoding", &self.media_encoding);
formatter.field("vocabulary_name", &self.vocabulary_name);
formatter.field("session_id", &self.session_id);
formatter.field("transcript_result_stream", &self.transcript_result_stream);
formatter.field("vocabulary_filter_name", &self.vocabulary_filter_name);
formatter.field("vocabulary_filter_method", &self.vocabulary_filter_method);
formatter.field("show_speaker_label", &self.show_speaker_label);
formatter.field(
"enable_channel_identification",
&self.enable_channel_identification,
);
formatter.field("number_of_channels", &self.number_of_channels);
formatter.field(
"enable_partial_results_stabilization",
&self.enable_partial_results_stabilization,
);
formatter.field("partial_results_stability", &self.partial_results_stability);
formatter.field(
"content_identification_type",
&self.content_identification_type,
);
formatter.field("content_redaction_type", &self.content_redaction_type);
formatter.field("pii_entity_types", &self.pii_entity_types);
formatter.field("language_model_name", &self.language_model_name);
formatter.finish()
}
}
/// See [`StartStreamTranscriptionOutput`](crate::output::StartStreamTranscriptionOutput)
pub mod start_stream_transcription_output {
/// A builder for [`StartStreamTranscriptionOutput`](crate::output::StartStreamTranscriptionOutput)
#[non_exhaustive]
#[derive(std::default::Default, std::fmt::Debug)]
pub struct Builder {
pub(crate) request_id: std::option::Option<std::string::String>,
pub(crate) language_code: std::option::Option<crate::model::LanguageCode>,
pub(crate) media_sample_rate_hertz: std::option::Option<i32>,
pub(crate) media_encoding: std::option::Option<crate::model::MediaEncoding>,
pub(crate) vocabulary_name: std::option::Option<std::string::String>,
pub(crate) session_id: std::option::Option<std::string::String>,
pub(crate) transcript_result_stream: std::option::Option<
aws_smithy_http::event_stream::Receiver<
crate::model::TranscriptResultStream,
crate::error::StartStreamTranscriptionError,
>,
>,
pub(crate) vocabulary_filter_name: std::option::Option<std::string::String>,
pub(crate) vocabulary_filter_method:
std::option::Option<crate::model::VocabularyFilterMethod>,
pub(crate) show_speaker_label: std::option::Option<bool>,
pub(crate) enable_channel_identification: std::option::Option<bool>,
pub(crate) number_of_channels: std::option::Option<i32>,
pub(crate) enable_partial_results_stabilization: std::option::Option<bool>,
pub(crate) partial_results_stability:
std::option::Option<crate::model::PartialResultsStability>,
pub(crate) content_identification_type:
std::option::Option<crate::model::ContentIdentificationType>,
pub(crate) content_redaction_type: std::option::Option<crate::model::ContentRedactionType>,
pub(crate) pii_entity_types: std::option::Option<std::string::String>,
pub(crate) language_model_name: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>An identifier for the streaming transcription.</p>
pub fn request_id(mut self, input: impl Into<std::string::String>) -> Self {
self.request_id = Some(input.into());
self
}
/// <p>An identifier for the streaming transcription.</p>
pub fn set_request_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.request_id = input;
self
}
/// <p>The language code for the input audio stream.</p>
pub fn language_code(mut self, input: crate::model::LanguageCode) -> Self {
self.language_code = Some(input);
self
}
/// <p>The language code for the input audio stream.</p>
pub fn set_language_code(
mut self,
input: std::option::Option<crate::model::LanguageCode>,
) -> Self {
self.language_code = input;
self
}
/// <p>The sample rate for the input audio stream. Use 8,000 Hz for low quality audio and 16,000 Hz
/// for high quality audio.</p>
pub fn media_sample_rate_hertz(mut self, input: i32) -> Self {
self.media_sample_rate_hertz = Some(input);
self
}
/// <p>The sample rate for the input audio stream. Use 8,000 Hz for low quality audio and 16,000 Hz
/// for high quality audio.</p>
pub fn set_media_sample_rate_hertz(mut self, input: std::option::Option<i32>) -> Self {
self.media_sample_rate_hertz = input;
self
}
/// <p>The encoding used for the input audio stream.</p>
pub fn media_encoding(mut self, input: crate::model::MediaEncoding) -> Self {
self.media_encoding = Some(input);
self
}
/// <p>The encoding used for the input audio stream.</p>
pub fn set_media_encoding(
mut self,
input: std::option::Option<crate::model::MediaEncoding>,
) -> Self {
self.media_encoding = input;
self
}
/// <p>The name of the vocabulary used when processing the stream.</p>
pub fn vocabulary_name(mut self, input: impl Into<std::string::String>) -> Self {
self.vocabulary_name = Some(input.into());
self
}
/// <p>The name of the vocabulary used when processing the stream.</p>
pub fn set_vocabulary_name(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.vocabulary_name = input;
self
}
/// <p>An identifier for a specific transcription session.</p>
pub fn session_id(mut self, input: impl Into<std::string::String>) -> Self {
self.session_id = Some(input.into());
self
}
/// <p>An identifier for a specific transcription session.</p>
pub fn set_session_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.session_id = input;
self
}
/// <p>Represents the stream of transcription events from Amazon Transcribe to your application.</p>
pub fn transcript_result_stream(
mut self,
input: aws_smithy_http::event_stream::Receiver<
crate::model::TranscriptResultStream,
crate::error::StartStreamTranscriptionError,
>,
) -> Self {
self.transcript_result_stream = Some(input);
self
}
/// <p>Represents the stream of transcription events from Amazon Transcribe to your application.</p>
pub fn set_transcript_result_stream(
mut self,
input: std::option::Option<
aws_smithy_http::event_stream::Receiver<
crate::model::TranscriptResultStream,
crate::error::StartStreamTranscriptionError,
>,
>,
) -> Self {
self.transcript_result_stream = input;
self
}
/// <p>The name of the vocabulary filter used in your real-time stream.</p>
pub fn vocabulary_filter_name(mut self, input: impl Into<std::string::String>) -> Self {
self.vocabulary_filter_name = Some(input.into());
self
}
/// <p>The name of the vocabulary filter used in your real-time stream.</p>
pub fn set_vocabulary_filter_name(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.vocabulary_filter_name = input;
self
}
/// <p>The vocabulary filtering method used in the real-time stream.</p>
pub fn vocabulary_filter_method(
mut self,
input: crate::model::VocabularyFilterMethod,
) -> Self {
self.vocabulary_filter_method = Some(input);
self
}
/// <p>The vocabulary filtering method used in the real-time stream.</p>
pub fn set_vocabulary_filter_method(
mut self,
input: std::option::Option<crate::model::VocabularyFilterMethod>,
) -> Self {
self.vocabulary_filter_method = input;
self
}
/// <p>Shows whether speaker identification was enabled in the stream.</p>
pub fn show_speaker_label(mut self, input: bool) -> Self {
self.show_speaker_label = Some(input);
self
}
/// <p>Shows whether speaker identification was enabled in the stream.</p>
pub fn set_show_speaker_label(mut self, input: std::option::Option<bool>) -> Self {
self.show_speaker_label = input;
self
}
/// <p>Shows whether channel identification has been enabled in the stream.</p>
pub fn enable_channel_identification(mut self, input: bool) -> Self {
self.enable_channel_identification = Some(input);
self
}
/// <p>Shows whether channel identification has been enabled in the stream.</p>
pub fn set_enable_channel_identification(
mut self,
input: std::option::Option<bool>,
) -> Self {
self.enable_channel_identification = input;
self
}
/// <p>The number of channels identified in the stream.</p>
pub fn number_of_channels(mut self, input: i32) -> Self {
self.number_of_channels = Some(input);
self
}
/// <p>The number of channels identified in the stream.</p>
pub fn set_number_of_channels(mut self, input: std::option::Option<i32>) -> Self {
self.number_of_channels = input;
self
}
/// <p>Shows whether partial results stabilization has been enabled in the stream.</p>
pub fn enable_partial_results_stabilization(mut self, input: bool) -> Self {
self.enable_partial_results_stabilization = Some(input);
self
}
/// <p>Shows whether partial results stabilization has been enabled in the stream.</p>
pub fn set_enable_partial_results_stabilization(
mut self,
input: std::option::Option<bool>,
) -> Self {
self.enable_partial_results_stabilization = input;
self
}
/// <p>If partial results stabilization has been enabled in the stream, shows the stability
/// level.</p>
pub fn partial_results_stability(
mut self,
input: crate::model::PartialResultsStability,
) -> Self {
self.partial_results_stability = Some(input);
self
}
/// <p>If partial results stabilization has been enabled in the stream, shows the stability
/// level.</p>
pub fn set_partial_results_stability(
mut self,
input: std::option::Option<crate::model::PartialResultsStability>,
) -> Self {
self.partial_results_stability = input;
self
}
/// <p>Shows whether content identification was enabled in this stream.</p>
pub fn content_identification_type(
mut self,
input: crate::model::ContentIdentificationType,
) -> Self {
self.content_identification_type = Some(input);
self
}
/// <p>Shows whether content identification was enabled in this stream.</p>
pub fn set_content_identification_type(
mut self,
input: std::option::Option<crate::model::ContentIdentificationType>,
) -> Self {
self.content_identification_type = input;
self
}
/// <p>Shows whether content redaction was enabled in this stream.</p>
pub fn content_redaction_type(mut self, input: crate::model::ContentRedactionType) -> Self {
self.content_redaction_type = Some(input);
self
}
/// <p>Shows whether content redaction was enabled in this stream.</p>
pub fn set_content_redaction_type(
mut self,
input: std::option::Option<crate::model::ContentRedactionType>,
) -> Self {
self.content_redaction_type = input;
self
}
/// <p>Lists the PII entity types you specified in your request.</p>
pub fn pii_entity_types(mut self, input: impl Into<std::string::String>) -> Self {
self.pii_entity_types = Some(input.into());
self
}
/// <p>Lists the PII entity types you specified in your request.</p>
pub fn set_pii_entity_types(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.pii_entity_types = input;
self
}
#[allow(missing_docs)] // documentation missing in model
pub fn language_model_name(mut self, input: impl Into<std::string::String>) -> Self {
self.language_model_name = Some(input.into());
self
}
#[allow(missing_docs)] // documentation missing in model
pub fn set_language_model_name(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.language_model_name = input;
self
}
/// Consumes the builder and constructs a [`StartStreamTranscriptionOutput`](crate::output::StartStreamTranscriptionOutput)
pub fn build(
self,
) -> std::result::Result<
crate::output::StartStreamTranscriptionOutput,
aws_smithy_http::operation::BuildError,
> {
Ok(
crate::output::StartStreamTranscriptionOutput {
request_id: self.request_id
,
language_code: self.language_code
,
media_sample_rate_hertz: self.media_sample_rate_hertz
,
media_encoding: self.media_encoding
,
vocabulary_name: self.vocabulary_name
,
session_id: self.session_id
,
transcript_result_stream: self.transcript_result_stream
.ok_or(
aws_smithy_http::operation::BuildError::MissingField { field: "transcript_result_stream", details: "transcript_result_stream was not specified but it is required when building StartStreamTranscriptionOutput"}
)?
,
vocabulary_filter_name: self.vocabulary_filter_name
,
vocabulary_filter_method: self.vocabulary_filter_method
,
show_speaker_label: self.show_speaker_label
.unwrap_or_default()
,
enable_channel_identification: self.enable_channel_identification
.unwrap_or_default()
,
number_of_channels: self.number_of_channels
,
enable_partial_results_stabilization: self.enable_partial_results_stabilization
.unwrap_or_default()
,
partial_results_stability: self.partial_results_stability
,
content_identification_type: self.content_identification_type
,
content_redaction_type: self.content_redaction_type
,
pii_entity_types: self.pii_entity_types
,
language_model_name: self.language_model_name
,
}
)
}
}
}
impl StartStreamTranscriptionOutput {
/// Creates a new builder-style object to manufacture [`StartStreamTranscriptionOutput`](crate::output::StartStreamTranscriptionOutput)
pub fn builder() -> crate::output::start_stream_transcription_output::Builder {
crate::output::start_stream_transcription_output::Builder::default()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
pub struct StartMedicalStreamTranscriptionOutput {
/// <p>An identifier for the streaming transcription.</p>
pub request_id: std::option::Option<std::string::String>,
/// <p>The language code for the response transcript. For Amazon Transcribe Medical, this is US English
/// (en-US).</p>
pub language_code: std::option::Option<crate::model::LanguageCode>,
/// <p>The sample rate of the input audio in Hertz.</p>
pub media_sample_rate_hertz: std::option::Option<i32>,
/// <p>The encoding used for the input audio stream.</p>
pub media_encoding: std::option::Option<crate::model::MediaEncoding>,
/// <p>The name of the vocabulary used when processing the stream.</p>
pub vocabulary_name: std::option::Option<std::string::String>,
/// <p>The specialty in the medical domain.</p>
pub specialty: std::option::Option<crate::model::Specialty>,
/// <p>The type of audio that was transcribed. </p>
pub r#type: std::option::Option<crate::model::Type>,
/// <p>Shows whether speaker identification was enabled in the stream.</p>
pub show_speaker_label: bool,
/// <p>Optional. An identifier for the transcription session. If you don't provide a session
/// ID, Amazon Transcribe generates one for you and returns it in the response.</p>
pub session_id: std::option::Option<std::string::String>,
/// <p>Represents the stream of transcription events from Amazon Transcribe Medical to your application. </p>
pub transcript_result_stream: aws_smithy_http::event_stream::Receiver<
crate::model::MedicalTranscriptResultStream,
crate::error::StartMedicalStreamTranscriptionError,
>,
/// <p>Shows whether channel identification has been enabled in the stream.</p>
pub enable_channel_identification: bool,
/// <p>The number of channels identified in the stream.</p>
pub number_of_channels: std::option::Option<i32>,
/// <p>If the value is <code>PHI</code>, indicates that you've configured your stream to
/// identify personal health information.</p>
pub content_identification_type:
std::option::Option<crate::model::MedicalContentIdentificationType>,
}
impl StartMedicalStreamTranscriptionOutput {
/// <p>An identifier for the streaming transcription.</p>
pub fn request_id(&self) -> std::option::Option<&str> {
self.request_id.as_deref()
}
/// <p>The language code for the response transcript. For Amazon Transcribe Medical, this is US English
/// (en-US).</p>
pub fn language_code(&self) -> std::option::Option<&crate::model::LanguageCode> {
self.language_code.as_ref()
}
/// <p>The sample rate of the input audio in Hertz.</p>
pub fn media_sample_rate_hertz(&self) -> std::option::Option<i32> {
self.media_sample_rate_hertz
}
/// <p>The encoding used for the input audio stream.</p>
pub fn media_encoding(&self) -> std::option::Option<&crate::model::MediaEncoding> {
self.media_encoding.as_ref()
}
/// <p>The name of the vocabulary used when processing the stream.</p>
pub fn vocabulary_name(&self) -> std::option::Option<&str> {
self.vocabulary_name.as_deref()
}
/// <p>The specialty in the medical domain.</p>
pub fn specialty(&self) -> std::option::Option<&crate::model::Specialty> {
self.specialty.as_ref()
}
/// <p>The type of audio that was transcribed. </p>
pub fn r#type(&self) -> std::option::Option<&crate::model::Type> {
self.r#type.as_ref()
}
/// <p>Shows whether speaker identification was enabled in the stream.</p>
pub fn show_speaker_label(&self) -> bool {
self.show_speaker_label
}
/// <p>Optional. An identifier for the transcription session. If you don't provide a session
/// ID, Amazon Transcribe generates one for you and returns it in the response.</p>
pub fn session_id(&self) -> std::option::Option<&str> {
self.session_id.as_deref()
}
/// <p>Represents the stream of transcription events from Amazon Transcribe Medical to your application. </p>
pub fn transcript_result_stream(
&self,
) -> &aws_smithy_http::event_stream::Receiver<
crate::model::MedicalTranscriptResultStream,
crate::error::StartMedicalStreamTranscriptionError,
> {
&self.transcript_result_stream
}
/// <p>Shows whether channel identification has been enabled in the stream.</p>
pub fn enable_channel_identification(&self) -> bool {
self.enable_channel_identification
}
/// <p>The number of channels identified in the stream.</p>
pub fn number_of_channels(&self) -> std::option::Option<i32> {
self.number_of_channels
}
/// <p>If the value is <code>PHI</code>, indicates that you've configured your stream to
/// identify personal health information.</p>
pub fn content_identification_type(
&self,
) -> std::option::Option<&crate::model::MedicalContentIdentificationType> {
self.content_identification_type.as_ref()
}
}
impl std::fmt::Debug for StartMedicalStreamTranscriptionOutput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("StartMedicalStreamTranscriptionOutput");
formatter.field("request_id", &self.request_id);
formatter.field("language_code", &self.language_code);
formatter.field("media_sample_rate_hertz", &self.media_sample_rate_hertz);
formatter.field("media_encoding", &self.media_encoding);
formatter.field("vocabulary_name", &self.vocabulary_name);
formatter.field("specialty", &self.specialty);
formatter.field("r#type", &self.r#type);
formatter.field("show_speaker_label", &self.show_speaker_label);
formatter.field("session_id", &self.session_id);
formatter.field("transcript_result_stream", &self.transcript_result_stream);
formatter.field(
"enable_channel_identification",
&self.enable_channel_identification,
);
formatter.field("number_of_channels", &self.number_of_channels);
formatter.field(
"content_identification_type",
&self.content_identification_type,
);
formatter.finish()
}
}
/// See [`StartMedicalStreamTranscriptionOutput`](crate::output::StartMedicalStreamTranscriptionOutput)
pub mod start_medical_stream_transcription_output {
/// A builder for [`StartMedicalStreamTranscriptionOutput`](crate::output::StartMedicalStreamTranscriptionOutput)
#[non_exhaustive]
#[derive(std::default::Default, std::fmt::Debug)]
pub struct Builder {
pub(crate) request_id: std::option::Option<std::string::String>,
pub(crate) language_code: std::option::Option<crate::model::LanguageCode>,
pub(crate) media_sample_rate_hertz: std::option::Option<i32>,
pub(crate) media_encoding: std::option::Option<crate::model::MediaEncoding>,
pub(crate) vocabulary_name: std::option::Option<std::string::String>,
pub(crate) specialty: std::option::Option<crate::model::Specialty>,
pub(crate) r#type: std::option::Option<crate::model::Type>,
pub(crate) show_speaker_label: std::option::Option<bool>,
pub(crate) session_id: std::option::Option<std::string::String>,
pub(crate) transcript_result_stream: std::option::Option<
aws_smithy_http::event_stream::Receiver<
crate::model::MedicalTranscriptResultStream,
crate::error::StartMedicalStreamTranscriptionError,
>,
>,
pub(crate) enable_channel_identification: std::option::Option<bool>,
pub(crate) number_of_channels: std::option::Option<i32>,
pub(crate) content_identification_type:
std::option::Option<crate::model::MedicalContentIdentificationType>,
}
impl Builder {
/// <p>An identifier for the streaming transcription.</p>
pub fn request_id(mut self, input: impl Into<std::string::String>) -> Self {
self.request_id = Some(input.into());
self
}
/// <p>An identifier for the streaming transcription.</p>
pub fn set_request_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.request_id = input;
self
}
/// <p>The language code for the response transcript. For Amazon Transcribe Medical, this is US English
/// (en-US).</p>
pub fn language_code(mut self, input: crate::model::LanguageCode) -> Self {
self.language_code = Some(input);
self
}
/// <p>The language code for the response transcript. For Amazon Transcribe Medical, this is US English
/// (en-US).</p>
pub fn set_language_code(
mut self,
input: std::option::Option<crate::model::LanguageCode>,
) -> Self {
self.language_code = input;
self
}
/// <p>The sample rate of the input audio in Hertz.</p>
pub fn media_sample_rate_hertz(mut self, input: i32) -> Self {
self.media_sample_rate_hertz = Some(input);
self
}
/// <p>The sample rate of the input audio in Hertz.</p>
pub fn set_media_sample_rate_hertz(mut self, input: std::option::Option<i32>) -> Self {
self.media_sample_rate_hertz = input;
self
}
/// <p>The encoding used for the input audio stream.</p>
pub fn media_encoding(mut self, input: crate::model::MediaEncoding) -> Self {
self.media_encoding = Some(input);
self
}
/// <p>The encoding used for the input audio stream.</p>
pub fn set_media_encoding(
mut self,
input: std::option::Option<crate::model::MediaEncoding>,
) -> Self {
self.media_encoding = input;
self
}
/// <p>The name of the vocabulary used when processing the stream.</p>
pub fn vocabulary_name(mut self, input: impl Into<std::string::String>) -> Self {
self.vocabulary_name = Some(input.into());
self
}
/// <p>The name of the vocabulary used when processing the stream.</p>
pub fn set_vocabulary_name(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.vocabulary_name = input;
self
}
/// <p>The specialty in the medical domain.</p>
pub fn specialty(mut self, input: crate::model::Specialty) -> Self {
self.specialty = Some(input);
self
}
/// <p>The specialty in the medical domain.</p>
pub fn set_specialty(
mut self,
input: std::option::Option<crate::model::Specialty>,
) -> Self {
self.specialty = input;
self
}
/// <p>The type of audio that was transcribed. </p>
pub fn r#type(mut self, input: crate::model::Type) -> Self {
self.r#type = Some(input);
self
}
/// <p>The type of audio that was transcribed. </p>
pub fn set_type(mut self, input: std::option::Option<crate::model::Type>) -> Self {
self.r#type = input;
self
}
/// <p>Shows whether speaker identification was enabled in the stream.</p>
pub fn show_speaker_label(mut self, input: bool) -> Self {
self.show_speaker_label = Some(input);
self
}
/// <p>Shows whether speaker identification was enabled in the stream.</p>
pub fn set_show_speaker_label(mut self, input: std::option::Option<bool>) -> Self {
self.show_speaker_label = input;
self
}
/// <p>Optional. An identifier for the transcription session. If you don't provide a session
/// ID, Amazon Transcribe generates one for you and returns it in the response.</p>
pub fn session_id(mut self, input: impl Into<std::string::String>) -> Self {
self.session_id = Some(input.into());
self
}
/// <p>Optional. An identifier for the transcription session. If you don't provide a session
/// ID, Amazon Transcribe generates one for you and returns it in the response.</p>
pub fn set_session_id(mut self, input: std::option::Option<std::string::String>) -> Self {
self.session_id = input;
self
}
/// <p>Represents the stream of transcription events from Amazon Transcribe Medical to your application. </p>
pub fn transcript_result_stream(
mut self,
input: aws_smithy_http::event_stream::Receiver<
crate::model::MedicalTranscriptResultStream,
crate::error::StartMedicalStreamTranscriptionError,
>,
) -> Self {
self.transcript_result_stream = Some(input);
self
}
/// <p>Represents the stream of transcription events from Amazon Transcribe Medical to your application. </p>
pub fn set_transcript_result_stream(
mut self,
input: std::option::Option<
aws_smithy_http::event_stream::Receiver<
crate::model::MedicalTranscriptResultStream,
crate::error::StartMedicalStreamTranscriptionError,
>,
>,
) -> Self {
self.transcript_result_stream = input;
self
}
/// <p>Shows whether channel identification has been enabled in the stream.</p>
pub fn enable_channel_identification(mut self, input: bool) -> Self {
self.enable_channel_identification = Some(input);
self
}
/// <p>Shows whether channel identification has been enabled in the stream.</p>
pub fn set_enable_channel_identification(
mut self,
input: std::option::Option<bool>,
) -> Self {
self.enable_channel_identification = input;
self
}
/// <p>The number of channels identified in the stream.</p>
pub fn number_of_channels(mut self, input: i32) -> Self {
self.number_of_channels = Some(input);
self
}
/// <p>The number of channels identified in the stream.</p>
pub fn set_number_of_channels(mut self, input: std::option::Option<i32>) -> Self {
self.number_of_channels = input;
self
}
/// <p>If the value is <code>PHI</code>, indicates that you've configured your stream to
/// identify personal health information.</p>
pub fn content_identification_type(
mut self,
input: crate::model::MedicalContentIdentificationType,
) -> Self {
self.content_identification_type = Some(input);
self
}
/// <p>If the value is <code>PHI</code>, indicates that you've configured your stream to
/// identify personal health information.</p>
pub fn set_content_identification_type(
mut self,
input: std::option::Option<crate::model::MedicalContentIdentificationType>,
) -> Self {
self.content_identification_type = input;
self
}
/// Consumes the builder and constructs a [`StartMedicalStreamTranscriptionOutput`](crate::output::StartMedicalStreamTranscriptionOutput)
pub fn build(
self,
) -> std::result::Result<
crate::output::StartMedicalStreamTranscriptionOutput,
aws_smithy_http::operation::BuildError,
> {
Ok(
crate::output::StartMedicalStreamTranscriptionOutput {
request_id: self.request_id
,
language_code: self.language_code
,
media_sample_rate_hertz: self.media_sample_rate_hertz
,
media_encoding: self.media_encoding
,
vocabulary_name: self.vocabulary_name
,
specialty: self.specialty
,
r#type: self.r#type
,
show_speaker_label: self.show_speaker_label
.unwrap_or_default()
,
session_id: self.session_id
,
transcript_result_stream: self.transcript_result_stream
.ok_or(
aws_smithy_http::operation::BuildError::MissingField { field: "transcript_result_stream", details: "transcript_result_stream was not specified but it is required when building StartMedicalStreamTranscriptionOutput"}
)?
,
enable_channel_identification: self.enable_channel_identification
.unwrap_or_default()
,
number_of_channels: self.number_of_channels
,
content_identification_type: self.content_identification_type
,
}
)
}
}
}
impl StartMedicalStreamTranscriptionOutput {
/// Creates a new builder-style object to manufacture [`StartMedicalStreamTranscriptionOutput`](crate::output::StartMedicalStreamTranscriptionOutput)
pub fn builder() -> crate::output::start_medical_stream_transcription_output::Builder {
crate::output::start_medical_stream_transcription_output::Builder::default()
}
}
| 47.31448 | 243 | 0.620786 |
f8b11c8918c234c1bcebde943e013a6be8d84349 | 8,354 | #[macro_use]
extern crate lazy_static;
mod cfglib;
mod services;
mod utils;
use parking_lot::RwLock;
use cfglib::*;
use utils::PatternsCache;
use actix_web::{
HttpServer,
App
};
use simple_logger::SimpleLogger;
use clap::{ArgMatches, Arg};
fn arg_matches<'a>() -> ArgMatches<'a> {
let about = format!("Fast, lightweight RESTful API services for processing, parsing & modifying UTF-8 text messages.
\nAuthor: {}\nSource: https://github.com/DK26/fast-webhooks", env!("CARGO_PKG_AUTHORS"));
clap::App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.about(about.as_str())
.arg(
Arg::with_name("listen")
.short("l")
.long("listen")
.value_name("INTERFACE IP:PORT")
.takes_value(true)
.help("Sets the listening interface for incoming HTTP connections. (Default: 127.0.0.1:8080)")
)
.arg(
Arg::with_name("server_hostname")
.short("n")
.long("server_hostname")
.value_name("HOSTNAME:PORT")
.takes_value(true)
.help("Sets the server hostname. Used by the application router as a hostname for url generation. (Default: localhost:8080)")
)
.arg(
Arg::with_name("workers")
.short("w")
.long("workers")
.value_name("N")
.takes_value(true)
.help("Sets the N number of workers. (Default: Logical CPUs count)")
)
.arg(
Arg::with_name("backlog")
.short("b")
.long("backlog")
.value_name("N")
.takes_value(true)
.help("Sets the maximum N number of pending connections that can be waiting to be served. (Default: 2048)")
)
.arg(
Arg::with_name("max_connections")
.short("c")
.long("max_connections")
.value_name("N")
.takes_value(true)
.help("Sets the maximum per-worker number of N concurrent connections. (Default: 25000)")
)
.arg(
Arg::with_name("max_connection_rate")
.short("r")
.long("max_connection_rate")
.value_name("N")
.takes_value(true)
.help("Sets the maximum N per-worker concurrent connection establish process. (Default: 256)")
)
.arg(
Arg::with_name("keep_alive")
.short("k")
.long("keep_alive")
.value_name("N")
.takes_value(true)
.help("Sets server keep-alive setting in N seconds. (Default: 5)")
)
.arg(
Arg::with_name("client_timeout")
.short("t")
.long("client_timeout")
.value_name("N")
.takes_value(true)
.help("Sets server client timeout in N milliseconds for the first request. To disable timeout set value to 0. (Default: 5000)")
)
.arg(
Arg::with_name("client_shutdown")
.short("s")
.long("client_shutdown")
.value_name("N")
.takes_value(true)
.help("Sets server connection shutdown timeout in N milliseconds. To disable timeout set value to 0. (Default: 5000)")
)
.arg(
Arg::with_name("shutdown_timeout")
.short("d")
.long("shutdown_timeout")
.value_name("N")
.takes_value(true)
.help("Sets the timeout for graceful workers shutdown in N seconds. (Default: 30)")
)
.arg(
Arg::with_name("alt_encoding")
.short("a")
.long("alt_encoding")
.value_name("ENCODING")
.takes_value(true)
.help("Sets the alternative encoding for decoding, in case decoding with the default UTF-8 fails. (Default: UTF-8)")
)
.arg(
Arg::with_name("regex_patterns_limit")
.long("regex_patterns_limit")
.value_name("N")
.takes_value(true)
.help("Sets the in-memory cached patterns limit. Clears cache after threshold. (Default: 10000)")
)
.arg(
Arg::with_name("regex_patterns_capacity")
.long("regex_patterns_capacity")
.value_name("N")
.takes_value(true)
.help("Sets the initial amount of N capacity for cached patterns. (Default: 10000)")
).arg(
Arg::with_name("log_level")
.short("g")
.long("log_level")
.value_name("LEVEL")
.takes_value(true)
.help(r#"Sets the log level for the logger. (Available levels: "OFF", "ERROR", "WARN", "INFO", "DEBUG", "TRACE")"#)
)
.get_matches()
}
lazy_static! {
static ref CFG: Config = cfglib::init_cfg({
let cfg = arg_matches();
println!("{} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION"));
log::info!("Initializing service...");
cfg
});
static ref PATTERNS_CACHE: RwLock<PatternsCache> = {
let cache = PatternsCache::with_capacity(CFG.cache.regex_patterns_capacity)
.limit(CFG.cache.regex_patterns_limit);
RwLock::new(cache)
};
}
pub const DEFAULT_CHARSET : &str = "utf-8";
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let log_level = match CFG.logger.log_level.parse() {
Ok(level) => level,
Err(e) => {
let default_log_level = cfglib::default_logger_level();
eprintln!("Error: {}\nSetting the log level to '{}'", e, default_log_level);
default_log_level.parse().unwrap()
}
};
SimpleLogger::new()
.with_level(log_level)
.init().unwrap();
// Configurations
// Service
log::debug!("listen = {}", CFG.service.listen);
log::debug!("server_hostname = {}", CFG.service.server_hostname);
log::debug!("workers = {}", CFG.service.workers);
log::debug!("backlog = {}", CFG.service.backlog);
log::debug!("max_connections = {}", CFG.service.max_connections);
log::debug!("max_connection_rate = {}", CFG.service.max_connection_rate);
log::debug!("keep_alive = {}", CFG.service.keep_alive);
log::debug!("client_timeout = {}", CFG.service.client_timeout);
log::debug!("client_shutdown = {}", CFG.service.client_shutdown);
log::debug!("shutdown_timeout = {}", CFG.service.shutdown_timeout);
// Common
log::debug!("alt_encoding = {}", CFG.common.alt_encoding);
// Cache
log::debug!("regex_patterns_capacity = {}", CFG.cache.regex_patterns_capacity);
log::debug!("regex_patterns_limit = {}", CFG.cache.regex_patterns_limit);
// Logger
log::debug!("log_level = {}", CFG.logger.log_level);
HttpServer::new(|| {
App::new()
.service(services::welcome)
.service(services::echo)
.service(services::unescape)
// .service(services::form_test)
// .service(services::json_test)
.service(services::unescape_charset)
.service(services::decode_base64)
.service(services::decode_base64_charset)
.service(services::decode_mime_header)
.service(services::decode_mime_header_rfc822)
.service(services::decode_quoted_printable)
.service(services::decode_quoted_printable_charset)
.service(services::decode_auto)
.service(services::decode_auto_charset)
.service(services::regex_capture_group)
})
.server_hostname(&CFG.service.server_hostname)
.workers(CFG.service.workers)
.backlog(CFG.service.backlog)
.max_connections(CFG.service.max_connections)
.max_connection_rate(CFG.service.max_connection_rate)
.keep_alive(CFG.service.keep_alive)
.client_timeout(CFG.service.client_timeout)
.client_shutdown(CFG.service.client_shutdown)
.shutdown_timeout(CFG.service.shutdown_timeout)
.bind(&CFG.service.listen)?
.run()
.await
} | 36.480349 | 143 | 0.559492 |
4a355b6904f4aab1fed3542ab30fb584f1900f7c | 16,084 | //! Utilities for handling characters in the Unicode "Halfwidth and Fullwidth Forms" block.
use std::mem::transmute;
/// Checks if `ch` is in the Unicode "Halfwidth and Fullwidth Forms" block.
///
/// # Example
/// ```rust
/// assert_eq!(unicode_hfwidth::is_nonstandard_width('カ'), false);
/// assert_eq!(unicode_hfwidth::is_nonstandard_width('カ'), true);
/// ```
pub fn is_nonstandard_width(ch: char) -> bool {
match ch as u32 {
0xff00...0xffee => true,
_ => false,
}
}
/// Returns the standard-width form for `ch`. If `ch` is not in the Unicode
/// "Halfwidth and Fullwidth forms" block, returns `None`.
///
/// # Example
/// ```rust
/// assert_eq!(unicode_hfwidth::to_standard_width('カ'), None);
/// assert_eq!(unicode_hfwidth::to_standard_width('カ'), Some('カ'));
/// assert_eq!(unicode_hfwidth::to_standard_width('a'), Some('a'));
/// ```
pub fn to_standard_width(ch: char) -> Option<char> {
match ch as u32 {
0xff01...0xff60 => to_halfwidth(ch),
0xff61...0xffdc => to_fullwidth(ch),
0xffe0...0xffe6 => to_halfwidth(ch),
0xffe8...0xffee => to_fullwidth(ch),
_ => None
}
}
/// Returns the half-width form for `ch`. If no half-width form for `ch` exists,
/// or `ch` is already in half-width form, returns `None`.
///
/// # Example
/// ```rust
/// assert_eq!(unicode_hfwidth::to_halfwidth('カ'), Some('カ'));
/// assert_eq!(unicode_hfwidth::to_halfwidth('a'), None);
/// ```
pub fn to_halfwidth(ch: char) -> Option<char> {
let ch = ch as u32;
unsafe {
match ch {
/* Full-width variant characters */
0xff01...0xff5e => Some(transmute(ch - 0xff01 + 0x0021)),
0xff5f...0xff60 => Some(transmute(ch - 0xff5f + 0x2985)),
0xffe0...0xffe1 => Some(transmute(ch - 0xffe0 + 0x00a2)),
0xffe2 => Some(transmute(0x00acu32)),
0xffe3 => Some(transmute(0x00afu32)),
0xffe4 => Some(transmute(0x00a6u32)),
0xffe5 => Some(transmute(0x00a5u32)),
0xffe6 => Some(transmute(0x20a9u32)),
/* Natural full-width characters */
0x3002 => Some(transmute(0xff61u32)),
0x300c => Some(transmute(0xff62u32)),
0x300d => Some(transmute(0xff63u32)),
0x3001 => Some(transmute(0xff64u32)),
0x30fb => Some(transmute(0xff65u32)),
0x30f2 => Some(transmute(0xff66u32)),
0x30a1 => Some(transmute(0xff67u32)),
0x30a3 => Some(transmute(0xff68u32)),
0x30a5 => Some(transmute(0xff69u32)),
0x30a7 => Some(transmute(0xff6au32)),
0x30a9 => Some(transmute(0xff6bu32)),
0x30e3 => Some(transmute(0xff6cu32)),
0x30e5 => Some(transmute(0xff6du32)),
0x30e7 => Some(transmute(0xff6eu32)),
0x30c3 => Some(transmute(0xff6fu32)),
0x30fc => Some(transmute(0xff70u32)),
0x30a2 => Some(transmute(0xff71u32)),
0x30a4 => Some(transmute(0xff72u32)),
0x30a6 => Some(transmute(0xff73u32)),
0x30a8 => Some(transmute(0xff74u32)),
0x30aa => Some(transmute(0xff75u32)),
0x30ab => Some(transmute(0xff76u32)),
0x30ad => Some(transmute(0xff77u32)),
0x30af => Some(transmute(0xff78u32)),
0x30b1 => Some(transmute(0xff79u32)),
0x30b3 => Some(transmute(0xff7au32)),
0x30b5 => Some(transmute(0xff7bu32)),
0x30b7 => Some(transmute(0xff7cu32)),
0x30b9 => Some(transmute(0xff7du32)),
0x30bb => Some(transmute(0xff7eu32)),
0x30bd => Some(transmute(0xff7fu32)),
0x30bf => Some(transmute(0xff80u32)),
0x30c1 => Some(transmute(0xff81u32)),
0x30c4 => Some(transmute(0xff82u32)),
0x30c6 => Some(transmute(0xff83u32)),
0x30c8 => Some(transmute(0xff84u32)),
0x30ca => Some(transmute(0xff85u32)),
0x30cb => Some(transmute(0xff86u32)),
0x30cc => Some(transmute(0xff87u32)),
0x30cd => Some(transmute(0xff88u32)),
0x30ce => Some(transmute(0xff89u32)),
0x30cf => Some(transmute(0xff8au32)),
0x30d2 => Some(transmute(0xff8bu32)),
0x30d5 => Some(transmute(0xff8cu32)),
0x30d8 => Some(transmute(0xff8du32)),
0x30db => Some(transmute(0xff8eu32)),
0x30de => Some(transmute(0xff8fu32)),
0x30df => Some(transmute(0xff90u32)),
0x30e0 => Some(transmute(0xff91u32)),
0x30e1 => Some(transmute(0xff92u32)),
0x30e2 => Some(transmute(0xff93u32)),
0x30e4 => Some(transmute(0xff94u32)),
0x30e6 => Some(transmute(0xff95u32)),
0x30e8 => Some(transmute(0xff96u32)),
0x30e9 => Some(transmute(0xff97u32)),
0x30ea => Some(transmute(0xff98u32)),
0x30eb => Some(transmute(0xff99u32)),
0x30ec => Some(transmute(0xff9au32)),
0x30ed => Some(transmute(0xff9bu32)),
0x30ef => Some(transmute(0xff9cu32)),
0x30f3 => Some(transmute(0xff9du32)),
0x3099 => Some(transmute(0xff9eu32)),
0x309a => Some(transmute(0xff9fu32)),
0x3164 => Some(transmute(0xffa0u32)),
0x3131 => Some(transmute(0xffa1u32)),
0x3132 => Some(transmute(0xffa2u32)),
0x3133 => Some(transmute(0xffa3u32)),
0x3134 => Some(transmute(0xffa4u32)),
0x3135 => Some(transmute(0xffa5u32)),
0x3136 => Some(transmute(0xffa6u32)),
0x3137 => Some(transmute(0xffa7u32)),
0x3138 => Some(transmute(0xffa8u32)),
0x3139 => Some(transmute(0xffa9u32)),
0x313a => Some(transmute(0xffaau32)),
0x313b => Some(transmute(0xffabu32)),
0x313c => Some(transmute(0xffacu32)),
0x313d => Some(transmute(0xffadu32)),
0x313e => Some(transmute(0xffaeu32)),
0x313f => Some(transmute(0xffafu32)),
0x3140 => Some(transmute(0xffb0u32)),
0x3141 => Some(transmute(0xffb1u32)),
0x3142 => Some(transmute(0xffb2u32)),
0x3143 => Some(transmute(0xffb3u32)),
0x3144 => Some(transmute(0xffb4u32)),
0x3145 => Some(transmute(0xffb5u32)),
0x3146 => Some(transmute(0xffb6u32)),
0x3147 => Some(transmute(0xffb7u32)),
0x3148 => Some(transmute(0xffb8u32)),
0x3149 => Some(transmute(0xffb9u32)),
0x314a => Some(transmute(0xffbau32)),
0x314b => Some(transmute(0xffbbu32)),
0x314c => Some(transmute(0xffbcu32)),
0x314d => Some(transmute(0xffbdu32)),
0x314e => Some(transmute(0xffbeu32)),
0x314f => Some(transmute(0xffc2u32)),
0x3150 => Some(transmute(0xffc3u32)),
0x3151 => Some(transmute(0xffc4u32)),
0x3152 => Some(transmute(0xffc5u32)),
0x3153 => Some(transmute(0xffc6u32)),
0x3154 => Some(transmute(0xffc7u32)),
0x3155 => Some(transmute(0xffcau32)),
0x3156 => Some(transmute(0xffcbu32)),
0x3157 => Some(transmute(0xffccu32)),
0x3158 => Some(transmute(0xffcdu32)),
0x3159 => Some(transmute(0xffceu32)),
0x315a => Some(transmute(0xffcfu32)),
0x315b => Some(transmute(0xffd2u32)),
0x315c => Some(transmute(0xffd3u32)),
0x315d => Some(transmute(0xffd4u32)),
0x315e => Some(transmute(0xffd5u32)),
0x315f => Some(transmute(0xffd6u32)),
0x3160 => Some(transmute(0xffd7u32)),
0x3161 => Some(transmute(0xffdau32)),
0x3162 => Some(transmute(0xffdbu32)),
0x3163 => Some(transmute(0xffdcu32)),
0x2502 => Some(transmute(0xffe8u32)),
0x2190 => Some(transmute(0xffe9u32)),
0x2191 => Some(transmute(0xffeau32)),
0x2192 => Some(transmute(0xffebu32)),
0x2193 => Some(transmute(0xffecu32)),
0x25a0 => Some(transmute(0xffedu32)),
0x25cb => Some(transmute(0xffeeu32)),
_ => None
}
}
}
/// Returns the full-width form for `ch`. If no full-width form for `ch` exists,
/// or `ch` is already in full-width form, returns `None`.
///
/// # Example
/// ```rust
/// assert_eq!(unicode_hfwidth::to_fullwidth('a'), Some('a'));
/// assert_eq!(unicode_hfwidth::to_fullwidth('カ'), None);
/// ```
pub fn to_fullwidth(ch: char) -> Option<char> {
let ch = ch as u32;
unsafe {
match ch {
/* Half-width variant characters */
0xff61 => Some(transmute(0x3002u32)),
0xff62 => Some(transmute(0x300cu32)),
0xff63 => Some(transmute(0x300du32)),
0xff64 => Some(transmute(0x3001u32)),
0xff65 => Some(transmute(0x30fbu32)),
0xff66 => Some(transmute(0x30f2u32)),
0xff67 => Some(transmute(0x30a1u32)),
0xff68 => Some(transmute(0x30a3u32)),
0xff69 => Some(transmute(0x30a5u32)),
0xff6a => Some(transmute(0x30a7u32)),
0xff6b => Some(transmute(0x30a9u32)),
0xff6c => Some(transmute(0x30e3u32)),
0xff6d => Some(transmute(0x30e5u32)),
0xff6e => Some(transmute(0x30e7u32)),
0xff6f => Some(transmute(0x30c3u32)),
0xff70 => Some(transmute(0x30fcu32)),
0xff71 => Some(transmute(0x30a2u32)),
0xff72 => Some(transmute(0x30a4u32)),
0xff73 => Some(transmute(0x30a6u32)),
0xff74 => Some(transmute(0x30a8u32)),
0xff75 => Some(transmute(0x30aau32)),
0xff76 => Some(transmute(0x30abu32)),
0xff77 => Some(transmute(0x30adu32)),
0xff78 => Some(transmute(0x30afu32)),
0xff79 => Some(transmute(0x30b1u32)),
0xff7a => Some(transmute(0x30b3u32)),
0xff7b => Some(transmute(0x30b5u32)),
0xff7c => Some(transmute(0x30b7u32)),
0xff7d => Some(transmute(0x30b9u32)),
0xff7e => Some(transmute(0x30bbu32)),
0xff7f => Some(transmute(0x30bdu32)),
0xff80 => Some(transmute(0x30bfu32)),
0xff81 => Some(transmute(0x30c1u32)),
0xff82 => Some(transmute(0x30c4u32)),
0xff83 => Some(transmute(0x30c6u32)),
0xff84 => Some(transmute(0x30c8u32)),
0xff85 => Some(transmute(0x30cau32)),
0xff86 => Some(transmute(0x30cbu32)),
0xff87 => Some(transmute(0x30ccu32)),
0xff88 => Some(transmute(0x30cdu32)),
0xff89 => Some(transmute(0x30ceu32)),
0xff8a => Some(transmute(0x30cfu32)),
0xff8b => Some(transmute(0x30d2u32)),
0xff8c => Some(transmute(0x30d5u32)),
0xff8d => Some(transmute(0x30d8u32)),
0xff8e => Some(transmute(0x30dbu32)),
0xff8f => Some(transmute(0x30deu32)),
0xff90 => Some(transmute(0x30dfu32)),
0xff91 => Some(transmute(0x30e0u32)),
0xff92 => Some(transmute(0x30e1u32)),
0xff93 => Some(transmute(0x30e2u32)),
0xff94 => Some(transmute(0x30e4u32)),
0xff95 => Some(transmute(0x30e6u32)),
0xff96 => Some(transmute(0x30e8u32)),
0xff97 => Some(transmute(0x30e9u32)),
0xff98 => Some(transmute(0x30eau32)),
0xff99 => Some(transmute(0x30ebu32)),
0xff9a => Some(transmute(0x30ecu32)),
0xff9b => Some(transmute(0x30edu32)),
0xff9c => Some(transmute(0x30efu32)),
0xff9d => Some(transmute(0x30f3u32)),
0xff9e => Some(transmute(0x3099u32)),
0xff9f => Some(transmute(0x309au32)),
0xffa0 => Some(transmute(0x3164u32)),
0xffa1 => Some(transmute(0x3131u32)),
0xffa2 => Some(transmute(0x3132u32)),
0xffa3 => Some(transmute(0x3133u32)),
0xffa4 => Some(transmute(0x3134u32)),
0xffa5 => Some(transmute(0x3135u32)),
0xffa6 => Some(transmute(0x3136u32)),
0xffa7 => Some(transmute(0x3137u32)),
0xffa8 => Some(transmute(0x3138u32)),
0xffa9 => Some(transmute(0x3139u32)),
0xffaa => Some(transmute(0x313au32)),
0xffab => Some(transmute(0x313bu32)),
0xffac => Some(transmute(0x313cu32)),
0xffad => Some(transmute(0x313du32)),
0xffae => Some(transmute(0x313eu32)),
0xffaf => Some(transmute(0x313fu32)),
0xffb0 => Some(transmute(0x3140u32)),
0xffb1 => Some(transmute(0x3141u32)),
0xffb2 => Some(transmute(0x3142u32)),
0xffb3 => Some(transmute(0x3143u32)),
0xffb4 => Some(transmute(0x3144u32)),
0xffb5 => Some(transmute(0x3145u32)),
0xffb6 => Some(transmute(0x3146u32)),
0xffb7 => Some(transmute(0x3147u32)),
0xffb8 => Some(transmute(0x3148u32)),
0xffb9 => Some(transmute(0x3149u32)),
0xffba => Some(transmute(0x314au32)),
0xffbb => Some(transmute(0x314bu32)),
0xffbc => Some(transmute(0x314cu32)),
0xffbd => Some(transmute(0x314du32)),
0xffbe => Some(transmute(0x314eu32)),
0xffc2 => Some(transmute(0x314fu32)),
0xffc3 => Some(transmute(0x3150u32)),
0xffc4 => Some(transmute(0x3151u32)),
0xffc5 => Some(transmute(0x3152u32)),
0xffc6 => Some(transmute(0x3153u32)),
0xffc7 => Some(transmute(0x3154u32)),
0xffca => Some(transmute(0x3155u32)),
0xffcb => Some(transmute(0x3156u32)),
0xffcc => Some(transmute(0x3157u32)),
0xffcd => Some(transmute(0x3158u32)),
0xffce => Some(transmute(0x3159u32)),
0xffcf => Some(transmute(0x315au32)),
0xffd2 => Some(transmute(0x315bu32)),
0xffd3 => Some(transmute(0x315cu32)),
0xffd4 => Some(transmute(0x315du32)),
0xffd5 => Some(transmute(0x315eu32)),
0xffd6 => Some(transmute(0x315fu32)),
0xffd7 => Some(transmute(0x3160u32)),
0xffda => Some(transmute(0x3161u32)),
0xffdb => Some(transmute(0x3162u32)),
0xffdc => Some(transmute(0x3163u32)),
0xffe8 => Some(transmute(0x2502u32)),
0xffe9 => Some(transmute(0x2190u32)),
0xffea => Some(transmute(0x2191u32)),
0xffeb => Some(transmute(0x2192u32)),
0xffec => Some(transmute(0x2193u32)),
0xffed => Some(transmute(0x25a0u32)),
0xffee => Some(transmute(0x25cbu32)),
/* Natural half-width characters */
0x0021...0x007e => Some(transmute(ch - 0x0021 + 0xff01)),
0x2985...0x2986 => Some(transmute(ch - 0x2985 + 0xff5f)),
0x00a2...0x00a3 => Some(transmute(ch - 0x00a2 + 0xffe0)),
0x00ac => Some(transmute(0xffe2u32)),
0x00af => Some(transmute(0xffe3u32)),
0x00a6 => Some(transmute(0xffe4u32)),
0x00a5 => Some(transmute(0xffe5u32)),
0x20a9 => Some(transmute(0xffe6u32)),
_ => None,
}
}
}
#[test]
fn test_katakana() {
let full = "アイウエオカキクケコサシスセソタチツテトナニヌネノハヒフヘホマミムメモヤユヨラリルレロワン";
let half = "アイウエオカキクケコサシスセソタチツテトナニヌネノハヒフヘホマミムメモヤユヨラリルレロワン";
for (f, h) in full.chars().zip(half.chars()) {
assert_eq!(to_fullwidth(h).unwrap(), f);
}
}
#[test]
fn test_katakana_rev() {
let full = "アイウエオカキクケコサシスセソタチツテトナニヌネノハヒフヘホマミムメモヤユヨラリルレロワン";
let half = "アイウエオカキクケコサシスセソタチツテトナニヌネノハヒフヘホマミムメモヤユヨラリルレロワン";
for (f, h) in full.chars().zip(half.chars()) {
assert_eq!(to_halfwidth(f).unwrap(), h);
}
}
#[test]
fn test_a() {
assert_eq!(to_fullwidth('a').unwrap(), 'a');
}
| 44.430939 | 91 | 0.565096 |
16de20f65929f6a733fb68e39d54261b1ae3a7be | 6,631 | // Generated from definition io.k8s.api.core.v1.StorageOSVolumeSource
/// Represents a StorageOS persistent volume resource.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct StorageOSVolumeSource {
/// Filesystem type to mount. Must be a filesystem type supported by the host operating system. Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
pub fs_type: Option<String>,
/// Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts.
pub read_only: Option<bool>,
/// SecretRef specifies the secret to use for obtaining the StorageOS API credentials. If not specified, default values will be attempted.
pub secret_ref: Option<::v1_12::api::core::v1::LocalObjectReference>,
/// VolumeName is the human-readable name of the StorageOS volume. Volume names are only unique within a namespace.
pub volume_name: Option<String>,
/// VolumeNamespace specifies the scope of the volume within StorageOS. If no namespace is specified then the Pod's namespace will be used. This allows the Kubernetes name scoping to be mirrored within StorageOS for tighter integration. Set VolumeName to any name to override the default behaviour. Set to "default" if you are not using namespaces within StorageOS. Namespaces that do not pre-exist within StorageOS will be created.
pub volume_namespace: Option<String>,
}
impl<'de> ::serde::Deserialize<'de> for StorageOSVolumeSource {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: ::serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_fs_type,
Key_read_only,
Key_secret_ref,
Key_volume_name,
Key_volume_namespace,
Other,
}
impl<'de> ::serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: ::serde::Deserializer<'de> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: ::serde::de::Error {
Ok(match v {
"fsType" => Field::Key_fs_type,
"readOnly" => Field::Key_read_only,
"secretRef" => Field::Key_secret_ref,
"volumeName" => Field::Key_volume_name,
"volumeNamespace" => Field::Key_volume_namespace,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = StorageOSVolumeSource;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "struct StorageOSVolumeSource")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: ::serde::de::MapAccess<'de> {
let mut value_fs_type: Option<String> = None;
let mut value_read_only: Option<bool> = None;
let mut value_secret_ref: Option<::v1_12::api::core::v1::LocalObjectReference> = None;
let mut value_volume_name: Option<String> = None;
let mut value_volume_namespace: Option<String> = None;
while let Some(key) = ::serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_fs_type => value_fs_type = ::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_read_only => value_read_only = ::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_secret_ref => value_secret_ref = ::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_volume_name => value_volume_name = ::serde::de::MapAccess::next_value(&mut map)?,
Field::Key_volume_namespace => value_volume_namespace = ::serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: ::serde::de::IgnoredAny = ::serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(StorageOSVolumeSource {
fs_type: value_fs_type,
read_only: value_read_only,
secret_ref: value_secret_ref,
volume_name: value_volume_name,
volume_namespace: value_volume_namespace,
})
}
}
deserializer.deserialize_struct(
"StorageOSVolumeSource",
&[
"fsType",
"readOnly",
"secretRef",
"volumeName",
"volumeNamespace",
],
Visitor,
)
}
}
impl ::serde::Serialize for StorageOSVolumeSource {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: ::serde::Serializer {
let mut state = serializer.serialize_struct(
"StorageOSVolumeSource",
0 +
self.fs_type.as_ref().map_or(0, |_| 1) +
self.read_only.as_ref().map_or(0, |_| 1) +
self.secret_ref.as_ref().map_or(0, |_| 1) +
self.volume_name.as_ref().map_or(0, |_| 1) +
self.volume_namespace.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.fs_type {
::serde::ser::SerializeStruct::serialize_field(&mut state, "fsType", value)?;
}
if let Some(value) = &self.read_only {
::serde::ser::SerializeStruct::serialize_field(&mut state, "readOnly", value)?;
}
if let Some(value) = &self.secret_ref {
::serde::ser::SerializeStruct::serialize_field(&mut state, "secretRef", value)?;
}
if let Some(value) = &self.volume_name {
::serde::ser::SerializeStruct::serialize_field(&mut state, "volumeName", value)?;
}
if let Some(value) = &self.volume_namespace {
::serde::ser::SerializeStruct::serialize_field(&mut state, "volumeNamespace", value)?;
}
::serde::ser::SerializeStruct::end(state)
}
}
| 47.028369 | 438 | 0.565526 |
bb9a4eccc28425e022b7674e15f5f1d29d245830 | 5,021 | use std::marker::PhantomData;
use std::rc::Rc;
use super::result::PgResult;
use super::row::PgRow;
use super::PgConnection;
/// The type returned by various [`Connection`] methods.
/// Acts as an iterator over `T`.
#[allow(missing_debug_implementations)]
pub struct Cursor<'a> {
current_row: usize,
db_result: Rc<PgResult>,
// We referenze connection here so that
// we could possibly use the connection in future changes
// to cursor
// This may be required to conditionally implement
// loading items using libpqs single row mode
p: PhantomData<&'a mut PgConnection>,
}
impl Cursor<'_> {
pub(super) fn new(db_result: PgResult) -> Self {
Cursor {
current_row: 0,
db_result: Rc::new(db_result),
p: PhantomData,
}
}
}
impl ExactSizeIterator for Cursor<'_> {
fn len(&self) -> usize {
self.db_result.num_rows() - self.current_row
}
}
impl Iterator for Cursor<'_> {
type Item = crate::QueryResult<PgRow>;
fn next(&mut self) -> Option<Self::Item> {
if self.current_row < self.db_result.num_rows() {
let row = self.db_result.clone().get_row(self.current_row);
self.current_row += 1;
Some(Ok(row))
} else {
None
}
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.current_row = (self.current_row + n).min(self.db_result.num_rows());
self.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
fn count(self) -> usize
where
Self: Sized,
{
self.len()
}
}
#[test]
fn fun_with_row_iters() {
crate::table! {
#[allow(unused_parens)]
users(id) {
id -> Integer,
name -> Text,
}
}
use crate::deserialize::{FromSql, FromSqlRow};
use crate::pg::Pg;
use crate::prelude::*;
use crate::row::{Field, Row};
use crate::sql_types;
let conn = &mut crate::test_helpers::connection();
crate::sql_query(
"CREATE TABLE IF NOT EXISTS users(id INTEGER PRIMARY KEY, name TEXT NOT NULL);",
)
.execute(conn)
.unwrap();
crate::insert_into(users::table)
.values(vec![
(users::id.eq(1), users::name.eq("Sean")),
(users::id.eq(2), users::name.eq("Tess")),
])
.execute(conn)
.unwrap();
let query = users::table.select((users::id, users::name));
let expected = vec![(1, String::from("Sean")), (2, String::from("Tess"))];
let row_iter = conn.load(&query).unwrap();
for (row, expected) in row_iter.zip(&expected) {
let row = row.unwrap();
let deserialized = <(i32, String) as FromSqlRow<
(sql_types::Integer, sql_types::Text),
_,
>>::build_from_row(&row)
.unwrap();
assert_eq!(&deserialized, expected);
}
{
let collected_rows = conn.load(&query).unwrap().collect::<Vec<_>>();
for (row, expected) in collected_rows.iter().zip(&expected) {
let deserialized = row
.as_ref()
.map(|row| {
<(i32, String) as FromSqlRow<
(sql_types::Integer, sql_types::Text),
_,
>>::build_from_row(row).unwrap()
})
.unwrap();
assert_eq!(&deserialized, expected);
}
}
let mut row_iter = conn.load(&query).unwrap();
let first_row = row_iter.next().unwrap().unwrap();
let first_fields = (first_row.get(0).unwrap(), first_row.get(1).unwrap());
let first_values = (first_fields.0.value(), first_fields.1.value());
let second_row = row_iter.next().unwrap().unwrap();
let second_fields = (second_row.get(0).unwrap(), second_row.get(1).unwrap());
let second_values = (second_fields.0.value(), second_fields.1.value());
assert!(row_iter.next().is_none());
assert_eq!(
<i32 as FromSql<sql_types::Integer, Pg>>::from_nullable_sql(first_values.0).unwrap(),
expected[0].0
);
assert_eq!(
<String as FromSql<sql_types::Text, Pg>>::from_nullable_sql(first_values.1).unwrap(),
expected[0].1
);
assert_eq!(
<i32 as FromSql<sql_types::Integer, Pg>>::from_nullable_sql(second_values.0).unwrap(),
expected[1].0
);
assert_eq!(
<String as FromSql<sql_types::Text, Pg>>::from_nullable_sql(second_values.1).unwrap(),
expected[1].1
);
let first_fields = (first_row.get(0).unwrap(), first_row.get(1).unwrap());
let first_values = (first_fields.0.value(), first_fields.1.value());
assert_eq!(
<i32 as FromSql<sql_types::Integer, Pg>>::from_nullable_sql(first_values.0).unwrap(),
expected[0].0
);
assert_eq!(
<String as FromSql<sql_types::Text, Pg>>::from_nullable_sql(first_values.1).unwrap(),
expected[0].1
);
}
| 28.207865 | 94 | 0.571201 |
bb8c5f175b8c0cb9304c455217a36b495c4ff148 | 47,991 | //! # The MIR Visitor
//!
//! ## Overview
//!
//! There are two visitors, one for immutable and one for mutable references,
//! but both are generated by the following macro. The code is written according
//! to the following conventions:
//!
//! - introduce a `visit_foo` and a `super_foo` method for every MIR type
//! - `visit_foo`, by default, calls `super_foo`
//! - `super_foo`, by default, destructures the `foo` and calls `visit_foo`
//!
//! This allows you as a user to override `visit_foo` for types are
//! interested in, and invoke (within that method) call
//! `self.super_foo` to get the default behavior. Just as in an OO
//! language, you should never call `super` methods ordinarily except
//! in that circumstance.
//!
//! For the most part, we do not destructure things external to the
//! MIR, e.g., types, spans, etc, but simply visit them and stop. This
//! avoids duplication with other visitors like `TypeFoldable`.
//!
//! ## Updating
//!
//! The code is written in a very deliberate style intended to minimize
//! the chance of things being overlooked. You'll notice that we always
//! use pattern matching to reference fields and we ensure that all
//! matches are exhaustive.
//!
//! For example, the `super_basic_block_data` method begins like this:
//!
//! ```rust
//! fn super_basic_block_data(&mut self,
//! block: BasicBlock,
//! data: & $($mutability)? BasicBlockData<'tcx>) {
//! let BasicBlockData {
//! statements,
//! terminator,
//! is_cleanup: _
//! } = *data;
//!
//! for statement in statements {
//! self.visit_statement(block, statement);
//! }
//!
//! ...
//! }
//! ```
//!
//! Here we used `let BasicBlockData { <fields> } = *data` deliberately,
//! rather than writing `data.statements` in the body. This is because if one
//! adds a new field to `BasicBlockData`, one will be forced to revise this code,
//! and hence one will (hopefully) invoke the correct visit methods (if any).
//!
//! For this to work, ALL MATCHES MUST BE EXHAUSTIVE IN FIELDS AND VARIANTS.
//! That means you never write `..` to skip over fields, nor do you write `_`
//! to skip over variants in a `match`.
//!
//! The only place that `_` is acceptable is to match a field (or
//! variant argument) that does not require visiting, as in
//! `is_cleanup` above.
use crate::mir::*;
use crate::ty::subst::SubstsRef;
use crate::ty::{CanonicalUserTypeAnnotation, Ty};
use rustc_span::Span;
macro_rules! make_mir_visitor {
($visitor_trait_name:ident, $($mutability:ident)?) => {
pub trait $visitor_trait_name<'tcx> {
// Override these, and call `self.super_xxx` to revert back to the
// default behavior.
fn visit_body(
&mut self,
body: &$($mutability)? Body<'tcx>,
) {
self.super_body(body);
}
fn visit_basic_block_data(&mut self,
block: BasicBlock,
data: & $($mutability)? BasicBlockData<'tcx>) {
self.super_basic_block_data(block, data);
}
fn visit_source_scope_data(&mut self,
scope_data: & $($mutability)? SourceScopeData<'tcx>) {
self.super_source_scope_data(scope_data);
}
fn visit_statement(&mut self,
statement: & $($mutability)? Statement<'tcx>,
location: Location) {
self.super_statement(statement, location);
}
fn visit_assign(&mut self,
place: & $($mutability)? Place<'tcx>,
rvalue: & $($mutability)? Rvalue<'tcx>,
location: Location) {
self.super_assign(place, rvalue, location);
}
fn visit_terminator(&mut self,
terminator: & $($mutability)? Terminator<'tcx>,
location: Location) {
self.super_terminator(terminator, location);
}
fn visit_assert_message(&mut self,
msg: & $($mutability)? AssertMessage<'tcx>,
location: Location) {
self.super_assert_message(msg, location);
}
fn visit_rvalue(&mut self,
rvalue: & $($mutability)? Rvalue<'tcx>,
location: Location) {
self.super_rvalue(rvalue, location);
}
fn visit_operand(&mut self,
operand: & $($mutability)? Operand<'tcx>,
location: Location) {
self.super_operand(operand, location);
}
fn visit_ascribe_user_ty(&mut self,
place: & $($mutability)? Place<'tcx>,
variance: & $($mutability)? ty::Variance,
user_ty: & $($mutability)? UserTypeProjection,
location: Location) {
self.super_ascribe_user_ty(place, variance, user_ty, location);
}
fn visit_coverage(&mut self,
coverage: & $($mutability)? Coverage,
location: Location) {
self.super_coverage(coverage, location);
}
fn visit_retag(&mut self,
kind: & $($mutability)? RetagKind,
place: & $($mutability)? Place<'tcx>,
location: Location) {
self.super_retag(kind, place, location);
}
fn visit_place(&mut self,
place: & $($mutability)? Place<'tcx>,
context: PlaceContext,
location: Location) {
self.super_place(place, context, location);
}
visit_place_fns!($($mutability)?);
fn visit_constant(&mut self,
constant: & $($mutability)? Constant<'tcx>,
location: Location) {
self.super_constant(constant, location);
}
fn visit_span(&mut self,
span: & $($mutability)? Span) {
self.super_span(span);
}
fn visit_source_info(&mut self,
source_info: & $($mutability)? SourceInfo) {
self.super_source_info(source_info);
}
fn visit_ty(&mut self,
ty: $(& $mutability)? Ty<'tcx>,
_: TyContext) {
self.super_ty(ty);
}
fn visit_user_type_projection(
&mut self,
ty: & $($mutability)? UserTypeProjection,
) {
self.super_user_type_projection(ty);
}
fn visit_user_type_annotation(
&mut self,
index: UserTypeAnnotationIndex,
ty: & $($mutability)? CanonicalUserTypeAnnotation<'tcx>,
) {
self.super_user_type_annotation(index, ty);
}
fn visit_region(&mut self,
region: & $($mutability)? ty::Region<'tcx>,
_: Location) {
self.super_region(region);
}
fn visit_const(&mut self,
constant: & $($mutability)? &'tcx ty::Const<'tcx>,
_: Location) {
self.super_const(constant);
}
fn visit_substs(&mut self,
substs: & $($mutability)? SubstsRef<'tcx>,
_: Location) {
self.super_substs(substs);
}
fn visit_local_decl(&mut self,
local: Local,
local_decl: & $($mutability)? LocalDecl<'tcx>) {
self.super_local_decl(local, local_decl);
}
fn visit_var_debug_info(&mut self,
var_debug_info: & $($mutability)* VarDebugInfo<'tcx>) {
self.super_var_debug_info(var_debug_info);
}
fn visit_local(&mut self,
_local: & $($mutability)? Local,
_context: PlaceContext,
_location: Location) {
}
fn visit_source_scope(&mut self,
scope: & $($mutability)? SourceScope) {
self.super_source_scope(scope);
}
// The `super_xxx` methods comprise the default behavior and are
// not meant to be overridden.
fn super_body(
&mut self,
body: &$($mutability)? Body<'tcx>,
) {
let span = body.span;
if let Some(yield_ty) = &$($mutability)? body.yield_ty {
self.visit_ty(
yield_ty,
TyContext::YieldTy(SourceInfo::outermost(span))
);
}
// for best performance, we want to use an iterator rather
// than a for-loop, to avoid calling `body::Body::invalidate` for
// each basic block.
macro_rules! basic_blocks {
(mut) => (body.basic_blocks_mut().iter_enumerated_mut());
() => (body.basic_blocks().iter_enumerated());
}
for (bb, data) in basic_blocks!($($mutability)?) {
self.visit_basic_block_data(bb, data);
}
for scope in &$($mutability)? body.source_scopes {
self.visit_source_scope_data(scope);
}
self.visit_ty(
&$($mutability)? body.return_ty(),
TyContext::ReturnTy(SourceInfo::outermost(body.span))
);
for local in body.local_decls.indices() {
self.visit_local_decl(local, & $($mutability)? body.local_decls[local]);
}
macro_rules! type_annotations {
(mut) => (body.user_type_annotations.iter_enumerated_mut());
() => (body.user_type_annotations.iter_enumerated());
}
for (index, annotation) in type_annotations!($($mutability)?) {
self.visit_user_type_annotation(
index, annotation
);
}
for var_debug_info in &$($mutability)? body.var_debug_info {
self.visit_var_debug_info(var_debug_info);
}
self.visit_span(&$($mutability)? body.span);
for const_ in &$($mutability)? body.required_consts {
let location = START_BLOCK.start_location();
self.visit_constant(const_, location);
}
}
fn super_basic_block_data(&mut self,
block: BasicBlock,
data: & $($mutability)? BasicBlockData<'tcx>) {
let BasicBlockData {
statements,
terminator,
is_cleanup: _
} = data;
let mut index = 0;
for statement in statements {
let location = Location { block, statement_index: index };
self.visit_statement(statement, location);
index += 1;
}
if let Some(terminator) = terminator {
let location = Location { block, statement_index: index };
self.visit_terminator(terminator, location);
}
}
fn super_source_scope_data(
&mut self,
scope_data: & $($mutability)? SourceScopeData<'tcx>,
) {
let SourceScopeData {
span,
parent_scope,
inlined,
inlined_parent_scope,
local_data: _,
} = scope_data;
self.visit_span(span);
if let Some(parent_scope) = parent_scope {
self.visit_source_scope(parent_scope);
}
if let Some((callee, callsite_span)) = inlined {
let location = START_BLOCK.start_location();
self.visit_span(callsite_span);
let ty::Instance { def: callee_def, substs: callee_substs } = callee;
match callee_def {
ty::InstanceDef::Item(_def_id) => {}
ty::InstanceDef::Intrinsic(_def_id) |
ty::InstanceDef::VtableShim(_def_id) |
ty::InstanceDef::ReifyShim(_def_id) |
ty::InstanceDef::Virtual(_def_id, _) |
ty::InstanceDef::ClosureOnceShim { call_once: _def_id } |
ty::InstanceDef::DropGlue(_def_id, None) => {}
ty::InstanceDef::FnPtrShim(_def_id, ty) |
ty::InstanceDef::DropGlue(_def_id, Some(ty)) |
ty::InstanceDef::CloneShim(_def_id, ty) => {
// FIXME(eddyb) use a better `TyContext` here.
self.visit_ty(ty, TyContext::Location(location));
}
}
self.visit_substs(callee_substs, location);
}
if let Some(inlined_parent_scope) = inlined_parent_scope {
self.visit_source_scope(inlined_parent_scope);
}
}
fn super_statement(&mut self,
statement: & $($mutability)? Statement<'tcx>,
location: Location) {
let Statement {
source_info,
kind,
} = statement;
self.visit_source_info(source_info);
match kind {
StatementKind::Assign(
box(ref $($mutability)? place, ref $($mutability)? rvalue)
) => {
self.visit_assign(place, rvalue, location);
}
StatementKind::FakeRead(_, place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
location
);
}
StatementKind::SetDiscriminant { place, .. } => {
self.visit_place(
place,
PlaceContext::MutatingUse(MutatingUseContext::Store),
location
);
}
StatementKind::StorageLive(local) => {
self.visit_local(
local,
PlaceContext::NonUse(NonUseContext::StorageLive),
location
);
}
StatementKind::StorageDead(local) => {
self.visit_local(
local,
PlaceContext::NonUse(NonUseContext::StorageDead),
location
);
}
StatementKind::LlvmInlineAsm(asm) => {
for output in & $($mutability)? asm.outputs[..] {
self.visit_place(
output,
PlaceContext::MutatingUse(MutatingUseContext::AsmOutput),
location
);
}
for (span, input) in & $($mutability)? asm.inputs[..] {
self.visit_span(span);
self.visit_operand(input, location);
}
}
StatementKind::Retag(kind, place) => {
self.visit_retag(kind, place, location);
}
StatementKind::AscribeUserType(
box(ref $($mutability)? place, ref $($mutability)? user_ty),
variance
) => {
self.visit_ascribe_user_ty(place, variance, user_ty, location);
}
StatementKind::Coverage(coverage) => {
self.visit_coverage(
coverage,
location
)
}
StatementKind::Nop => {}
}
}
fn super_assign(&mut self,
place: &$($mutability)? Place<'tcx>,
rvalue: &$($mutability)? Rvalue<'tcx>,
location: Location) {
self.visit_place(
place,
PlaceContext::MutatingUse(MutatingUseContext::Store),
location
);
self.visit_rvalue(rvalue, location);
}
fn super_terminator(&mut self,
terminator: &$($mutability)? Terminator<'tcx>,
location: Location) {
let Terminator { source_info, kind } = terminator;
self.visit_source_info(source_info);
match kind {
TerminatorKind::Goto { .. } |
TerminatorKind::Resume |
TerminatorKind::Abort |
TerminatorKind::GeneratorDrop |
TerminatorKind::Unreachable |
TerminatorKind::FalseEdge { .. } |
TerminatorKind::FalseUnwind { .. } => {
}
TerminatorKind::Return => {
// `return` logically moves from the return place `_0`. Note that the place
// cannot be changed by any visitor, though.
let $($mutability)? local = RETURN_PLACE;
self.visit_local(
& $($mutability)? local,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Move),
location,
);
assert_eq!(
local,
RETURN_PLACE,
"`MutVisitor` tried to mutate return place of `return` terminator"
);
}
TerminatorKind::SwitchInt {
discr,
switch_ty,
targets: _
} => {
self.visit_operand(discr, location);
self.visit_ty(switch_ty, TyContext::Location(location));
}
TerminatorKind::Drop {
place,
target: _,
unwind: _,
} => {
self.visit_place(
place,
PlaceContext::MutatingUse(MutatingUseContext::Drop),
location
);
}
TerminatorKind::DropAndReplace {
place,
value,
target: _,
unwind: _,
} => {
self.visit_place(
place,
PlaceContext::MutatingUse(MutatingUseContext::Drop),
location
);
self.visit_operand(value, location);
}
TerminatorKind::Call {
func,
args,
destination,
cleanup: _,
from_hir_call: _,
fn_span: _
} => {
self.visit_operand(func, location);
for arg in args {
self.visit_operand(arg, location);
}
if let Some((destination, _)) = destination {
self.visit_place(
destination,
PlaceContext::MutatingUse(MutatingUseContext::Call),
location
);
}
}
TerminatorKind::Assert {
cond,
expected: _,
msg,
target: _,
cleanup: _,
} => {
self.visit_operand(cond, location);
self.visit_assert_message(msg, location);
}
TerminatorKind::Yield {
value,
resume: _,
resume_arg,
drop: _,
} => {
self.visit_operand(value, location);
self.visit_place(
resume_arg,
PlaceContext::MutatingUse(MutatingUseContext::Yield),
location,
);
}
TerminatorKind::InlineAsm {
template: _,
operands,
options: _,
line_spans: _,
destination: _,
} => {
for op in operands {
match op {
InlineAsmOperand::In { value, .. }
| InlineAsmOperand::Const { value } => {
self.visit_operand(value, location);
}
InlineAsmOperand::Out { place, .. } => {
if let Some(place) = place {
self.visit_place(
place,
PlaceContext::MutatingUse(MutatingUseContext::Store),
location,
);
}
}
InlineAsmOperand::InOut { in_value, out_place, .. } => {
self.visit_operand(in_value, location);
if let Some(out_place) = out_place {
self.visit_place(
out_place,
PlaceContext::MutatingUse(MutatingUseContext::Store),
location,
);
}
}
InlineAsmOperand::SymFn { value } => {
self.visit_constant(value, location);
}
InlineAsmOperand::SymStatic { def_id: _ } => {}
}
}
}
}
}
fn super_assert_message(&mut self,
msg: & $($mutability)? AssertMessage<'tcx>,
location: Location) {
use crate::mir::AssertKind::*;
match msg {
BoundsCheck { len, index } => {
self.visit_operand(len, location);
self.visit_operand(index, location);
}
Overflow(_, l, r) => {
self.visit_operand(l, location);
self.visit_operand(r, location);
}
OverflowNeg(op) | DivisionByZero(op) | RemainderByZero(op) => {
self.visit_operand(op, location);
}
ResumedAfterReturn(_) | ResumedAfterPanic(_) => {
// Nothing to visit
}
}
}
fn super_rvalue(&mut self,
rvalue: & $($mutability)? Rvalue<'tcx>,
location: Location) {
match rvalue {
Rvalue::Use(operand) => {
self.visit_operand(operand, location);
}
Rvalue::Repeat(value, _) => {
self.visit_operand(value, location);
}
Rvalue::ThreadLocalRef(_) => {}
Rvalue::Ref(r, bk, path) => {
self.visit_region(r, location);
let ctx = match bk {
BorrowKind::Shared => PlaceContext::NonMutatingUse(
NonMutatingUseContext::SharedBorrow
),
BorrowKind::Shallow => PlaceContext::NonMutatingUse(
NonMutatingUseContext::ShallowBorrow
),
BorrowKind::Unique => PlaceContext::NonMutatingUse(
NonMutatingUseContext::UniqueBorrow
),
BorrowKind::Mut { .. } =>
PlaceContext::MutatingUse(MutatingUseContext::Borrow),
};
self.visit_place(path, ctx, location);
}
Rvalue::AddressOf(m, path) => {
let ctx = match m {
Mutability::Mut => PlaceContext::MutatingUse(
MutatingUseContext::AddressOf
),
Mutability::Not => PlaceContext::NonMutatingUse(
NonMutatingUseContext::AddressOf
),
};
self.visit_place(path, ctx, location);
}
Rvalue::Len(path) => {
self.visit_place(
path,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
location
);
}
Rvalue::Cast(_cast_kind, operand, ty) => {
self.visit_operand(operand, location);
self.visit_ty(ty, TyContext::Location(location));
}
Rvalue::BinaryOp(_bin_op, lhs, rhs)
| Rvalue::CheckedBinaryOp(_bin_op, lhs, rhs) => {
self.visit_operand(lhs, location);
self.visit_operand(rhs, location);
}
Rvalue::UnaryOp(_un_op, op) => {
self.visit_operand(op, location);
}
Rvalue::Discriminant(place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Inspect),
location
);
}
Rvalue::NullaryOp(_op, ty) => {
self.visit_ty(ty, TyContext::Location(location));
}
Rvalue::Aggregate(kind, operands) => {
let kind = &$($mutability)? **kind;
match kind {
AggregateKind::Array(ty) => {
self.visit_ty(ty, TyContext::Location(location));
}
AggregateKind::Tuple => {
}
AggregateKind::Adt(
_adt_def,
_variant_index,
substs,
_user_substs,
_active_field_index
) => {
self.visit_substs(substs, location);
}
AggregateKind::Closure(
_,
closure_substs
) => {
self.visit_substs(closure_substs, location);
}
AggregateKind::Generator(
_,
generator_substs,
_movability,
) => {
self.visit_substs(generator_substs, location);
}
}
for operand in operands {
self.visit_operand(operand, location);
}
}
}
}
fn super_operand(&mut self,
operand: & $($mutability)? Operand<'tcx>,
location: Location) {
match operand {
Operand::Copy(place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
location
);
}
Operand::Move(place) => {
self.visit_place(
place,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Move),
location
);
}
Operand::Constant(constant) => {
self.visit_constant(constant, location);
}
}
}
fn super_ascribe_user_ty(&mut self,
place: & $($mutability)? Place<'tcx>,
_variance: & $($mutability)? ty::Variance,
user_ty: & $($mutability)? UserTypeProjection,
location: Location) {
self.visit_place(
place,
PlaceContext::NonUse(NonUseContext::AscribeUserTy),
location
);
self.visit_user_type_projection(user_ty);
}
fn super_coverage(&mut self,
_coverage: & $($mutability)? Coverage,
_location: Location) {
}
fn super_retag(&mut self,
_kind: & $($mutability)? RetagKind,
place: & $($mutability)? Place<'tcx>,
location: Location) {
self.visit_place(
place,
PlaceContext::MutatingUse(MutatingUseContext::Retag),
location,
);
}
fn super_local_decl(&mut self,
local: Local,
local_decl: & $($mutability)? LocalDecl<'tcx>) {
let LocalDecl {
mutability: _,
ty,
user_ty,
source_info,
internal: _,
local_info: _,
is_block_tail: _,
} = local_decl;
self.visit_ty(ty, TyContext::LocalDecl {
local,
source_info: *source_info,
});
if let Some(user_ty) = user_ty {
for (user_ty, _) in & $($mutability)? user_ty.contents {
self.visit_user_type_projection(user_ty);
}
}
self.visit_source_info(source_info);
}
fn super_var_debug_info(&mut self,
var_debug_info: & $($mutability)? VarDebugInfo<'tcx>) {
let VarDebugInfo {
name: _,
source_info,
value,
} = var_debug_info;
self.visit_source_info(source_info);
let location = START_BLOCK.start_location();
match value {
VarDebugInfoContents::Const(c) => self.visit_constant(c, location),
VarDebugInfoContents::Place(place) =>
self.visit_place(
place,
PlaceContext::NonUse(NonUseContext::VarDebugInfo),
location
),
}
}
fn super_source_scope(&mut self,
_scope: & $($mutability)? SourceScope) {
}
fn super_constant(&mut self,
constant: & $($mutability)? Constant<'tcx>,
location: Location) {
let Constant {
span,
user_ty,
literal,
} = constant;
self.visit_span(span);
drop(user_ty); // no visit method for this
self.visit_const(literal, location);
}
fn super_span(&mut self, _span: & $($mutability)? Span) {
}
fn super_source_info(&mut self, source_info: & $($mutability)? SourceInfo) {
let SourceInfo {
span,
scope,
} = source_info;
self.visit_span(span);
self.visit_source_scope(scope);
}
fn super_user_type_projection(
&mut self,
_ty: & $($mutability)? UserTypeProjection,
) {
}
fn super_user_type_annotation(
&mut self,
_index: UserTypeAnnotationIndex,
ty: & $($mutability)? CanonicalUserTypeAnnotation<'tcx>,
) {
self.visit_span(& $($mutability)? ty.span);
self.visit_ty(& $($mutability)? ty.inferred_ty, TyContext::UserTy(ty.span));
}
fn super_ty(&mut self, _ty: $(& $mutability)? Ty<'tcx>) {
}
fn super_region(&mut self, _region: & $($mutability)? ty::Region<'tcx>) {
}
fn super_const(&mut self, _const: & $($mutability)? &'tcx ty::Const<'tcx>) {
}
fn super_substs(&mut self, _substs: & $($mutability)? SubstsRef<'tcx>) {
}
// Convenience methods
fn visit_location(
&mut self,
body: &$($mutability)? Body<'tcx>,
location: Location
) {
macro_rules! basic_blocks {
(mut) => (body.basic_blocks_mut());
() => (body.basic_blocks());
}
let basic_block = & $($mutability)? basic_blocks!($($mutability)?)[location.block];
if basic_block.statements.len() == location.statement_index {
if let Some(ref $($mutability)? terminator) = basic_block.terminator {
self.visit_terminator(terminator, location)
}
} else {
let statement = & $($mutability)?
basic_block.statements[location.statement_index];
self.visit_statement(statement, location)
}
}
}
}
}
macro_rules! visit_place_fns {
(mut) => {
fn tcx<'a>(&'a self) -> TyCtxt<'tcx>;
fn super_place(
&mut self,
place: &mut Place<'tcx>,
context: PlaceContext,
location: Location,
) {
self.visit_local(&mut place.local, context, location);
if let Some(new_projection) = self.process_projection(&place.projection, location) {
place.projection = self.tcx().intern_place_elems(&new_projection);
}
}
fn process_projection(
&mut self,
projection: &'a [PlaceElem<'tcx>],
location: Location,
) -> Option<Vec<PlaceElem<'tcx>>> {
let mut projection = Cow::Borrowed(projection);
for i in 0..projection.len() {
if let Some(&elem) = projection.get(i) {
if let Some(elem) = self.process_projection_elem(elem, location) {
// This converts the borrowed projection into `Cow::Owned(_)` and returns a
// clone of the projection so we can mutate and reintern later.
let vec = projection.to_mut();
vec[i] = elem;
}
}
}
match projection {
Cow::Borrowed(_) => None,
Cow::Owned(vec) => Some(vec),
}
}
fn process_projection_elem(
&mut self,
elem: PlaceElem<'tcx>,
location: Location,
) -> Option<PlaceElem<'tcx>> {
match elem {
PlaceElem::Index(local) => {
let mut new_local = local;
self.visit_local(
&mut new_local,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
location,
);
if new_local == local { None } else { Some(PlaceElem::Index(new_local)) }
}
PlaceElem::Deref
| PlaceElem::Field(..)
| PlaceElem::ConstantIndex { .. }
| PlaceElem::Subslice { .. }
| PlaceElem::Downcast(..) => None,
}
}
};
() => {
fn visit_projection(
&mut self,
place_ref: PlaceRef<'tcx>,
context: PlaceContext,
location: Location,
) {
self.super_projection(place_ref, context, location);
}
fn visit_projection_elem(
&mut self,
local: Local,
proj_base: &[PlaceElem<'tcx>],
elem: PlaceElem<'tcx>,
context: PlaceContext,
location: Location,
) {
self.super_projection_elem(local, proj_base, elem, context, location);
}
fn super_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
let mut context = context;
if !place.projection.is_empty() {
if context.is_use() {
// ^ Only change the context if it is a real use, not a "use" in debuginfo.
context = if context.is_mutating_use() {
PlaceContext::MutatingUse(MutatingUseContext::Projection)
} else {
PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection)
};
}
}
self.visit_local(&place.local, context, location);
self.visit_projection(place.as_ref(), context, location);
}
fn super_projection(
&mut self,
place_ref: PlaceRef<'tcx>,
context: PlaceContext,
location: Location,
) {
// FIXME: Use PlaceRef::iter_projections, once that exists.
let mut cursor = place_ref.projection;
while let &[ref proj_base @ .., elem] = cursor {
cursor = proj_base;
self.visit_projection_elem(place_ref.local, cursor, elem, context, location);
}
}
fn super_projection_elem(
&mut self,
_local: Local,
_proj_base: &[PlaceElem<'tcx>],
elem: PlaceElem<'tcx>,
_context: PlaceContext,
location: Location,
) {
match elem {
ProjectionElem::Field(_field, ty) => {
self.visit_ty(ty, TyContext::Location(location));
}
ProjectionElem::Index(local) => {
self.visit_local(
&local,
PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy),
location,
);
}
ProjectionElem::Deref
| ProjectionElem::Subslice { from: _, to: _, from_end: _ }
| ProjectionElem::ConstantIndex { offset: _, min_length: _, from_end: _ }
| ProjectionElem::Downcast(_, _) => {}
}
}
};
}
make_mir_visitor!(Visitor,);
make_mir_visitor!(MutVisitor, mut);
pub trait MirVisitable<'tcx> {
fn apply(&self, location: Location, visitor: &mut dyn Visitor<'tcx>);
}
impl<'tcx> MirVisitable<'tcx> for Statement<'tcx> {
fn apply(&self, location: Location, visitor: &mut dyn Visitor<'tcx>) {
visitor.visit_statement(self, location)
}
}
impl<'tcx> MirVisitable<'tcx> for Terminator<'tcx> {
fn apply(&self, location: Location, visitor: &mut dyn Visitor<'tcx>) {
visitor.visit_terminator(self, location)
}
}
impl<'tcx> MirVisitable<'tcx> for Option<Terminator<'tcx>> {
fn apply(&self, location: Location, visitor: &mut dyn Visitor<'tcx>) {
visitor.visit_terminator(self.as_ref().unwrap(), location)
}
}
/// Extra information passed to `visit_ty` and friends to give context
/// about where the type etc appears.
#[derive(Debug)]
pub enum TyContext {
LocalDecl {
/// The index of the local variable we are visiting.
local: Local,
/// The source location where this local variable was declared.
source_info: SourceInfo,
},
/// The inferred type of a user type annotation.
UserTy(Span),
/// The return type of the function.
ReturnTy(SourceInfo),
YieldTy(SourceInfo),
/// A type found at some location.
Location(Location),
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum NonMutatingUseContext {
/// Being inspected in some way, like loading a len.
Inspect,
/// Consumed as part of an operand.
Copy,
/// Consumed as part of an operand.
Move,
/// Shared borrow.
SharedBorrow,
/// Shallow borrow.
ShallowBorrow,
/// Unique borrow.
UniqueBorrow,
/// AddressOf for *const pointer.
AddressOf,
/// Used as base for another place, e.g., `x` in `x.y`. Will not mutate the place.
/// For example, the projection `x.y` is not marked as a mutation in these cases:
///
/// z = x.y;
/// f(&x.y);
///
Projection,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum MutatingUseContext {
/// Appears as LHS of an assignment.
Store,
/// Can often be treated as a `Store`, but needs to be separate because
/// ASM is allowed to read outputs as well, so a `Store`-`AsmOutput` sequence
/// cannot be simplified the way a `Store`-`Store` can be.
AsmOutput,
/// Destination of a call.
Call,
/// Destination of a yield.
Yield,
/// Being dropped.
Drop,
/// Mutable borrow.
Borrow,
/// AddressOf for *mut pointer.
AddressOf,
/// Used as base for another place, e.g., `x` in `x.y`. Could potentially mutate the place.
/// For example, the projection `x.y` is marked as a mutation in these cases:
///
/// x.y = ...;
/// f(&mut x.y);
///
Projection,
/// Retagging, a "Stacked Borrows" shadow state operation
Retag,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum NonUseContext {
/// Starting a storage live range.
StorageLive,
/// Ending a storage live range.
StorageDead,
/// User type annotation assertions for NLL.
AscribeUserTy,
/// The data of an user variable, for debug info.
VarDebugInfo,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum PlaceContext {
NonMutatingUse(NonMutatingUseContext),
MutatingUse(MutatingUseContext),
NonUse(NonUseContext),
}
impl PlaceContext {
/// Returns `true` if this place context represents a drop.
#[inline]
pub fn is_drop(&self) -> bool {
matches!(self, PlaceContext::MutatingUse(MutatingUseContext::Drop))
}
/// Returns `true` if this place context represents a borrow.
pub fn is_borrow(&self) -> bool {
matches!(
self,
PlaceContext::NonMutatingUse(
NonMutatingUseContext::SharedBorrow
| NonMutatingUseContext::ShallowBorrow
| NonMutatingUseContext::UniqueBorrow
) | PlaceContext::MutatingUse(MutatingUseContext::Borrow)
)
}
/// Returns `true` if this place context represents a storage live or storage dead marker.
#[inline]
pub fn is_storage_marker(&self) -> bool {
matches!(
self,
PlaceContext::NonUse(NonUseContext::StorageLive | NonUseContext::StorageDead)
)
}
/// Returns `true` if this place context represents a use that potentially changes the value.
#[inline]
pub fn is_mutating_use(&self) -> bool {
matches!(self, PlaceContext::MutatingUse(..))
}
/// Returns `true` if this place context represents a use that does not change the value.
#[inline]
pub fn is_nonmutating_use(&self) -> bool {
matches!(self, PlaceContext::NonMutatingUse(..))
}
/// Returns `true` if this place context represents a use.
#[inline]
pub fn is_use(&self) -> bool {
!matches!(self, PlaceContext::NonUse(..))
}
/// Returns `true` if this place context represents an assignment statement.
pub fn is_place_assignment(&self) -> bool {
matches!(
self,
PlaceContext::MutatingUse(
MutatingUseContext::Store
| MutatingUseContext::Call
| MutatingUseContext::AsmOutput,
)
)
}
}
| 38.088095 | 99 | 0.435915 |
5bbbbbe1e090314ea83b94a0fcc95897ed10475e | 1,676 | use std::path::Path;
use anyhow::Result;
use parser::{
parse::{Encoding, StandardEncoder, UnsignedLEB128Encoder},
types::metadata::shared::BufferSpec,
};
use crate::parse_rows;
pub fn load_component_data_with_serde<T>(
part_files: &[String],
buffer_spec: &BufferSpec,
path: &Path,
) -> Result<Vec<Vec<T>>>
where
T: serde::de::DeserializeOwned,
{
let data = match buffer_spec.uses_leb128() {
true => _load_data_with_serde::<T, UnsignedLEB128Encoder>(
part_files,
buffer_spec.uses_compression(),
buffer_spec.appends_length(),
path,
),
false => _load_data_with_serde::<T, StandardEncoder>(
part_files,
buffer_spec.uses_compression(),
buffer_spec.appends_length(),
path,
),
}?;
Ok(data)
}
fn _load_data_with_serde<T, E>(
part_files: &[String],
is_compressed: bool,
has_appended_length: bool,
path: &Path,
) -> Result<Vec<Vec<T>>>
where
E: Encoding,
T: serde::de::DeserializeOwned,
{
let mut data = Vec::new();
for part_name in part_files {
let part_data = {
let part_path = path.join(part_name);
let file = std::fs::read(&part_path)?;
if is_compressed {
parse_rows::<T, E>(&parser::load::compression::decompress_part_file(&file)?)?
} else {
let mut file: &[u8] = &file;
if has_appended_length {
file = &file[4..];
};
parse_rows::<T, E>(file)?
}
};
data.push(part_data);
}
Ok(data)
}
| 23.942857 | 93 | 0.551909 |
8f874c5a4cc5a8d0ecd2326fa1211d3f427b5326 | 4,196 | use super::*;
macro_rules! yield_from {
($x: expr) => { for e in $x { yield e } };
}
pub fn smart_insert<S: Stack<N>>(mut stack: S) -> impl Iterator<Item = Instruction> {
use self::Instruction::*;
IterGen(move || {
let mut side_stack = S::default();
while stack.len() > 0 {
let a_len = stack.len();
let b_len = side_stack.len();
let (rot_a, rot_b, _) = {
let neighboring_rotations = (-100..100).map(|delta| {
let x = stack.peek(delta);
let rot_a = if delta < 0 {
a_len - (delta % (a_len as isize)).abs() as usize - 1
} else {
delta as usize % a_len
};
let rot_b = match side_stack.insert_index(&x) {
Some(index) => index,
None => side_stack.maximum()
.map(|(_, idx)| idx)
.unwrap_or(0)
};
(rot_a, rot_b, x)
});
let best_rotation = neighboring_rotations
.min_by(|(rot_a1, rot_b1, x1), (rot_a2, rot_b2, x2)| {
use std::cmp::{min, Ordering::Equal};
let (rot_cost1, rrot_cost1) = rotation_costs(a_len, b_len, *rot_a1, *rot_b1);
let (rot_cost2, rrot_cost2) = rotation_costs(a_len, b_len, *rot_a2, *rot_b2);
let min_cost1 = min(rot_cost1, rrot_cost1);
let min_cost2 = min(rot_cost2, rrot_cost2);
match min_cost1.cmp(&min_cost2) {
Equal => x1.cmp(x2),
ord => ord
}
}).unwrap();
best_rotation
};
let (rot_cost, rrot_cost) = rotation_costs(a_len, b_len, rot_a, rot_b);
use std::cmp::min;
// Optimize output
match rot_cost <= rrot_cost {
true => { // rotate
let rot_both = min(rot_a, rot_b);
let rot_a_only = rot_a.saturating_sub(rot_both);
let rot_b_only = rot_b.saturating_sub(rot_both);
for _ in 0..rot_both { yield RotateBoth }
for _ in 0..rot_a_only { yield RotateA }
for _ in 0..rot_b_only { yield RotateB }
yield PushB
},
false => { // rrotate
let rrot_a = a_len - rot_a;
let rrot_b = b_len - rot_b;
let rot_both = min(rrot_a, rrot_b);
let rot_a_only = rrot_a.saturating_sub(rot_both);
let rot_b_only = rrot_b.saturating_sub(rot_both);
for _ in 0..rot_both { yield RRotateBoth }
for _ in 0..rot_a_only { yield RRotateA }
for _ in 0..rot_b_only { yield RRotateB }
yield PushB
}
}
stack.rotate_n(rot_a);
let x = stack.pop().unwrap();
side_stack.rotate_n(rot_b);
side_stack.push(x);
}
if let Some((_, min_idx)) = side_stack.maximum() {
let (instr, n) = shortest_rotation(&side_stack, min_idx);
yield_from!(repeat_n(instr, n));
}
for _ in 0..side_stack.len() { yield PushA }
})
}
fn shortest_rotation(stack: &impl Stack<N>, at: usize) -> (Instruction, usize) {
use std::cmp::Ordering::Greater;
let mid = stack.len() / 2;
match at.cmp(&mid) {
Greater => (Instruction::RRotateB, stack.len() - at),
_ => (Instruction::RotateB, at),
}
}
fn rotation_costs(a_len: usize, b_len: usize, rot_a: usize, rot_b: usize)
-> (usize, usize)
{
let rrot_a = a_len - rot_a;
let rrot_b = b_len - rot_b;
let rot_cost = (rot_a as isize - rot_b as isize).abs();
let rrot_cost = (rrot_a as isize - rrot_b as isize).abs();
(rot_cost as usize, rrot_cost as usize)
}
| 36.172414 | 101 | 0.465682 |
1606dc413fb56527d0d947acb192f896e9a73747 | 7,155 | //! Serde Integration for InfluxDB. Provides deserialization of query returns.
//!
//! When querying multiple series in the same query (e.g. with a regex query), it might be desirable to flat map
//! the resulting series into a single `Vec` like so. The example assumes, that there are weather readings in multiple
//! series named `weather_<city_name>` (e.g. `weather_berlin`, or `weather_london`). Since we're using a Regex query,
//! we don't actually know which series will be returned. To assign the city name to the series, we can use the series
//! `name`, InfluxDB provides alongside query results.
//!
//! ```rust,no_run
//! use futures::prelude::*;
//! use influxdb::client::InfluxDbClient;
//! use influxdb::query::InfluxDbQuery;
//! use serde::Deserialize;
//!
//! #[derive(Deserialize)]
//! struct WeatherWithoutCityName {
//! temperature: i32,
//! }
//!
//! #[derive(Deserialize)]
//! struct Weather {
//! city_name: String,
//! weather: WeatherWithoutCityName,
//! }
//!
//! let mut rt = tokio::runtime::current_thread::Runtime::new().unwrap();
//! let client = InfluxDbClient::new("http://localhost:8086", "test");
//! let query = InfluxDbQuery::raw_read_query(
//! "SELECT temperature FROM /weather_[a-z]*$/ WHERE time > now() - 1m ORDER BY DESC",
//! );
//! let _result = rt
//! .block_on(client.json_query(query))
//! .map(|mut db_result| db_result.deserialize_next::<WeatherWithoutCityName>())
//! .map(|it| {
//! it.map(|series_vec| {
//! series_vec
//! .series
//! .into_iter()
//! .map(|mut city_series| {
//! let city_name =
//! city_series.name.split("_").collect::<Vec<&str>>().remove(2);
//! Weather {
//! weather: city_series.values.remove(0),
//! city_name: city_name.to_string(),
//! }
//! })
//! .collect::<Vec<Weather>>()
//! })
//! });
//! ```
use crate::client::InfluxDbClient;
use serde::de::DeserializeOwned;
use futures::{Future, Stream};
use reqwest::r#async::{Client, Decoder};
use reqwest::{StatusCode, Url};
use std::mem;
use serde::Deserialize;
use serde_json;
use crate::error::InfluxDbError;
use crate::query::read_query::InfluxDbReadQuery;
use crate::query::InfluxDbQuery;
use futures::future::Either;
#[derive(Deserialize)]
#[doc(hidden)]
struct _DatabaseError {
error: String,
}
#[derive(Deserialize, Debug)]
#[doc(hidden)]
pub struct DatabaseQueryResult {
pub results: Vec<serde_json::Value>,
}
impl DatabaseQueryResult {
pub fn deserialize_next<T: 'static>(
&mut self,
) -> impl Future<Item = InfluxDbReturn<T>, Error = InfluxDbError>
where
T: DeserializeOwned,
{
match serde_json::from_value::<InfluxDbReturn<T>>(self.results.remove(0)) {
Ok(item) => futures::future::result(Ok(item)),
Err(err) => futures::future::err(InfluxDbError::DeserializationError {
error: format!("could not deserialize: {}", err),
}),
}
}
}
#[derive(Deserialize, Debug)]
#[doc(hidden)]
pub struct InfluxDbReturn<T> {
pub series: Vec<InfluxDbSeries<T>>,
}
#[derive(Deserialize, Debug)]
/// Represents a returned series from InfluxDB
pub struct InfluxDbSeries<T> {
pub name: String,
pub values: Vec<T>,
}
impl InfluxDbClient {
pub fn json_query(
&self,
q: InfluxDbReadQuery,
) -> impl Future<Item = DatabaseQueryResult, Error = InfluxDbError> {
use futures::future;
let query = q.build().unwrap();
let basic_parameters: Vec<(String, String)> = self.into();
let client = {
let read_query = query.get();
let mut url = match Url::parse_with_params(
format!("{url}/query", url = self.database_url()).as_str(),
basic_parameters,
) {
Ok(url) => url,
Err(err) => {
let error = InfluxDbError::UrlConstructionError {
error: format!("{}", err),
};
return Either::B(future::err::<DatabaseQueryResult, InfluxDbError>(error));
}
};
url.query_pairs_mut().append_pair("q", &read_query.clone());
if read_query.contains("SELECT") || read_query.contains("SHOW") {
Client::new().get(url.as_str())
} else {
let error = InfluxDbError::InvalidQueryError {
error: String::from(
"Only SELECT and SHOW queries supported with JSON deserialization",
),
};
return Either::B(future::err::<DatabaseQueryResult, InfluxDbError>(error));
}
};
Either::A(
client
.send()
.map_err(|err| InfluxDbError::ConnectionError { error: err })
.and_then(
|res| -> future::FutureResult<reqwest::r#async::Response, InfluxDbError> {
match res.status() {
StatusCode::UNAUTHORIZED => {
futures::future::err(InfluxDbError::AuthorizationError)
}
StatusCode::FORBIDDEN => {
futures::future::err(InfluxDbError::AuthenticationError)
}
_ => futures::future::ok(res),
}
},
)
.and_then(|mut res| {
let body = mem::replace(res.body_mut(), Decoder::empty());
body.concat2().map_err(|err| InfluxDbError::ProtocolError {
error: format!("{}", err),
})
})
.and_then(|body| {
// Try parsing InfluxDBs { "error": "error message here" }
if let Ok(error) = serde_json::from_slice::<_DatabaseError>(&body) {
return futures::future::err(InfluxDbError::DatabaseError {
error: error.error.to_string(),
});
} else {
// Json has another structure, let's try actually parsing it to the type we're deserializing
let from_slice = serde_json::from_slice::<DatabaseQueryResult>(&body);
let deserialized = match from_slice {
Ok(deserialized) => deserialized,
Err(err) => {
return futures::future::err(InfluxDbError::DeserializationError {
error: format!("serde error: {}", err),
})
}
};
return futures::future::result(Ok(deserialized));
}
}),
)
}
}
| 36.136364 | 118 | 0.519776 |
e45dec890d079840e1430a19cb8c4aa971cc9beb | 2,863 | // Copyright 2021 Vector 35 Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use binaryninjacore_sys::BNFileAccessor;
use std::io::{Read, Seek, SeekFrom, Write};
use std::marker::PhantomData;
use std::slice;
pub struct FileAccessor<'a> {
pub(crate) api_object: BNFileAccessor,
_ref: PhantomData<&'a mut ()>,
}
impl<'a> FileAccessor<'a> {
pub fn new<F>(f: &'a mut F) -> Self
where
F: 'a + Read + Write + Seek + Sized,
{
use std::os::raw::c_void;
extern "C" fn cb_get_length<F>(ctxt: *mut c_void) -> u64
where
F: Read + Write + Seek + Sized,
{
let f = unsafe { &mut *(ctxt as *mut F) };
match f.seek(SeekFrom::End(0)) {
Ok(len) => len,
Err(_) => 0,
}
}
extern "C" fn cb_read<F>(
ctxt: *mut c_void,
dest: *mut c_void,
offset: u64,
len: usize,
) -> usize
where
F: Read + Write + Seek + Sized,
{
let f = unsafe { &mut *(ctxt as *mut F) };
let dest = unsafe { slice::from_raw_parts_mut(dest as *mut u8, len) };
if !f.seek(SeekFrom::Start(offset)).is_ok() {
debug!("Failed to seek to offset {:x}", offset);
return 0;
}
match f.read(dest) {
Ok(len) => len,
Err(_) => 0,
}
}
extern "C" fn cb_write<F>(
ctxt: *mut c_void,
offset: u64,
src: *const c_void,
len: usize,
) -> usize
where
F: Read + Write + Seek + Sized,
{
let f = unsafe { &mut *(ctxt as *mut F) };
let src = unsafe { slice::from_raw_parts(src as *const u8, len) };
if !f.seek(SeekFrom::Start(offset)).is_ok() {
return 0;
}
match f.write(src) {
Ok(len) => len,
Err(_) => 0,
}
}
Self {
api_object: BNFileAccessor {
context: f as *mut F as *mut _,
getLength: Some(cb_get_length::<F>),
read: Some(cb_read::<F>),
write: Some(cb_write::<F>),
},
_ref: PhantomData,
}
}
}
| 28.63 | 82 | 0.495983 |
ab38cd4a550d2c8f9ddc84a2f5ddab1fe529a63f | 3,166 | #![cfg_attr(
not(any(feature = "vulkan", feature = "metal",)),
allow(dead_code, unused_extern_crates, unused_imports)
)]
use std::sync::{Arc, RwLock};
use env_logger;
#[cfg(not(any(feature = "vulkan", feature = "metal",)))]
use gfx_backend_empty as back;
#[cfg(feature = "metal")]
use gfx_backend_metal as back;
#[cfg(feature = "vulkan")]
use gfx_backend_vulkan as back;
use winit;
use crate::backend_state::create_backend;
use crate::renderer_state::{RendererState, UserState};
use crate::screen_size_state::ScreenSizeState;
use spectrum_audio::shared_data::SharedData;
fn init() -> (
winit::event_loop::EventLoop<()>,
RendererState<back::Backend>,
) {
env_logger::init();
let event_loop = winit::event_loop::EventLoop::new();
let min_screen_state = ScreenSizeState::new_default_min(1.0);
let start_screen_state = ScreenSizeState::new_default_start(1.0);
let wb = winit::window::WindowBuilder::new()
.with_min_inner_size(min_screen_state.logical_size())
.with_inner_size(start_screen_state.logical_size())
.with_title("colour-uniform".to_string());
let backend = create_backend(wb, &event_loop);
let renderer_state = unsafe { RendererState::new(backend) };
(event_loop, renderer_state)
}
#[cfg(any(feature = "vulkan", feature = "metal",))]
pub fn run(shared_data: Arc<RwLock<SharedData>>) {
let (event_loop, mut renderer_state) = init();
let mut user_state = UserState::new(shared_data);
renderer_state.draw(&user_state);
event_loop.run(move |event, _, control_flow| {
*control_flow = winit::event_loop::ControlFlow::Poll;
match event {
winit::event::Event::WindowEvent { event, .. } => match event {
winit::event::WindowEvent::KeyboardInput {
input:
winit::event::KeyboardInput {
virtual_keycode: Some(winit::event::VirtualKeyCode::Escape),
..
},
..
}
| winit::event::WindowEvent::CloseRequested => {
*control_flow = winit::event_loop::ControlFlow::Exit
}
winit::event::WindowEvent::Resized(size) => {
renderer_state.resize(size);
}
winit::event::WindowEvent::ScaleFactorChanged { scale_factor, .. } => {
renderer_state.change_dpi(scale_factor);
}
winit::event::WindowEvent::CursorMoved { position, .. } => {
user_state.cursor_pos = position;
}
_ => (),
},
winit::event::Event::RedrawRequested(_) => {
renderer_state.draw(&user_state);
}
winit::event::Event::MainEventsCleared => {
renderer_state.backend.window.request_redraw();
}
_ => (),
};
});
}
#[cfg(not(any(feature = "vulkan", feature = "metal",)))]
pub fn run() {
println!("You need to enable the native API feature (vulkan/metal) in order to test the LL");
}
| 35.573034 | 97 | 0.583386 |
ebc688ec55ca726015053f1a58e470c77c08de46 | 3,242 | //! Pack card definitions
use super::*;
use crate::math::SafeMath;
use borsh::{BorshDeserialize, BorshSerialize};
use solana_program::{
msg,
program_error::ProgramError,
program_pack::{IsInitialized, Pack, Sealed},
pubkey::Pubkey,
};
/// Pack card
#[repr(C)]
#[derive(Debug, Clone, PartialEq, BorshSerialize, BorshDeserialize, BorshSchema, Default)]
pub struct PackCard {
/// Account type - PackCard
pub account_type: AccountType,
/// Pack set
pub pack_set: Pubkey,
/// Master edition account
pub master: Pubkey,
/// Metadata account
pub metadata: Pubkey,
/// Program token account which holds MasterEdition token
pub token_account: Pubkey,
/// How many instances(editions) of this card exists in this pack
pub max_supply: u32,
/// Fixed probability, should be filled if PackSet distribution_type is "fixed"
pub weight: u16,
}
impl PackCard {
/// Prefix used to generate account
pub const PREFIX: &'static str = "card";
/// Initialize a PackCard
pub fn init(&mut self, params: InitPackCardParams) {
self.account_type = AccountType::PackCard;
self.pack_set = params.pack_set;
self.master = params.master;
self.metadata = params.metadata;
self.token_account = params.token_account;
self.max_supply = params.max_supply;
self.weight = params.weight;
}
/// Decrement supply value
pub fn decrement_supply(&mut self) -> Result<(), ProgramError> {
self.max_supply = self.max_supply.error_decrement()?;
Ok(())
}
}
/// Initialize a PackCard params
pub struct InitPackCardParams {
/// Pack set
pub pack_set: Pubkey,
/// Master edition account
pub master: Pubkey,
/// Metadata account
pub metadata: Pubkey,
/// Program token account which holds MasterEdition token
pub token_account: Pubkey,
/// How many instances of this card will exists in a packs
pub max_supply: u32,
/// Fixed probability, should be filled if PackSet distribution_type is "fixed"
pub weight: u16,
}
impl Sealed for PackCard {}
impl Pack for PackCard {
const LEN: usize = 145;
fn pack_into_slice(&self, dst: &mut [u8]) {
let mut slice = dst;
self.serialize(&mut slice).unwrap()
}
fn unpack_from_slice(src: &[u8]) -> Result<Self, ProgramError> {
let mut src_mut = src;
Self::deserialize(&mut src_mut).map_err(|_| {
msg!("Failed to deserialize");
ProgramError::InvalidAccountData
})
}
}
impl IsInitialized for PackCard {
fn is_initialized(&self) -> bool {
self.account_type != AccountType::Uninitialized
&& self.account_type == AccountType::PackCard
}
}
impl MasterEditionHolder for PackCard {
fn get_pack_set(&self) -> Pubkey {
self.pack_set
}
fn get_master_edition(&self) -> Pubkey {
self.master
}
fn get_master_metadata(&self) -> Pubkey {
self.metadata
}
fn get_token_account(&self) -> Pubkey {
self.token_account
}
fn decrement_supply(&mut self) -> Result<(), ProgramError> {
self.max_supply = self.max_supply.error_decrement()?;
Ok(())
}
}
| 27.243697 | 90 | 0.645589 |
56f5144db571caac40dfd56ff3dd085d1e7bebc1 | 5,598 | use actix_web::HttpMessage;
use diesel;
use diesel::prelude::*;
use serde_json;
use abac::prelude::*;
use abac::schema::*;
use iam::models::{Account, Namespace};
use shared::db::{create_account, create_namespace, AccountKind, NamespaceKind};
use shared::{self, IAM_NAMESPACE_ID};
#[must_use]
fn before_each(conn: &PgConnection) -> (Account, Namespace) {
conn.begin_test_transaction()
.expect("Failed to begin transaction");
let iam_account = create_account(conn, AccountKind::Iam);
let iam_namespace = create_namespace(conn, NamespaceKind::Iam(iam_account.id));
diesel::insert_into(abac_subject::table)
.values(NewAbacSubject {
inbound: AbacAttribute {
namespace_id: iam_namespace.id,
key: "uri".to_owned(),
value: "account/25a0c367-756a-42e1-ac5a-e7a2b6b64420".to_owned(),
},
outbound: AbacAttribute {
namespace_id: iam_namespace.id,
key: "role".to_owned(),
value: "client".to_owned(),
},
})
.execute(conn)
.unwrap();
diesel::insert_into(abac_object::table)
.values(NewAbacObject {
inbound: AbacAttribute {
namespace_id: iam_namespace.id,
key: "uri".to_owned(),
value: "room/1".to_owned(),
},
outbound: AbacAttribute {
namespace_id: iam_namespace.id,
key: "type".to_owned(),
value: "room".to_owned(),
},
})
.execute(conn)
.unwrap();
diesel::insert_into(abac_action::table)
.values(vec![
NewAbacAction {
inbound: AbacAttribute {
namespace_id: iam_namespace.id,
key: "operation".to_owned(),
value: "create".to_owned(),
},
outbound: AbacAttribute {
namespace_id: iam_namespace.id,
key: "operation".to_owned(),
value: "any".to_owned(),
},
},
NewAbacAction {
inbound: AbacAttribute {
namespace_id: iam_namespace.id,
key: "operation".to_owned(),
value: "read".to_owned(),
},
outbound: AbacAttribute {
namespace_id: iam_namespace.id,
key: "operation".to_owned(),
value: "any".to_owned(),
},
},
])
.execute(conn)
.unwrap();
(iam_account, iam_namespace)
}
#[test]
fn with_permission() {
let shared::Server { mut srv, pool } = shared::build_server();
{
let conn = pool.get().expect("Failed to get connection from pool");
let (_account, namespace) = before_each(&conn);
diesel::insert_into(abac_policy::table)
.values(NewAbacPolicy {
subject: vec![AbacAttribute {
namespace_id: namespace.id,
key: "role".to_owned(),
value: "client".to_owned(),
}],
object: vec![AbacAttribute {
namespace_id: namespace.id,
key: "type".to_owned(),
value: "room".to_owned(),
}],
action: vec![AbacAttribute {
namespace_id: namespace.id,
key: "operation".to_owned(),
value: "any".to_owned(),
}],
namespace_id: namespace.id,
})
.execute(&conn)
.unwrap();
}
let req =
shared::build_anonymous_request(&srv, serde_json::to_string(&build_request()).unwrap());
let resp = srv.execute(req.send()).unwrap();
let body = srv.execute(resp.body()).unwrap();
let resp_json = r#"{
"jsonrpc": "2.0",
"result": true,
"id": "qwerty"
}"#;
assert_eq!(body, shared::strip_json(resp_json));
}
#[test]
fn without_permission() {
let shared::Server { mut srv, pool } = shared::build_server();
{
let conn = pool.get().expect("Failed to get connection from pool");
let _ = before_each(&conn);
}
let req =
shared::build_anonymous_request(&srv, serde_json::to_string(&build_request()).unwrap());
let resp = srv.execute(req.send()).unwrap();
let body = srv.execute(resp.body()).unwrap();
let resp_json = r#"{
"jsonrpc": "2.0",
"result": false,
"id": "qwerty"
}"#;
assert_eq!(body, shared::strip_json(resp_json));
}
fn build_request() -> serde_json::Value {
json!({
"jsonrpc": "2.0",
"method": "authorize",
"params": [{
"namespace_ids": [*IAM_NAMESPACE_ID],
"subject": [
{
"namespace_id": *IAM_NAMESPACE_ID,
"key": "uri",
"value": "account/25a0c367-756a-42e1-ac5a-e7a2b6b64420"
}
],
"object": [
{
"namespace_id": *IAM_NAMESPACE_ID,
"key": "uri",
"value": "room/1"
}
],
"action": [
{
"namespace_id": *IAM_NAMESPACE_ID,
"key": "operation",
"value": "read"
}
],
}],
"id": "qwerty",
})
}
| 30.928177 | 96 | 0.483387 |
b9442b9cf443c803bcea00a67e27c6bb780ce7fa | 7,135 | #![no_std]
#![no_main]
use core::ops::Range;
use serde::{Deserialize, Serialize};
use icecap_start_generic::declare_generic_main;
use icecap_core::config::*;
use icecap_core::prelude::*;
use icecap_core::ring_buffer::{BufferedRingBuffer, RingBuffer};
use virtio_drivers::VirtIOConsole;
use virtio_drivers::VirtIOHeader;
use virtio_drivers::DeviceType;
declare_generic_main!(main);
#[derive(Debug, Clone, Serialize, Deserialize)]
struct Config {
virtio_region: Range<usize>,
virtio_irq_handlers: Vec<IRQHandler>,
virtio_pool_region: Range<usize>,
virtio_pool_pages: Vec<SmallPage>,
event_nfn: Notification,
client_ring_buffer: UnmanagedRingBufferConfig,
badges: Badges,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
struct Badges {
irq: Badge,
client: Badge,
}
// Pool to manage pages which are accessible by both the guest and the host
struct VirtioPool {
pool: &'static mut [u8],
paddr: usize,
mark: usize,
}
impl VirtioPool {
fn new(vaddr: usize, paddr: usize, len: usize) -> VirtioPool {
VirtioPool {
pool: unsafe {
core::slice::from_raw_parts_mut(vaddr as *mut u8, len)
},
paddr: paddr,
mark: 0,
}
}
fn alloc<'a>(&'a mut self, size: usize) -> Fallible<&'a mut [u8]> {
let count = (size+virtio_drivers::PAGE_SIZE-1) / virtio_drivers::PAGE_SIZE;
let ppages = unsafe { virtio_dma_alloc(count) };
if ppages == 0 {
bail!("virtio_pool: out of pages");
}
Ok(unsafe {
core::slice::from_raw_parts_mut(
virtio_phys_to_virt(ppages) as *mut u8,
count * virtio_drivers::PAGE_SIZE,
)
})
}
#[allow(unused)]
fn dealloc(&mut self, pages: &mut [u8]) {
unsafe {
virtio_dma_dealloc(
virtio_virt_to_phys(pages.as_ptr() as usize),
pages.len() / virtio_drivers::PAGE_SIZE
)
};
}
}
static mut VIRTIO_POOL: Option<VirtioPool> = None;
// virtio pool page mappings for virtio-drivers
#[no_mangle]
pub unsafe extern "C" fn virtio_dma_alloc(pages: usize) -> usize {
debug_println!("virtio_pool: allocating {}x{} pages", pages, virtio_drivers::PAGE_SIZE);
let pool = VIRTIO_POOL.as_mut().unwrap();
if pool.mark + pages*virtio_drivers::PAGE_SIZE > pool.pool.len() {
debug_println!("virtio_pool: out of pages ({}/{})!",
pool.pool.len() / virtio_drivers::PAGE_SIZE,
pool.pool.len() / virtio_drivers::PAGE_SIZE
);
return 0;
}
let old_mark = pool.mark;
pool.mark += pages*virtio_drivers::PAGE_SIZE;
let p = &mut pool.pool[old_mark] as *mut _ as usize;
debug_println!("virtio_pool: allocating {}x{} pages -> {:012x}", pages, virtio_drivers::PAGE_SIZE, virtio_virt_to_phys(p as usize));
virtio_virt_to_phys(p as usize)
}
#[no_mangle]
pub unsafe extern "C" fn virtio_dma_dealloc(paddr: usize, _pages: usize) -> i32 {
debug_println!("virtio_pool: deallocating {:012x}", paddr);
let pool = VIRTIO_POOL.as_mut().unwrap();
debug_assert!(pool.pool.as_ptr_range().contains(&(virtio_phys_to_virt(paddr) as *const u8)));
0
}
#[no_mangle]
pub unsafe extern "C" fn virtio_phys_to_virt(paddr: usize) -> usize {
let pool = VIRTIO_POOL.as_mut().unwrap();
debug_assert!(paddr >= pool.paddr && paddr < pool.paddr + pool.pool.len(),
"virtio_pool: invalid paddr {:012x}", paddr);
paddr - pool.paddr + (pool.pool.as_ptr() as usize)
}
#[no_mangle]
pub unsafe extern "C" fn virtio_virt_to_phys(vaddr: usize) -> usize {
let pool = VIRTIO_POOL.as_mut().unwrap();
debug_assert!(vaddr >= pool.pool.as_ptr() as usize && vaddr < pool.pool.as_ptr() as usize + pool.pool.len(),
"virtio_pool: invalid vaddr {:012x}", vaddr);
vaddr - (pool.pool.as_ptr() as usize) + pool.paddr
}
// entry point
fn main(config: Config) -> Fallible<()> {
// setup the virtio pool
unsafe {
VIRTIO_POOL = Some(VirtioPool::new(
config.virtio_pool_region.start,
config.virtio_pool_pages[0].paddr()?,
config.virtio_pool_region.end - config.virtio_pool_region.start,
));
}
// find a virtio driver that reports as a console device
let (virtio_i, virtio_mmio, virtio_irq_handler) = match
config.virtio_region.clone()
.step_by(512)
.zip(&config.virtio_irq_handlers)
.enumerate()
.find(|(_, (mmio, _))| {
let id = unsafe { core::ptr::read_volatile((mmio+8) as *const u32) };
id == DeviceType::Console as u32
})
{
Some((i, (mmio, irq_handler))) => (i, mmio, irq_handler),
None => {
bail!("virtio-console-server: could not find a virtio-console");
}
};
debug_println!("found virtio-console at virtio{}@{:012x}", virtio_i, virtio_mmio);
let header = unsafe { &mut *(virtio_mmio as *mut VirtIOHeader) };
let mut console = VirtIOConsole::new(header)?;
// we start off with all irqs registered, but we only need to listen to one,
// disable the others
for irq_handler in config.virtio_irq_handlers.iter() {
if irq_handler.raw() != virtio_irq_handler.raw() {
irq_handler.clear()?;
}
}
// begin processing requests
let mut rb = BufferedRingBuffer::new(RingBuffer::unmanaged_from_config(
&config.client_ring_buffer,
));
let send_page = unsafe { VIRTIO_POOL.as_mut() }.unwrap().alloc(virtio_drivers::PAGE_SIZE)?;
// we may have already recieved data to send, but lost the notification
// during initialization, so there may already be data in our ring buffer
// we need to write out
rb.rx_callback();
rb.tx_callback();
if let Some(chars) = rb.rx() {
for chunk in chars.chunks(virtio_drivers::PAGE_SIZE) {
send_page[..chunk.len()].copy_from_slice(chunk);
console.send_slice(&send_page[..chunk.len()])?;
}
}
rb.ring_buffer().enable_notify_read();
rb.ring_buffer().enable_notify_write();
loop {
let badge = config.event_nfn.wait();
if badge & config.badges.irq != 0 {
loop {
console.ack_interrupt()?;
let mut buffer = [0; 512];
let recved = console.recv_slice(&mut buffer)?;
if recved == 0 {
break;
}
rb.tx(&buffer[..recved]);
}
virtio_irq_handler.ack()?;
}
if badge & config.badges.client != 0 {
rb.rx_callback();
rb.tx_callback();
if let Some(chars) = rb.rx() {
for chunk in chars.chunks(virtio_drivers::PAGE_SIZE) {
send_page[..chunk.len()].copy_from_slice(chunk);
console.send_slice(&send_page[..chunk.len()])?;
}
}
rb.ring_buffer().enable_notify_read();
rb.ring_buffer().enable_notify_write();
}
}
}
| 32.285068 | 136 | 0.604205 |
1d780c77a47fe66e41845b649aedc1437930478b | 4,716 | // Copyright 2016 Mozilla Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![recursion_limit="128"]
extern crate app_dirs;
extern crate base64;
extern crate bincode;
extern crate byteorder;
#[cfg(feature = "chrono")]
extern crate chrono;
#[macro_use]
extern crate clap;
#[cfg(feature = "rust-crypto")]
extern crate crypto;
#[cfg(unix)]
extern crate daemonize;
extern crate env_logger;
#[macro_use]
extern crate error_chain;
extern crate filetime;
#[macro_use]
extern crate futures;
extern crate futures_cpupool;
#[cfg(feature = "hyper")]
extern crate hyper;
#[cfg(feature = "hyper-tls")]
extern crate hyper_tls;
#[cfg(feature = "jsonwebtoken")]
extern crate jsonwebtoken as jwt;
#[cfg(windows)]
extern crate kernel32;
extern crate local_encoding;
#[macro_use]
extern crate log;
extern crate lru_disk_cache;
extern crate fern;
#[cfg(test)]
extern crate itertools;
extern crate libc;
#[cfg(windows)]
extern crate mio_named_pipes;
extern crate native_tls;
extern crate num_cpus;
extern crate number_prefix;
#[cfg(feature = "openssl")]
extern crate openssl;
extern crate ring;
#[cfg(feature = "redis")]
extern crate redis;
extern crate regex;
extern crate retry;
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
extern crate tempdir;
extern crate tempfile;
extern crate time;
extern crate tokio_core;
extern crate tokio_io;
extern crate tokio_process;
extern crate tokio_proto;
extern crate tokio_service;
extern crate tokio_serde_bincode;
#[cfg(feature = "gcs")]
extern crate url;
extern crate uuid;
#[cfg(windows)]
extern crate winapi;
extern crate which;
extern crate zip;
// To get macros in scope, this has to be first.
#[cfg(test)]
#[macro_use]
mod test;
#[macro_use]
mod errors;
mod cache;
mod client;
mod cmdline;
mod commands;
mod compiler;
mod jobserver;
mod mock_command;
mod protocol;
mod server;
#[cfg(feature = "simple-s3")]
mod simples3;
mod util;
use std::env;
use std::io::Write;
fn main() {
init_logging();
std::process::exit(match cmdline::parse() {
Ok(cmd) => {
match commands::run_command(cmd) {
Ok(s) => s,
Err(e) => {
let stderr = &mut std::io::stderr();
writeln!(stderr, "error: {}", e).unwrap();
for e in e.iter().skip(1) {
writeln!(stderr, "caused by: {}", e).unwrap();
}
2
}
}
}
Err(e) => {
println!("sccache: {}", e);
cmdline::get_app().print_help().unwrap();
println!("");
1
}
});
}
fn init_logging() {
match if env::var("RUST_LOG").is_ok() {
env_logger::init()
.map_err(|e| format!("{:?}", e))
} else {
match env::var("SCCACHE_LOG_LEVEL") {
Ok(log_level) => {
let log_level = match &*log_level.to_lowercase() {
"off" => log::LogLevelFilter::Off,
"trace" => log::LogLevelFilter::Trace,
"debug" => log::LogLevelFilter::Debug,
"info" => log::LogLevelFilter::Info,
"warn" => log::LogLevelFilter::Warn,
"error" => log::LogLevelFilter::Error,
_ => panic!("Invalid log level {}", log_level),
};
let logger_config = fern::DispatchConfig {
format: Box::new(|msg: &str, level: &log::LogLevel, _location: &log::LogLocation| {
format!("[{}][{}] {}", time::now().strftime("%Y-%m-%d][%H:%M:%S").unwrap(), level, msg)
}),
//TODO: only the server process should output to the log file.
output: vec![fern::OutputConfig::stdout(), fern::OutputConfig::file("sccache.log")],
level: log::LogLevelFilter::Trace,
};
fern::init_global_logger(logger_config, log_level)
.map_err(|e| format!("{:?}", e))
},
Err(_) => Ok(()),
}
} {
Ok(_) => (),
Err(e) => panic!(format!("Failed to initalize logging: {}", e)),
}
}
| 27.905325 | 111 | 0.592875 |
2628a99a53a34366dd35af511a691da123e0e5f0 | 6,497 | use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use crate::{SendgridError, SendgridResult};
macro_rules! add_field {
// Create a setter that appends.
(
$(#[$outer:meta])*
$method:ident << $field:ident: $ty:ty
) => {
$(#[$outer])*
pub fn $method(mut self, data: $ty) -> Mail<'a> {
self.$field.push(data);
self
}
};
// Create a setter that stores.
(
$(#[$outer:meta])*
$method:ident = $field:ident: $ty:ty
) => {
$(#[$outer])*
pub fn $method(mut self, data: $ty) -> Mail<'a> {
self.$field = data;
self
}
};
// Create a setter that inserts into a map.
(
$(#[$outer:meta])*
$method:ident <- $field:ident: $ty:ty
) => {
$(#[$outer])*
pub fn $method(mut self, id: String, data: $ty) -> Mail<'a> {
self.$field.insert(id, data);
self
}
};
}
/// A destination is a combination of an email address and a name to whom emails can be sent.
#[derive(Debug)]
pub struct Destination<'a> {
/// The email address to which the email will be sent.
pub address: &'a str,
/// The display name of the recipient.
pub name: &'a str,
}
impl<'a> From<(&'a str, &'a str)> for Destination<'a> {
fn from((address, name): (&'a str, &'a str)) -> Self {
Self {
address,
name,
}
}
}
/// This is a representation of a valid SendGrid message. It has support for
/// all of the fields in the V2 API.
#[derive(Debug, Default)]
pub struct Mail<'a> {
/// The list of people to whom the email will be sent.
pub to: Vec<Destination<'a>>,
/// The list of people that are CC'd in this email.
pub cc: Vec<&'a str>,
/// The list of people that are BCC'd in this email.
pub bcc: Vec<&'a str>,
/// The email address that will be used as sender.
pub from: &'a str,
/// The subject field of the email.
pub subject: &'a str,
/// When the client is sufficiently modern (this should almost always be the case), the email is
/// displayed as HTML.
pub html: &'a str,
/// This is used as a fallback when either the client is too old or the HTML field was not
/// provided.
pub text: &'a str,
/// This is the name that will be used as sender.
pub from_name: &'a str,
/// This is the email address that is used as a reply to field.
pub reply_to: &'a str,
/// The date added to the header of this email. For example `Thu, 21 Dec 2000 16:01:07 +0200`.
pub date: &'a str,
/// The attachments of this email, smaller than 7MB.
pub attachments: HashMap<String, String>,
/// Content IDs of the files to be used as inline images. Content IDs should match the content
/// IDS used in the HTML markup.
pub content: HashMap<String, &'a str>,
/// A collection of key/value pairs in JSON format. This is specifically for non-SendGrid custom
/// extension headers. Each key represents a header name and the value the header value.
///
/// ### Example
/// ```json
/// {"X-Accept-Language": "en", "X-Mailer": "MyApp"}
/// ```
pub headers: HashMap<String, &'a str>,
/// The `X-SMTPAPI` header that is used.
pub x_smtpapi: &'a str,
}
impl<'a> Mail<'a> {
/// Returns a new Mail struct to send with a client. All of the fields are
/// initially empty.
pub fn new() -> Mail<'a> {
Mail::default()
}
add_field!(
/// Adds a CC recipient to the Mail struct.
add_cc << cc: &'a str
);
add_field!(
/// Adds a to recipient to the Mail struct.
add_to << to: Destination<'a>
);
add_field!(
/// Set the from address for the Mail struct. This can be changed, but there
/// is only one from address per message.
add_from = from: &'a str
);
add_field!(
/// Set the subject of the message.
add_subject = subject: &'a str
);
add_field!(
/// This function sets the HTML content for the message.
add_html = html: &'a str
);
add_field!(
/// Set the text content of the message.
add_text = text: &'a str
);
add_field!(
/// Add a BCC address to the message.
add_bcc << bcc: &'a str
);
add_field!(
/// Set the from name for the message.
add_from_name = from_name: &'a str
);
add_field!(
/// Set the reply to address for the message.
add_reply_to = reply_to: &'a str
);
// TODO(richo) Should this be a chronos::Utc ?
add_field!(
/// Set the date for the message. This must be a valid RFC 822 timestamp.
add_date = date: &'a str
);
/// Convenience method when using Mail as a builder.
pub fn build(self) -> Mail<'a> {
self
}
/// Add an attachment for the message. You can pass the name of a file as a
/// path on the file system.
///
/// # Examples
///
/// ```ignore
/// let message = Mail::new()
/// .add_attachment("/path/to/file/contents.txt");
/// ```
pub fn add_attachment<P: AsRef<Path>>(mut self, path: P) -> SendgridResult<Mail<'a>> {
let mut file = File::open(&path)?;
let mut data = String::new();
file.read_to_string(&mut data)?;
if let Some(name) = path.as_ref().to_str() {
self.attachments.insert(String::from(name), data);
} else {
return Err(SendgridError::InvalidFilename);
}
Ok(self)
}
add_field!(
/// Add content for inline images in the message.
add_content <- content: &'a str
);
add_field!(
/// Add a custom header for the message. These are usually prefixed with
/// 'X' or 'x' per the RFC specifications.
add_header <- headers: &'a str
);
/// Used internally for string encoding. Not needed for message building.
pub(crate) fn make_header_string(&mut self) -> SendgridResult<String> {
let string = serde_json::to_string(&self.headers)?;
Ok(string)
}
add_field!(
/// Add an X-SMTPAPI string to the message. This can be done by using the `serde_json` crate
/// to JSON encode a map or custom struct. Alternatively a regular `String` type can be
/// escaped and used.
add_x_smtpapi = x_smtpapi: &'a str
);
}
| 29.940092 | 100 | 0.567801 |
569afe653394e29b4308be6169b3e918036c3449 | 3,702 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use super::{balance_ap, encode_mint_transaction, encode_transfer_transaction, seqnum_ap, MockVM};
use config::config::VMConfig;
use failure::Result;
use state_view::StateView;
use types::{
access_path::AccessPath,
account_address::{AccountAddress, ADDRESS_LENGTH},
write_set::WriteOp,
};
use vm_runtime::VMExecutor;
fn gen_address(index: u8) -> AccountAddress {
AccountAddress::new([index; ADDRESS_LENGTH])
}
struct MockStateView;
impl StateView for MockStateView {
fn get(&self, _access_path: &AccessPath) -> Result<Option<Vec<u8>>> {
Ok(None)
}
fn multi_get(&self, _access_paths: &[AccessPath]) -> Result<Vec<Option<Vec<u8>>>> {
unimplemented!();
}
fn is_genesis(&self) -> bool {
false
}
}
#[test]
fn test_mock_vm_different_senders() {
let amount = 100;
let mut txns = vec![];
for i in 0..10 {
txns.push(encode_mint_transaction(gen_address(i), amount));
}
let outputs = MockVM::execute_block(
txns.clone(),
&VMConfig::empty_whitelist_FOR_TESTING(),
&MockStateView,
);
for (output, txn) in itertools::zip_eq(outputs.iter(), txns.iter()) {
let sender = txn.sender();
assert_eq!(
output.write_set().iter().cloned().collect::<Vec<_>>(),
vec![
(
balance_ap(sender),
WriteOp::Value(amount.to_le_bytes().to_vec())
),
(
seqnum_ap(sender),
WriteOp::Value(1u64.to_le_bytes().to_vec())
),
]
);
}
}
#[test]
fn test_mock_vm_same_sender() {
let amount = 100;
let sender = gen_address(1);
let mut txns = vec![];
for _i in 0..10 {
txns.push(encode_mint_transaction(sender, amount));
}
let outputs = MockVM::execute_block(
txns,
&VMConfig::empty_whitelist_FOR_TESTING(),
&MockStateView,
);
for (i, output) in outputs.iter().enumerate() {
assert_eq!(
output.write_set().iter().cloned().collect::<Vec<_>>(),
vec![
(
balance_ap(sender),
WriteOp::Value((amount * (i as u64 + 1)).to_le_bytes().to_vec())
),
(
seqnum_ap(sender),
WriteOp::Value((i as u64 + 1).to_le_bytes().to_vec())
),
]
);
}
}
#[test]
fn test_mock_vm_payment() {
let mut txns = vec![];
txns.push(encode_mint_transaction(gen_address(0), 100));
txns.push(encode_mint_transaction(gen_address(1), 100));
txns.push(encode_transfer_transaction(
gen_address(0),
gen_address(1),
50,
));
let output = MockVM::execute_block(
txns,
&VMConfig::empty_whitelist_FOR_TESTING(),
&MockStateView,
);
let mut output_iter = output.iter();
output_iter.next();
output_iter.next();
assert_eq!(
output_iter
.next()
.unwrap()
.write_set()
.iter()
.cloned()
.collect::<Vec<_>>(),
vec![
(
balance_ap(gen_address(0)),
WriteOp::Value(50u64.to_le_bytes().to_vec())
),
(
seqnum_ap(gen_address(0)),
WriteOp::Value(2u64.to_le_bytes().to_vec())
),
(
balance_ap(gen_address(1)),
WriteOp::Value(150u64.to_le_bytes().to_vec())
),
]
);
}
| 25.888112 | 97 | 0.529984 |
14d9a06ef625ef6dec2c12922b56b4d07e315e5b | 5,528 | use instruction_def::*;
use test::run_test;
use Operand::*;
use Reg::*;
use RegScale::*;
use RegType::*;
use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
#[test]
fn vmovlpd_1() {
run_test(
&Instruction {
mnemonic: Mnemonic::VMOVLPD,
operand1: Some(Direct(XMM3)),
operand2: Some(Direct(XMM2)),
operand3: Some(Indirect(ECX, Some(OperandSize::Qword), None)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[197, 233, 18, 25],
OperandSize::Dword,
)
}
#[test]
fn vmovlpd_2() {
run_test(
&Instruction {
mnemonic: Mnemonic::VMOVLPD,
operand1: Some(Direct(XMM3)),
operand2: Some(Direct(XMM3)),
operand3: Some(IndirectScaledDisplaced(
RSI,
Eight,
1014789822,
Some(OperandSize::Qword),
None,
)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[197, 225, 18, 28, 245, 190, 118, 124, 60],
OperandSize::Qword,
)
}
#[test]
fn vmovlpd_3() {
run_test(
&Instruction {
mnemonic: Mnemonic::VMOVLPD,
operand1: Some(Direct(XMM2)),
operand2: Some(Direct(XMM3)),
operand3: Some(IndirectScaledDisplaced(
EAX,
Four,
1476870238,
Some(OperandSize::Qword),
None,
)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[197, 225, 18, 20, 133, 94, 64, 7, 88],
OperandSize::Dword,
)
}
#[test]
fn vmovlpd_4() {
run_test(
&Instruction {
mnemonic: Mnemonic::VMOVLPD,
operand1: Some(Direct(XMM15)),
operand2: Some(Direct(XMM15)),
operand3: Some(IndirectScaledDisplaced(
RCX,
Eight,
1814399509,
Some(OperandSize::Qword),
None,
)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[197, 1, 18, 60, 205, 21, 138, 37, 108],
OperandSize::Qword,
)
}
#[test]
fn vmovlpd_5() {
run_test(
&Instruction {
mnemonic: Mnemonic::VMOVLPD,
operand1: Some(IndirectScaledIndexedDisplaced(
EBX,
ESI,
Two,
1618573408,
Some(OperandSize::Qword),
None,
)),
operand2: Some(Direct(XMM4)),
operand3: None,
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[197, 249, 19, 164, 115, 96, 120, 121, 96],
OperandSize::Dword,
)
}
#[test]
fn vmovlpd_6() {
run_test(
&Instruction {
mnemonic: Mnemonic::VMOVLPD,
operand1: Some(IndirectScaledIndexed(
RAX,
RBX,
Eight,
Some(OperandSize::Qword),
None,
)),
operand2: Some(Direct(XMM3)),
operand3: None,
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[197, 249, 19, 28, 216],
OperandSize::Qword,
)
}
#[test]
fn vmovlpd_7() {
run_test(
&Instruction {
mnemonic: Mnemonic::VMOVLPD,
operand1: Some(IndirectScaledIndexedDisplaced(
ECX,
EDI,
Eight,
674533733,
Some(OperandSize::Qword),
None,
)),
operand2: Some(Direct(XMM7)),
operand3: None,
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[197, 249, 19, 188, 249, 101, 145, 52, 40],
OperandSize::Dword,
)
}
#[test]
fn vmovlpd_8() {
run_test(
&Instruction {
mnemonic: Mnemonic::VMOVLPD,
operand1: Some(IndirectScaledIndexedDisplaced(
RBX,
RSI,
Eight,
2090827822,
Some(OperandSize::Qword),
None,
)),
operand2: Some(Direct(XMM17)),
operand3: None,
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: None,
broadcast: None,
},
&[98, 225, 253, 8, 19, 140, 243, 46, 128, 159, 124],
OperandSize::Qword,
)
}
| 25.013575 | 95 | 0.445188 |
fcdf0712f04f87a3f0be0c282d8093f6ddb3a882 | 3,367 | use super::{operate, PathSubcommandArguments};
use crate::prelude::*;
use nu_engine::WholeStreamCommand;
use nu_errors::ShellError;
use nu_protocol::{ColumnPath, Signature, SyntaxShape, UntaggedValue, Value};
use nu_source::Tagged;
use std::path::Path;
pub struct PathBasename;
struct PathBasenameArguments {
rest: Vec<ColumnPath>,
replace: Option<Tagged<String>>,
}
impl PathSubcommandArguments for PathBasenameArguments {
fn get_column_paths(&self) -> &Vec<ColumnPath> {
&self.rest
}
}
impl WholeStreamCommand for PathBasename {
fn name(&self) -> &str {
"path basename"
}
fn signature(&self) -> Signature {
Signature::build("path basename")
.rest(SyntaxShape::ColumnPath, "Optionally operate by column path")
.named(
"replace",
SyntaxShape::String,
"Return original path with basename replaced by this string",
Some('r'),
)
}
fn usage(&self) -> &str {
"Get the final component of a path"
}
fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> {
let tag = args.call_info.name_tag.clone();
let cmd_args = Arc::new(PathBasenameArguments {
rest: args.rest(0)?,
replace: args.get_flag("replace")?,
});
Ok(operate(args.input, &action, tag.span, cmd_args))
}
#[cfg(windows)]
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Get basename of a path",
example: "echo 'C:\\Users\\joe\\test.txt' | path basename",
result: Some(vec![Value::from("test.txt")]),
},
Example {
description: "Replace basename of a path",
example: "echo 'C:\\Users\\joe\\test.txt' | path basename -r 'spam.png'",
result: Some(vec![Value::from(UntaggedValue::filepath(
"C:\\Users\\joe\\spam.png",
))]),
},
]
}
#[cfg(not(windows))]
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Get basename of a path",
example: "echo '/home/joe/test.txt' | path basename",
result: Some(vec![Value::from("test.txt")]),
},
Example {
description: "Replace basename of a path",
example: "echo '/home/joe/test.txt' | path basename -r 'spam.png'",
result: Some(vec![Value::from(UntaggedValue::filepath(
"/home/joe/spam.png",
))]),
},
]
}
}
fn action(path: &Path, tag: Tag, args: &PathBasenameArguments) -> Value {
let untagged = match args.replace {
Some(ref basename) => UntaggedValue::filepath(path.with_file_name(&basename.item)),
None => UntaggedValue::string(match path.file_name() {
Some(filename) => filename.to_string_lossy(),
None => "".into(),
}),
};
untagged.into_value(tag)
}
#[cfg(test)]
mod tests {
use super::PathBasename;
use super::ShellError;
#[test]
fn examples_work_as_expected() -> Result<(), ShellError> {
use crate::examples::test as test_examples;
test_examples(PathBasename {})
}
}
| 29.79646 | 91 | 0.550936 |
08f2c4f697273af0fbaa3ff5d182cd059b61e782 | 2,602 | use crate::relayer::block_proposal_process::{BlockProposalProcess, Status};
use crate::relayer::tests::helper::{build_chain, new_transaction};
use ckb_network::PeerIndex;
use ckb_types::packed::{self, ProposalShortId};
use ckb_types::prelude::*;
#[test]
fn test_no_unknown() {
let (relayer, always_success_out_point) = build_chain(5);
let peer_index: PeerIndex = 100.into();
let transaction = new_transaction(&relayer, 1, &always_success_out_point);
let transactions = vec![transaction.clone()];
// known tx
{
relayer
.shared
.state()
.mark_as_known_tx(transaction.hash().to_owned());
}
let content = packed::BlockProposal::new_builder()
.transactions(transactions.into_iter().map(|tx| tx.data()).pack())
.build();
let process = BlockProposalProcess::new(content.as_reader(), &relayer, peer_index);
let r = process.execute();
assert_eq!(r.ok(), Some(Status::NoUnknown));
}
#[test]
fn test_no_asked() {
let (relayer, always_success_out_point) = build_chain(5);
let peer_index: PeerIndex = 100.into();
let transaction = new_transaction(&relayer, 1, &always_success_out_point);
let transactions = vec![transaction.clone()];
let content = packed::BlockProposal::new_builder()
.transactions(transactions.into_iter().map(|tx| tx.data()).pack())
.build();
let process = BlockProposalProcess::new(content.as_reader(), &relayer, peer_index);
let r = process.execute();
assert_eq!(r.ok(), Some(Status::NoAsked));
let known = relayer.shared.state().already_known_tx(&transaction.hash());
assert_eq!(known, false);
}
#[test]
fn test_ok() {
let (relayer, always_success_out_point) = build_chain(5);
let peer_index: PeerIndex = 100.into();
let transaction = new_transaction(&relayer, 1, &always_success_out_point);
let transactions = vec![transaction.clone()];
let proposals: Vec<ProposalShortId> = transactions
.iter()
.map(|tx| tx.proposal_short_id())
.collect();
// Before asked proposals
{
relayer.shared.state().insert_inflight_proposals(proposals);
}
let content = packed::BlockProposal::new_builder()
.transactions(transactions.into_iter().map(|tx| tx.data()).pack())
.build();
let process = BlockProposalProcess::new(content.as_reader(), &relayer, peer_index);
let r = process.execute();
assert_eq!(r.ok(), Some(Status::Ok));
let known = relayer.shared.state().already_known_tx(&transaction.hash());
assert_eq!(known, true);
}
| 32.123457 | 87 | 0.667179 |
9165beed564563590d4f4d95a5a7ec14f9d58c1b | 4,179 | use std::cmp::Ordering;
use hibitset::BitSet;
use amethyst_core::{
nalgebra::{Point3, Vector3},
specs::prelude::{Entities, Entity, Join, Read, ReadStorage, System, Write},
GlobalTransform,
};
use crate::{
cam::{ActiveCamera, Camera},
hidden::{Hidden, HiddenPropagate},
transparent::Transparent,
};
/// Resource for controlling what entities should be rendered, and whether to draw them ordered or
/// not, which is useful for transparent surfaces.
#[derive(Default)]
pub struct SpriteVisibility {
/// Visible entities that can be drawn in any order
pub visible_unordered: BitSet,
/// Visible entities that need to be drawn in the given order
pub visible_ordered: Vec<Entity>,
}
/// Determines what entities to be drawn. Will also sort transparent entities back to front based on
/// position on the Z axis.
///
/// The sprite render pass should draw all sprites without semi-transparent pixels, then draw the
/// sprites with semi-transparent pixels from far to near.
///
/// Note that this should run after `GlobalTransform` has been updated for the current frame, and
/// before rendering occurs.
#[derive(Default)]
pub struct SpriteVisibilitySortingSystem {
centroids: Vec<Internals>,
transparent: Vec<Internals>,
}
#[derive(Clone)]
struct Internals {
entity: Entity,
transparent: bool,
centroid: Point3<f32>,
from_camera: Vector3<f32>,
}
impl SpriteVisibilitySortingSystem {
/// Returns a new sprite visibility sorting system
pub fn new() -> Self {
Default::default()
}
}
impl<'a> System<'a> for SpriteVisibilitySortingSystem {
type SystemData = (
Entities<'a>,
Write<'a, SpriteVisibility>,
ReadStorage<'a, Hidden>,
ReadStorage<'a, HiddenPropagate>,
Option<Read<'a, ActiveCamera>>,
ReadStorage<'a, Camera>,
ReadStorage<'a, Transparent>,
ReadStorage<'a, GlobalTransform>,
);
fn run(
&mut self,
(entities, mut visibility, hidden, hidden_prop, active, camera, transparent, global): Self::SystemData,
){
let origin = Point3::origin();
// The camera position is used to determine culling, but the sprites are ordered based on
// the Z coordinate
let camera: Option<&GlobalTransform> = active
.and_then(|a| global.get(a.entity))
.or_else(|| (&camera, &global).join().map(|cg| cg.1).next());
let camera_backward = camera
.map(|c| c.0.column(2).xyz().into())
.unwrap_or_else(Vector3::z);
let camera_centroid = camera
.map(|g| g.0.transform_point(&origin))
.unwrap_or_else(|| origin);
self.centroids.clear();
self.centroids.extend(
(&*entities, &global, !&hidden, !&hidden_prop)
.join()
.map(|(entity, global, _, _)| (entity, global.0.transform_point(&origin)))
.map(|(entity, centroid)| Internals {
entity,
transparent: transparent.contains(entity),
centroid,
from_camera: centroid - camera_centroid,
})
// filter entities behind the camera
.filter(|c| c.from_camera.dot(&camera_backward) < 0.),
);
self.transparent.clear();
self.transparent
.extend(self.centroids.iter().filter(|c| c.transparent).cloned());
// Note: Smaller Z values are placed first, so that semi-transparent sprite colors blend
// correctly. This is opposite to the mesh visibility sorting system.
self.transparent.sort_by(|a, b| {
a.centroid
.z
.partial_cmp(&b.centroid.z)
.unwrap_or(Ordering::Equal)
});
visibility.visible_unordered.clear();
for c in &self.centroids {
if !c.transparent {
visibility.visible_unordered.add(c.entity.id());
}
}
visibility.visible_ordered.clear();
visibility
.visible_ordered
.extend(self.transparent.iter().map(|c| c.entity));
}
}
| 33.432 | 111 | 0.611151 |
2f8aae3e3f392662e02816d3fece28749c1d1dc9 | 1,785 | #[derive(Debug)]
pub struct BootRecord {
version: u8,
system_identifier: String,
boot_identifier: String,
system_use: Vec<u8>,
}
impl BootRecord {
pub fn parse<'a>(bytes: &[u8]) -> ::error::Result<Self> {
match parser::boot_record(bytes) {
Ok((_, record)) => Ok(record),
Err(err) => Err(err.into()),
}
}
}
mod parser {
use nom::be_u8;
use super::BootRecord;
#[cfg_attr(rustfmt, rustfmt_skip)]
named!(pub boot_record(&[u8]) -> BootRecord,
do_parse!(
tag!(b"\0") >>
tag!(b"CD001") >>
version: be_u8 >>
sys_id: map_res!(null_terminated!(32), ::std::str::from_utf8) >>
boot_id: map_res!(null_terminated!(32), ::std::str::from_utf8) >>
sys_use: take!(1977) >>
(BootRecord {
version,
system_identifier: sys_id.to_owned(),
boot_identifier: boot_id.to_owned(),
system_use: sys_use.to_vec(),
})
)
);
#[cfg(test)]
mod tests {
#[test]
fn test_boot_record() {
let mut buf = Vec::new();
buf.extend(b"\x00CD001\x01EL TORITO SPECIFICATION");
buf.extend(&vec![0; 2050]);
let (remaining, record) = super::boot_record(&buf).unwrap();
assert_eq!(remaining, &buf[2048..]);
assert_eq!(record.version, 1);
assert_eq!(record.system_identifier, "EL TORITO SPECIFICATION");
}
}
}
| 31.875 | 77 | 0.446499 |
6747917515a2c6ac30c9446072fdba5960af5db0 | 749 | use crate::framework::types::Vec2I;
pub trait RendererTrait {
fn load_textures(&mut self, base_path: &str, filenames: &[&str]);
fn load_sprite_sheet(&mut self, filename: &str);
fn clear(&mut self);
fn set_texture_color_mod(&mut self, tex_name: &str, r: u8, g: u8, b: u8);
fn set_sprite_texture_color_mod(&mut self, sprite_name: &str, r: u8, g: u8, b: u8);
fn draw_str(&mut self, tex_name: &str, x: i32, y: i32, text: &str);
fn draw_sprite(&mut self, sprite_name: &str, pos: &Vec2I);
fn draw_sprite_rot(&mut self, sprite_name: &str, pos: &Vec2I, angle: u8,
center: Option<&Vec2I>);
fn set_draw_color(&mut self, r: u8, g: u8, b: u8);
fn fill_rect(&mut self, dst: Option<[&Vec2I; 2]>);
}
| 46.8125 | 87 | 0.634179 |
2f9a0bf9e357c293035eeaf061b5fd0d81a47e16 | 2,218 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::convert::TryInto;
use serde::de::DeserializeOwned;
use serde::Deserialize;
use serde::Serialize;
use crate::KVMeta;
use crate::SledSerde;
/// Some value bound with a seq number
#[derive(Serialize, Deserialize, Debug, Default, Clone, Eq, PartialEq)]
pub struct SeqV<T = Vec<u8>> {
pub seq: u64,
pub meta: Option<KVMeta>,
pub data: T,
}
impl<T: Serialize + DeserializeOwned> SledSerde for SeqV<T> {}
pub trait IntoSeqV<T> {
type Error;
fn into_seqv(self) -> Result<SeqV<T>, Self::Error>;
}
impl<T, V> IntoSeqV<T> for SeqV<V>
where V: TryInto<T>
{
type Error = <V as TryInto<T>>::Error;
fn into_seqv(self) -> Result<SeqV<T>, Self::Error> {
Ok(SeqV {
seq: self.seq,
meta: self.meta,
data: self.data.try_into()?,
})
}
}
impl<T> SeqV<T> {
pub fn new(seq: u64, data: T) -> Self {
Self {
seq,
meta: None,
data,
}
}
pub fn with_meta(seq: u64, meta: Option<KVMeta>, data: T) -> Self {
Self { seq, meta, data }
}
pub fn get_expire_at(&self) -> u64 {
match self.meta {
None => u64::MAX,
Some(ref m) => match m.expire_at {
None => u64::MAX,
Some(exp_at) => exp_at,
},
}
}
pub fn set_seq(mut self, seq: u64) -> SeqV<T> {
self.seq = seq;
self
}
pub fn set_meta(mut self, m: Option<KVMeta>) -> SeqV<T> {
self.meta = m;
self
}
pub fn set_value(mut self, v: T) -> SeqV<T> {
self.data = v;
self
}
}
| 24.373626 | 75 | 0.581154 |
bfa3894d89fa190f3be5599cdacd031f1552d82b | 3,124 | use std::pin::Pin;
use std::task::{Context, Poll};
use actix_web::{dev::ServiceRequest, dev::ServiceResponse, Error};
use actix_utils::future::{ok, Ready};
use std::future::Future;
use actix_web::web::Query;
use serde::Deserialize;
use actix_web::dev::{Transform, Service};
#[derive(Debug, Deserialize)]
struct Auth {
#[serde(rename = "u")]
username: String,
#[serde(rename = "p")]
password: Option<String>,
#[serde(rename = "t", default)]
token: String,
#[serde(rename = "s", default)]
salt: String,
#[serde(rename = "c", default)]
client: String,
#[serde(rename = "v", default)]
version: String,
}
pub struct SonicAuth;
impl<S> Transform<S, ServiceRequest> for SonicAuth
where
S: Service<ServiceRequest, Response=ServiceResponse, Error=Error>,
S::Future: 'static,
{
type Response = ServiceResponse;
type Error = Error;
type Transform = SonicAuthMiddleware<S>;
type InitError = ();
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ok(SonicAuthMiddleware { service })
}
}
pub struct SonicAuthMiddleware<S> {
service: S,
}
impl<S> Service<ServiceRequest> for SonicAuthMiddleware<S>
where
S: Service<ServiceRequest, Response=ServiceResponse, Error=Error>,
S::Future: 'static,
{
type Response = ServiceResponse;
type Error = Error;
type Future = Pin<Box<dyn Future<Output=Result<Self::Response, Self::Error>>>>;
fn poll_ready(&self, ctx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.service.poll_ready(ctx)
}
fn call(&self, req: ServiceRequest) -> Self::Future {
let query = Query::<Auth>::from_query(req.query_string());
match query {
Ok(query) => {
let query = query.into_inner();
// t = md5(password+s)
if query.username == std::env::var("ANNI_USER").unwrap()
&& match query.password {
None => { query.token == format!("{:x}", md5::compute(std::env::var("ANNI_PASSWD").unwrap() + &query.salt)) }
Some(password) => {
let password = if password.starts_with("enc:") { &password[4..] } else { &password };
password == std::env::var("ANNI_PASSWD_HEX").unwrap()
}
} {
let fut = self.service.call(req);
Box::pin(async {
let res = fut.await?;
Ok(res)
})
} else {
// wrong password
Box::pin(async {
let res = req.error_response(std::io::Error::new(std::io::ErrorKind::InvalidInput, ""));
Ok(res)
})
}
}
Err(_) => Box::pin(async {
let res = req.error_response(std::io::Error::new(std::io::ErrorKind::InvalidInput, ""));
Ok(res)
})
}
}
} | 32.884211 | 129 | 0.533291 |
cc2b2410b34b4b4755fc7c03a387a5e999b85da5 | 9,793 | // -*- mode: rust; -*-
//
// This file is part of curve25519-dalek.
// Copyright (c) 2018 Henry de Valence
// See LICENSE for licensing information.
//
// Authors:
// - Henry de Valence <hdevalence@hdevalence.ca>
#![allow(non_snake_case)]
use traits::Identity;
use std::ops::{Add, Neg, Sub};
use subtle::Choice;
use subtle::ConditionallySelectable;
use edwards;
use window::{LookupTable, NafLookupTable5, NafLookupTable8};
use super::constants;
use super::field::{F51x4Reduced, F51x4Unreduced, Lanes, Shuffle};
#[derive(Copy, Clone, Debug)]
pub struct ExtendedPoint(pub(super) F51x4Unreduced);
#[derive(Copy, Clone, Debug)]
pub struct CachedPoint(pub(super) F51x4Reduced);
impl From<edwards::EdwardsPoint> for ExtendedPoint {
fn from(P: edwards::EdwardsPoint) -> ExtendedPoint {
ExtendedPoint(F51x4Unreduced::new(&P.X, &P.Y, &P.Z, &P.T))
}
}
impl From<ExtendedPoint> for edwards::EdwardsPoint {
fn from(P: ExtendedPoint) -> edwards::EdwardsPoint {
let reduced = F51x4Reduced::from(P.0);
let tmp = F51x4Unreduced::from(reduced).split();
edwards::EdwardsPoint {
X: tmp[0],
Y: tmp[1],
Z: tmp[2],
T: tmp[3],
}
}
}
impl From<ExtendedPoint> for CachedPoint {
fn from(P: ExtendedPoint) -> CachedPoint {
let mut x = P.0;
x = x.blend(&x.diff_sum(), Lanes::AB);
x = &F51x4Reduced::from(x) * (121666, 121666, 2 * 121666, 2 * 121665);
x = x.blend(&x.negate_lazy(), Lanes::D);
CachedPoint(F51x4Reduced::from(x))
}
}
impl Default for ExtendedPoint {
fn default() -> ExtendedPoint {
ExtendedPoint::identity()
}
}
impl Identity for ExtendedPoint {
fn identity() -> ExtendedPoint {
constants::EXTENDEDPOINT_IDENTITY
}
}
impl ExtendedPoint {
pub fn double(&self) -> ExtendedPoint {
// (Y1 X1 T1 Z1) -- uses vpshufd (1c latency @ 1/c)
let mut tmp0 = self.0.shuffle(Shuffle::BADC);
// (X1+Y1 X1+Y1 X1+Y1 X1+Y1) -- can use vpinserti128
let mut tmp1 = (self.0 + tmp0).shuffle(Shuffle::ABAB);
// (X1 Y1 Z1 X1+Y1)
tmp0 = self.0.blend(&tmp1, Lanes::D);
tmp1 = F51x4Reduced::from(tmp0).square();
// Now tmp1 = (S1 S2 S3 S4)
// We want to compute
//
// + | S1 | S1 | S1 | S1 |
// + | S2 | | | S2 |
// + | | | S3 | |
// + | | | S3 | |
// + | |16p |16p |16p |
// - | | S2 | S2 | |
// - | | | | S4 |
// =======================
// S5 S6 S8 S9
let zero = F51x4Unreduced::zero();
let S1_S1_S1_S1 = tmp1.shuffle(Shuffle::AAAA);
let S2_S2_S2_S2 = tmp1.shuffle(Shuffle::BBBB);
let S2_S2_S2_S4 = S2_S2_S2_S2.blend(&tmp1, Lanes::D).negate_lazy();
tmp0 = S1_S1_S1_S1 + zero.blend(&(tmp1 + tmp1), Lanes::C);
tmp0 = tmp0 + zero.blend(&S2_S2_S2_S2, Lanes::AD);
tmp0 = tmp0 + zero.blend(&S2_S2_S2_S4, Lanes::BCD);
let tmp2 = F51x4Reduced::from(tmp0);
ExtendedPoint(&tmp2.shuffle(Shuffle::DBBD) * &tmp2.shuffle(Shuffle::CACA))
}
pub fn mul_by_pow_2(&self, k: u32) -> ExtendedPoint {
let mut tmp: ExtendedPoint = *self;
for _ in 0..k {
tmp = tmp.double();
}
tmp
}
}
impl<'a, 'b> Add<&'b CachedPoint> for &'a ExtendedPoint {
type Output = ExtendedPoint;
/// Add an `ExtendedPoint` and a `CachedPoint`.
fn add(self, other: &'b CachedPoint) -> ExtendedPoint {
let mut tmp = self.0;
tmp = tmp.blend(&tmp.diff_sum(), Lanes::AB);
// tmp = (Y1-X1 Y1+X1 Z1 T1) = (S0 S1 Z1 T1)
tmp = &F51x4Reduced::from(tmp) * &other.0;
// tmp = (S0*S2' S1*S3' Z1*Z2' T1*T2') = (S8 S9 S10 S11)
tmp = tmp.shuffle(Shuffle::ABDC);
// tmp = (S8 S9 S11 S10)
let tmp = F51x4Reduced::from(tmp.diff_sum());
// tmp = (S9-S8 S9+S8 S10-S11 S10+S11) = (S12 S13 S14 S15)
let t0 = tmp.shuffle(Shuffle::ADDA);
// t0 = (S12 S15 S15 S12)
let t1 = tmp.shuffle(Shuffle::CBCB);
// t1 = (S14 S13 S14 S13)
// Return (S12*S14 S15*S13 S15*S14 S12*S13) = (X3 Y3 Z3 T3)
ExtendedPoint(&t0 * &t1)
}
}
impl Default for CachedPoint {
fn default() -> CachedPoint {
CachedPoint::identity()
}
}
impl Identity for CachedPoint {
fn identity() -> CachedPoint {
constants::CACHEDPOINT_IDENTITY
}
}
impl ConditionallySelectable for CachedPoint {
fn conditional_select(a: &Self, b: &Self, choice: Choice) -> Self {
CachedPoint(F51x4Reduced::conditional_select(&a.0, &b.0, choice))
}
fn conditional_assign(&mut self, other: &Self, choice: Choice) {
self.0.conditional_assign(&other.0, choice);
}
}
impl<'a> Neg for &'a CachedPoint {
type Output = CachedPoint;
fn neg(self) -> CachedPoint {
let swapped = self.0.shuffle(Shuffle::BACD);
CachedPoint(swapped.blend(&(-self.0), Lanes::D))
}
}
impl<'a, 'b> Sub<&'b CachedPoint> for &'a ExtendedPoint {
type Output = ExtendedPoint;
/// Implement subtraction by negating the point and adding.
fn sub(self, other: &'b CachedPoint) -> ExtendedPoint {
self + &(-other)
}
}
impl<'a> From<&'a edwards::EdwardsPoint> for LookupTable<CachedPoint> {
fn from(point: &'a edwards::EdwardsPoint) -> Self {
let P = ExtendedPoint::from(*point);
let mut points = [CachedPoint::from(P); 8];
for i in 0..7 {
points[i + 1] = (&P + &points[i]).into();
}
LookupTable(points)
}
}
impl<'a> From<&'a edwards::EdwardsPoint> for NafLookupTable5<CachedPoint> {
fn from(point: &'a edwards::EdwardsPoint) -> Self {
let A = ExtendedPoint::from(*point);
let mut Ai = [CachedPoint::from(A); 8];
let A2 = A.double();
for i in 0..7 {
Ai[i + 1] = (&A2 + &Ai[i]).into();
}
// Now Ai = [A, 3A, 5A, 7A, 9A, 11A, 13A, 15A]
NafLookupTable5(Ai)
}
}
impl<'a> From<&'a edwards::EdwardsPoint> for NafLookupTable8<CachedPoint> {
fn from(point: &'a edwards::EdwardsPoint) -> Self {
let A = ExtendedPoint::from(*point);
let mut Ai = [CachedPoint::from(A); 64];
let A2 = A.double();
for i in 0..63 {
Ai[i + 1] = (&A2 + &Ai[i]).into();
}
// Now Ai = [A, 3A, 5A, 7A, 9A, 11A, 13A, 15A, ..., 127A]
NafLookupTable8(Ai)
}
}
#[cfg(test)]
mod test {
use super::*;
fn addition_test_helper(P: edwards::EdwardsPoint, Q: edwards::EdwardsPoint) {
// Test the serial implementation of the parallel addition formulas
//let R_serial: edwards::EdwardsPoint = serial_add(P.into(), Q.into()).into();
// Test the vector implementation of the parallel readdition formulas
let cached_Q = CachedPoint::from(ExtendedPoint::from(Q));
let R_vector: edwards::EdwardsPoint = (&ExtendedPoint::from(P) + &cached_Q).into();
let S_vector: edwards::EdwardsPoint = (&ExtendedPoint::from(P) - &cached_Q).into();
println!("Testing point addition:");
println!("P = {:?}", P);
println!("Q = {:?}", Q);
println!("cached Q = {:?}", cached_Q);
println!("R = P + Q = {:?}", &P + &Q);
//println!("R_serial = {:?}", R_serial);
println!("R_vector = {:?}", R_vector);
println!("S = P - Q = {:?}", &P - &Q);
println!("S_vector = {:?}", S_vector);
//assert_eq!(R_serial.compress(), (&P + &Q).compress());
assert_eq!(R_vector.compress(), (&P + &Q).compress());
assert_eq!(S_vector.compress(), (&P - &Q).compress());
println!("OK!\n");
}
#[test]
fn vector_addition_vs_serial_addition_vs_edwards_extendedpoint() {
use constants;
use scalar::Scalar;
println!("Testing id +- id");
let P = edwards::EdwardsPoint::identity();
let Q = edwards::EdwardsPoint::identity();
addition_test_helper(P, Q);
println!("Testing id +- B");
let P = edwards::EdwardsPoint::identity();
let Q = constants::ED25519_BASEPOINT_POINT;
addition_test_helper(P, Q);
println!("Testing B +- B");
let P = constants::ED25519_BASEPOINT_POINT;
let Q = constants::ED25519_BASEPOINT_POINT;
addition_test_helper(P, Q);
println!("Testing B +- kB");
let P = constants::ED25519_BASEPOINT_POINT;
let Q = &constants::ED25519_BASEPOINT_TABLE * &Scalar::from(8475983829u64);
addition_test_helper(P, Q);
}
fn doubling_test_helper(P: edwards::EdwardsPoint) {
//let R1: edwards::EdwardsPoint = serial_double(P.into()).into();
let R2: edwards::EdwardsPoint = ExtendedPoint::from(P).double().into();
println!("Testing point doubling:");
println!("P = {:?}", P);
//println!("(serial) R1 = {:?}", R1);
println!("(vector) R2 = {:?}", R2);
println!("P + P = {:?}", &P + &P);
//assert_eq!(R1.compress(), (&P + &P).compress());
assert_eq!(R2.compress(), (&P + &P).compress());
println!("OK!\n");
}
#[test]
fn vector_doubling_vs_serial_doubling_vs_edwards_extendedpoint() {
use constants;
use scalar::Scalar;
println!("Testing [2]id");
let P = edwards::EdwardsPoint::identity();
doubling_test_helper(P);
println!("Testing [2]B");
let P = constants::ED25519_BASEPOINT_POINT;
doubling_test_helper(P);
println!("Testing [2]([k]B)");
let P = &constants::ED25519_BASEPOINT_TABLE * &Scalar::from(8475983829u64);
doubling_test_helper(P);
}
}
| 30.990506 | 91 | 0.563259 |
d6ce46c3b9f845223d846b9a06caf4981ff9aaa2 | 192 | // Copyright (c) SimpleStaking, Viable Systems and Tezedge Contributors
// SPDX-License-Identifier: MIT
pub mod potential_peers_get;
mod peer_requests_state;
pub use peer_requests_state::*;
| 24 | 71 | 0.802083 |
ed27a4d0b2a2fdc8438637582446c106a186709d | 779 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that the `Fn` traits require `()` form without a feature gate.
fn bar1(x: &Fn<(), Output=()>) {
//~^ ERROR of `Fn`-family traits' type parameters is subject to change
}
fn bar2<T>(x: &T) where T: Fn<()> {
//~^ ERROR of `Fn`-family traits' type parameters is subject to change
}
fn main() { }
| 33.869565 | 74 | 0.691913 |
28837070356702e2d589116682ebe6dac0e54a64 | 2,195 | #[doc = "Reader of register SEL51"]
pub type R = crate::R<u16, super::SEL51>;
#[doc = "Writer for register SEL51"]
pub type W = crate::W<u16, super::SEL51>;
#[doc = "Register SEL51 `reset()`'s with value 0"]
impl crate::ResetValue for super::SEL51 {
type Type = u16;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `SEL102`"]
pub type SEL102_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SEL102`"]
pub struct SEL102_W<'a> {
w: &'a mut W,
}
impl<'a> SEL102_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x7f) | ((value as u16) & 0x7f);
self.w
}
}
#[doc = "Reader of field `SEL103`"]
pub type SEL103_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SEL103`"]
pub struct SEL103_W<'a> {
w: &'a mut W,
}
impl<'a> SEL103_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x7f << 8)) | (((value as u16) & 0x7f) << 8);
self.w
}
}
impl R {
#[doc = "Bits 0:6 - Input (XBARA_INn) to be muxed to XBARA_OUT102 (refer to Functional Description section for input/output assignment)"]
#[inline(always)]
pub fn sel102(&self) -> SEL102_R {
SEL102_R::new((self.bits & 0x7f) as u8)
}
#[doc = "Bits 8:14 - Input (XBARA_INn) to be muxed to XBARA_OUT103 (refer to Functional Description section for input/output assignment)"]
#[inline(always)]
pub fn sel103(&self) -> SEL103_R {
SEL103_R::new(((self.bits >> 8) & 0x7f) as u8)
}
}
impl W {
#[doc = "Bits 0:6 - Input (XBARA_INn) to be muxed to XBARA_OUT102 (refer to Functional Description section for input/output assignment)"]
#[inline(always)]
pub fn sel102(&mut self) -> SEL102_W {
SEL102_W { w: self }
}
#[doc = "Bits 8:14 - Input (XBARA_INn) to be muxed to XBARA_OUT103 (refer to Functional Description section for input/output assignment)"]
#[inline(always)]
pub fn sel103(&mut self) -> SEL103_W {
SEL103_W { w: self }
}
}
| 33.769231 | 142 | 0.600911 |
e468de7501ee94032c473245d86b83f699931894 | 212,236 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct AssociateFleetError {
pub kind: AssociateFleetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum AssociateFleetErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
IncompatibleImageException(crate::error::IncompatibleImageException),
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
LimitExceededException(crate::error::LimitExceededException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for AssociateFleetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
AssociateFleetErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
AssociateFleetErrorKind::IncompatibleImageException(_inner) => _inner.fmt(f),
AssociateFleetErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
AssociateFleetErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
AssociateFleetErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
AssociateFleetErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
AssociateFleetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for AssociateFleetError {
fn code(&self) -> Option<&str> {
AssociateFleetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl AssociateFleetError {
pub fn new(kind: AssociateFleetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: AssociateFleetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: AssociateFleetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
AssociateFleetErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_incompatible_image_exception(&self) -> bool {
matches!(
&self.kind,
AssociateFleetErrorKind::IncompatibleImageException(_)
)
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
AssociateFleetErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
AssociateFleetErrorKind::LimitExceededException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
AssociateFleetErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
AssociateFleetErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for AssociateFleetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
AssociateFleetErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
AssociateFleetErrorKind::IncompatibleImageException(_inner) => Some(_inner),
AssociateFleetErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
AssociateFleetErrorKind::LimitExceededException(_inner) => Some(_inner),
AssociateFleetErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
AssociateFleetErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
AssociateFleetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct BatchAssociateUserStackError {
pub kind: BatchAssociateUserStackErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum BatchAssociateUserStackErrorKind {
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for BatchAssociateUserStackError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
BatchAssociateUserStackErrorKind::InvalidParameterCombinationException(_inner) => {
_inner.fmt(f)
}
BatchAssociateUserStackErrorKind::OperationNotPermittedException(_inner) => {
_inner.fmt(f)
}
BatchAssociateUserStackErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for BatchAssociateUserStackError {
fn code(&self) -> Option<&str> {
BatchAssociateUserStackError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl BatchAssociateUserStackError {
pub fn new(kind: BatchAssociateUserStackErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: BatchAssociateUserStackErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: BatchAssociateUserStackErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
BatchAssociateUserStackErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
BatchAssociateUserStackErrorKind::OperationNotPermittedException(_)
)
}
}
impl std::error::Error for BatchAssociateUserStackError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
BatchAssociateUserStackErrorKind::InvalidParameterCombinationException(_inner) => {
Some(_inner)
}
BatchAssociateUserStackErrorKind::OperationNotPermittedException(_inner) => {
Some(_inner)
}
BatchAssociateUserStackErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct BatchDisassociateUserStackError {
pub kind: BatchDisassociateUserStackErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum BatchDisassociateUserStackErrorKind {
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for BatchDisassociateUserStackError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
BatchDisassociateUserStackErrorKind::InvalidParameterCombinationException(_inner) => {
_inner.fmt(f)
}
BatchDisassociateUserStackErrorKind::OperationNotPermittedException(_inner) => {
_inner.fmt(f)
}
BatchDisassociateUserStackErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for BatchDisassociateUserStackError {
fn code(&self) -> Option<&str> {
BatchDisassociateUserStackError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl BatchDisassociateUserStackError {
pub fn new(kind: BatchDisassociateUserStackErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: BatchDisassociateUserStackErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: BatchDisassociateUserStackErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
BatchDisassociateUserStackErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
BatchDisassociateUserStackErrorKind::OperationNotPermittedException(_)
)
}
}
impl std::error::Error for BatchDisassociateUserStackError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
BatchDisassociateUserStackErrorKind::InvalidParameterCombinationException(_inner) => {
Some(_inner)
}
BatchDisassociateUserStackErrorKind::OperationNotPermittedException(_inner) => {
Some(_inner)
}
BatchDisassociateUserStackErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CopyImageError {
pub kind: CopyImageErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CopyImageErrorKind {
IncompatibleImageException(crate::error::IncompatibleImageException),
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
LimitExceededException(crate::error::LimitExceededException),
ResourceAlreadyExistsException(crate::error::ResourceAlreadyExistsException),
ResourceNotAvailableException(crate::error::ResourceNotAvailableException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CopyImageError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CopyImageErrorKind::IncompatibleImageException(_inner) => _inner.fmt(f),
CopyImageErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
CopyImageErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
CopyImageErrorKind::ResourceAlreadyExistsException(_inner) => _inner.fmt(f),
CopyImageErrorKind::ResourceNotAvailableException(_inner) => _inner.fmt(f),
CopyImageErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CopyImageErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CopyImageError {
fn code(&self) -> Option<&str> {
CopyImageError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CopyImageError {
pub fn new(kind: CopyImageErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CopyImageErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CopyImageErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_incompatible_image_exception(&self) -> bool {
matches!(
&self.kind,
CopyImageErrorKind::IncompatibleImageException(_)
)
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
CopyImageErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, CopyImageErrorKind::LimitExceededException(_))
}
pub fn is_resource_already_exists_exception(&self) -> bool {
matches!(
&self.kind,
CopyImageErrorKind::ResourceAlreadyExistsException(_)
)
}
pub fn is_resource_not_available_exception(&self) -> bool {
matches!(
&self.kind,
CopyImageErrorKind::ResourceNotAvailableException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(&self.kind, CopyImageErrorKind::ResourceNotFoundException(_))
}
}
impl std::error::Error for CopyImageError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CopyImageErrorKind::IncompatibleImageException(_inner) => Some(_inner),
CopyImageErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
CopyImageErrorKind::LimitExceededException(_inner) => Some(_inner),
CopyImageErrorKind::ResourceAlreadyExistsException(_inner) => Some(_inner),
CopyImageErrorKind::ResourceNotAvailableException(_inner) => Some(_inner),
CopyImageErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CopyImageErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateDirectoryConfigError {
pub kind: CreateDirectoryConfigErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateDirectoryConfigErrorKind {
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
InvalidRoleException(crate::error::InvalidRoleException),
LimitExceededException(crate::error::LimitExceededException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceAlreadyExistsException(crate::error::ResourceAlreadyExistsException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateDirectoryConfigError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateDirectoryConfigErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
CreateDirectoryConfigErrorKind::InvalidRoleException(_inner) => _inner.fmt(f),
CreateDirectoryConfigErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
CreateDirectoryConfigErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
CreateDirectoryConfigErrorKind::ResourceAlreadyExistsException(_inner) => _inner.fmt(f),
CreateDirectoryConfigErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CreateDirectoryConfigErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateDirectoryConfigError {
fn code(&self) -> Option<&str> {
CreateDirectoryConfigError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateDirectoryConfigError {
pub fn new(kind: CreateDirectoryConfigErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateDirectoryConfigErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateDirectoryConfigErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
CreateDirectoryConfigErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_invalid_role_exception(&self) -> bool {
matches!(
&self.kind,
CreateDirectoryConfigErrorKind::InvalidRoleException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
CreateDirectoryConfigErrorKind::LimitExceededException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
CreateDirectoryConfigErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_already_exists_exception(&self) -> bool {
matches!(
&self.kind,
CreateDirectoryConfigErrorKind::ResourceAlreadyExistsException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateDirectoryConfigErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for CreateDirectoryConfigError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateDirectoryConfigErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
CreateDirectoryConfigErrorKind::InvalidRoleException(_inner) => Some(_inner),
CreateDirectoryConfigErrorKind::LimitExceededException(_inner) => Some(_inner),
CreateDirectoryConfigErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
CreateDirectoryConfigErrorKind::ResourceAlreadyExistsException(_inner) => Some(_inner),
CreateDirectoryConfigErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CreateDirectoryConfigErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateFleetError {
pub kind: CreateFleetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateFleetErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
IncompatibleImageException(crate::error::IncompatibleImageException),
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
InvalidRoleException(crate::error::InvalidRoleException),
LimitExceededException(crate::error::LimitExceededException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
RequestLimitExceededException(crate::error::RequestLimitExceededException),
ResourceAlreadyExistsException(crate::error::ResourceAlreadyExistsException),
ResourceNotAvailableException(crate::error::ResourceNotAvailableException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateFleetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateFleetErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::IncompatibleImageException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::InvalidParameterCombinationException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::InvalidRoleException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::RequestLimitExceededException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::ResourceAlreadyExistsException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::ResourceNotAvailableException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CreateFleetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateFleetError {
fn code(&self) -> Option<&str> {
CreateFleetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateFleetError {
pub fn new(kind: CreateFleetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateFleetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateFleetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
CreateFleetErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_incompatible_image_exception(&self) -> bool {
matches!(
&self.kind,
CreateFleetErrorKind::IncompatibleImageException(_)
)
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
CreateFleetErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
CreateFleetErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_invalid_role_exception(&self) -> bool {
matches!(&self.kind, CreateFleetErrorKind::InvalidRoleException(_))
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, CreateFleetErrorKind::LimitExceededException(_))
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
CreateFleetErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_request_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
CreateFleetErrorKind::RequestLimitExceededException(_)
)
}
pub fn is_resource_already_exists_exception(&self) -> bool {
matches!(
&self.kind,
CreateFleetErrorKind::ResourceAlreadyExistsException(_)
)
}
pub fn is_resource_not_available_exception(&self) -> bool {
matches!(
&self.kind,
CreateFleetErrorKind::ResourceNotAvailableException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateFleetErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for CreateFleetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateFleetErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
CreateFleetErrorKind::IncompatibleImageException(_inner) => Some(_inner),
CreateFleetErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
CreateFleetErrorKind::InvalidParameterCombinationException(_inner) => Some(_inner),
CreateFleetErrorKind::InvalidRoleException(_inner) => Some(_inner),
CreateFleetErrorKind::LimitExceededException(_inner) => Some(_inner),
CreateFleetErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
CreateFleetErrorKind::RequestLimitExceededException(_inner) => Some(_inner),
CreateFleetErrorKind::ResourceAlreadyExistsException(_inner) => Some(_inner),
CreateFleetErrorKind::ResourceNotAvailableException(_inner) => Some(_inner),
CreateFleetErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CreateFleetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateImageBuilderError {
pub kind: CreateImageBuilderErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateImageBuilderErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
IncompatibleImageException(crate::error::IncompatibleImageException),
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
InvalidRoleException(crate::error::InvalidRoleException),
LimitExceededException(crate::error::LimitExceededException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
RequestLimitExceededException(crate::error::RequestLimitExceededException),
ResourceAlreadyExistsException(crate::error::ResourceAlreadyExistsException),
ResourceNotAvailableException(crate::error::ResourceNotAvailableException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateImageBuilderError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateImageBuilderErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
CreateImageBuilderErrorKind::IncompatibleImageException(_inner) => _inner.fmt(f),
CreateImageBuilderErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
CreateImageBuilderErrorKind::InvalidParameterCombinationException(_inner) => {
_inner.fmt(f)
}
CreateImageBuilderErrorKind::InvalidRoleException(_inner) => _inner.fmt(f),
CreateImageBuilderErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
CreateImageBuilderErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
CreateImageBuilderErrorKind::RequestLimitExceededException(_inner) => _inner.fmt(f),
CreateImageBuilderErrorKind::ResourceAlreadyExistsException(_inner) => _inner.fmt(f),
CreateImageBuilderErrorKind::ResourceNotAvailableException(_inner) => _inner.fmt(f),
CreateImageBuilderErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CreateImageBuilderErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateImageBuilderError {
fn code(&self) -> Option<&str> {
CreateImageBuilderError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateImageBuilderError {
pub fn new(kind: CreateImageBuilderErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateImageBuilderErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateImageBuilderErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_incompatible_image_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::IncompatibleImageException(_)
)
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_invalid_role_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::InvalidRoleException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::LimitExceededException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_request_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::RequestLimitExceededException(_)
)
}
pub fn is_resource_already_exists_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::ResourceAlreadyExistsException(_)
)
}
pub fn is_resource_not_available_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::ResourceNotAvailableException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for CreateImageBuilderError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateImageBuilderErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
CreateImageBuilderErrorKind::IncompatibleImageException(_inner) => Some(_inner),
CreateImageBuilderErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
CreateImageBuilderErrorKind::InvalidParameterCombinationException(_inner) => {
Some(_inner)
}
CreateImageBuilderErrorKind::InvalidRoleException(_inner) => Some(_inner),
CreateImageBuilderErrorKind::LimitExceededException(_inner) => Some(_inner),
CreateImageBuilderErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
CreateImageBuilderErrorKind::RequestLimitExceededException(_inner) => Some(_inner),
CreateImageBuilderErrorKind::ResourceAlreadyExistsException(_inner) => Some(_inner),
CreateImageBuilderErrorKind::ResourceNotAvailableException(_inner) => Some(_inner),
CreateImageBuilderErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CreateImageBuilderErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateImageBuilderStreamingURLError {
pub kind: CreateImageBuilderStreamingURLErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateImageBuilderStreamingURLErrorKind {
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateImageBuilderStreamingURLError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateImageBuilderStreamingURLErrorKind::OperationNotPermittedException(_inner) => {
_inner.fmt(f)
}
CreateImageBuilderStreamingURLErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
CreateImageBuilderStreamingURLErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateImageBuilderStreamingURLError {
fn code(&self) -> Option<&str> {
CreateImageBuilderStreamingURLError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateImageBuilderStreamingURLError {
pub fn new(kind: CreateImageBuilderStreamingURLErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateImageBuilderStreamingURLErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateImageBuilderStreamingURLErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderStreamingURLErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateImageBuilderStreamingURLErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for CreateImageBuilderStreamingURLError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateImageBuilderStreamingURLErrorKind::OperationNotPermittedException(_inner) => {
Some(_inner)
}
CreateImageBuilderStreamingURLErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
CreateImageBuilderStreamingURLErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateStackError {
pub kind: CreateStackErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateStackErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
InvalidRoleException(crate::error::InvalidRoleException),
LimitExceededException(crate::error::LimitExceededException),
ResourceAlreadyExistsException(crate::error::ResourceAlreadyExistsException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateStackError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateStackErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
CreateStackErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
CreateStackErrorKind::InvalidParameterCombinationException(_inner) => _inner.fmt(f),
CreateStackErrorKind::InvalidRoleException(_inner) => _inner.fmt(f),
CreateStackErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
CreateStackErrorKind::ResourceAlreadyExistsException(_inner) => _inner.fmt(f),
CreateStackErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CreateStackErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateStackError {
fn code(&self) -> Option<&str> {
CreateStackError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateStackError {
pub fn new(kind: CreateStackErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateStackErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateStackErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
CreateStackErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
CreateStackErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
CreateStackErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_invalid_role_exception(&self) -> bool {
matches!(&self.kind, CreateStackErrorKind::InvalidRoleException(_))
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, CreateStackErrorKind::LimitExceededException(_))
}
pub fn is_resource_already_exists_exception(&self) -> bool {
matches!(
&self.kind,
CreateStackErrorKind::ResourceAlreadyExistsException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateStackErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for CreateStackError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateStackErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
CreateStackErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
CreateStackErrorKind::InvalidParameterCombinationException(_inner) => Some(_inner),
CreateStackErrorKind::InvalidRoleException(_inner) => Some(_inner),
CreateStackErrorKind::LimitExceededException(_inner) => Some(_inner),
CreateStackErrorKind::ResourceAlreadyExistsException(_inner) => Some(_inner),
CreateStackErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CreateStackErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateStreamingURLError {
pub kind: CreateStreamingURLErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateStreamingURLErrorKind {
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceNotAvailableException(crate::error::ResourceNotAvailableException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateStreamingURLError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateStreamingURLErrorKind::InvalidParameterCombinationException(_inner) => {
_inner.fmt(f)
}
CreateStreamingURLErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
CreateStreamingURLErrorKind::ResourceNotAvailableException(_inner) => _inner.fmt(f),
CreateStreamingURLErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CreateStreamingURLErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateStreamingURLError {
fn code(&self) -> Option<&str> {
CreateStreamingURLError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateStreamingURLError {
pub fn new(kind: CreateStreamingURLErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateStreamingURLErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateStreamingURLErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
CreateStreamingURLErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
CreateStreamingURLErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_not_available_exception(&self) -> bool {
matches!(
&self.kind,
CreateStreamingURLErrorKind::ResourceNotAvailableException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateStreamingURLErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for CreateStreamingURLError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateStreamingURLErrorKind::InvalidParameterCombinationException(_inner) => {
Some(_inner)
}
CreateStreamingURLErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
CreateStreamingURLErrorKind::ResourceNotAvailableException(_inner) => Some(_inner),
CreateStreamingURLErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CreateStreamingURLErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateUpdatedImageError {
pub kind: CreateUpdatedImageErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateUpdatedImageErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
IncompatibleImageException(crate::error::IncompatibleImageException),
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
LimitExceededException(crate::error::LimitExceededException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceAlreadyExistsException(crate::error::ResourceAlreadyExistsException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateUpdatedImageError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateUpdatedImageErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
CreateUpdatedImageErrorKind::IncompatibleImageException(_inner) => _inner.fmt(f),
CreateUpdatedImageErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
CreateUpdatedImageErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
CreateUpdatedImageErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
CreateUpdatedImageErrorKind::ResourceAlreadyExistsException(_inner) => _inner.fmt(f),
CreateUpdatedImageErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
CreateUpdatedImageErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateUpdatedImageError {
fn code(&self) -> Option<&str> {
CreateUpdatedImageError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateUpdatedImageError {
pub fn new(kind: CreateUpdatedImageErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateUpdatedImageErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateUpdatedImageErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
CreateUpdatedImageErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_incompatible_image_exception(&self) -> bool {
matches!(
&self.kind,
CreateUpdatedImageErrorKind::IncompatibleImageException(_)
)
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
CreateUpdatedImageErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
CreateUpdatedImageErrorKind::LimitExceededException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
CreateUpdatedImageErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_already_exists_exception(&self) -> bool {
matches!(
&self.kind,
CreateUpdatedImageErrorKind::ResourceAlreadyExistsException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
CreateUpdatedImageErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for CreateUpdatedImageError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateUpdatedImageErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
CreateUpdatedImageErrorKind::IncompatibleImageException(_inner) => Some(_inner),
CreateUpdatedImageErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
CreateUpdatedImageErrorKind::LimitExceededException(_inner) => Some(_inner),
CreateUpdatedImageErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
CreateUpdatedImageErrorKind::ResourceAlreadyExistsException(_inner) => Some(_inner),
CreateUpdatedImageErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
CreateUpdatedImageErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateUsageReportSubscriptionError {
pub kind: CreateUsageReportSubscriptionErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateUsageReportSubscriptionErrorKind {
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
InvalidRoleException(crate::error::InvalidRoleException),
LimitExceededException(crate::error::LimitExceededException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateUsageReportSubscriptionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateUsageReportSubscriptionErrorKind::InvalidAccountStatusException(_inner) => {
_inner.fmt(f)
}
CreateUsageReportSubscriptionErrorKind::InvalidRoleException(_inner) => _inner.fmt(f),
CreateUsageReportSubscriptionErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
CreateUsageReportSubscriptionErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateUsageReportSubscriptionError {
fn code(&self) -> Option<&str> {
CreateUsageReportSubscriptionError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateUsageReportSubscriptionError {
pub fn new(kind: CreateUsageReportSubscriptionErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateUsageReportSubscriptionErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateUsageReportSubscriptionErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
CreateUsageReportSubscriptionErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_invalid_role_exception(&self) -> bool {
matches!(
&self.kind,
CreateUsageReportSubscriptionErrorKind::InvalidRoleException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
CreateUsageReportSubscriptionErrorKind::LimitExceededException(_)
)
}
}
impl std::error::Error for CreateUsageReportSubscriptionError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateUsageReportSubscriptionErrorKind::InvalidAccountStatusException(_inner) => {
Some(_inner)
}
CreateUsageReportSubscriptionErrorKind::InvalidRoleException(_inner) => Some(_inner),
CreateUsageReportSubscriptionErrorKind::LimitExceededException(_inner) => Some(_inner),
CreateUsageReportSubscriptionErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct CreateUserError {
pub kind: CreateUserErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum CreateUserErrorKind {
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
LimitExceededException(crate::error::LimitExceededException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceAlreadyExistsException(crate::error::ResourceAlreadyExistsException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for CreateUserError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
CreateUserErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
CreateUserErrorKind::InvalidParameterCombinationException(_inner) => _inner.fmt(f),
CreateUserErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
CreateUserErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
CreateUserErrorKind::ResourceAlreadyExistsException(_inner) => _inner.fmt(f),
CreateUserErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for CreateUserError {
fn code(&self) -> Option<&str> {
CreateUserError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl CreateUserError {
pub fn new(kind: CreateUserErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: CreateUserErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: CreateUserErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
CreateUserErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
CreateUserErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, CreateUserErrorKind::LimitExceededException(_))
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
CreateUserErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_already_exists_exception(&self) -> bool {
matches!(
&self.kind,
CreateUserErrorKind::ResourceAlreadyExistsException(_)
)
}
}
impl std::error::Error for CreateUserError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
CreateUserErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
CreateUserErrorKind::InvalidParameterCombinationException(_inner) => Some(_inner),
CreateUserErrorKind::LimitExceededException(_inner) => Some(_inner),
CreateUserErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
CreateUserErrorKind::ResourceAlreadyExistsException(_inner) => Some(_inner),
CreateUserErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteDirectoryConfigError {
pub kind: DeleteDirectoryConfigErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteDirectoryConfigErrorKind {
ResourceInUseException(crate::error::ResourceInUseException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteDirectoryConfigError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteDirectoryConfigErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
DeleteDirectoryConfigErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteDirectoryConfigErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteDirectoryConfigError {
fn code(&self) -> Option<&str> {
DeleteDirectoryConfigError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteDirectoryConfigError {
pub fn new(kind: DeleteDirectoryConfigErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteDirectoryConfigErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteDirectoryConfigErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
DeleteDirectoryConfigErrorKind::ResourceInUseException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteDirectoryConfigErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DeleteDirectoryConfigError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteDirectoryConfigErrorKind::ResourceInUseException(_inner) => Some(_inner),
DeleteDirectoryConfigErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteDirectoryConfigErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteFleetError {
pub kind: DeleteFleetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteFleetErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
ResourceInUseException(crate::error::ResourceInUseException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteFleetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteFleetErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
DeleteFleetErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
DeleteFleetErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteFleetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteFleetError {
fn code(&self) -> Option<&str> {
DeleteFleetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteFleetError {
pub fn new(kind: DeleteFleetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteFleetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteFleetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
DeleteFleetErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(&self.kind, DeleteFleetErrorKind::ResourceInUseException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteFleetErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DeleteFleetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteFleetErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
DeleteFleetErrorKind::ResourceInUseException(_inner) => Some(_inner),
DeleteFleetErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteFleetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteImageError {
pub kind: DeleteImageErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteImageErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceInUseException(crate::error::ResourceInUseException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteImageError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteImageErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
DeleteImageErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
DeleteImageErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
DeleteImageErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteImageErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteImageError {
fn code(&self) -> Option<&str> {
DeleteImageError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteImageError {
pub fn new(kind: DeleteImageErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteImageErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteImageErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
DeleteImageErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
DeleteImageErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(&self.kind, DeleteImageErrorKind::ResourceInUseException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteImageErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DeleteImageError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteImageErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
DeleteImageErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
DeleteImageErrorKind::ResourceInUseException(_inner) => Some(_inner),
DeleteImageErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteImageErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteImageBuilderError {
pub kind: DeleteImageBuilderErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteImageBuilderErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteImageBuilderError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteImageBuilderErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
DeleteImageBuilderErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
DeleteImageBuilderErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteImageBuilderErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteImageBuilderError {
fn code(&self) -> Option<&str> {
DeleteImageBuilderError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteImageBuilderError {
pub fn new(kind: DeleteImageBuilderErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteImageBuilderErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteImageBuilderErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
DeleteImageBuilderErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
DeleteImageBuilderErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteImageBuilderErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DeleteImageBuilderError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteImageBuilderErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
DeleteImageBuilderErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
DeleteImageBuilderErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteImageBuilderErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteImagePermissionsError {
pub kind: DeleteImagePermissionsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteImagePermissionsErrorKind {
ResourceNotAvailableException(crate::error::ResourceNotAvailableException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteImagePermissionsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteImagePermissionsErrorKind::ResourceNotAvailableException(_inner) => _inner.fmt(f),
DeleteImagePermissionsErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteImagePermissionsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteImagePermissionsError {
fn code(&self) -> Option<&str> {
DeleteImagePermissionsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteImagePermissionsError {
pub fn new(kind: DeleteImagePermissionsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteImagePermissionsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteImagePermissionsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_not_available_exception(&self) -> bool {
matches!(
&self.kind,
DeleteImagePermissionsErrorKind::ResourceNotAvailableException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteImagePermissionsErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DeleteImagePermissionsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteImagePermissionsErrorKind::ResourceNotAvailableException(_inner) => Some(_inner),
DeleteImagePermissionsErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteImagePermissionsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteStackError {
pub kind: DeleteStackErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteStackErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
ResourceInUseException(crate::error::ResourceInUseException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteStackError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteStackErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
DeleteStackErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
DeleteStackErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteStackErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteStackError {
fn code(&self) -> Option<&str> {
DeleteStackError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteStackError {
pub fn new(kind: DeleteStackErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteStackErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteStackErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
DeleteStackErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(&self.kind, DeleteStackErrorKind::ResourceInUseException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteStackErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DeleteStackError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteStackErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
DeleteStackErrorKind::ResourceInUseException(_inner) => Some(_inner),
DeleteStackErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteStackErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteUsageReportSubscriptionError {
pub kind: DeleteUsageReportSubscriptionErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteUsageReportSubscriptionErrorKind {
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteUsageReportSubscriptionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteUsageReportSubscriptionErrorKind::InvalidAccountStatusException(_inner) => {
_inner.fmt(f)
}
DeleteUsageReportSubscriptionErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
DeleteUsageReportSubscriptionErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteUsageReportSubscriptionError {
fn code(&self) -> Option<&str> {
DeleteUsageReportSubscriptionError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteUsageReportSubscriptionError {
pub fn new(kind: DeleteUsageReportSubscriptionErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteUsageReportSubscriptionErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteUsageReportSubscriptionErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
DeleteUsageReportSubscriptionErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteUsageReportSubscriptionErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DeleteUsageReportSubscriptionError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteUsageReportSubscriptionErrorKind::InvalidAccountStatusException(_inner) => {
Some(_inner)
}
DeleteUsageReportSubscriptionErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
DeleteUsageReportSubscriptionErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DeleteUserError {
pub kind: DeleteUserErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DeleteUserErrorKind {
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DeleteUserError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DeleteUserErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DeleteUserErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DeleteUserError {
fn code(&self) -> Option<&str> {
DeleteUserError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DeleteUserError {
pub fn new(kind: DeleteUserErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DeleteUserErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DeleteUserErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DeleteUserErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DeleteUserError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DeleteUserErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DeleteUserErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeDirectoryConfigsError {
pub kind: DescribeDirectoryConfigsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeDirectoryConfigsErrorKind {
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeDirectoryConfigsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeDirectoryConfigsErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DescribeDirectoryConfigsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeDirectoryConfigsError {
fn code(&self) -> Option<&str> {
DescribeDirectoryConfigsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeDirectoryConfigsError {
pub fn new(kind: DescribeDirectoryConfigsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeDirectoryConfigsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeDirectoryConfigsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeDirectoryConfigsErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeDirectoryConfigsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeDirectoryConfigsErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DescribeDirectoryConfigsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeFleetsError {
pub kind: DescribeFleetsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeFleetsErrorKind {
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeFleetsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeFleetsErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DescribeFleetsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeFleetsError {
fn code(&self) -> Option<&str> {
DescribeFleetsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeFleetsError {
pub fn new(kind: DescribeFleetsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeFleetsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeFleetsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeFleetsErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeFleetsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeFleetsErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DescribeFleetsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeImageBuildersError {
pub kind: DescribeImageBuildersErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeImageBuildersErrorKind {
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeImageBuildersError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeImageBuildersErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DescribeImageBuildersErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeImageBuildersError {
fn code(&self) -> Option<&str> {
DescribeImageBuildersError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeImageBuildersError {
pub fn new(kind: DescribeImageBuildersErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeImageBuildersErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeImageBuildersErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeImageBuildersErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeImageBuildersError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeImageBuildersErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DescribeImageBuildersErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeImagePermissionsError {
pub kind: DescribeImagePermissionsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeImagePermissionsErrorKind {
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeImagePermissionsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeImagePermissionsErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DescribeImagePermissionsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeImagePermissionsError {
fn code(&self) -> Option<&str> {
DescribeImagePermissionsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeImagePermissionsError {
pub fn new(kind: DescribeImagePermissionsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeImagePermissionsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeImagePermissionsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeImagePermissionsErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeImagePermissionsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeImagePermissionsErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DescribeImagePermissionsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeImagesError {
pub kind: DescribeImagesErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeImagesErrorKind {
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeImagesError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeImagesErrorKind::InvalidParameterCombinationException(_inner) => _inner.fmt(f),
DescribeImagesErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DescribeImagesErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeImagesError {
fn code(&self) -> Option<&str> {
DescribeImagesError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeImagesError {
pub fn new(kind: DescribeImagesErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeImagesErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeImagesErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
DescribeImagesErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeImagesErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeImagesError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeImagesErrorKind::InvalidParameterCombinationException(_inner) => Some(_inner),
DescribeImagesErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DescribeImagesErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeSessionsError {
pub kind: DescribeSessionsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeSessionsErrorKind {
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeSessionsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeSessionsErrorKind::InvalidParameterCombinationException(_inner) => {
_inner.fmt(f)
}
DescribeSessionsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeSessionsError {
fn code(&self) -> Option<&str> {
DescribeSessionsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeSessionsError {
pub fn new(kind: DescribeSessionsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeSessionsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeSessionsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
DescribeSessionsErrorKind::InvalidParameterCombinationException(_)
)
}
}
impl std::error::Error for DescribeSessionsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeSessionsErrorKind::InvalidParameterCombinationException(_inner) => Some(_inner),
DescribeSessionsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeStacksError {
pub kind: DescribeStacksErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeStacksErrorKind {
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeStacksError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeStacksErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DescribeStacksErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeStacksError {
fn code(&self) -> Option<&str> {
DescribeStacksError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeStacksError {
pub fn new(kind: DescribeStacksErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeStacksErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeStacksErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeStacksErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeStacksError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeStacksErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DescribeStacksErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeUsageReportSubscriptionsError {
pub kind: DescribeUsageReportSubscriptionsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeUsageReportSubscriptionsErrorKind {
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeUsageReportSubscriptionsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeUsageReportSubscriptionsErrorKind::InvalidAccountStatusException(_inner) => {
_inner.fmt(f)
}
DescribeUsageReportSubscriptionsErrorKind::ResourceNotFoundException(_inner) => {
_inner.fmt(f)
}
DescribeUsageReportSubscriptionsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeUsageReportSubscriptionsError {
fn code(&self) -> Option<&str> {
DescribeUsageReportSubscriptionsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeUsageReportSubscriptionsError {
pub fn new(kind: DescribeUsageReportSubscriptionsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeUsageReportSubscriptionsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeUsageReportSubscriptionsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
DescribeUsageReportSubscriptionsErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeUsageReportSubscriptionsErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeUsageReportSubscriptionsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeUsageReportSubscriptionsErrorKind::InvalidAccountStatusException(_inner) => {
Some(_inner)
}
DescribeUsageReportSubscriptionsErrorKind::ResourceNotFoundException(_inner) => {
Some(_inner)
}
DescribeUsageReportSubscriptionsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeUsersError {
pub kind: DescribeUsersErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeUsersErrorKind {
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeUsersError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeUsersErrorKind::InvalidParameterCombinationException(_inner) => _inner.fmt(f),
DescribeUsersErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DescribeUsersErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeUsersError {
fn code(&self) -> Option<&str> {
DescribeUsersError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeUsersError {
pub fn new(kind: DescribeUsersErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeUsersErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeUsersErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
DescribeUsersErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DescribeUsersErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DescribeUsersError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeUsersErrorKind::InvalidParameterCombinationException(_inner) => Some(_inner),
DescribeUsersErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DescribeUsersErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DescribeUserStackAssociationsError {
pub kind: DescribeUserStackAssociationsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DescribeUserStackAssociationsErrorKind {
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DescribeUserStackAssociationsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DescribeUserStackAssociationsErrorKind::InvalidParameterCombinationException(
_inner,
) => _inner.fmt(f),
DescribeUserStackAssociationsErrorKind::OperationNotPermittedException(_inner) => {
_inner.fmt(f)
}
DescribeUserStackAssociationsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DescribeUserStackAssociationsError {
fn code(&self) -> Option<&str> {
DescribeUserStackAssociationsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DescribeUserStackAssociationsError {
pub fn new(kind: DescribeUserStackAssociationsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DescribeUserStackAssociationsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DescribeUserStackAssociationsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
DescribeUserStackAssociationsErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
DescribeUserStackAssociationsErrorKind::OperationNotPermittedException(_)
)
}
}
impl std::error::Error for DescribeUserStackAssociationsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DescribeUserStackAssociationsErrorKind::InvalidParameterCombinationException(
_inner,
) => Some(_inner),
DescribeUserStackAssociationsErrorKind::OperationNotPermittedException(_inner) => {
Some(_inner)
}
DescribeUserStackAssociationsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DisableUserError {
pub kind: DisableUserErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DisableUserErrorKind {
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DisableUserError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DisableUserErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DisableUserErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DisableUserError {
fn code(&self) -> Option<&str> {
DisableUserError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DisableUserError {
pub fn new(kind: DisableUserErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DisableUserErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DisableUserErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DisableUserErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DisableUserError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DisableUserErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DisableUserErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct DisassociateFleetError {
pub kind: DisassociateFleetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum DisassociateFleetErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceInUseException(crate::error::ResourceInUseException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for DisassociateFleetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
DisassociateFleetErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
DisassociateFleetErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
DisassociateFleetErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
DisassociateFleetErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
DisassociateFleetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for DisassociateFleetError {
fn code(&self) -> Option<&str> {
DisassociateFleetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl DisassociateFleetError {
pub fn new(kind: DisassociateFleetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: DisassociateFleetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: DisassociateFleetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
DisassociateFleetErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
DisassociateFleetErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
DisassociateFleetErrorKind::ResourceInUseException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
DisassociateFleetErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for DisassociateFleetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
DisassociateFleetErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
DisassociateFleetErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
DisassociateFleetErrorKind::ResourceInUseException(_inner) => Some(_inner),
DisassociateFleetErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
DisassociateFleetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct EnableUserError {
pub kind: EnableUserErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum EnableUserErrorKind {
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for EnableUserError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
EnableUserErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
EnableUserErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
EnableUserErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for EnableUserError {
fn code(&self) -> Option<&str> {
EnableUserError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl EnableUserError {
pub fn new(kind: EnableUserErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: EnableUserErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: EnableUserErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
EnableUserErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
EnableUserErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for EnableUserError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
EnableUserErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
EnableUserErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
EnableUserErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ExpireSessionError {
pub kind: ExpireSessionErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ExpireSessionErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ExpireSessionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ExpireSessionErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ExpireSessionError {
fn code(&self) -> Option<&str> {
ExpireSessionError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ExpireSessionError {
pub fn new(kind: ExpireSessionErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ExpireSessionErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ExpireSessionErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for ExpireSessionError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ExpireSessionErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListAssociatedFleetsError {
pub kind: ListAssociatedFleetsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListAssociatedFleetsErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListAssociatedFleetsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListAssociatedFleetsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListAssociatedFleetsError {
fn code(&self) -> Option<&str> {
ListAssociatedFleetsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListAssociatedFleetsError {
pub fn new(kind: ListAssociatedFleetsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListAssociatedFleetsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListAssociatedFleetsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for ListAssociatedFleetsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListAssociatedFleetsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListAssociatedStacksError {
pub kind: ListAssociatedStacksErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListAssociatedStacksErrorKind {
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListAssociatedStacksError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListAssociatedStacksErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListAssociatedStacksError {
fn code(&self) -> Option<&str> {
ListAssociatedStacksError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListAssociatedStacksError {
pub fn new(kind: ListAssociatedStacksErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListAssociatedStacksErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListAssociatedStacksErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
}
impl std::error::Error for ListAssociatedStacksError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListAssociatedStacksErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct ListTagsForResourceError {
pub kind: ListTagsForResourceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum ListTagsForResourceErrorKind {
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for ListTagsForResourceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
ListTagsForResourceErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
ListTagsForResourceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for ListTagsForResourceError {
fn code(&self) -> Option<&str> {
ListTagsForResourceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl ListTagsForResourceError {
pub fn new(kind: ListTagsForResourceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: ListTagsForResourceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: ListTagsForResourceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
ListTagsForResourceErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for ListTagsForResourceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
ListTagsForResourceErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
ListTagsForResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct StartFleetError {
pub kind: StartFleetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum StartFleetErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
InvalidRoleException(crate::error::InvalidRoleException),
LimitExceededException(crate::error::LimitExceededException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
RequestLimitExceededException(crate::error::RequestLimitExceededException),
ResourceNotAvailableException(crate::error::ResourceNotAvailableException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for StartFleetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
StartFleetErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
StartFleetErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
StartFleetErrorKind::InvalidRoleException(_inner) => _inner.fmt(f),
StartFleetErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
StartFleetErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
StartFleetErrorKind::RequestLimitExceededException(_inner) => _inner.fmt(f),
StartFleetErrorKind::ResourceNotAvailableException(_inner) => _inner.fmt(f),
StartFleetErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
StartFleetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for StartFleetError {
fn code(&self) -> Option<&str> {
StartFleetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl StartFleetError {
pub fn new(kind: StartFleetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: StartFleetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: StartFleetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
StartFleetErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
StartFleetErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_invalid_role_exception(&self) -> bool {
matches!(&self.kind, StartFleetErrorKind::InvalidRoleException(_))
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, StartFleetErrorKind::LimitExceededException(_))
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
StartFleetErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_request_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
StartFleetErrorKind::RequestLimitExceededException(_)
)
}
pub fn is_resource_not_available_exception(&self) -> bool {
matches!(
&self.kind,
StartFleetErrorKind::ResourceNotAvailableException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
StartFleetErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for StartFleetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
StartFleetErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
StartFleetErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
StartFleetErrorKind::InvalidRoleException(_inner) => Some(_inner),
StartFleetErrorKind::LimitExceededException(_inner) => Some(_inner),
StartFleetErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
StartFleetErrorKind::RequestLimitExceededException(_inner) => Some(_inner),
StartFleetErrorKind::ResourceNotAvailableException(_inner) => Some(_inner),
StartFleetErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
StartFleetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct StartImageBuilderError {
pub kind: StartImageBuilderErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum StartImageBuilderErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
IncompatibleImageException(crate::error::IncompatibleImageException),
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
ResourceNotAvailableException(crate::error::ResourceNotAvailableException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for StartImageBuilderError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
StartImageBuilderErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
StartImageBuilderErrorKind::IncompatibleImageException(_inner) => _inner.fmt(f),
StartImageBuilderErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
StartImageBuilderErrorKind::ResourceNotAvailableException(_inner) => _inner.fmt(f),
StartImageBuilderErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
StartImageBuilderErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for StartImageBuilderError {
fn code(&self) -> Option<&str> {
StartImageBuilderError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl StartImageBuilderError {
pub fn new(kind: StartImageBuilderErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: StartImageBuilderErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: StartImageBuilderErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
StartImageBuilderErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_incompatible_image_exception(&self) -> bool {
matches!(
&self.kind,
StartImageBuilderErrorKind::IncompatibleImageException(_)
)
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
StartImageBuilderErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_resource_not_available_exception(&self) -> bool {
matches!(
&self.kind,
StartImageBuilderErrorKind::ResourceNotAvailableException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
StartImageBuilderErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for StartImageBuilderError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
StartImageBuilderErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
StartImageBuilderErrorKind::IncompatibleImageException(_inner) => Some(_inner),
StartImageBuilderErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
StartImageBuilderErrorKind::ResourceNotAvailableException(_inner) => Some(_inner),
StartImageBuilderErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
StartImageBuilderErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct StopFleetError {
pub kind: StopFleetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum StopFleetErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for StopFleetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
StopFleetErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
StopFleetErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
StopFleetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for StopFleetError {
fn code(&self) -> Option<&str> {
StopFleetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl StopFleetError {
pub fn new(kind: StopFleetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: StopFleetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: StopFleetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
StopFleetErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(&self.kind, StopFleetErrorKind::ResourceNotFoundException(_))
}
}
impl std::error::Error for StopFleetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
StopFleetErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
StopFleetErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
StopFleetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct StopImageBuilderError {
pub kind: StopImageBuilderErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum StopImageBuilderErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for StopImageBuilderError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
StopImageBuilderErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
StopImageBuilderErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
StopImageBuilderErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
StopImageBuilderErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for StopImageBuilderError {
fn code(&self) -> Option<&str> {
StopImageBuilderError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl StopImageBuilderError {
pub fn new(kind: StopImageBuilderErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: StopImageBuilderErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: StopImageBuilderErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
StopImageBuilderErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
StopImageBuilderErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
StopImageBuilderErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for StopImageBuilderError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
StopImageBuilderErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
StopImageBuilderErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
StopImageBuilderErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
StopImageBuilderErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct TagResourceError {
pub kind: TagResourceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum TagResourceErrorKind {
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
LimitExceededException(crate::error::LimitExceededException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for TagResourceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
TagResourceErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
TagResourceErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
TagResourceErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
TagResourceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for TagResourceError {
fn code(&self) -> Option<&str> {
TagResourceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl TagResourceError {
pub fn new(kind: TagResourceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: TagResourceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: TagResourceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
TagResourceErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, TagResourceErrorKind::LimitExceededException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
TagResourceErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for TagResourceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
TagResourceErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
TagResourceErrorKind::LimitExceededException(_inner) => Some(_inner),
TagResourceErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
TagResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UntagResourceError {
pub kind: UntagResourceErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UntagResourceErrorKind {
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UntagResourceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UntagResourceErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UntagResourceErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UntagResourceError {
fn code(&self) -> Option<&str> {
UntagResourceError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UntagResourceError {
pub fn new(kind: UntagResourceErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UntagResourceErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UntagResourceErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UntagResourceErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for UntagResourceError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UntagResourceErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UntagResourceErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateDirectoryConfigError {
pub kind: UpdateDirectoryConfigErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateDirectoryConfigErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
InvalidRoleException(crate::error::InvalidRoleException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceInUseException(crate::error::ResourceInUseException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateDirectoryConfigError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateDirectoryConfigErrorKind::ConcurrentModificationException(_inner) => {
_inner.fmt(f)
}
UpdateDirectoryConfigErrorKind::InvalidRoleException(_inner) => _inner.fmt(f),
UpdateDirectoryConfigErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
UpdateDirectoryConfigErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
UpdateDirectoryConfigErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UpdateDirectoryConfigErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateDirectoryConfigError {
fn code(&self) -> Option<&str> {
UpdateDirectoryConfigError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateDirectoryConfigError {
pub fn new(kind: UpdateDirectoryConfigErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateDirectoryConfigErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateDirectoryConfigErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
UpdateDirectoryConfigErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_invalid_role_exception(&self) -> bool {
matches!(
&self.kind,
UpdateDirectoryConfigErrorKind::InvalidRoleException(_)
)
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
UpdateDirectoryConfigErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(
&self.kind,
UpdateDirectoryConfigErrorKind::ResourceInUseException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateDirectoryConfigErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for UpdateDirectoryConfigError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateDirectoryConfigErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
UpdateDirectoryConfigErrorKind::InvalidRoleException(_inner) => Some(_inner),
UpdateDirectoryConfigErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
UpdateDirectoryConfigErrorKind::ResourceInUseException(_inner) => Some(_inner),
UpdateDirectoryConfigErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UpdateDirectoryConfigErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateFleetError {
pub kind: UpdateFleetErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateFleetErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
IncompatibleImageException(crate::error::IncompatibleImageException),
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
InvalidRoleException(crate::error::InvalidRoleException),
LimitExceededException(crate::error::LimitExceededException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
RequestLimitExceededException(crate::error::RequestLimitExceededException),
ResourceInUseException(crate::error::ResourceInUseException),
ResourceNotAvailableException(crate::error::ResourceNotAvailableException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateFleetError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateFleetErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::IncompatibleImageException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::InvalidParameterCombinationException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::InvalidRoleException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::RequestLimitExceededException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::ResourceNotAvailableException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UpdateFleetErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateFleetError {
fn code(&self) -> Option<&str> {
UpdateFleetError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateFleetError {
pub fn new(kind: UpdateFleetErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateFleetErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateFleetErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
UpdateFleetErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_incompatible_image_exception(&self) -> bool {
matches!(
&self.kind,
UpdateFleetErrorKind::IncompatibleImageException(_)
)
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
UpdateFleetErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
UpdateFleetErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_invalid_role_exception(&self) -> bool {
matches!(&self.kind, UpdateFleetErrorKind::InvalidRoleException(_))
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, UpdateFleetErrorKind::LimitExceededException(_))
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
UpdateFleetErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_request_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
UpdateFleetErrorKind::RequestLimitExceededException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(&self.kind, UpdateFleetErrorKind::ResourceInUseException(_))
}
pub fn is_resource_not_available_exception(&self) -> bool {
matches!(
&self.kind,
UpdateFleetErrorKind::ResourceNotAvailableException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateFleetErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for UpdateFleetError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateFleetErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
UpdateFleetErrorKind::IncompatibleImageException(_inner) => Some(_inner),
UpdateFleetErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
UpdateFleetErrorKind::InvalidParameterCombinationException(_inner) => Some(_inner),
UpdateFleetErrorKind::InvalidRoleException(_inner) => Some(_inner),
UpdateFleetErrorKind::LimitExceededException(_inner) => Some(_inner),
UpdateFleetErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
UpdateFleetErrorKind::RequestLimitExceededException(_inner) => Some(_inner),
UpdateFleetErrorKind::ResourceInUseException(_inner) => Some(_inner),
UpdateFleetErrorKind::ResourceNotAvailableException(_inner) => Some(_inner),
UpdateFleetErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UpdateFleetErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateImagePermissionsError {
pub kind: UpdateImagePermissionsErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateImagePermissionsErrorKind {
LimitExceededException(crate::error::LimitExceededException),
ResourceNotAvailableException(crate::error::ResourceNotAvailableException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateImagePermissionsError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateImagePermissionsErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
UpdateImagePermissionsErrorKind::ResourceNotAvailableException(_inner) => _inner.fmt(f),
UpdateImagePermissionsErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UpdateImagePermissionsErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateImagePermissionsError {
fn code(&self) -> Option<&str> {
UpdateImagePermissionsError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateImagePermissionsError {
pub fn new(kind: UpdateImagePermissionsErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateImagePermissionsErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateImagePermissionsErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(
&self.kind,
UpdateImagePermissionsErrorKind::LimitExceededException(_)
)
}
pub fn is_resource_not_available_exception(&self) -> bool {
matches!(
&self.kind,
UpdateImagePermissionsErrorKind::ResourceNotAvailableException(_)
)
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateImagePermissionsErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for UpdateImagePermissionsError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateImagePermissionsErrorKind::LimitExceededException(_inner) => Some(_inner),
UpdateImagePermissionsErrorKind::ResourceNotAvailableException(_inner) => Some(_inner),
UpdateImagePermissionsErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UpdateImagePermissionsErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub struct UpdateStackError {
pub kind: UpdateStackErrorKind,
pub(crate) meta: smithy_types::Error,
}
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum UpdateStackErrorKind {
ConcurrentModificationException(crate::error::ConcurrentModificationException),
IncompatibleImageException(crate::error::IncompatibleImageException),
InvalidAccountStatusException(crate::error::InvalidAccountStatusException),
InvalidParameterCombinationException(crate::error::InvalidParameterCombinationException),
InvalidRoleException(crate::error::InvalidRoleException),
LimitExceededException(crate::error::LimitExceededException),
OperationNotPermittedException(crate::error::OperationNotPermittedException),
ResourceInUseException(crate::error::ResourceInUseException),
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unexpected error, eg. invalid JSON returned by the service or an unknown error code
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for UpdateStackError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
UpdateStackErrorKind::ConcurrentModificationException(_inner) => _inner.fmt(f),
UpdateStackErrorKind::IncompatibleImageException(_inner) => _inner.fmt(f),
UpdateStackErrorKind::InvalidAccountStatusException(_inner) => _inner.fmt(f),
UpdateStackErrorKind::InvalidParameterCombinationException(_inner) => _inner.fmt(f),
UpdateStackErrorKind::InvalidRoleException(_inner) => _inner.fmt(f),
UpdateStackErrorKind::LimitExceededException(_inner) => _inner.fmt(f),
UpdateStackErrorKind::OperationNotPermittedException(_inner) => _inner.fmt(f),
UpdateStackErrorKind::ResourceInUseException(_inner) => _inner.fmt(f),
UpdateStackErrorKind::ResourceNotFoundException(_inner) => _inner.fmt(f),
UpdateStackErrorKind::Unhandled(_inner) => _inner.fmt(f),
}
}
}
impl smithy_types::retry::ProvideErrorKind for UpdateStackError {
fn code(&self) -> Option<&str> {
UpdateStackError::code(self)
}
fn retryable_error_kind(&self) -> Option<smithy_types::retry::ErrorKind> {
None
}
}
impl UpdateStackError {
pub fn new(kind: UpdateStackErrorKind, meta: smithy_types::Error) -> Self {
Self { kind, meta }
}
pub fn unhandled(err: impl Into<Box<dyn std::error::Error + Send + Sync + 'static>>) -> Self {
Self {
kind: UpdateStackErrorKind::Unhandled(err.into()),
meta: Default::default(),
}
}
pub fn generic(err: smithy_types::Error) -> Self {
Self {
meta: err.clone(),
kind: UpdateStackErrorKind::Unhandled(err.into()),
}
}
// Consider if this should actually be `Option<Cow<&str>>`. This would enable us to use display
// as implemented by std::Error to generate a message in that case.
pub fn message(&self) -> Option<&str> {
self.meta.message()
}
pub fn meta(&self) -> &smithy_types::Error {
&self.meta
}
pub fn request_id(&self) -> Option<&str> {
self.meta.request_id()
}
pub fn code(&self) -> Option<&str> {
self.meta.code()
}
pub fn is_concurrent_modification_exception(&self) -> bool {
matches!(
&self.kind,
UpdateStackErrorKind::ConcurrentModificationException(_)
)
}
pub fn is_incompatible_image_exception(&self) -> bool {
matches!(
&self.kind,
UpdateStackErrorKind::IncompatibleImageException(_)
)
}
pub fn is_invalid_account_status_exception(&self) -> bool {
matches!(
&self.kind,
UpdateStackErrorKind::InvalidAccountStatusException(_)
)
}
pub fn is_invalid_parameter_combination_exception(&self) -> bool {
matches!(
&self.kind,
UpdateStackErrorKind::InvalidParameterCombinationException(_)
)
}
pub fn is_invalid_role_exception(&self) -> bool {
matches!(&self.kind, UpdateStackErrorKind::InvalidRoleException(_))
}
pub fn is_limit_exceeded_exception(&self) -> bool {
matches!(&self.kind, UpdateStackErrorKind::LimitExceededException(_))
}
pub fn is_operation_not_permitted_exception(&self) -> bool {
matches!(
&self.kind,
UpdateStackErrorKind::OperationNotPermittedException(_)
)
}
pub fn is_resource_in_use_exception(&self) -> bool {
matches!(&self.kind, UpdateStackErrorKind::ResourceInUseException(_))
}
pub fn is_resource_not_found_exception(&self) -> bool {
matches!(
&self.kind,
UpdateStackErrorKind::ResourceNotFoundException(_)
)
}
}
impl std::error::Error for UpdateStackError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match &self.kind {
UpdateStackErrorKind::ConcurrentModificationException(_inner) => Some(_inner),
UpdateStackErrorKind::IncompatibleImageException(_inner) => Some(_inner),
UpdateStackErrorKind::InvalidAccountStatusException(_inner) => Some(_inner),
UpdateStackErrorKind::InvalidParameterCombinationException(_inner) => Some(_inner),
UpdateStackErrorKind::InvalidRoleException(_inner) => Some(_inner),
UpdateStackErrorKind::LimitExceededException(_inner) => Some(_inner),
UpdateStackErrorKind::OperationNotPermittedException(_inner) => Some(_inner),
UpdateStackErrorKind::ResourceInUseException(_inner) => Some(_inner),
UpdateStackErrorKind::ResourceNotFoundException(_inner) => Some(_inner),
UpdateStackErrorKind::Unhandled(_inner) => Some(_inner.as_ref()),
}
}
}
/// <p>The specified resource was not found.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ResourceNotFoundException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ResourceNotFoundException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ResourceNotFoundException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ResourceNotFoundException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ResourceNotFoundException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ResourceNotFoundException")?;
if let Some(inner_1) = &self.message {
write!(f, ": {}", inner_1)?;
}
Ok(())
}
}
impl std::error::Error for ResourceNotFoundException {}
/// See [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
pub mod resource_not_found_exception {
/// A builder for [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
pub fn build(self) -> crate::error::ResourceNotFoundException {
crate::error::ResourceNotFoundException {
message: self.message,
}
}
}
}
impl ResourceNotFoundException {
/// Creates a new builder-style object to manufacture [`ResourceNotFoundException`](crate::error::ResourceNotFoundException)
pub fn builder() -> crate::error::resource_not_found_exception::Builder {
crate::error::resource_not_found_exception::Builder::default()
}
}
/// <p>The specified resource is in use.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ResourceInUseException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ResourceInUseException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ResourceInUseException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ResourceInUseException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ResourceInUseException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ResourceInUseException")?;
if let Some(inner_2) = &self.message {
write!(f, ": {}", inner_2)?;
}
Ok(())
}
}
impl std::error::Error for ResourceInUseException {}
/// See [`ResourceInUseException`](crate::error::ResourceInUseException)
pub mod resource_in_use_exception {
/// A builder for [`ResourceInUseException`](crate::error::ResourceInUseException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ResourceInUseException`](crate::error::ResourceInUseException)
pub fn build(self) -> crate::error::ResourceInUseException {
crate::error::ResourceInUseException {
message: self.message,
}
}
}
}
impl ResourceInUseException {
/// Creates a new builder-style object to manufacture [`ResourceInUseException`](crate::error::ResourceInUseException)
pub fn builder() -> crate::error::resource_in_use_exception::Builder {
crate::error::resource_in_use_exception::Builder::default()
}
}
/// <p>The attempted operation is not permitted.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct OperationNotPermittedException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for OperationNotPermittedException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("OperationNotPermittedException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl OperationNotPermittedException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for OperationNotPermittedException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "OperationNotPermittedException")?;
if let Some(inner_3) = &self.message {
write!(f, ": {}", inner_3)?;
}
Ok(())
}
}
impl std::error::Error for OperationNotPermittedException {}
/// See [`OperationNotPermittedException`](crate::error::OperationNotPermittedException)
pub mod operation_not_permitted_exception {
/// A builder for [`OperationNotPermittedException`](crate::error::OperationNotPermittedException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`OperationNotPermittedException`](crate::error::OperationNotPermittedException)
pub fn build(self) -> crate::error::OperationNotPermittedException {
crate::error::OperationNotPermittedException {
message: self.message,
}
}
}
}
impl OperationNotPermittedException {
/// Creates a new builder-style object to manufacture [`OperationNotPermittedException`](crate::error::OperationNotPermittedException)
pub fn builder() -> crate::error::operation_not_permitted_exception::Builder {
crate::error::operation_not_permitted_exception::Builder::default()
}
}
/// <p>The requested limit exceeds the permitted limit for an account.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct LimitExceededException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for LimitExceededException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("LimitExceededException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl LimitExceededException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for LimitExceededException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "LimitExceededException")?;
if let Some(inner_4) = &self.message {
write!(f, ": {}", inner_4)?;
}
Ok(())
}
}
impl std::error::Error for LimitExceededException {}
/// See [`LimitExceededException`](crate::error::LimitExceededException)
pub mod limit_exceeded_exception {
/// A builder for [`LimitExceededException`](crate::error::LimitExceededException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`LimitExceededException`](crate::error::LimitExceededException)
pub fn build(self) -> crate::error::LimitExceededException {
crate::error::LimitExceededException {
message: self.message,
}
}
}
}
impl LimitExceededException {
/// Creates a new builder-style object to manufacture [`LimitExceededException`](crate::error::LimitExceededException)
pub fn builder() -> crate::error::limit_exceeded_exception::Builder {
crate::error::limit_exceeded_exception::Builder::default()
}
}
/// <p>The specified role is invalid.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InvalidRoleException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for InvalidRoleException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InvalidRoleException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl InvalidRoleException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for InvalidRoleException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "InvalidRoleException")?;
if let Some(inner_5) = &self.message {
write!(f, ": {}", inner_5)?;
}
Ok(())
}
}
impl std::error::Error for InvalidRoleException {}
/// See [`InvalidRoleException`](crate::error::InvalidRoleException)
pub mod invalid_role_exception {
/// A builder for [`InvalidRoleException`](crate::error::InvalidRoleException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`InvalidRoleException`](crate::error::InvalidRoleException)
pub fn build(self) -> crate::error::InvalidRoleException {
crate::error::InvalidRoleException {
message: self.message,
}
}
}
}
impl InvalidRoleException {
/// Creates a new builder-style object to manufacture [`InvalidRoleException`](crate::error::InvalidRoleException)
pub fn builder() -> crate::error::invalid_role_exception::Builder {
crate::error::invalid_role_exception::Builder::default()
}
}
/// <p>Indicates an incorrect combination of parameters, or a missing parameter.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InvalidParameterCombinationException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for InvalidParameterCombinationException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InvalidParameterCombinationException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl InvalidParameterCombinationException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for InvalidParameterCombinationException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "InvalidParameterCombinationException")?;
if let Some(inner_6) = &self.message {
write!(f, ": {}", inner_6)?;
}
Ok(())
}
}
impl std::error::Error for InvalidParameterCombinationException {}
/// See [`InvalidParameterCombinationException`](crate::error::InvalidParameterCombinationException)
pub mod invalid_parameter_combination_exception {
/// A builder for [`InvalidParameterCombinationException`](crate::error::InvalidParameterCombinationException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`InvalidParameterCombinationException`](crate::error::InvalidParameterCombinationException)
pub fn build(self) -> crate::error::InvalidParameterCombinationException {
crate::error::InvalidParameterCombinationException {
message: self.message,
}
}
}
}
impl InvalidParameterCombinationException {
/// Creates a new builder-style object to manufacture [`InvalidParameterCombinationException`](crate::error::InvalidParameterCombinationException)
pub fn builder() -> crate::error::invalid_parameter_combination_exception::Builder {
crate::error::invalid_parameter_combination_exception::Builder::default()
}
}
/// <p>The resource cannot be created because your AWS account is suspended. For assistance, contact AWS Support. </p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct InvalidAccountStatusException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for InvalidAccountStatusException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("InvalidAccountStatusException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl InvalidAccountStatusException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for InvalidAccountStatusException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "InvalidAccountStatusException")?;
if let Some(inner_7) = &self.message {
write!(f, ": {}", inner_7)?;
}
Ok(())
}
}
impl std::error::Error for InvalidAccountStatusException {}
/// See [`InvalidAccountStatusException`](crate::error::InvalidAccountStatusException)
pub mod invalid_account_status_exception {
/// A builder for [`InvalidAccountStatusException`](crate::error::InvalidAccountStatusException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`InvalidAccountStatusException`](crate::error::InvalidAccountStatusException)
pub fn build(self) -> crate::error::InvalidAccountStatusException {
crate::error::InvalidAccountStatusException {
message: self.message,
}
}
}
}
impl InvalidAccountStatusException {
/// Creates a new builder-style object to manufacture [`InvalidAccountStatusException`](crate::error::InvalidAccountStatusException)
pub fn builder() -> crate::error::invalid_account_status_exception::Builder {
crate::error::invalid_account_status_exception::Builder::default()
}
}
/// <p>The image can't be updated because it's not compatible for updates.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct IncompatibleImageException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for IncompatibleImageException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("IncompatibleImageException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl IncompatibleImageException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for IncompatibleImageException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "IncompatibleImageException")?;
if let Some(inner_8) = &self.message {
write!(f, ": {}", inner_8)?;
}
Ok(())
}
}
impl std::error::Error for IncompatibleImageException {}
/// See [`IncompatibleImageException`](crate::error::IncompatibleImageException)
pub mod incompatible_image_exception {
/// A builder for [`IncompatibleImageException`](crate::error::IncompatibleImageException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`IncompatibleImageException`](crate::error::IncompatibleImageException)
pub fn build(self) -> crate::error::IncompatibleImageException {
crate::error::IncompatibleImageException {
message: self.message,
}
}
}
}
impl IncompatibleImageException {
/// Creates a new builder-style object to manufacture [`IncompatibleImageException`](crate::error::IncompatibleImageException)
pub fn builder() -> crate::error::incompatible_image_exception::Builder {
crate::error::incompatible_image_exception::Builder::default()
}
}
/// <p>An API error occurred. Wait a few minutes and try again.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ConcurrentModificationException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ConcurrentModificationException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ConcurrentModificationException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ConcurrentModificationException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ConcurrentModificationException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ConcurrentModificationException")?;
if let Some(inner_9) = &self.message {
write!(f, ": {}", inner_9)?;
}
Ok(())
}
}
impl std::error::Error for ConcurrentModificationException {}
/// See [`ConcurrentModificationException`](crate::error::ConcurrentModificationException)
pub mod concurrent_modification_exception {
/// A builder for [`ConcurrentModificationException`](crate::error::ConcurrentModificationException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ConcurrentModificationException`](crate::error::ConcurrentModificationException)
pub fn build(self) -> crate::error::ConcurrentModificationException {
crate::error::ConcurrentModificationException {
message: self.message,
}
}
}
}
impl ConcurrentModificationException {
/// Creates a new builder-style object to manufacture [`ConcurrentModificationException`](crate::error::ConcurrentModificationException)
pub fn builder() -> crate::error::concurrent_modification_exception::Builder {
crate::error::concurrent_modification_exception::Builder::default()
}
}
/// <p>The specified resource exists and is not in use, but isn't available.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ResourceNotAvailableException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ResourceNotAvailableException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ResourceNotAvailableException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ResourceNotAvailableException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ResourceNotAvailableException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ResourceNotAvailableException")?;
if let Some(inner_10) = &self.message {
write!(f, ": {}", inner_10)?;
}
Ok(())
}
}
impl std::error::Error for ResourceNotAvailableException {}
/// See [`ResourceNotAvailableException`](crate::error::ResourceNotAvailableException)
pub mod resource_not_available_exception {
/// A builder for [`ResourceNotAvailableException`](crate::error::ResourceNotAvailableException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ResourceNotAvailableException`](crate::error::ResourceNotAvailableException)
pub fn build(self) -> crate::error::ResourceNotAvailableException {
crate::error::ResourceNotAvailableException {
message: self.message,
}
}
}
}
impl ResourceNotAvailableException {
/// Creates a new builder-style object to manufacture [`ResourceNotAvailableException`](crate::error::ResourceNotAvailableException)
pub fn builder() -> crate::error::resource_not_available_exception::Builder {
crate::error::resource_not_available_exception::Builder::default()
}
}
/// <p>AppStream 2.0 can’t process the request right now because the Describe calls from your AWS account are being throttled by Amazon EC2. Try again later.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct RequestLimitExceededException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for RequestLimitExceededException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("RequestLimitExceededException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl RequestLimitExceededException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for RequestLimitExceededException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "RequestLimitExceededException")?;
if let Some(inner_11) = &self.message {
write!(f, ": {}", inner_11)?;
}
Ok(())
}
}
impl std::error::Error for RequestLimitExceededException {}
/// See [`RequestLimitExceededException`](crate::error::RequestLimitExceededException)
pub mod request_limit_exceeded_exception {
/// A builder for [`RequestLimitExceededException`](crate::error::RequestLimitExceededException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`RequestLimitExceededException`](crate::error::RequestLimitExceededException)
pub fn build(self) -> crate::error::RequestLimitExceededException {
crate::error::RequestLimitExceededException {
message: self.message,
}
}
}
}
impl RequestLimitExceededException {
/// Creates a new builder-style object to manufacture [`RequestLimitExceededException`](crate::error::RequestLimitExceededException)
pub fn builder() -> crate::error::request_limit_exceeded_exception::Builder {
crate::error::request_limit_exceeded_exception::Builder::default()
}
}
/// <p>The specified resource already exists.</p>
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ResourceAlreadyExistsException {
/// <p>The error message in the exception.</p>
pub message: std::option::Option<std::string::String>,
}
impl std::fmt::Debug for ResourceAlreadyExistsException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ResourceAlreadyExistsException");
formatter.field("message", &self.message);
formatter.finish()
}
}
impl ResourceAlreadyExistsException {
pub fn message(&self) -> Option<&str> {
self.message.as_deref()
}
}
impl std::fmt::Display for ResourceAlreadyExistsException {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "ResourceAlreadyExistsException")?;
if let Some(inner_12) = &self.message {
write!(f, ": {}", inner_12)?;
}
Ok(())
}
}
impl std::error::Error for ResourceAlreadyExistsException {}
/// See [`ResourceAlreadyExistsException`](crate::error::ResourceAlreadyExistsException)
pub mod resource_already_exists_exception {
/// A builder for [`ResourceAlreadyExistsException`](crate::error::ResourceAlreadyExistsException)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) message: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The error message in the exception.</p>
pub fn message(mut self, input: impl Into<std::string::String>) -> Self {
self.message = Some(input.into());
self
}
pub fn set_message(mut self, input: std::option::Option<std::string::String>) -> Self {
self.message = input;
self
}
/// Consumes the builder and constructs a [`ResourceAlreadyExistsException`](crate::error::ResourceAlreadyExistsException)
pub fn build(self) -> crate::error::ResourceAlreadyExistsException {
crate::error::ResourceAlreadyExistsException {
message: self.message,
}
}
}
}
impl ResourceAlreadyExistsException {
/// Creates a new builder-style object to manufacture [`ResourceAlreadyExistsException`](crate::error::ResourceAlreadyExistsException)
pub fn builder() -> crate::error::resource_already_exists_exception::Builder {
crate::error::resource_already_exists_exception::Builder::default()
}
}
| 37.358916 | 161 | 0.650069 |
67cf8fabf4825f563c611c707a72b101aabb7f0d | 5,279 | // Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
use std::convert::AsRef;
use std::env;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::time::SystemTime;
use log::debug;
use strum::VariantNames;
use strum_macros::{AsRefStr, EnumString, EnumVariantNames};
use client::pantsd;
use options::{option_id, render_choice, OptionParser};
// TODO(John Sirois): Maybe consolidate with PythonLogLevel in src/rust/engine/logging/src/lib.rs.
#[derive(AsRefStr, EnumString, EnumVariantNames)]
#[strum(serialize_all = "snake_case")]
enum PythonLogLevel {
Trace,
Debug,
Info,
Warn,
Error,
}
async fn execute(start: SystemTime) -> Result<i32, String> {
let options_parser = OptionParser::new()?;
let use_pantsd = options_parser.parse_bool(&option_id!("pantsd"), true)?;
if !use_pantsd.value {
return Err(format!(
"Pantsd has been turned off via {option_source:?}.",
option_source = use_pantsd.source
));
}
let concurrent = options_parser.parse_bool(&option_id!("concurrent"), false)?;
if concurrent.value {
return Err("Pantsd is being turned off since --concurrent is true.".to_owned());
}
let level_option = option_id!(-'l', "level");
let log_level_option_value =
options_parser.parse_string(&level_option, PythonLogLevel::Info.as_ref())?;
let level = PythonLogLevel::from_str(&log_level_option_value.value).map_err(|_| {
format!(
"Not a valid log level {level} from {option_source:?}. Should be one of {levels}.",
level = log_level_option_value.value,
option_source = log_level_option_value.source,
levels = render_choice(PythonLogLevel::VARIANTS)
.expect("We know there is at least one PythonLogLevel enum variant."),
)
})?;
env_logger::init_from_env(env_logger::Env::new().filter_or("__PANTS_LEVEL__", level.as_ref()));
let working_dir = env::current_dir()
.map_err(|e| format!("Could not detect current working directory: {err}", err = e))?;
let pantsd_settings = find_pantsd(&working_dir, &options_parser)?;
let env = env::vars().collect::<Vec<(_, _)>>();
let argv = env::args().collect::<Vec<_>>();
client::execute_command(start, pantsd_settings, env, argv).await
}
fn find_pantsd(
working_dir: &Path,
options_parser: &OptionParser,
) -> Result<client::ConnectionSettings, String> {
let pants_subprocessdir = option_id!("pants", "subprocessdir");
let option_value = options_parser.parse_string(&pants_subprocessdir, ".pids")?;
let metadata_dir = {
let path = PathBuf::from(&option_value.value);
if path.is_absolute() {
path
} else {
match working_dir.join(&path) {
p if p.is_absolute() => p,
p => p.canonicalize().map_err(|e| {
format!(
"Failed to resolve relative pants subprocessdir specified via {:?} as {}: {}",
option_value,
path.display(),
e
)
})?,
}
}
};
debug!(
"\
Looking for pantsd metadata in {metadata_dir} as specified by {option} = {value} via \
{source:?}.\
",
metadata_dir = metadata_dir.display(),
option = pants_subprocessdir,
value = option_value.value,
source = option_value.source
);
let port = pantsd::probe(working_dir, &metadata_dir)?;
let mut pantsd_settings = client::ConnectionSettings::new(port);
pantsd_settings.timeout_limit = options_parser
.parse_float(
&option_id!("pantsd", "timeout", "when", "multiple", "invocations"),
pantsd_settings.timeout_limit,
)?
.value;
pantsd_settings.dynamic_ui = options_parser
.parse_bool(&option_id!("dynamic", "ui"), pantsd_settings.dynamic_ui)?
.value;
Ok(pantsd_settings)
}
// The value is taken from this C precedent:
// ```
// $ grep 75 /usr/include/sysexits.h
// #define EX_TEMPFAIL 75 /* temp failure; user is invited to retry */
// ```
const EX_TEMPFAIL: i32 = 75;
#[tokio::main]
async fn main() {
let start = SystemTime::now();
match execute(start).await {
Err(err) => {
eprintln!("{}", err);
// We use this exit code to indicate an error running pants via the nailgun protocol to
// differentiate from a successful nailgun protocol session.
std::process::exit(EX_TEMPFAIL);
}
Ok(exit_code) => std::process::exit(exit_code),
}
}
| 33.411392 | 214 | 0.686494 |
62ab2d753916664a7a9fb26c1060d8a80044097c | 4,225 | use iced::{button, slider,
canvas::{Cache, Cursor, Fill, Geometry, Program},
Color, Element, Point, Rectangle, Size};
use crate::application::{Chip8EmulatorSettings, Message};
mod main_menu;
mod emulation_screen;
pub struct Gui {
pub current_page: PageModel,
pub screen: Screen,
}
pub struct Screen {
pub pixels: [bool; 2048],
pub bg_red: u32,
pub bg_green: u32,
pub bg_blue: u32,
pub fg_red: u32,
pub fg_green: u32,
pub fg_blue: u32,
pub screen: Cache,
}
#[derive(Debug, Clone)]
pub enum PageModel {
MainMenu {
clock_speed_state: slider::State,
clock_speed_value: u32,
bg_red_state: slider::State,
bg_red_value: u32,
bg_green_state: slider::State,
bg_green_value: u32,
bg_blue_state: slider::State,
bg_blue_value: u32,
fg_red_state: slider::State,
fg_red_value: u32,
fg_green_state: slider::State,
fg_green_value: u32,
fg_blue_state: slider::State,
fg_blue_value: u32,
choose_rom_button: button::State,
launch_button: button::State,
},
EmulationScreen,
}
impl Gui {
pub fn new() -> Self {
Self {
current_page: PageModel::MainMenu {
clock_speed_state: slider::State::new(),
clock_speed_value: 800,
bg_red_state: slider::State::new(),
bg_red_value: 0,
bg_green_state: slider::State::new(),
bg_green_value: 0,
bg_blue_state: slider::State::new(),
bg_blue_value: 0,
fg_red_state: slider::State::new(),
fg_red_value: 0,
fg_green_state: slider::State::new(),
fg_green_value: 0,
fg_blue_state: slider::State::new(),
fg_blue_value: 0,
choose_rom_button: button::State::new(),
launch_button: button::State::new(),
},
screen: Screen {
pixels: [false; 2048],
bg_red: 0,
bg_green: 0,
bg_blue: 0,
fg_red: 0,
fg_green: 0,
fg_blue: 0,
screen: Cache::new(),
}
}
}
pub fn make(&mut self, settings: &Chip8EmulatorSettings) -> Element<Message> {
match &mut self.current_page {
PageModel::MainMenu { clock_speed_state, clock_speed_value: _, bg_red_state, bg_red_value: _, bg_green_state, bg_green_value: _, bg_blue_state, bg_blue_value: _, fg_red_state, fg_red_value: _, fg_green_state, fg_green_value: _, fg_blue_state, fg_blue_value: _, choose_rom_button, launch_button } => {
main_menu::draw(settings.rom_name.to_string() , clock_speed_state, settings.clock_speed as i32, bg_red_state, self.screen.bg_red as i32, bg_green_state, self.screen.bg_green as i32, bg_blue_state, self.screen.bg_blue as i32, fg_red_state, self.screen.fg_red as i32, fg_green_state, self.screen.fg_green as i32, fg_blue_state, self.screen.fg_blue as i32, choose_rom_button, launch_button)
},
PageModel::EmulationScreen => {
emulation_screen::draw(&mut self.screen, settings.rom_name.to_string())
}
}
}
}
impl Program<Message> for Screen {
fn draw(&self, bounds: Rectangle, _cursor: Cursor) -> Vec<Geometry> {
let bg_color = Color::from_rgba(self.bg_red as f32 / 256.0 , self.bg_green as f32 / 256.0, self.bg_blue as f32 / 256.0, 1.0);
let fg_color = Color::from_rgba(self.fg_red as f32 / 256.0 , self.fg_green as f32 / 256.0, self.fg_blue as f32 / 256.0, 1.0);
let geo = self.screen.draw(bounds.size(), |frame| {
for i in 0..2048 {
let mut color = bg_color;
if self.pixels[(i%64) + (64 * (i/64))] {
color = fg_color;
}
frame.fill_rectangle(
Point::new((i % 64) as f32 * 10.0, (i / 64) as f32 * 10.0),
Size::new(10.0, 10.0),
Fill::from(color)
);
}
});
vec![geo]
}
} | 37.061404 | 403 | 0.564734 |
0307de8fbc507d9122eaeea42dd8832dbf435b8e | 845 | use std::f64::consts::PI;
use std::fs::File;
use std::io::{BufRead, BufReader, Lines};
use std::path::Path;
// Returns an Iterator to the Reader of the lines of the file.
pub fn read_lines<P>(filename: P) -> Lines<BufReader<File>>
where
P: AsRef<Path>,
{
let file = File::open(filename).unwrap();
BufReader::new(file).lines()
}
pub trait Angle {
fn to_0_2pi(self) -> Self;
fn to_npi_pi(self) -> Self;
}
impl Angle for f64 {
fn to_0_2pi(self) -> Self {
assert!(-PI < self && self <= PI);
if self < 0.0 {
self + 2.0 * PI
} else {
self
}
}
fn to_npi_pi(self) -> Self {
assert!((0.0..2.0 * PI).contains(&self));
if self > PI {
self - 2.0 * PI
} else {
self
}
}
}
mod point;
pub use point::Point;
| 20.119048 | 62 | 0.52426 |
d9051733da24dace89662b70f3fd479bdfbfc1a8 | 1,614 | // Do not remove inline: will result in relocation failure
#[inline(always)]
pub(crate) unsafe fn rel_ptr<T>(offset: u64) -> *const T {
(image_base() + offset) as *const T
}
// Do not remove inline: will result in relocation failure
#[inline(always)]
pub(crate) unsafe fn rel_ptr_mut<T>(offset: u64) -> *mut T {
(image_base() + offset) as *mut T
}
extern {
static ENCLAVE_SIZE: usize;
}
// Do not remove inline: will result in relocation failure
// For the same reason we use inline ASM here instead of an extern static to
// locate the base
/// Returns address at which current enclave is loaded.
#[inline(always)]
#[unstable(feature = "sgx_platform", issue = "56975")]
pub fn image_base() -> u64 {
let base;
unsafe { asm!("lea IMAGE_BASE(%rip),$0":"=r"(base)) };
base
}
/// Returns `true` if the specified memory range is in the enclave.
///
/// `p + len` must not overflow.
#[unstable(feature = "sgx_platform", issue = "56975")]
pub fn is_enclave_range(p: *const u8, len: usize) -> bool {
let start = p as u64;
let end = start + (len as u64);
start >= image_base() &&
end <= image_base() + (unsafe { ENCLAVE_SIZE } as u64) // unsafe ok: link-time constant
}
/// Returns `true` if the specified memory range is in userspace.
///
/// `p + len` must not overflow.
#[unstable(feature = "sgx_platform", issue = "56975")]
pub fn is_user_range(p: *const u8, len: usize) -> bool {
let start = p as u64;
let end = start + (len as u64);
end <= image_base() ||
start >= image_base() + (unsafe { ENCLAVE_SIZE } as u64) // unsafe ok: link-time constant
}
| 32.28 | 97 | 0.648699 |
edc8af93d792f3d93d6ee03d2c9f1e17e17fa094 | 4,490 | use math::{Point2, Point3, Point4};
use noise_fns::NoiseFn;
/// Noise function that uses multiple source functions to displace each coordinate
/// of the input value before returning the output value from the `source` function.
pub struct Displace<'a, Source: 'a, XDisplace: 'a, YDisplace: 'a, ZDisplace: 'a, UDisplace: 'a> {
/// Source function that outputs a value
pub source: &'a Source,
/// Displacement function that displaces the _x_ coordinate of the input
/// value.
pub x_displace: &'a XDisplace,
/// Displacement function that displaces the _y_ coordinate of the input
/// value.
pub y_displace: &'a YDisplace,
/// Displacement function that displaces the _z_ coordinate of the input
/// value. Only needed for 3d or higher noise.
pub z_displace: &'a ZDisplace,
/// Displacement function that displaces the _u_ coordinate of the input
/// value. Only needed for 4d or higher noise.
pub u_displace: &'a UDisplace,
}
impl<'a, Source, XDisplace, YDisplace, ZDisplace, UDisplace>
Displace<'a, Source, XDisplace, YDisplace, ZDisplace, UDisplace>
{
pub fn new(
source: &'a Source,
x_displace: &'a XDisplace,
y_displace: &'a YDisplace,
z_displace: &'a ZDisplace,
u_displace: &'a UDisplace,
) -> Self {
Displace {
source,
x_displace,
y_displace,
z_displace,
u_displace,
}
}
}
#[cfg_attr(rustfmt, rustfmt_skip)]
impl<'a, Source, XDisplace, YDisplace, ZDisplace, UDisplace> NoiseFn<Point2<f64>>
for Displace<'a, Source, XDisplace, YDisplace, ZDisplace, UDisplace>
where Source: NoiseFn<Point2<f64>>,
XDisplace: NoiseFn<Point2<f64>>,
YDisplace: NoiseFn<Point2<f64>>,
{
fn get(&self, point: Point2<f64>) -> f64 {
// Get the output values from the displacement functions and add them to
// the corresponding coordinate in the input value. Since this is a 2d
// function, we only need the x_displace and y_displace functions.
let x = point[0] + self.x_displace.get(point);
let y = point[1] + self.y_displace.get(point);
// get the output value using the offset input value instead of the
// original input value.
self.source.get([x, y])
}
}
#[cfg_attr(rustfmt, rustfmt_skip)]
impl<'a, Source, XDisplace, YDisplace, ZDisplace, UDisplace> NoiseFn<Point3<f64>>
for Displace<'a, Source, XDisplace, YDisplace, ZDisplace, UDisplace>
where Source: NoiseFn<Point3<f64>>,
XDisplace: NoiseFn<Point3<f64>>,
YDisplace: NoiseFn<Point3<f64>>,
ZDisplace: NoiseFn<Point3<f64>>,
{
fn get(&self, point: Point3<f64>) -> f64 {
// Get the output values from the displacement functions and add them to
// the corresponding coordinate in the input value. Since this is a 3d
// function, we only need the x_displace, y_displace, and z_displace
// functions. Also, panic if there is no z_displace function defined.
let x = point[0] + self.x_displace.get(point);
let y = point[1] + self.y_displace.get(point);
let z = point[2] + self.z_displace.get(point);
// get the output value using the offset input value instead of the
// original input value.
self.source.get([x, y, z])
}
}
#[cfg_attr(rustfmt, rustfmt_skip)]
impl<'a, Source, XDisplace, YDisplace, ZDisplace, UDisplace> NoiseFn<Point4<f64>>
for Displace<'a, Source, XDisplace, YDisplace, ZDisplace, UDisplace>
where Source: NoiseFn<Point4<f64>>,
XDisplace: NoiseFn<Point4<f64>>,
YDisplace: NoiseFn<Point4<f64>>,
ZDisplace: NoiseFn<Point4<f64>>,
UDisplace: NoiseFn<Point4<f64>>,
{
fn get(&self, point: Point4<f64>) -> f64 {
// Get the output values from the displacement functions and add them to
// the corresponding coordinate in the input value. Since this is a 4d
// function, we need all of the displace functions. Panic if there is no z-
// or u-displace function defined.
let x = point[0] + self.x_displace.get(point);
let y = point[1] + self.y_displace.get(point);
let z = point[2] + self.z_displace.get(point);
let u = point[3] + self.u_displace.get(point);
// get the output value using the offset input value instead of the
// original input value.
self.source.get([x, y, z, u])
}
}
| 39.385965 | 97 | 0.645212 |
18bd05abc41e79efe794ea824fabbf5770022e23 | 9,403 | mod types;
use crate::types::{Area, Direction, PenStatus, Position};
use derive_more::From;
use eventmill::dispatch::{Core, CoreError};
use eventmill::event::DomainEventView;
use eventmill::inmemory_store::InMemoryStore;
use eventmill::{
Aggregate, AggregateType, DispatchCommand, DomainCommand, EventType, Generation, HandleCommand,
NewEvent,
};
const EVENT_NAMESPACE: &str = "https://github.com/innoave/eventmill/examples/turtle";
//
// Domain events
//
#[derive(Debug)]
struct TurtleMoved {
amount: u16,
}
#[derive(Debug)]
enum TurtleTurned {
Left,
Right,
}
#[derive(Debug)]
enum PenSwitched {
Up,
Down,
}
#[derive(EventType, From, Debug)]
#[event_source(EVENT_NAMESPACE)]
#[event_type_version("V2")]
enum TurtleEvent {
Moved(TurtleMoved),
Turned(TurtleTurned),
PenSwitched(PenSwitched),
}
//
// Aggregate
//
#[derive(AggregateType, Debug)]
#[id_field(id)]
#[initialize_with_defaults]
struct Turtle {
id: u32,
position: Position,
direction: Direction,
pen_status: PenStatus,
}
//
// Commands
//
#[derive(Debug)]
struct Move {
amount: u16,
}
#[derive(Debug)]
enum Turn {
Left,
Right,
}
#[derive(Debug)]
enum SwitchPen {
Up,
Down,
}
//
// implementations
//
impl Aggregate<TurtleMoved> for Turtle {
fn apply_event(&mut self, event: DomainEventView<'_, TurtleMoved, Self>) {
match self.direction {
Direction::North => self.position.y += i32::from(event.data.amount),
Direction::East => self.position.x += i32::from(event.data.amount),
Direction::South => self.position.y -= i32::from(event.data.amount),
Direction::West => self.position.y -= i32::from(event.data.amount),
}
}
}
impl Aggregate<TurtleTurned> for Turtle {
fn apply_event(&mut self, event: DomainEventView<'_, TurtleTurned, Self>) {
match self.direction {
Direction::North => match event.data {
TurtleTurned::Left => self.direction = Direction::West,
TurtleTurned::Right => self.direction = Direction::East,
},
Direction::East => match event.data {
TurtleTurned::Left => self.direction = Direction::North,
TurtleTurned::Right => self.direction = Direction::South,
},
Direction::South => match event.data {
TurtleTurned::Left => self.direction = Direction::East,
TurtleTurned::Right => self.direction = Direction::West,
},
Direction::West => match event.data {
TurtleTurned::Left => self.direction = Direction::South,
TurtleTurned::Right => self.direction = Direction::North,
},
}
}
}
impl Aggregate<PenSwitched> for Turtle {
fn apply_event(&mut self, event: DomainEventView<'_, PenSwitched, Self>) {
match event.data {
PenSwitched::Up => self.pen_status = PenStatus::Up,
PenSwitched::Down => self.pen_status = PenStatus::Down,
}
}
}
impl Aggregate<TurtleEvent> for Turtle {
fn apply_event(&mut self, event: DomainEventView<'_, TurtleEvent, Self>) {
match &event.data {
TurtleEvent::Moved(moved) => self.apply_event(event.transmute(moved)),
TurtleEvent::Turned(turned) => self.apply_event(event.transmute(turned)),
TurtleEvent::PenSwitched(pen_switched) => {
self.apply_event(event.transmute(pen_switched))
}
}
}
}
#[derive(thiserror::Error, Debug, PartialEq)]
enum TurtleError {
#[error("{0} border of area reached")]
BorderOfAreaReached(Direction),
}
impl HandleCommand<Move, Turtle> for Turtle {
type Event = TurtleEvent;
type Error = TurtleError;
type Context = Area;
fn handle_command(
&self,
Move { amount }: Move,
area: &Self::Context,
) -> Result<Vec<NewEvent<Self::Event, Turtle>>, Self::Error> {
// validate the command
let new_position = match self.direction {
Direction::North => Position {
x: self.position.x,
y: self.position.y + i32::from(amount),
},
Direction::East => Position {
x: self.position.x + i32::from(amount),
y: self.position.y,
},
Direction::South => Position {
x: self.position.x,
y: self.position.y - i32::from(amount),
},
Direction::West => Position {
x: self.position.x - i32::from(amount),
y: self.position.y,
},
};
if area.contains(new_position) {
Ok(vec![NewEvent {
aggregate_id: self.id,
data: TurtleMoved { amount }.into(),
}])
} else {
Err(TurtleError::BorderOfAreaReached(self.direction))
}
}
}
impl HandleCommand<Turn, Turtle> for Turtle {
type Event = TurtleEvent;
type Error = TurtleError;
type Context = ();
fn handle_command(
&self,
command: Turn,
_context: &Self::Context,
) -> Result<Vec<NewEvent<Self::Event, Turtle>>, Self::Error> {
let turned = match command {
Turn::Left => TurtleTurned::Left,
Turn::Right => TurtleTurned::Right,
};
Ok(vec![NewEvent {
aggregate_id: self.id,
data: turned.into(),
}])
}
}
impl HandleCommand<SwitchPen, Turtle> for Turtle {
type Event = TurtleEvent;
type Error = TurtleError;
type Context = ();
fn handle_command(
&self,
command: SwitchPen,
_context: &Self::Context,
) -> Result<Vec<NewEvent<Self::Event, Turtle>>, Self::Error> {
let event = match command {
SwitchPen::Up => PenSwitched::Up,
SwitchPen::Down => PenSwitched::Down,
};
Ok(vec![NewEvent {
aggregate_id: self.id,
data: event.into(),
}])
}
}
fn main() {
let turtle_id = 1;
let event_store = InMemoryStore::default();
let core = Core::new(event_store);
let area = Area {
top_left: Position { x: -149, y: 105 },
bottom_right: Position { x: 148, y: -105 },
};
let move_turtle: DomainCommand<Move, Turtle> = DomainCommand {
aggregate_id: turtle_id,
aggregate_generation: Generation::default(),
data: Move { amount: 21 },
};
let turtle = core
.dispatch_command(move_turtle, &area)
.expect("turtle moved");
assert_eq!(turtle.state().position, Position { x: 0, y: 21 });
assert_eq!(turtle.state().direction, Direction::North);
assert_eq!(turtle.state().pen_status, PenStatus::Up);
// once we have an aggregate we can construct new domain commands by
// converting the tuple of a command and the current aggregate.
let turn_right = (Turn::Right, &turtle).into();
let turtle = core
.dispatch_command(turn_right, &())
.expect("turtle turned");
assert_eq!(turtle.state().position, Position { x: 0, y: 21 });
assert_eq!(turtle.state().direction, Direction::East);
assert_eq!(turtle.state().pen_status, PenStatus::Up);
let switch_pen_down = (SwitchPen::Down, &turtle).into();
let turtle = core
.dispatch_command(switch_pen_down, &())
.expect("pen is switched down");
assert_eq!(turtle.state().position, Position { x: 0, y: 21 });
assert_eq!(turtle.state().direction, Direction::East);
assert_eq!(turtle.state().pen_status, PenStatus::Down);
let move_10 = (Move { amount: 10 }, &turtle).into();
let turtle = core
.dispatch_command(move_10, &area)
.expect("turtle turned");
assert_eq!(turtle.state().position, Position { x: 10, y: 21 });
assert_eq!(turtle.state().direction, Direction::East);
assert_eq!(turtle.state().pen_status, PenStatus::Down);
let turn_right = (Turn::Right, &turtle).into();
let turtle = core
.dispatch_command(turn_right, &())
.expect("turtle turned right again");
assert_eq!(turtle.state().position, Position { x: 10, y: 21 });
assert_eq!(turtle.state().direction, Direction::South);
assert_eq!(turtle.state().pen_status, PenStatus::Down);
// now we try to move out of the area
let move_200 = (Move { amount: 200 }, &turtle).into();
let error = core
.dispatch_command(move_200, &area)
.expect_err(&format!("turtle can not move out of the area {:?}", area));
assert_eq!(
error,
CoreError::HandleCommandFailed(TurtleError::BorderOfAreaReached(Direction::South))
);
let switch_pen_up = (SwitchPen::Up, &turtle).into();
let turtle = core
.dispatch_command(switch_pen_up, &())
.expect("pen is switched up");
assert_eq!(turtle.state().position, Position { x: 10, y: 21 });
assert_eq!(turtle.state().direction, Direction::South);
assert_eq!(turtle.state().pen_status, PenStatus::Up);
let turn_left = (Turn::Left, &turtle).into();
let turtle = core
.dispatch_command(turn_left, &())
.expect("turtle turned");
assert_eq!(turtle.state().position, Position { x: 10, y: 21 });
assert_eq!(turtle.state().direction, Direction::East);
assert_eq!(turtle.state().pen_status, PenStatus::Up);
}
| 28.667683 | 99 | 0.597469 |
23d56ce7d3b56e73c413f7e8557991c52affac55 | 37,501 | use fnv::FnvHashMap;
use std::f64::consts;
use std::ops::Deref;
use std::rc::Rc;
use std::str::FromStr;
type ContextHashMap<K, V> = FnvHashMap<K, V>;
use extra_math::factorial;
use shunting_yard::to_rpn;
use std;
use std::fmt;
use tokenizer::{tokenize, Token};
use Error;
/// Representation of a parsed expression.
///
/// The expression is internally stored in the [reverse Polish notation (RPN)][RPN] as a sequence
/// of `Token`s.
///
/// Methods `bind`, `bind_with_context`, `bind2`, ... can be used to create closures from
/// the expression that then can be passed around and used as any other `Fn` closures.
///
/// ```rust
/// let func = "x^2".parse::<meval::Expr>().unwrap().bind("x").unwrap();
/// let r = Some(2.).map(func);
/// assert_eq!(r, Some(4.));
/// ```
///
/// [RPN]: https://en.wikipedia.org/wiki/Reverse_Polish_notation
#[derive(Debug, Clone, PartialEq)]
pub struct Expr {
rpn: Vec<Token>,
}
impl Expr {
/// Evaluates the expression.
pub fn eval(&self) -> Result<f64, Error> {
self.eval_with_context(builtin())
}
/// Evaluates the expression with variables given by the argument.
pub fn eval_with_context<C: ContextProvider>(&self, ctx: C) -> Result<f64, Error> {
use tokenizer::Operation::*;
use tokenizer::Token::*;
let mut stack = Vec::with_capacity(16);
for token in &self.rpn {
match *token {
Var(ref n) => {
if let Some(v) = ctx.get_var(n) {
stack.push(v);
} else {
return Err(Error::UnknownVariable(n.clone()));
}
}
Number(f) => stack.push(f),
Binary(op) => {
let right = stack.pop().unwrap();
let left = stack.pop().unwrap();
let r = match op {
Plus => left + right,
Minus => left - right,
Times => left * right,
Div => left / right,
Rem => left % right,
Pow => left.powf(right),
_ => {
return Err(Error::EvalError(format!(
"Unimplemented binary operation: {:?}",
op
)));
}
};
stack.push(r);
}
Unary(op) => {
let x = stack.pop().unwrap();
let r = match op {
Plus => x,
Minus => -x,
Fact => {
// Check to make sure x has no fractional component (can be converted to int without loss)
match factorial(x) {
Ok(res) => res,
Err(e) => return Err(Error::EvalError(String::from(e))),
}
}
_ => {
return Err(Error::EvalError(format!(
"Unimplemented unary operation: {:?}",
op
)));
}
};
stack.push(r);
}
Func(ref n, Some(i)) => {
if stack.len() < i {
return Err(Error::EvalError(format!(
"eval: stack does not have enough arguments for function token \
{:?}",
token
)));
}
match ctx.eval_func(n, &stack[stack.len() - i..]) {
Ok(r) => {
let nl = stack.len() - i;
stack.truncate(nl);
stack.push(r);
}
Err(e) => return Err(Error::Function(n.to_owned(), e)),
}
}
_ => return Err(Error::EvalError(format!("Unrecognized token: {:?}", token))),
}
}
let r = stack.pop().expect("Stack is empty, this is impossible.");
if !stack.is_empty() {
return Err(Error::EvalError(format!(
"There are still {} items on the stack.",
stack.len()
)));
}
Ok(r)
}
/// Creates a function of one variable based on this expression, with default constants and
/// functions.
///
/// Binds the input of the returned closure to `var`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by the default
/// context or `var`.
pub fn bind<'a>(self, var: &str) -> Result<impl Fn(f64) -> f64 + 'a, Error> {
self.bind_with_context(builtin(), var)
}
/// Creates a function of one variable based on this expression.
///
/// Binds the input of the returned closure to `var`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by `ctx` or
/// `var`.
pub fn bind_with_context<'a, C>(
self,
ctx: C,
var: &str,
) -> Result<impl Fn(f64) -> f64 + 'a, Error>
where
C: ContextProvider + 'a,
{
try!(self.check_context(((var, 0.), &ctx)));
let var = var.to_owned();
Ok(move |x| {
self.eval_with_context(((&var, x), &ctx))
.expect("Expr::bind")
})
}
/// Creates a function of two variables based on this expression, with default constants and
/// functions.
///
/// Binds the inputs of the returned closure to `var1` and `var2`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by the default
/// context or `var`.
pub fn bind2<'a>(self, var1: &str, var2: &str) -> Result<impl Fn(f64, f64) -> f64 + 'a, Error> {
self.bind2_with_context(builtin(), var1, var2)
}
/// Creates a function of two variables based on this expression.
///
/// Binds the inputs of the returned closure to `var1` and `var2`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by `ctx` or
/// `var`.
pub fn bind2_with_context<'a, C>(
self,
ctx: C,
var1: &str,
var2: &str,
) -> Result<impl Fn(f64, f64) -> f64 + 'a, Error>
where
C: ContextProvider + 'a,
{
try!(self.check_context(([(var1, 0.), (var2, 0.)], &ctx)));
let var1 = var1.to_owned();
let var2 = var2.to_owned();
Ok(move |x, y| {
self.eval_with_context(([(&var1, x), (&var2, y)], &ctx))
.expect("Expr::bind2")
})
}
/// Creates a function of three variables based on this expression, with default constants and
/// functions.
///
/// Binds the inputs of the returned closure to `var1`, `var2` and `var3`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by the default
/// context or `var`.
pub fn bind3<'a>(
self,
var1: &str,
var2: &str,
var3: &str,
) -> Result<impl Fn(f64, f64, f64) -> f64 + 'a, Error> {
self.bind3_with_context(builtin(), var1, var2, var3)
}
/// Creates a function of three variables based on this expression.
///
/// Binds the inputs of the returned closure to `var1`, `var2` and `var3`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by `ctx` or
/// `var`.
pub fn bind3_with_context<'a, C>(
self,
ctx: C,
var1: &str,
var2: &str,
var3: &str,
) -> Result<impl Fn(f64, f64, f64) -> f64 + 'a, Error>
where
C: ContextProvider + 'a,
{
try!(self.check_context(([(var1, 0.), (var2, 0.), (var3, 0.)], &ctx)));
let var1 = var1.to_owned();
let var2 = var2.to_owned();
let var3 = var3.to_owned();
Ok(move |x, y, z| {
self.eval_with_context(([(&var1, x), (&var2, y), (&var3, z)], &ctx))
.expect("Expr::bind3")
})
}
/// Creates a function of four variables based on this expression, with default constants and
/// functions.
///
/// Binds the inputs of the returned closure to `var1`, `var2`, `var3` and `var4`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by the default
/// context or `var`.
pub fn bind4<'a>(
self,
var1: &str,
var2: &str,
var3: &str,
var4: &str,
) -> Result<impl Fn(f64, f64, f64, f64) -> f64 + 'a, Error> {
self.bind4_with_context(builtin(), var1, var2, var3, var4)
}
/// Creates a function of four variables based on this expression.
///
/// Binds the inputs of the returned closure to `var1`, `var2`, `var3` and `var4`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by `ctx` or
/// `var`.
pub fn bind4_with_context<'a, C>(
self,
ctx: C,
var1: &str,
var2: &str,
var3: &str,
var4: &str,
) -> Result<impl Fn(f64, f64, f64, f64) -> f64 + 'a, Error>
where
C: ContextProvider + 'a,
{
try!(self.check_context(([(var1, 0.), (var2, 0.), (var3, 0.), (var4, 0.)], &ctx)));
let var1 = var1.to_owned();
let var2 = var2.to_owned();
let var3 = var3.to_owned();
let var4 = var4.to_owned();
Ok(move |x1, x2, x3, x4| {
self.eval_with_context(([(&var1, x1), (&var2, x2), (&var3, x3), (&var4, x4)], &ctx))
.expect("Expr::bind4")
})
}
/// Creates a function of five variables based on this expression, with default constants and
/// functions.
///
/// Binds the inputs of the returned closure to `var1`, `var2`, `var3`, `var4` and `var5`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by the default
/// context or `var`.
pub fn bind5<'a>(
self,
var1: &str,
var2: &str,
var3: &str,
var4: &str,
var5: &str,
) -> Result<impl Fn(f64, f64, f64, f64, f64) -> f64 + 'a, Error> {
self.bind5_with_context(builtin(), var1, var2, var3, var4, var5)
}
/// Creates a function of five variables based on this expression.
///
/// Binds the inputs of the returned closure to `var1`, `var2`, `var3`, `var4` and `var5`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by `ctx` or
/// `var`.
pub fn bind5_with_context<'a, C>(
self,
ctx: C,
var1: &str,
var2: &str,
var3: &str,
var4: &str,
var5: &str,
) -> Result<impl Fn(f64, f64, f64, f64, f64) -> f64 + 'a, Error>
where
C: ContextProvider + 'a,
{
try!(self.check_context((
[(var1, 0.), (var2, 0.), (var3, 0.), (var4, 0.), (var5, 0.)],
&ctx
)));
let var1 = var1.to_owned();
let var2 = var2.to_owned();
let var3 = var3.to_owned();
let var4 = var4.to_owned();
let var5 = var5.to_owned();
Ok(move |x1, x2, x3, x4, x5| {
self.eval_with_context((
[
(&var1, x1),
(&var2, x2),
(&var3, x3),
(&var4, x4),
(&var5, x5),
],
&ctx,
))
.expect("Expr::bind5")
})
}
/// Binds the input of the returned closure to elements of `vars`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by the default
/// context or `var`.
pub fn bindn<'a>(self, vars: &'a [&str]) -> Result<impl Fn(&[f64]) -> f64 + 'a, Error> {
self.bindn_with_context(builtin(), vars)
}
/// Creates a function of N variables based on this expression.
///
/// Binds the input of the returned closure to the elements of `vars`.
///
/// # Failure
///
/// Returns `Err` if there is a variable in the expression that is not provided by `ctx` or
/// `var`.
pub fn bindn_with_context<'a, C>(
self,
ctx: C,
vars: &'a [&str],
) -> Result<impl Fn(&[f64]) -> f64 + 'a, Error>
where
C: ContextProvider + 'a,
{
let n = vars.len();
try!(self.check_context((
vars.into_iter()
.zip(vec![0.; n].into_iter())
.collect::<Vec<_>>(),
&ctx
)));
let vars = vars.iter().map(|v| v.to_owned()).collect::<Vec<_>>();
Ok(move |x: &[f64]| {
self.eval_with_context((
vars.iter()
.zip(x.into_iter())
.map(|(v, x)| (v, *x))
.collect::<Vec<_>>(),
&ctx,
))
.expect("Expr::bindn")
})
}
/// Checks that the value of every variable in the expression is specified by the context `ctx`.
///
/// # Failure
///
/// Returns `Err` if a missing variable is detected.
fn check_context<C: ContextProvider>(&self, ctx: C) -> Result<(), Error> {
for t in &self.rpn {
match *t {
Token::Var(ref name) => {
if ctx.get_var(name).is_none() {
return Err(Error::UnknownVariable(name.clone()));
}
}
Token::Func(ref name, Some(i)) => {
let v = vec![0.; i];
if let Err(e) = ctx.eval_func(name, &v) {
return Err(Error::Function(name.to_owned(), e));
}
}
Token::Func(_, None) => {
return Err(Error::EvalError(format!(
"expr::check_context: Unexpected token: {:?}",
*t
)));
}
Token::LParen
| Token::RParen
| Token::Binary(_)
| Token::Unary(_)
| Token::Comma
| Token::Number(_) => {}
}
}
Ok(())
}
}
/// Evaluates a string with built-in constants and functions.
pub fn eval_str<S: AsRef<str>>(expr: S) -> Result<f64, Error> {
let expr = try!(Expr::from_str(expr.as_ref()));
expr.eval_with_context(builtin())
}
impl FromStr for Expr {
type Err = Error;
/// Constructs an expression by parsing a string.
fn from_str(s: &str) -> Result<Self, Self::Err> {
let tokens = try!(tokenize(s));
let rpn = try!(to_rpn(&tokens));
Ok(Expr { rpn: rpn })
}
}
/// Evaluates a string with the given context.
///
/// No built-ins are defined in this case.
pub fn eval_str_with_context<S: AsRef<str>, C: ContextProvider>(
expr: S,
ctx: C,
) -> Result<f64, Error> {
let expr = try!(Expr::from_str(expr.as_ref()));
expr.eval_with_context(ctx)
}
impl Deref for Expr {
type Target = [Token];
fn deref(&self) -> &[Token] {
&self.rpn
}
}
/// A trait of a source of variables (and constants) and functions for substitution into an
/// evaluated expression.
///
/// A simplest way to create a custom context provider is to use [`Context`](struct.Context.html).
///
/// ## Advanced usage
///
/// Alternatively, values of variables/constants can be specified by tuples `(name, value)`,
/// `std::collections::HashMap` or `std::collections::BTreeMap`.
///
/// ```rust
/// use meval::{ContextProvider, Context};
///
/// let mut ctx = Context::new(); // built-ins
/// ctx.var("x", 2.); // insert a new variable
/// assert_eq!(ctx.get_var("pi"), Some(std::f64::consts::PI));
///
/// let myvars = ("x", 2.); // tuple as a ContextProvider
/// assert_eq!(myvars.get_var("x"), Some(2f64));
///
/// // HashMap as a ContextProvider
/// let mut varmap = std::collections::HashMap::new();
/// varmap.insert("x", 2.);
/// varmap.insert("y", 3.);
/// assert_eq!(varmap.get_var("x"), Some(2f64));
/// assert_eq!(varmap.get_var("z"), None);
/// ```
///
/// Custom functions can be also defined.
///
/// ```rust
/// use meval::{ContextProvider, Context};
///
/// let mut ctx = Context::new(); // built-ins
/// ctx.func2("phi", |x, y| x / (y * y));
///
/// assert_eq!(ctx.eval_func("phi", &[2., 3.]), Ok(2. / (3. * 3.)));
/// ```
///
/// A `ContextProvider` can be built by combining other contexts:
///
/// ```rust
/// use meval::Context;
///
/// let bins = Context::new(); // built-ins
/// let mut funcs = Context::empty(); // empty context
/// funcs.func2("phi", |x, y| x / (y * y));
/// let myvars = ("x", 2.);
///
/// // contexts can be combined using tuples
/// let ctx = ((myvars, bins), funcs); // first context has preference if there's duplicity
///
/// assert_eq!(meval::eval_str_with_context("x * pi + phi(1., 2.)", ctx).unwrap(), 2. *
/// std::f64::consts::PI + 1. / (2. * 2.));
/// ```
///
pub trait ContextProvider {
fn get_var(&self, _: &str) -> Option<f64> {
None
}
fn eval_func(&self, _: &str, _: &[f64]) -> Result<f64, FuncEvalError> {
Err(FuncEvalError::UnknownFunction)
}
}
/// Function evaluation error.
#[derive(Debug, Clone, PartialEq)]
pub enum FuncEvalError {
TooFewArguments,
TooManyArguments,
NumberArgs(usize),
UnknownFunction,
}
impl fmt::Display for FuncEvalError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
FuncEvalError::UnknownFunction => write!(f, "Unknown function"),
FuncEvalError::NumberArgs(i) => write!(f, "Expected {} arguments", i),
FuncEvalError::TooFewArguments => write!(f, "Too few arguments"),
FuncEvalError::TooManyArguments => write!(f, "Too many arguments"),
}
}
}
impl std::error::Error for FuncEvalError {
fn description(&self) -> &str {
match *self {
FuncEvalError::UnknownFunction => "unknown function",
FuncEvalError::NumberArgs(_) => "wrong number of function arguments",
FuncEvalError::TooFewArguments => "too few function arguments",
FuncEvalError::TooManyArguments => "too many function arguments",
}
}
}
#[doc(hidden)]
pub fn max_array(xs: &[f64]) -> f64 {
xs.iter().fold(::std::f64::NEG_INFINITY, |m, &x| m.max(x))
}
#[doc(hidden)]
pub fn min_array(xs: &[f64]) -> f64 {
xs.iter().fold(::std::f64::INFINITY, |m, &x| m.min(x))
}
/// Returns the built-in constants and functions in a form that can be used as a `ContextProvider`.
#[doc(hidden)]
pub fn builtin<'a>() -> Context<'a> {
// TODO: cache this (lazy_static)
Context::new()
}
impl<'a, T: ContextProvider> ContextProvider for &'a T {
fn get_var(&self, name: &str) -> Option<f64> {
(&**self).get_var(name)
}
fn eval_func(&self, name: &str, args: &[f64]) -> Result<f64, FuncEvalError> {
(&**self).eval_func(name, args)
}
}
impl<'a, T: ContextProvider> ContextProvider for &'a mut T {
fn get_var(&self, name: &str) -> Option<f64> {
(&**self).get_var(name)
}
fn eval_func(&self, name: &str, args: &[f64]) -> Result<f64, FuncEvalError> {
(&**self).eval_func(name, args)
}
}
impl<T: ContextProvider, S: ContextProvider> ContextProvider for (T, S) {
fn get_var(&self, name: &str) -> Option<f64> {
self.0.get_var(name).or_else(|| self.1.get_var(name))
}
fn eval_func(&self, name: &str, args: &[f64]) -> Result<f64, FuncEvalError> {
match self.0.eval_func(name, args) {
Err(FuncEvalError::UnknownFunction) => self.1.eval_func(name, args),
e => e,
}
}
}
impl<S: AsRef<str>> ContextProvider for (S, f64) {
fn get_var(&self, name: &str) -> Option<f64> {
if self.0.as_ref() == name {
Some(self.1)
} else {
None
}
}
}
/// `std::collections::HashMap` of variables.
impl<S> ContextProvider for std::collections::HashMap<S, f64>
where
S: std::hash::Hash + std::cmp::Eq + std::borrow::Borrow<str>,
{
fn get_var(&self, name: &str) -> Option<f64> {
self.get(name).cloned()
}
}
/// `std::collections::BTreeMap` of variables.
impl<S> ContextProvider for std::collections::BTreeMap<S, f64>
where
S: std::cmp::Ord + std::borrow::Borrow<str>,
{
fn get_var(&self, name: &str) -> Option<f64> {
self.get(name).cloned()
}
}
impl<S: AsRef<str>> ContextProvider for Vec<(S, f64)> {
fn get_var(&self, name: &str) -> Option<f64> {
for &(ref n, v) in self.iter() {
if n.as_ref() == name {
return Some(v);
}
}
None
}
}
// macro for implementing ContextProvider for arrays
macro_rules! array_impls {
($($N:expr)+) => {
$(
impl<S: AsRef<str>> ContextProvider for [(S, f64); $N] {
fn get_var(&self, name: &str) -> Option<f64> {
for &(ref n, v) in self.iter() {
if n.as_ref() == name {
return Some(v);
}
}
None
}
}
)+
}
}
array_impls! {
0 1 2 3 4 5 6 7 8
}
/// A structure for storing variables/constants and functions to be used in an expression.
///
/// # Example
///
/// ```rust
/// use meval::{eval_str_with_context, Context};
///
/// let mut ctx = Context::new(); // builtins
/// ctx.var("x", 3.)
/// .func("f", |x| 2. * x)
/// .funcn("sum", |xs| xs.iter().sum(), ..);
///
/// assert_eq!(eval_str_with_context("pi + sum(1., 2.) + f(x)", &ctx),
/// Ok(std::f64::consts::PI + 1. + 2. + 2. * 3.));
/// ```
#[derive(Clone)]
pub struct Context<'a> {
vars: ContextHashMap<String, f64>,
funcs: ContextHashMap<String, GuardedFunc<'a>>,
}
impl<'a> Context<'a> {
/// Creates a context with built-in constants and functions.
pub fn new() -> Context<'a> {
thread_local!(static DEFAULT_CONTEXT: Context<'static> = {
let mut ctx = Context::empty();
ctx.var("pi", consts::PI);
ctx.var("e", consts::E);
ctx.func("sqrt", f64::sqrt);
ctx.func("exp", f64::exp);
ctx.func("ln", f64::ln);
ctx.func("log10", f64::log10);
ctx.func("abs", f64::abs);
ctx.func("sin", f64::sin);
ctx.func("cos", f64::cos);
ctx.func("tan", f64::tan);
ctx.func("asin", f64::asin);
ctx.func("acos", f64::acos);
ctx.func("atan", f64::atan);
ctx.func("sinh", f64::sinh);
ctx.func("cosh", f64::cosh);
ctx.func("tanh", f64::tanh);
ctx.func("asinh", f64::asinh);
ctx.func("acosh", f64::acosh);
ctx.func("atanh", f64::atanh);
ctx.func("floor", f64::floor);
ctx.func("ceil", f64::ceil);
ctx.func("round", f64::round);
ctx.func("signum", f64::signum);
ctx.func2("atan2", f64::atan2);
ctx.funcn("max", max_array, 1..);
ctx.funcn("min", min_array, 1..);
ctx
});
DEFAULT_CONTEXT.with(|ctx| ctx.clone())
}
/// Creates an empty contexts.
pub fn empty() -> Context<'a> {
Context {
vars: ContextHashMap::default(),
funcs: ContextHashMap::default(),
}
}
/// Adds a new variable/constant.
pub fn var<S: Into<String>>(&mut self, var: S, value: f64) -> &mut Self {
self.vars.insert(var.into(), value);
self
}
/// Adds a new function of one argument.
pub fn func<S, F>(&mut self, name: S, func: F) -> &mut Self
where
S: Into<String>,
F: Fn(f64) -> f64 + 'a,
{
self.funcs.insert(
name.into(),
Rc::new(move |args: &[f64]| {
if args.len() == 1 {
Ok(func(args[0]))
} else {
Err(FuncEvalError::NumberArgs(1))
}
}),
);
self
}
/// Adds a new function of two arguments.
pub fn func2<S, F>(&mut self, name: S, func: F) -> &mut Self
where
S: Into<String>,
F: Fn(f64, f64) -> f64 + 'a,
{
self.funcs.insert(
name.into(),
Rc::new(move |args: &[f64]| {
if args.len() == 2 {
Ok(func(args[0], args[1]))
} else {
Err(FuncEvalError::NumberArgs(2))
}
}),
);
self
}
/// Adds a new function of three arguments.
pub fn func3<S, F>(&mut self, name: S, func: F) -> &mut Self
where
S: Into<String>,
F: Fn(f64, f64, f64) -> f64 + 'a,
{
self.funcs.insert(
name.into(),
Rc::new(move |args: &[f64]| {
if args.len() == 3 {
Ok(func(args[0], args[1], args[2]))
} else {
Err(FuncEvalError::NumberArgs(3))
}
}),
);
self
}
/// Adds a new function of a variable number of arguments.
///
/// `n_args` specifies the allowed number of variables by giving an exact number `n` or a range
/// `n..m`, `..`, `n..`, `..m`. The range is half-open, exclusive on the right, as is common in
/// Rust standard library.
///
/// # Example
///
/// ```rust
/// let mut ctx = meval::Context::empty();
///
/// // require exactly 2 arguments
/// ctx.funcn("sum_two", |xs| xs[0] + xs[1], 2);
///
/// // allow an arbitrary number of arguments
/// ctx.funcn("sum", |xs| xs.iter().sum(), ..);
/// ```
pub fn funcn<S, F, N>(&mut self, name: S, func: F, n_args: N) -> &mut Self
where
S: Into<String>,
F: Fn(&[f64]) -> f64 + 'a,
N: ArgGuard,
{
self.funcs.insert(name.into(), n_args.to_arg_guard(func));
self
}
}
impl<'a> Default for Context<'a> {
fn default() -> Self {
Context::new()
}
}
type GuardedFunc<'a> = Rc<Fn(&[f64]) -> Result<f64, FuncEvalError> + 'a>;
/// Trait for types that can specify the number of required arguments for a function with a
/// variable number of arguments.
///
/// # Example
///
/// ```rust
/// let mut ctx = meval::Context::empty();
///
/// // require exactly 2 arguments
/// ctx.funcn("sum_two", |xs| xs[0] + xs[1], 2);
///
/// // allow an arbitrary number of arguments
/// ctx.funcn("sum", |xs| xs.iter().sum(), ..);
/// ```
pub trait ArgGuard {
fn to_arg_guard<'a, F: Fn(&[f64]) -> f64 + 'a>(self, func: F) -> GuardedFunc<'a>;
}
impl ArgGuard for usize {
fn to_arg_guard<'a, F: Fn(&[f64]) -> f64 + 'a>(self, func: F) -> GuardedFunc<'a> {
Rc::new(move |args: &[f64]| {
if args.len() == self {
Ok(func(args))
} else {
Err(FuncEvalError::NumberArgs(1))
}
})
}
}
impl ArgGuard for std::ops::RangeFrom<usize> {
fn to_arg_guard<'a, F: Fn(&[f64]) -> f64 + 'a>(self, func: F) -> GuardedFunc<'a> {
Rc::new(move |args: &[f64]| {
if args.len() >= self.start {
Ok(func(args))
} else {
Err(FuncEvalError::TooFewArguments)
}
})
}
}
impl ArgGuard for std::ops::RangeTo<usize> {
fn to_arg_guard<'a, F: Fn(&[f64]) -> f64 + 'a>(self, func: F) -> GuardedFunc<'a> {
Rc::new(move |args: &[f64]| {
if args.len() < self.end {
Ok(func(args))
} else {
Err(FuncEvalError::TooManyArguments)
}
})
}
}
impl ArgGuard for std::ops::Range<usize> {
fn to_arg_guard<'a, F: Fn(&[f64]) -> f64 + 'a>(self, func: F) -> GuardedFunc<'a> {
Rc::new(move |args: &[f64]| {
if args.len() >= self.start && args.len() < self.end {
Ok(func(args))
} else if args.len() < self.start {
Err(FuncEvalError::TooFewArguments)
} else {
Err(FuncEvalError::TooManyArguments)
}
})
}
}
impl ArgGuard for std::ops::RangeFull {
fn to_arg_guard<'a, F: Fn(&[f64]) -> f64 + 'a>(self, func: F) -> GuardedFunc<'a> {
Rc::new(move |args: &[f64]| Ok(func(args)))
}
}
impl<'a> ContextProvider for Context<'a> {
fn get_var(&self, name: &str) -> Option<f64> {
self.vars.get(name).cloned()
}
fn eval_func(&self, name: &str, args: &[f64]) -> Result<f64, FuncEvalError> {
self.funcs
.get(name)
.map_or(Err(FuncEvalError::UnknownFunction), |f| f(args))
}
}
#[cfg(feature = "serde")]
pub mod de {
use super::Expr;
use serde;
use std::fmt;
use std::str::FromStr;
use tokenizer::Token;
impl<'de> serde::Deserialize<'de> for Expr {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct ExprVisitor;
impl<'de> serde::de::Visitor<'de> for ExprVisitor {
type Value = Expr;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a math expression")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Expr::from_str(v).map_err(serde::de::Error::custom)
}
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(Expr {
rpn: vec![Token::Number(v)],
})
}
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(Expr {
rpn: vec![Token::Number(v as f64)],
})
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(Expr {
rpn: vec![Token::Number(v as f64)],
})
}
}
deserializer.deserialize_any(ExprVisitor)
}
}
#[cfg(test)]
mod tests {
use super::*;
use de::as_f64;
use serde_json;
use serde_test;
#[test]
fn test_deserialization() {
use serde_test::Token;
let expr = Expr::from_str("sin(x)").unwrap();
serde_test::assert_de_tokens(&expr, &[Token::Str("sin(x)")]);
serde_test::assert_de_tokens(&expr, &[Token::String("sin(x)")]);
let expr = Expr::from_str("5").unwrap();
serde_test::assert_de_tokens(&expr, &[Token::F64(5.)]);
serde_test::assert_de_tokens(&expr, &[Token::U8(5)]);
serde_test::assert_de_tokens(&expr, &[Token::I8(5)]);
}
#[test]
fn test_json_deserialization() {
#[derive(Deserialize)]
struct Ode {
#[serde(deserialize_with = "as_f64")]
x0: f64,
#[serde(deserialize_with = "as_f64")]
t0: f64,
f: Expr,
g: Expr,
h: Expr,
}
let config = r#"
{
"x0": "cos(1.)",
"t0": 2,
"f": "sin(x)",
"g": 2.5,
"h": 5
}
"#;
let ode: Ode = serde_json::from_str(config).unwrap();
assert_eq!(ode.x0, 1f64.cos());
assert_eq!(ode.t0, 2f64);
assert_eq!(ode.f.bind("x").unwrap()(2.), 2f64.sin());
assert_eq!(ode.g.eval().unwrap(), 2.5f64);
assert_eq!(ode.h.eval().unwrap(), 5f64);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::str::FromStr;
use Error;
#[test]
fn test_eval() {
assert_eq!(eval_str("2 + 3"), Ok(5.));
assert_eq!(eval_str("2 + (3 + 4)"), Ok(9.));
assert_eq!(eval_str("-2^(4 - 3) * (3 + 4)"), Ok(-14.));
assert_eq!(eval_str("-2*3! + 1"), Ok(-11.));
assert_eq!(eval_str("-171!"), Ok(std::f64::NEG_INFINITY));
assert_eq!(eval_str("150!/148!"), Ok(22350.));
assert_eq!(eval_str("a + 3"), Err(Error::UnknownVariable("a".into())));
assert_eq!(eval_str("round(sin (pi) * cos(0))"), Ok(0.));
assert_eq!(eval_str("round( sqrt(3^2 + 4^2)) "), Ok(5.));
assert_eq!(eval_str("max(1.)"), Ok(1.));
assert_eq!(eval_str("max(1., 2., -1)"), Ok(2.));
assert_eq!(eval_str("min(1., 2., -1)"), Ok(-1.));
assert_eq!(
eval_str("sin(1.) + cos(2.)"),
Ok((1f64).sin() + (2f64).cos())
);
assert_eq!(eval_str("10 % 9"), Ok(10f64 % 9f64));
match eval_str("0.5!") {
Err(Error::EvalError(_)) => {}
_ => panic!("Cannot evaluate factorial of non-integer"),
}
}
#[test]
fn test_builtins() {
assert_eq!(eval_str("atan2(1.,2.)"), Ok((1f64).atan2(2.)));
}
#[test]
fn test_eval_func_ctx() {
use std::collections::{BTreeMap, HashMap};
let y = 5.;
assert_eq!(
eval_str_with_context("phi(2.)", Context::new().func("phi", |x| x + y + 3.)),
Ok(2. + y + 3.)
);
assert_eq!(
eval_str_with_context(
"phi(2., 3.)",
Context::new().func2("phi", |x, y| x + y + 3.)
),
Ok(2. + 3. + 3.)
);
assert_eq!(
eval_str_with_context(
"phi(2., 3., 4.)",
Context::new().func3("phi", |x, y, z| x + y * z)
),
Ok(2. + 3. * 4.)
);
assert_eq!(
eval_str_with_context(
"phi(2., 3.)",
Context::new().funcn("phi", |xs: &[f64]| xs[0] + xs[1], 2)
),
Ok(2. + 3.)
);
let mut m = HashMap::new();
m.insert("x", 2.);
m.insert("y", 3.);
assert_eq!(eval_str_with_context("x + y", &m), Ok(2. + 3.));
assert_eq!(
eval_str_with_context("x + z", m),
Err(Error::UnknownVariable("z".into()))
);
let mut m = BTreeMap::new();
m.insert("x", 2.);
m.insert("y", 3.);
assert_eq!(eval_str_with_context("x + y", &m), Ok(2. + 3.));
assert_eq!(
eval_str_with_context("x + z", m),
Err(Error::UnknownVariable("z".into()))
);
}
#[test]
fn test_bind() {
let expr = Expr::from_str("x + 3").unwrap();
let func = expr.clone().bind("x").unwrap();
assert_eq!(func(1.), 4.);
assert_eq!(
expr.clone().bind("y").err(),
Some(Error::UnknownVariable("x".into()))
);
let ctx = (("x", 2.), builtin());
let func = expr.bind_with_context(&ctx, "y").unwrap();
assert_eq!(func(1.), 5.);
let expr = Expr::from_str("x + y + 2.").unwrap();
let func = expr.clone().bind2("x", "y").unwrap();
assert_eq!(func(1., 2.), 5.);
assert_eq!(
expr.clone().bind2("z", "y").err(),
Some(Error::UnknownVariable("x".into()))
);
assert_eq!(
expr.bind2("x", "z").err(),
Some(Error::UnknownVariable("y".into()))
);
let expr = Expr::from_str("x + y^2 + z^3").unwrap();
let func = expr.clone().bind3("x", "y", "z").unwrap();
assert_eq!(func(1., 2., 3.), 32.);
let expr = Expr::from_str("sin(x)").unwrap();
let func = expr.clone().bind("x").unwrap();
assert_eq!(func(1.), (1f64).sin());
let expr = Expr::from_str("sin(x,2)").unwrap();
match expr.clone().bind("x") {
Err(Error::Function(_, FuncEvalError::NumberArgs(1))) => {}
_ => panic!("bind did not error"),
}
let expr = Expr::from_str("hey(x,2)").unwrap();
match expr.clone().bind("x") {
Err(Error::Function(_, FuncEvalError::UnknownFunction)) => {}
_ => panic!("bind did not error"),
}
}
#[test]
fn hash_context() {
let y = 0.;
{
let z = 0.;
let mut ctx = Context::new();
ctx.var("x", 1.).func("f", |x| x + y).func("g", |x| x + z);
ctx.func2("g", |x, y| x + y);
}
}
}
| 31.14701 | 118 | 0.479427 |
fb29aca76b5fd50d1bc360e553db7eec3175bd54 | 27,282 | use std::cell::RefCell;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::{collections::HashMap, iter, rc::Rc};
use actix::prelude::*;
use actix_service::{Service, Transform};
use actix_session::{Session, SessionStatus};
use actix_web::cookie::{Cookie, CookieJar, Key, SameSite};
use actix_web::dev::{ServiceRequest, ServiceResponse};
use actix_web::http::header::{self, HeaderValue};
use actix_web::{error, Error, HttpMessage};
use futures::future::{ok, Future, Ready};
use rand::{distributions::Alphanumeric, rngs::OsRng, Rng};
use time::{self, Duration, OffsetDateTime};
use crate::cluster::RedisClusterActor;
use crate::command::{del, get, set};
use crate::redis::RedisActor;
/// Use redis as session storage.
///
/// You need to pass an address of the redis server and random value to the
/// constructor of `RedisSessionBackend`. This is private key for cookie
/// session, When this value is changed, all session data is lost.
///
/// Constructor panics if key length is less than 32 bytes.
pub struct RedisSession<R: Actor = RedisActor>(Rc<Inner<R>>);
impl RedisSession<RedisActor> {
/// Create new redis session backend
///
/// * `addr` - address of the redis server
pub fn new<S: Into<String>>(addr: S, key: &[u8]) -> Self {
RedisSession(Rc::new(Inner {
key: Key::from_master(key),
cache_keygen: Box::new(|key: &str| format!("session:{}", &key)),
ttl: 7200,
addr: RedisActor::start(addr),
name: "actix-session".to_owned(),
path: "/".to_owned(),
domain: None,
secure: false,
max_age: Some(Duration::days(7)),
same_site: None,
http_only: Some(true),
}))
}
}
impl RedisSession<RedisClusterActor> {
/// Create new redis session backend with Redis Cluster
///
/// * `addr` - address of the redis server
pub fn new_cluster<S: Into<String>>(addr: S, key: &[u8]) -> Self {
RedisSession(Rc::new(Inner {
key: Key::from_master(key),
cache_keygen: Box::new(|key: &str| format!("session:{}", &key)),
ttl: 7200,
addr: RedisClusterActor::start(addr),
name: "actix-session".to_owned(),
path: "/".to_owned(),
domain: None,
secure: false,
max_age: Some(Duration::days(7)),
same_site: None,
http_only: Some(true),
}))
}
}
impl<R: Actor> RedisSession<R> {
/// Set time to live in seconds for session value
pub fn ttl(mut self, ttl: i64) -> Self {
Rc::get_mut(&mut self.0).unwrap().ttl = ttl;
self
}
/// Set custom cookie name for session id
pub fn cookie_name(mut self, name: &str) -> Self {
Rc::get_mut(&mut self.0).unwrap().name = name.to_owned();
self
}
/// Set custom cookie path
pub fn cookie_path(mut self, path: &str) -> Self {
Rc::get_mut(&mut self.0).unwrap().path = path.to_owned();
self
}
/// Set custom cookie domain
pub fn cookie_domain(mut self, domain: &str) -> Self {
Rc::get_mut(&mut self.0).unwrap().domain = Some(domain.to_owned());
self
}
/// Set custom cookie secure
/// If the `secure` field is set, a cookie will only be transmitted when the
/// connection is secure - i.e. `https`
pub fn cookie_secure(mut self, secure: bool) -> Self {
Rc::get_mut(&mut self.0).unwrap().secure = secure;
self
}
/// Set custom cookie max-age
pub fn cookie_max_age(mut self, max_age: Duration) -> Self {
Rc::get_mut(&mut self.0).unwrap().max_age = Some(max_age);
self
}
/// Set custom cookie SameSite
pub fn cookie_same_site(mut self, same_site: SameSite) -> Self {
Rc::get_mut(&mut self.0).unwrap().same_site = Some(same_site);
self
}
/// Set custom cookie HttpOnly policy
pub fn cookie_http_only(mut self, http_only: bool) -> Self {
Rc::get_mut(&mut self.0).unwrap().http_only = Some(http_only);
self
}
/// Set a custom cache key generation strategy, expecting session key as input
pub fn cache_keygen(mut self, keygen: Box<dyn Fn(&str) -> String>) -> Self {
Rc::get_mut(&mut self.0).unwrap().cache_keygen = keygen;
self
}
}
/// Cookie session middleware
pub struct RedisSessionMiddleware<S: 'static, R: Actor> {
service: Rc<RefCell<S>>,
inner: Rc<Inner<R>>,
}
struct Inner<R: Actor> {
key: Key,
cache_keygen: Box<dyn Fn(&str) -> String>,
ttl: i64,
addr: Addr<R>,
name: String,
path: String,
domain: Option<String>,
secure: bool,
max_age: Option<Duration>,
same_site: Option<SameSite>,
http_only: Option<bool>,
}
macro_rules! impl_methods {
($R:ty) => {
impl<S, B> Transform<S> for RedisSession<$R>
where
S: Service<
Request = ServiceRequest,
Response = ServiceResponse<B>,
Error = Error,
> + 'static,
S::Future: 'static,
B: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
type Error = S::Error;
type InitError = ();
type Transform = RedisSessionMiddleware<S, $R>;
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ok(RedisSessionMiddleware {
service: Rc::new(RefCell::new(service)),
inner: self.0.clone(),
})
}
}
impl<S, B> Service for RedisSessionMiddleware<S, $R>
where
S: Service<
Request = ServiceRequest,
Response = ServiceResponse<B>,
Error = Error,
> + 'static,
S::Future: 'static,
B: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
type Error = Error;
#[allow(clippy::type_complexity)]
type Future =
Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>>>>;
fn poll_ready(
&mut self,
cx: &mut Context<'_>,
) -> Poll<Result<(), Self::Error>> {
self.service.borrow_mut().poll_ready(cx)
}
fn call(&mut self, mut req: ServiceRequest) -> Self::Future {
let mut srv = self.service.clone();
let inner = self.inner.clone();
Box::pin(async move {
let state = inner.load(&req).await?;
let value = if let Some((state, value)) = state {
Session::set_session(state.into_iter(), &mut req);
Some(value)
} else {
None
};
let mut res = srv.call(req).await?;
match Session::get_changes(&mut res) {
(SessionStatus::Unchanged, None) => Ok(res),
(SessionStatus::Unchanged, Some(state)) => {
if value.is_none() {
// implies the session is new
inner.update(res, state, value).await
} else {
Ok(res)
}
}
(SessionStatus::Changed, Some(state)) => {
inner.update(res, state, value).await
}
(SessionStatus::Purged, Some(_)) => {
if let Some(val) = value {
inner.clear_cache(val).await?;
match inner.remove_cookie(&mut res) {
Ok(_) => Ok(res),
Err(_err) => {
Err(error::ErrorInternalServerError(_err))
}
}
} else {
Err(error::ErrorInternalServerError("unexpected"))
}
}
(SessionStatus::Renewed, Some(state)) => {
if let Some(val) = value {
inner.clear_cache(val).await?;
inner.update(res, state, None).await
} else {
inner.update(res, state, None).await
}
}
(_, None) => unreachable!(),
}
})
}
}
impl Inner<$R> {
async fn load(
&self,
req: &ServiceRequest,
) -> Result<Option<(HashMap<String, String>, String)>, Error> {
if let Ok(cookies) = req.cookies() {
for cookie in cookies.iter() {
if cookie.name() == self.name {
let mut jar = CookieJar::new();
jar.add_original(cookie.clone());
if let Some(cookie) = jar.signed(&self.key).get(&self.name) {
let value = cookie.value().to_owned();
let cachekey = (self.cache_keygen)(&cookie.value());
return match self.addr.send(get(cachekey)).await {
Err(e) => Err(Error::from(e)),
Ok(res) => match res {
Ok(val) => {
if let Some(val) = val {
if let Ok(val) =
serde_json::from_slice(&val)
{
return Ok(Some((val, value)));
}
}
Ok(None)
}
Err(err) => {
Err(error::ErrorInternalServerError(err))
}
},
};
} else {
return Ok(None);
}
}
}
}
Ok(None)
}
async fn update<B>(
&self,
mut res: ServiceResponse<B>,
state: impl Iterator<Item = (String, String)>,
value: Option<String>,
) -> Result<ServiceResponse<B>, Error> {
let (value, jar) = if let Some(value) = value {
(value, None)
} else {
let value: String = iter::repeat(())
.map(|()| OsRng.sample(Alphanumeric))
.take(32)
.collect();
// prepare session id cookie
let mut cookie = Cookie::new(self.name.clone(), value.clone());
cookie.set_path(self.path.clone());
cookie.set_secure(self.secure);
cookie.set_http_only(self.http_only.unwrap_or(true));
if let Some(ref domain) = self.domain {
cookie.set_domain(domain.clone());
}
if let Some(max_age) = self.max_age {
cookie.set_max_age(max_age);
}
if let Some(same_site) = self.same_site {
cookie.set_same_site(same_site);
}
// set cookie
let mut jar = CookieJar::new();
jar.signed(&self.key).add(cookie);
(value, Some(jar))
};
let cachekey = (self.cache_keygen)(&value);
let state: HashMap<_, _> = state.collect();
match serde_json::to_string(&state) {
Err(e) => Err(e.into()),
Ok(body) => {
match self.addr.send(set(cachekey, body).ex(self.ttl)).await {
Err(e) => Err(Error::from(e)),
Ok(redis_result) => match redis_result {
Ok(_) => {
if let Some(jar) = jar {
for cookie in jar.delta() {
let val = HeaderValue::from_str(
&cookie.to_string(),
)?;
res.headers_mut()
.append(header::SET_COOKIE, val);
}
}
Ok(res)
}
Err(err) => Err(error::ErrorInternalServerError(err)),
},
}
}
}
}
/// removes cache entry
async fn clear_cache(&self, key: String) -> Result<(), Error> {
let cachekey = (self.cache_keygen)(&key);
match self.addr.send(del(cachekey)).await {
Err(e) => Err(Error::from(e)),
Ok(res) => match res {
Ok(x) if x > 0 => Ok(()),
_ => Err(error::ErrorInternalServerError(
"failed to remove session from cache",
)),
},
}
}
/// invalidates session cookie
fn remove_cookie<B>(
&self,
res: &mut ServiceResponse<B>,
) -> Result<(), Error> {
let mut cookie = Cookie::named(self.name.clone());
cookie.set_value("");
cookie.set_max_age(Duration::zero());
cookie.set_expires(OffsetDateTime::now() - Duration::days(365));
let val = HeaderValue::from_str(&cookie.to_string())
.map_err(error::ErrorInternalServerError)?;
res.headers_mut().append(header::SET_COOKIE, val);
Ok(())
}
}
};
}
impl_methods!(RedisActor);
impl_methods!(RedisClusterActor);
#[cfg(test)]
mod test {
use super::*;
use actix_session::Session;
use actix_web::{
middleware, test, web,
web::{get, post, resource},
App, HttpResponse, Result,
};
use serde::{Deserialize, Serialize};
use serde_json::json;
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub struct IndexResponse {
user_id: Option<String>,
counter: i32,
}
async fn index(session: Session) -> Result<HttpResponse> {
let user_id: Option<String> = session.get::<String>("user_id").unwrap();
let counter: i32 = session
.get::<i32>("counter")
.unwrap_or(Some(0))
.unwrap_or(0);
Ok(HttpResponse::Ok().json(IndexResponse { user_id, counter }))
}
async fn do_something(session: Session) -> Result<HttpResponse> {
let user_id: Option<String> = session.get::<String>("user_id").unwrap();
let counter: i32 = session
.get::<i32>("counter")
.unwrap_or(Some(0))
.map_or(1, |inner| inner + 1);
session.set("counter", counter)?;
Ok(HttpResponse::Ok().json(IndexResponse { user_id, counter }))
}
#[derive(Deserialize)]
struct Identity {
user_id: String,
}
async fn login(
user_id: web::Json<Identity>,
session: Session,
) -> Result<HttpResponse> {
let id = user_id.into_inner().user_id;
session.set("user_id", &id)?;
session.renew();
let counter: i32 = session
.get::<i32>("counter")
.unwrap_or(Some(0))
.unwrap_or(0);
Ok(HttpResponse::Ok().json(IndexResponse {
user_id: Some(id),
counter,
}))
}
async fn logout(session: Session) -> Result<HttpResponse> {
let id: Option<String> = session.get("user_id")?;
if let Some(x) = id {
session.purge();
Ok(format!("Logged out: {}", x).into())
} else {
Ok("Could not log out anonymous user".into())
}
}
#[actix_rt::test]
async fn test_workflow() {
let srv = test::start(|| {
App::new()
.wrap(
RedisSession::new("127.0.0.1:6379", &[0; 32])
.cookie_name("test-session"),
)
.wrap(middleware::Logger::default())
.service(resource("/").route(get().to(index)))
.service(resource("/do_something").route(post().to(do_something)))
.service(resource("/login").route(post().to(login)))
.service(resource("/logout").route(post().to(logout)))
});
test_workflow_helper(srv).await;
}
#[actix_rt::test]
async fn test_workflow_cluster() {
let srv_cluster = test::start(|| {
App::new()
.wrap(
RedisSession::new_cluster("127.0.0.1:7000", &[0; 32])
.cookie_name("test-session"),
)
.wrap(middleware::Logger::default())
.service(resource("/").route(get().to(index)))
.service(resource("/do_something").route(post().to(do_something)))
.service(resource("/login").route(post().to(login)))
.service(resource("/logout").route(post().to(logout)))
});
test_workflow_helper(srv_cluster).await;
}
async fn test_workflow_helper(srv: test::TestServer) {
// Step 1: GET index
// - set-cookie actix-session will be in response (session cookie #1)
// - response should be: {"counter": 0, "user_id": None}
// Step 2: GET index, including session cookie #1 in request
// - set-cookie will *not* be in response
// - response should be: {"counter": 0, "user_id": None}
// Step 3: POST to do_something, including session cookie #1 in request
// - adds new session state in redis: {"counter": 1}
// - response should be: {"counter": 1, "user_id": None}
// Step 4: POST again to do_something, including session cookie #1 in request
// - updates session state in redis: {"counter": 2}
// - response should be: {"counter": 2, "user_id": None}
// Step 5: POST to login, including session cookie #1 in request
// - set-cookie actix-session will be in response (session cookie #2)
// - updates session state in redis: {"counter": 2, "user_id": "ferris"}
// Step 6: GET index, including session cookie #2 in request
// - response should be: {"counter": 2, "user_id": "ferris"}
// Step 7: POST again to do_something, including session cookie #2 in request
// - updates session state in redis: {"counter": 3, "user_id": "ferris"}
// - response should be: {"counter": 2, "user_id": None}
// Step 8: GET index, including session cookie #1 in request
// - set-cookie actix-session will be in response (session cookie #3)
// - response should be: {"counter": 0, "user_id": None}
// Step 9: POST to logout, including session cookie #2
// - set-cookie actix-session will be in response with session cookie #2
// invalidation logic
// Step 10: GET index, including session cookie #2 in request
// - set-cookie actix-session will be in response (session cookie #3)
// - response should be: {"counter": 0, "user_id": None}
// Step 1: GET index
// - set-cookie actix-session will be in response (session cookie #1)
// - response should be: {"counter": 0, "user_id": None}
let req_1a = srv.get("/").send();
let mut resp_1 = req_1a.await.unwrap();
let cookie_1 = resp_1
.cookies()
.unwrap()
.clone()
.into_iter()
.find(|c| c.name() == "test-session")
.unwrap();
let result_1 = resp_1.json::<IndexResponse>().await.unwrap();
assert_eq!(
result_1,
IndexResponse {
user_id: None,
counter: 0
}
);
// Step 2: GET index, including session cookie #1 in request
// - set-cookie will *not* be in response
// - response should be: {"counter": 0, "user_id": None}
let req_2 = srv.get("/").cookie(cookie_1.clone()).send();
let resp_2 = req_2.await.unwrap();
let cookie_2 = resp_2
.cookies()
.unwrap()
.clone()
.into_iter()
.find(|c| c.name() == "test-session");
assert_eq!(cookie_2, None);
// Step 3: POST to do_something, including session cookie #1 in request
// - adds new session state in redis: {"counter": 1}
// - response should be: {"counter": 1, "user_id": None}
let req_3 = srv.post("/do_something").cookie(cookie_1.clone()).send();
let mut resp_3 = req_3.await.unwrap();
let result_3 = resp_3.json::<IndexResponse>().await.unwrap();
assert_eq!(
result_3,
IndexResponse {
user_id: None,
counter: 1
}
);
// Step 4: POST again to do_something, including session cookie #1 in request
// - updates session state in redis: {"counter": 2}
// - response should be: {"counter": 2, "user_id": None}
let req_4 = srv.post("/do_something").cookie(cookie_1.clone()).send();
let mut resp_4 = req_4.await.unwrap();
let result_4 = resp_4.json::<IndexResponse>().await.unwrap();
assert_eq!(
result_4,
IndexResponse {
user_id: None,
counter: 2
}
);
// Step 5: POST to login, including session cookie #1 in request
// - set-cookie actix-session will be in response (session cookie #2)
// - updates session state in redis: {"counter": 2, "user_id": "ferris"}
let req_5 = srv
.post("/login")
.cookie(cookie_1.clone())
.send_json(&json!({"user_id": "ferris"}));
let mut resp_5 = req_5.await.unwrap();
let cookie_2 = resp_5
.cookies()
.unwrap()
.clone()
.into_iter()
.find(|c| c.name() == "test-session")
.unwrap();
assert_ne!(cookie_1.value(), cookie_2.value());
let result_5 = resp_5.json::<IndexResponse>().await.unwrap();
assert_eq!(
result_5,
IndexResponse {
user_id: Some("ferris".into()),
counter: 2
}
);
// Step 6: GET index, including session cookie #2 in request
// - response should be: {"counter": 2, "user_id": "ferris"}
let req_6 = srv.get("/").cookie(cookie_2.clone()).send();
let mut resp_6 = req_6.await.unwrap();
let result_6 = resp_6.json::<IndexResponse>().await.unwrap();
assert_eq!(
result_6,
IndexResponse {
user_id: Some("ferris".into()),
counter: 2
}
);
// Step 7: POST again to do_something, including session cookie #2 in request
// - updates session state in redis: {"counter": 3, "user_id": "ferris"}
// - response should be: {"counter": 2, "user_id": None}
let req_7 = srv.post("/do_something").cookie(cookie_2.clone()).send();
let mut resp_7 = req_7.await.unwrap();
let result_7 = resp_7.json::<IndexResponse>().await.unwrap();
assert_eq!(
result_7,
IndexResponse {
user_id: Some("ferris".into()),
counter: 3
}
);
// Step 8: GET index, including session cookie #1 in request
// - set-cookie actix-session will be in response (session cookie #3)
// - response should be: {"counter": 0, "user_id": None}
let req_8 = srv.get("/").cookie(cookie_1.clone()).send();
let mut resp_8 = req_8.await.unwrap();
let cookie_3 = resp_8
.cookies()
.unwrap()
.clone()
.into_iter()
.find(|c| c.name() == "test-session")
.unwrap();
let result_8 = resp_8.json::<IndexResponse>().await.unwrap();
assert_eq!(
result_8,
IndexResponse {
user_id: None,
counter: 0
}
);
assert_ne!(cookie_3.value(), cookie_2.value());
// Step 9: POST to logout, including session cookie #2
// - set-cookie actix-session will be in response with session cookie #2
// invalidation logic
let req_9 = srv.post("/logout").cookie(cookie_2.clone()).send();
let resp_9 = req_9.await.unwrap();
let cookie_4 = resp_9
.cookies()
.unwrap()
.clone()
.into_iter()
.find(|c| c.name() == "test-session")
.unwrap();
assert_ne!(
OffsetDateTime::now().year(),
cookie_4.expires().map(|t| t.year()).unwrap()
);
// Step 10: GET index, including session cookie #2 in request
// - set-cookie actix-session will be in response (session cookie #3)
// - response should be: {"counter": 0, "user_id": None}
let req_10 = srv.get("/").cookie(cookie_2.clone()).send();
let mut resp_10 = req_10.await.unwrap();
let result_10 = resp_10.json::<IndexResponse>().await.unwrap();
assert_eq!(
result_10,
IndexResponse {
user_id: None,
counter: 0
}
);
let cookie_5 = resp_10
.cookies()
.unwrap()
.clone()
.into_iter()
.find(|c| c.name() == "test-session")
.unwrap();
assert_ne!(cookie_5.value(), cookie_2.value());
}
}
| 38.050209 | 89 | 0.473829 |
50b0ede09c22f05c124413f11b80c623441c8405 | 446 | // Check that when making a ref mut binding with type `&mut T`, the
// type `T` must match precisely the type `U` of the value being
// matched, and in particular cannot be some supertype of `U`. Issue
// #23116. This test focuses on a `match`.
#![allow(dead_code)]
struct S<'b>(&'b i32);
impl<'b> S<'b> {
fn bar<'a>(&'a mut self) -> &'a mut &'a i32 {
match self.0 { ref mut x => x } //~ ERROR mismatched types
}
}
fn main() {}
| 29.733333 | 68 | 0.609865 |
7a86379786ff57a49791e4e780a8a15bc6a074af | 1,940 | use log::*;
use std::io::Write;
use crate::cli;
use env_logger::fmt::{Color, Style, StyledValue};
// Change style based on the message log level
fn colored_level<'a>(style: &'a mut Style, level: Level) -> StyledValue<'a, &'static str> {
match level {
Level::Trace => style.set_color(Color::Magenta).value("TRACE"),
Level::Debug => style.set_color(Color::Blue).value("DEBUG"),
Level::Info => style.set_color(Color::Green).value("INFO "),
Level::Warn => style.set_color(Color::Yellow).value("WARN "),
Level::Error => style.set_color(Color::Red).value("ERROR"),
}
}
// Start logger, should be done inside main
pub fn init() {
let default_filter = if cli::manager::is_verbose() {
"debug"
} else {
"info"
};
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or(default_filter))
.format(|buf, record| {
let mut style = buf.style();
let level = colored_level(&mut style, record.level());
let mut style = buf.style();
let message = style.set_bold(true).value(record.args());
writeln!(
buf,
"{} {} {}:{}: {}",
level,
chrono::Local::now().format("%H:%M:%S.%3f"),
record.file().unwrap_or("unknown"),
record.line().unwrap_or(0),
message,
)
})
.init();
info!(
"{}, version: {}-{}, build date: {}",
env!("CARGO_PKG_NAME"),
env!("CARGO_PKG_VERSION"),
env!("VERGEN_GIT_SHA_SHORT"),
env!("VERGEN_BUILD_DATE")
);
info!(
"Starting at {}",
chrono::Local::now().format("%Y-%m-%dT%H:%M:%S"),
);
debug!("Command line call: {}", cli::manager::command_line_string());
debug!(
"Command line input struct call: {:#?}",
cli::manager::matches().args
);
}
| 32.333333 | 95 | 0.53299 |
e62eadaa4387dd962418d043c4ecbae4a58d3e73 | 40,289 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// The Rust HIR.
pub use self::BindingMode::*;
pub use self::BinOp_::*;
pub use self::BlockCheckMode::*;
pub use self::CaptureClause::*;
pub use self::Decl_::*;
pub use self::ExplicitSelf_::*;
pub use self::Expr_::*;
pub use self::FunctionRetTy::*;
pub use self::ForeignItem_::*;
pub use self::ImplItem_::*;
pub use self::Item_::*;
pub use self::Mutability::*;
pub use self::Pat_::*;
pub use self::PathListItem_::*;
pub use self::PatWildKind::*;
pub use self::PrimTy::*;
pub use self::Stmt_::*;
pub use self::StructFieldKind::*;
pub use self::TraitItem_::*;
pub use self::Ty_::*;
pub use self::TyParamBound::*;
pub use self::UnOp::*;
pub use self::UnsafeSource::*;
pub use self::ViewPath_::*;
pub use self::Visibility::*;
pub use self::PathParameters::*;
use syntax::codemap::{self, Span, Spanned, DUMMY_SP, ExpnId};
use syntax::abi::Abi;
use syntax::ast::{Name, Ident, NodeId, DUMMY_NODE_ID, TokenTree, AsmDialect};
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, CrateConfig};
use syntax::owned_slice::OwnedSlice;
use syntax::parse::token::InternedString;
use syntax::ptr::P;
use print::pprust;
use util;
use std::fmt;
use std::{iter, option, slice};
use serialize::{Encodable, Encoder, Decoder};
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime {
pub id: NodeId,
pub span: Span,
pub name: Name,
}
impl fmt::Debug for Lifetime {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"lifetime({}: {})",
self.id,
pprust::lifetime_to_string(self))
}
}
/// A lifetime definition, eg `'a: 'b+'c+'d`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct LifetimeDef {
pub lifetime: Lifetime,
pub bounds: Vec<Lifetime>,
}
/// A "Path" is essentially Rust's notion of a name; for instance:
/// std::cmp::PartialEq . It's represented as a sequence of identifiers,
/// along with a bunch of supporting information.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Path {
pub span: Span,
/// A `::foo` path, is relative to the crate root rather than current
/// module (like paths in an import).
pub global: bool,
/// The segments in the path: the things separated by `::`.
pub segments: Vec<PathSegment>,
}
impl fmt::Debug for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "path({})", pprust::path_to_string(self))
}
}
impl fmt::Display for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", pprust::path_to_string(self))
}
}
/// A segment of a path: an identifier, an optional lifetime, and a set of
/// types.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct PathSegment {
/// The identifier portion of this path segment.
pub identifier: Ident,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that
/// this is more than just simple syntactic sugar; the use of
/// parens affects the region binding rules, so we preserve the
/// distinction.
pub parameters: PathParameters,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum PathParameters {
/// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`
AngleBracketedParameters(AngleBracketedParameterData),
/// The `(A,B)` and `C` in `Foo(A,B) -> C`
ParenthesizedParameters(ParenthesizedParameterData),
}
impl PathParameters {
pub fn none() -> PathParameters {
AngleBracketedParameters(AngleBracketedParameterData {
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
bindings: OwnedSlice::empty(),
})
}
pub fn is_empty(&self) -> bool {
match *self {
AngleBracketedParameters(ref data) => data.is_empty(),
// Even if the user supplied no types, something like
// `X()` is equivalent to `X<(),()>`.
ParenthesizedParameters(..) => false,
}
}
pub fn has_lifetimes(&self) -> bool {
match *self {
AngleBracketedParameters(ref data) => !data.lifetimes.is_empty(),
ParenthesizedParameters(_) => false,
}
}
pub fn has_types(&self) -> bool {
match *self {
AngleBracketedParameters(ref data) => !data.types.is_empty(),
ParenthesizedParameters(..) => true,
}
}
/// Returns the types that the user wrote. Note that these do not necessarily map to the type
/// parameters in the parenthesized case.
pub fn types(&self) -> Vec<&P<Ty>> {
match *self {
AngleBracketedParameters(ref data) => {
data.types.iter().collect()
}
ParenthesizedParameters(ref data) => {
data.inputs
.iter()
.chain(data.output.iter())
.collect()
}
}
}
pub fn lifetimes(&self) -> Vec<&Lifetime> {
match *self {
AngleBracketedParameters(ref data) => {
data.lifetimes.iter().collect()
}
ParenthesizedParameters(_) => {
Vec::new()
}
}
}
pub fn bindings(&self) -> Vec<&P<TypeBinding>> {
match *self {
AngleBracketedParameters(ref data) => {
data.bindings.iter().collect()
}
ParenthesizedParameters(_) => {
Vec::new()
}
}
}
}
/// A path like `Foo<'a, T>`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct AngleBracketedParameterData {
/// The lifetime parameters for this path segment.
pub lifetimes: Vec<Lifetime>,
/// The type parameters for this path segment, if present.
pub types: OwnedSlice<P<Ty>>,
/// Bindings (equality constraints) on associated types, if present.
/// E.g., `Foo<A=Bar>`.
pub bindings: OwnedSlice<P<TypeBinding>>,
}
impl AngleBracketedParameterData {
fn is_empty(&self) -> bool {
self.lifetimes.is_empty() && self.types.is_empty() && self.bindings.is_empty()
}
}
/// A path like `Foo(A,B) -> C`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ParenthesizedParameterData {
/// Overall span
pub span: Span,
/// `(A,B)`
pub inputs: Vec<P<Ty>>,
/// `C`
pub output: Option<P<Ty>>,
}
/// The AST represents all type param bounds as types.
/// typeck::collect::compute_bounds matches these against
/// the "special" built-in traits (see middle::lang_items) and
/// detects Copy, Send and Sync.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TyParamBound {
TraitTyParamBound(PolyTraitRef, TraitBoundModifier),
RegionTyParamBound(Lifetime),
}
/// A modifier on a bound, currently this is only used for `?Sized`, where the
/// modifier is `Maybe`. Negative bounds should also be handled here.
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TraitBoundModifier {
None,
Maybe,
}
pub type TyParamBounds = OwnedSlice<TyParamBound>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TyParam {
pub name: Name,
pub id: NodeId,
pub bounds: TyParamBounds,
pub default: Option<P<Ty>>,
pub span: Span,
}
/// Represents lifetimes and type parameters attached to a declaration
/// of a function, enum, trait, etc.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Generics {
pub lifetimes: Vec<LifetimeDef>,
pub ty_params: OwnedSlice<TyParam>,
pub where_clause: WhereClause,
}
impl Generics {
pub fn is_lt_parameterized(&self) -> bool {
!self.lifetimes.is_empty()
}
pub fn is_type_parameterized(&self) -> bool {
!self.ty_params.is_empty()
}
pub fn is_parameterized(&self) -> bool {
self.is_lt_parameterized() || self.is_type_parameterized()
}
}
/// A `where` clause in a definition
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereClause {
pub id: NodeId,
pub predicates: Vec<WherePredicate>,
}
/// A single predicate in a `where` clause
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum WherePredicate {
/// A type binding, eg `for<'c> Foo: Send+Clone+'c`
BoundPredicate(WhereBoundPredicate),
/// A lifetime predicate, e.g. `'a: 'b+'c`
RegionPredicate(WhereRegionPredicate),
/// An equality predicate (unsupported)
EqPredicate(WhereEqPredicate),
}
/// A type bound, eg `for<'c> Foo: Send+Clone+'c`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereBoundPredicate {
pub span: Span,
/// Any lifetimes from a `for` binding
pub bound_lifetimes: Vec<LifetimeDef>,
/// The type being bounded
pub bounded_ty: P<Ty>,
/// Trait and lifetime bounds (`Clone+Send+'static`)
pub bounds: OwnedSlice<TyParamBound>,
}
/// A lifetime predicate, e.g. `'a: 'b+'c`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereRegionPredicate {
pub span: Span,
pub lifetime: Lifetime,
pub bounds: Vec<Lifetime>,
}
/// An equality predicate (unsupported), e.g. `T=int`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereEqPredicate {
pub id: NodeId,
pub span: Span,
pub path: Path,
pub ty: P<Ty>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Crate {
pub module: Mod,
pub attrs: Vec<Attribute>,
pub config: CrateConfig,
pub span: Span,
pub exported_macros: Vec<MacroDef>,
}
/// A macro definition, in this crate or imported from another.
///
/// Not parsed directly, but created on macro import or `macro_rules!` expansion.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct MacroDef {
pub name: Name,
pub attrs: Vec<Attribute>,
pub id: NodeId,
pub span: Span,
pub imported_from: Option<Name>,
pub export: bool,
pub use_locally: bool,
pub allow_internal_unstable: bool,
pub body: Vec<TokenTree>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Block {
/// Statements in a block
pub stmts: Vec<P<Stmt>>,
/// An expression at the end of the block
/// without a semicolon, if any
pub expr: Option<P<Expr>>,
pub id: NodeId,
/// Distinguishes between `unsafe { ... }` and `{ ... }`
pub rules: BlockCheckMode,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Pat {
pub id: NodeId,
pub node: Pat_,
pub span: Span,
}
impl fmt::Debug for Pat {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "pat({}: {})", self.id, pprust::pat_to_string(self))
}
}
/// A single field in a struct pattern
///
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }`
/// are treated the same as` x: x, y: ref y, z: ref mut z`,
/// except is_shorthand is true
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct FieldPat {
/// The identifier for the field
pub name: Name,
/// The pattern the field is destructured to
pub pat: P<Pat>,
pub is_shorthand: bool,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BindingMode {
BindByRef(Mutability),
BindByValue(Mutability),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum PatWildKind {
/// Represents the wildcard pattern `_`
PatWildSingle,
/// Represents the wildcard pattern `..`
PatWildMulti,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Pat_ {
/// Represents a wildcard pattern (either `_` or `..`)
PatWild(PatWildKind),
/// A PatIdent may either be a new bound variable,
/// or a nullary enum (in which case the third field
/// is None).
///
/// In the nullary enum case, the parser can't determine
/// which it is. The resolver determines this, and
/// records this pattern's NodeId in an auxiliary
/// set (of "PatIdents that refer to nullary enums")
PatIdent(BindingMode, Spanned<Ident>, Option<P<Pat>>),
/// "None" means a * pattern where we don't bind the fields to names.
PatEnum(Path, Option<Vec<P<Pat>>>),
/// An associated const named using the qualified path `<T>::CONST` or
/// `<T as Trait>::CONST`. Associated consts from inherent impls can be
/// referred to as simply `T::CONST`, in which case they will end up as
/// PatEnum, and the resolver will have to sort that out.
PatQPath(QSelf, Path),
/// Destructuring of a struct, e.g. `Foo {x, y, ..}`
/// The `bool` is `true` in the presence of a `..`
PatStruct(Path, Vec<Spanned<FieldPat>>, bool),
/// A tuple pattern `(a, b)`
PatTup(Vec<P<Pat>>),
/// A `box` pattern
PatBox(P<Pat>),
/// A reference pattern, e.g. `&mut (a, b)`
PatRegion(P<Pat>, Mutability),
/// A literal
PatLit(P<Expr>),
/// A range pattern, e.g. `1...2`
PatRange(P<Expr>, P<Expr>),
/// [a, b, ..i, y, z] is represented as:
/// PatVec(box [a, b], Some(i), box [y, z])
PatVec(Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum Mutability {
MutMutable,
MutImmutable,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BinOp_ {
/// The `+` operator (addition)
BiAdd,
/// The `-` operator (subtraction)
BiSub,
/// The `*` operator (multiplication)
BiMul,
/// The `/` operator (division)
BiDiv,
/// The `%` operator (modulus)
BiRem,
/// The `&&` operator (logical and)
BiAnd,
/// The `||` operator (logical or)
BiOr,
/// The `^` operator (bitwise xor)
BiBitXor,
/// The `&` operator (bitwise and)
BiBitAnd,
/// The `|` operator (bitwise or)
BiBitOr,
/// The `<<` operator (shift left)
BiShl,
/// The `>>` operator (shift right)
BiShr,
/// The `==` operator (equality)
BiEq,
/// The `<` operator (less than)
BiLt,
/// The `<=` operator (less than or equal to)
BiLe,
/// The `!=` operator (not equal to)
BiNe,
/// The `>=` operator (greater than or equal to)
BiGe,
/// The `>` operator (greater than)
BiGt,
}
pub type BinOp = Spanned<BinOp_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum UnOp {
/// The `*` operator for dereferencing
UnDeref,
/// The `!` operator for logical inversion
UnNot,
/// The `-` operator for negation
UnNeg,
}
/// A statement
pub type Stmt = Spanned<Stmt_>;
impl fmt::Debug for Stmt_ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Sadness.
let spanned = codemap::dummy_spanned(self.clone());
write!(f,
"stmt({}: {})",
util::stmt_id(&spanned),
pprust::stmt_to_string(&spanned))
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum Stmt_ {
/// Could be an item or a local (let) binding:
StmtDecl(P<Decl>, NodeId),
/// Expr without trailing semi-colon (must have unit type):
StmtExpr(P<Expr>, NodeId),
/// Expr with trailing semi-colon (may have any type):
StmtSemi(P<Expr>, NodeId),
}
// FIXME (pending discussion of #1697, #2178...): local should really be
// a refinement on pat.
/// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Local {
pub pat: P<Pat>,
pub ty: Option<P<Ty>>,
/// Initializer expression to set the value, if any
pub init: Option<P<Expr>>,
pub id: NodeId,
pub span: Span,
}
pub type Decl = Spanned<Decl_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Decl_ {
/// A local (let) binding:
DeclLocal(P<Local>),
/// An item binding:
DeclItem(P<Item>),
}
/// represents one arm of a 'match'
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Arm {
pub attrs: Vec<Attribute>,
pub pats: Vec<P<Pat>>,
pub guard: Option<P<Expr>>,
pub body: P<Expr>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Field {
pub name: Spanned<Name>,
pub expr: P<Expr>,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BlockCheckMode {
DefaultBlock,
UnsafeBlock(UnsafeSource),
PushUnsafeBlock(UnsafeSource),
PopUnsafeBlock(UnsafeSource),
// Within this block (but outside a PopUnstableBlock), we suspend checking of stability.
PushUnstableBlock,
PopUnstableBlock,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum UnsafeSource {
CompilerGenerated,
UserProvided,
}
/// An expression
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Expr {
pub id: NodeId,
pub node: Expr_,
pub span: Span,
}
impl fmt::Debug for Expr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "expr({}: {})", self.id, pprust::expr_to_string(self))
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Expr_ {
/// A `box x` expression.
ExprBox(P<Expr>),
/// An array (`[a, b, c, d]`)
ExprVec(Vec<P<Expr>>),
/// A function call
///
/// The first field resolves to the function itself,
/// and the second field is the list of arguments
ExprCall(P<Expr>, Vec<P<Expr>>),
/// A method call (`x.foo::<Bar, Baz>(a, b, c, d)`)
///
/// The `Spanned<Name>` is the identifier for the method name.
/// The vector of `Ty`s are the ascripted type parameters for the method
/// (within the angle brackets).
///
/// The first element of the vector of `Expr`s is the expression that evaluates
/// to the object on which the method is being called on (the receiver),
/// and the remaining elements are the rest of the arguments.
///
/// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
/// `ExprMethodCall(foo, [Bar, Baz], [x, a, b, c, d])`.
ExprMethodCall(Spanned<Name>, Vec<P<Ty>>, Vec<P<Expr>>),
/// A tuple (`(a, b, c ,d)`)
ExprTup(Vec<P<Expr>>),
/// A binary operation (For example: `a + b`, `a * b`)
ExprBinary(BinOp, P<Expr>, P<Expr>),
/// A unary operation (For example: `!x`, `*x`)
ExprUnary(UnOp, P<Expr>),
/// A literal (For example: `1u8`, `"foo"`)
ExprLit(P<Lit>),
/// A cast (`foo as f64`)
ExprCast(P<Expr>, P<Ty>),
/// An `if` block, with an optional else block
///
/// `if expr { block } else { expr }`
ExprIf(P<Expr>, P<Block>, Option<P<Expr>>),
/// A while loop, with an optional label
///
/// `'label: while expr { block }`
ExprWhile(P<Expr>, P<Block>, Option<Ident>),
/// Conditionless loop (can be exited with break, continue, or return)
///
/// `'label: loop { block }`
ExprLoop(P<Block>, Option<Ident>),
/// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind.
ExprMatch(P<Expr>, Vec<Arm>, MatchSource),
/// A closure (for example, `move |a, b, c| {a + b + c}`)
ExprClosure(CaptureClause, P<FnDecl>, P<Block>),
/// A block (`{ ... }`)
ExprBlock(P<Block>),
/// An assignment (`a = foo()`)
ExprAssign(P<Expr>, P<Expr>),
/// An assignment with an operator
///
/// For example, `a += 1`.
ExprAssignOp(BinOp, P<Expr>, P<Expr>),
/// Access of a named struct field (`obj.foo`)
ExprField(P<Expr>, Spanned<Name>),
/// Access of an unnamed field of a struct or tuple-struct
///
/// For example, `foo.0`.
ExprTupField(P<Expr>, Spanned<usize>),
/// An indexing operation (`foo[2]`)
ExprIndex(P<Expr>, P<Expr>),
/// A range (`1..2`, `1..`, or `..2`)
ExprRange(Option<P<Expr>>, Option<P<Expr>>),
/// Variable reference, possibly containing `::` and/or type
/// parameters, e.g. foo::bar::<baz>.
///
/// Optionally "qualified",
/// e.g. `<Vec<T> as SomeTrait>::SomeType`.
ExprPath(Option<QSelf>, Path),
/// A referencing operation (`&a` or `&mut a`)
ExprAddrOf(Mutability, P<Expr>),
/// A `break`, with an optional label to break
ExprBreak(Option<Spanned<Ident>>),
/// A `continue`, with an optional label
ExprAgain(Option<Spanned<Ident>>),
/// A `return`, with an optional value to be returned
ExprRet(Option<P<Expr>>),
/// Output of the `asm!()` macro
ExprInlineAsm(InlineAsm),
/// A struct literal expression.
///
/// For example, `Foo {x: 1, y: 2}`, or
/// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
ExprStruct(Path, Vec<Field>, Option<P<Expr>>),
/// A vector literal constructed from one repeated element.
///
/// For example, `[1u8; 5]`. The first expression is the element
/// to be repeated; the second is the number of times to repeat it.
ExprRepeat(P<Expr>, P<Expr>),
}
/// The explicit Self type in a "qualified path". The actual
/// path, including the trait and the associated item, is stored
/// separately. `position` represents the index of the associated
/// item qualified with this Self type.
///
/// <Vec<T> as a::b::Trait>::AssociatedItem
/// ^~~~~ ~~~~~~~~~~~~~~^
/// ty position = 3
///
/// <Vec<T>>::AssociatedItem
/// ^~~~~ ^
/// ty position = 0
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct QSelf {
pub ty: P<Ty>,
pub position: usize,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum MatchSource {
Normal,
IfLetDesugar {
contains_else_clause: bool,
},
WhileLetDesugar,
ForLoopDesugar,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum CaptureClause {
CaptureByValue,
CaptureByRef,
}
// NB: If you change this, you'll probably want to change the corresponding
// type structure in middle/ty.rs as well.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct MutTy {
pub ty: P<Ty>,
pub mutbl: Mutability,
}
/// Represents a method's signature in a trait declaration,
/// or in an implementation.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct MethodSig {
pub unsafety: Unsafety,
pub constness: Constness,
pub abi: Abi,
pub decl: P<FnDecl>,
pub generics: Generics,
pub explicit_self: ExplicitSelf,
}
/// Represents a method declaration in a trait declaration, possibly including
/// a default implementation A trait method is either required (meaning it
/// doesn't have an implementation, just a signature) or provided (meaning it
/// has a default implementation).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItem {
pub id: NodeId,
pub name: Name,
pub attrs: Vec<Attribute>,
pub node: TraitItem_,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TraitItem_ {
ConstTraitItem(P<Ty>, Option<P<Expr>>),
MethodTraitItem(MethodSig, Option<P<Block>>),
TypeTraitItem(TyParamBounds, Option<P<Ty>>),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItem {
pub id: NodeId,
pub name: Name,
pub vis: Visibility,
pub attrs: Vec<Attribute>,
pub node: ImplItem_,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ImplItem_ {
ConstImplItem(P<Ty>, P<Expr>),
MethodImplItem(MethodSig, P<Block>),
TypeImplItem(P<Ty>),
}
// Bind a type to an associated type: `A=Foo`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TypeBinding {
pub id: NodeId,
pub name: Name,
pub ty: P<Ty>,
pub span: Span,
}
// NB PartialEq method appears below.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Ty {
pub id: NodeId,
pub node: Ty_,
pub span: Span,
}
impl fmt::Debug for Ty {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "type({})", pprust::ty_to_string(self))
}
}
/// Not represented directly in the AST, referred to by name through a ty_path.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum PrimTy {
TyInt(IntTy),
TyUint(UintTy),
TyFloat(FloatTy),
TyStr,
TyBool,
TyChar,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct BareFnTy {
pub unsafety: Unsafety,
pub abi: Abi,
pub lifetimes: Vec<LifetimeDef>,
pub decl: P<FnDecl>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
/// The different kinds of types recognized by the compiler
pub enum Ty_ {
TyVec(P<Ty>),
/// A fixed length array (`[T; n]`)
TyFixedLengthVec(P<Ty>, P<Expr>),
/// A raw pointer (`*const T` or `*mut T`)
TyPtr(MutTy),
/// A reference (`&'a T` or `&'a mut T`)
TyRptr(Option<Lifetime>, MutTy),
/// A bare function (e.g. `fn(usize) -> bool`)
TyBareFn(P<BareFnTy>),
/// A tuple (`(A, B, C, D,...)`)
TyTup(Vec<P<Ty>>),
/// A path (`module::module::...::Type`), optionally
/// "qualified", e.g. `<Vec<T> as SomeTrait>::SomeType`.
///
/// Type parameters are stored in the Path itself
TyPath(Option<QSelf>, Path),
/// Something like `A+B`. Note that `B` must always be a path.
TyObjectSum(P<Ty>, TyParamBounds),
/// A type like `for<'a> Foo<&'a Bar>`
TyPolyTraitRef(TyParamBounds),
/// No-op; kept solely so that we can pretty-print faithfully
TyParen(P<Ty>),
/// Unused for now
TyTypeof(P<Expr>),
/// TyInfer means the type should be inferred instead of it having been
/// specified. This can appear anywhere in a type.
TyInfer,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct InlineAsm {
pub asm: InternedString,
pub asm_str_style: StrStyle,
pub outputs: Vec<(InternedString, P<Expr>, bool)>,
pub inputs: Vec<(InternedString, P<Expr>)>,
pub clobbers: Vec<InternedString>,
pub volatile: bool,
pub alignstack: bool,
pub dialect: AsmDialect,
pub expn_id: ExpnId,
}
/// represents an argument in a function header
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Arg {
pub ty: P<Ty>,
pub pat: P<Pat>,
pub id: NodeId,
}
impl Arg {
pub fn new_self(span: Span, mutability: Mutability, self_ident: Ident) -> Arg {
let path = Spanned {
span: span,
node: self_ident,
};
Arg {
// HACK(eddyb) fake type for the self argument.
ty: P(Ty {
id: DUMMY_NODE_ID,
node: TyInfer,
span: DUMMY_SP,
}),
pat: P(Pat {
id: DUMMY_NODE_ID,
node: PatIdent(BindByValue(mutability), path, None),
span: span,
}),
id: DUMMY_NODE_ID,
}
}
}
/// Represents the header (not the body) of a function declaration
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct FnDecl {
pub inputs: Vec<Arg>,
pub output: FunctionRetTy,
pub variadic: bool,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Unsafety {
Unsafe,
Normal,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Constness {
Const,
NotConst,
}
impl fmt::Display for Unsafety {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(match *self {
Unsafety::Normal => "normal",
Unsafety::Unsafe => "unsafe",
},
f)
}
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum ImplPolarity {
/// `impl Trait for Type`
Positive,
/// `impl !Trait for Type`
Negative,
}
impl fmt::Debug for ImplPolarity {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ImplPolarity::Positive => "positive".fmt(f),
ImplPolarity::Negative => "negative".fmt(f),
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum FunctionRetTy {
/// Functions with return type `!`that always
/// raise an error or exit (i.e. never return to the caller)
NoReturn(Span),
/// Return type is not specified.
///
/// Functions default to `()` and
/// closures default to inference. Span points to where return
/// type would be inserted.
DefaultReturn(Span),
/// Everything else
Return(P<Ty>),
}
impl FunctionRetTy {
pub fn span(&self) -> Span {
match *self {
NoReturn(span) => span,
DefaultReturn(span) => span,
Return(ref ty) => ty.span,
}
}
}
/// Represents the kind of 'self' associated with a method
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ExplicitSelf_ {
/// No self
SelfStatic,
/// `self`
SelfValue(Name),
/// `&'lt self`, `&'lt mut self`
SelfRegion(Option<Lifetime>, Mutability, Name),
/// `self: TYPE`
SelfExplicit(P<Ty>, Name),
}
pub type ExplicitSelf = Spanned<ExplicitSelf_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Mod {
/// A span from the first token past `{` to the last token until `}`.
/// For `mod foo;`, the inner span ranges from the first token
/// to the last token in the external file.
pub inner: Span,
pub items: Vec<P<Item>>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ForeignMod {
pub abi: Abi,
pub items: Vec<P<ForeignItem>>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct EnumDef {
pub variants: Vec<P<Variant>>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Variant_ {
pub name: Name,
pub attrs: Vec<Attribute>,
pub data: P<VariantData>,
/// Explicit discriminant, eg `Foo = 1`
pub disr_expr: Option<P<Expr>>,
}
pub type Variant = Spanned<Variant_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum PathListItem_ {
PathListIdent {
name: Name,
/// renamed in list, eg `use foo::{bar as baz};`
rename: Option<Name>,
id: NodeId,
},
PathListMod {
/// renamed in list, eg `use foo::{self as baz};`
rename: Option<Name>,
id: NodeId,
},
}
impl PathListItem_ {
pub fn id(&self) -> NodeId {
match *self {
PathListIdent { id, .. } | PathListMod { id, .. } => id,
}
}
pub fn name(&self) -> Option<Name> {
match *self {
PathListIdent { name, .. } => Some(name),
PathListMod { .. } => None,
}
}
pub fn rename(&self) -> Option<Name> {
match *self {
PathListIdent { rename, .. } | PathListMod { rename, .. } => rename,
}
}
}
pub type PathListItem = Spanned<PathListItem_>;
pub type ViewPath = Spanned<ViewPath_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ViewPath_ {
/// `foo::bar::baz as quux`
///
/// or just
///
/// `foo::bar::baz` (with `as baz` implicitly on the right)
ViewPathSimple(Name, Path),
/// `foo::bar::*`
ViewPathGlob(Path),
/// `foo::bar::{a,b,c}`
ViewPathList(Path, Vec<PathListItem>),
}
/// TraitRef's appear in impls.
///
/// resolve maps each TraitRef's ref_id to its defining trait; that's all
/// that the ref_id is for. The impl_id maps to the "self type" of this impl.
/// If this impl is an ItemImpl, the impl_id is redundant (it could be the
/// same as the impl's node id).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitRef {
pub path: Path,
pub ref_id: NodeId,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct PolyTraitRef {
/// The `'a` in `<'a> Foo<&'a T>`
pub bound_lifetimes: Vec<LifetimeDef>,
/// The `Foo<&'a T>` in `<'a> Foo<&'a T>`
pub trait_ref: TraitRef,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum Visibility {
Public,
Inherited,
}
impl Visibility {
pub fn inherit_from(&self, parent_visibility: Visibility) -> Visibility {
match self {
&Inherited => parent_visibility,
&Public => *self,
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct StructField_ {
pub kind: StructFieldKind,
pub id: NodeId,
pub ty: P<Ty>,
pub attrs: Vec<Attribute>,
}
impl StructField_ {
pub fn name(&self) -> Option<Name> {
match self.kind {
NamedField(name, _) => Some(name),
UnnamedField(_) => None,
}
}
}
pub type StructField = Spanned<StructField_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum StructFieldKind {
NamedField(Name, Visibility),
/// Element of a tuple-like struct
UnnamedField(Visibility),
}
impl StructFieldKind {
pub fn is_unnamed(&self) -> bool {
match *self {
UnnamedField(..) => true,
NamedField(..) => false,
}
}
}
/// Fields and Ids of enum variants and structs
///
/// For enum variants: `NodeId` represents both an Id of the variant itself (relevant for all
/// variant kinds) and an Id of the variant's constructor (not relevant for `Struct`-variants).
/// One shared Id can be successfully used for these two purposes.
/// Id of the whole enum lives in `Item`.
///
/// For structs: `NodeId` represents an Id of the structure's constructor, so it is not actually
/// used for `Struct`-structs (but still presents). Structures don't have an analogue of "Id of
/// the variant itself" from enum variants.
/// Id of the whole struct lives in `Item`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum VariantData {
Struct(Vec<StructField>, NodeId),
Tuple(Vec<StructField>, NodeId),
Unit(NodeId),
}
pub type FieldIter<'a> = iter::FlatMap<option::IntoIter<&'a Vec<StructField>>,
slice::Iter<'a, StructField>,
fn(&Vec<StructField>) -> slice::Iter<StructField>>;
impl VariantData {
pub fn fields(&self) -> FieldIter {
fn vec_iter<T>(v: &Vec<T>) -> slice::Iter<T> { v.iter() }
match *self {
VariantData::Struct(ref fields, _) | VariantData::Tuple(ref fields, _) => Some(fields),
_ => None,
}.into_iter().flat_map(vec_iter)
}
pub fn id(&self) -> NodeId {
match *self {
VariantData::Struct(_, id) | VariantData::Tuple(_, id) | VariantData::Unit(id) => id
}
}
pub fn is_struct(&self) -> bool {
if let VariantData::Struct(..) = *self { true } else { false }
}
pub fn is_tuple(&self) -> bool {
if let VariantData::Tuple(..) = *self { true } else { false }
}
pub fn is_unit(&self) -> bool {
if let VariantData::Unit(..) = *self { true } else { false }
}
}
/*
FIXME (#3300): Should allow items to be anonymous. Right now
we just use dummy names for anon items.
*/
/// An item
///
/// The name might be a dummy name in case of anonymous items
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Item {
pub name: Name,
pub attrs: Vec<Attribute>,
pub id: NodeId,
pub node: Item_,
pub vis: Visibility,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Item_ {
/// An`extern crate` item, with optional original crate name,
///
/// e.g. `extern crate foo` or `extern crate foo_bar as foo`
ItemExternCrate(Option<Name>),
/// A `use` or `pub use` item
ItemUse(P<ViewPath>),
/// A `static` item
ItemStatic(P<Ty>, Mutability, P<Expr>),
/// A `const` item
ItemConst(P<Ty>, P<Expr>),
/// A function declaration
ItemFn(P<FnDecl>, Unsafety, Constness, Abi, Generics, P<Block>),
/// A module
ItemMod(Mod),
/// An external module
ItemForeignMod(ForeignMod),
/// A type alias, e.g. `type Foo = Bar<u8>`
ItemTy(P<Ty>, Generics),
/// An enum definition, e.g. `enum Foo<A, B> {C<A>, D<B>}`
ItemEnum(EnumDef, Generics),
/// A struct definition, e.g. `struct Foo<A> {x: A}`
ItemStruct(P<VariantData>, Generics),
/// Represents a Trait Declaration
ItemTrait(Unsafety, Generics, TyParamBounds, Vec<P<TraitItem>>),
// Default trait implementations
///
// `impl Trait for .. {}`
ItemDefaultImpl(Unsafety, TraitRef),
/// An implementation, eg `impl<A> Trait for Foo { .. }`
ItemImpl(Unsafety,
ImplPolarity,
Generics,
Option<TraitRef>, // (optional) trait this impl implements
P<Ty>, // self
Vec<P<ImplItem>>),
}
impl Item_ {
pub fn descriptive_variant(&self) -> &str {
match *self {
ItemExternCrate(..) => "extern crate",
ItemUse(..) => "use",
ItemStatic(..) => "static item",
ItemConst(..) => "constant item",
ItemFn(..) => "function",
ItemMod(..) => "module",
ItemForeignMod(..) => "foreign module",
ItemTy(..) => "type alias",
ItemEnum(..) => "enum",
ItemStruct(..) => "struct",
ItemTrait(..) => "trait",
ItemImpl(..) |
ItemDefaultImpl(..) => "item",
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ForeignItem {
pub name: Name,
pub attrs: Vec<Attribute>,
pub node: ForeignItem_,
pub id: NodeId,
pub span: Span,
pub vis: Visibility,
}
/// An item within an `extern` block
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ForeignItem_ {
/// A foreign function
ForeignItemFn(P<FnDecl>, Generics),
/// A foreign static item (`static ext: u8`), with optional mutability
/// (the boolean is true when mutable)
ForeignItemStatic(P<Ty>, bool),
}
impl ForeignItem_ {
pub fn descriptive_variant(&self) -> &str {
match *self {
ForeignItemFn(..) => "foreign function",
ForeignItemStatic(..) => "foreign static item",
}
}
}
| 30.614742 | 99 | 0.619946 |
7a28c93ecfcaa13fa2d107731fe2155211a08c54 | 4,906 | #![no_main]
extern crate libc;
extern crate rand;
use libfuzzer_sys::fuzz_target;
use libfuzzer_sys::fuzz_mutate;
use std::process::Command;
use std::ffi::CString;
use std::fs::{File, remove_file};
use std::fs::OpenOptions;
use std::io::prelude::*;
use rand::RngCore;
use evmfuzz::execute_proto;
use protobuf::Message;
static mut FIRST_TIME: bool = true;
static mut WRITE_TO: String = String::new();
static mut READ_FROM: String = String::new();
fn get_absolute_path_string(path_from_workspace_root: String) -> String {
let mut cur_dir = std::env::current_dir().unwrap();
cur_dir.pop();
cur_dir.pop();
println!("{:?}", cur_dir);
cur_dir.push(std::path::PathBuf::from(path_from_workspace_root));
return cur_dir.to_str().unwrap().into();
}
fn run_geth(data: &[u8]) -> Vec<u8> {
unsafe {
let mut writeTo_file = OpenOptions::new().write(true).open(WRITE_TO.clone()).unwrap();
writeTo_file.write_all(data).unwrap();
}
let mut response = Vec::new();
unsafe {
let mut readfrom_file = OpenOptions::new().read(true).open(READ_FROM.clone()).unwrap();
readfrom_file.read_to_end(&mut response).unwrap();
}
return response;
}
fn fuzz_main(data: &[u8]) {
unsafe {
if (FIRST_TIME) {
WRITE_TO = get_absolute_path_string(format!("fifos/{}", rand::thread_rng().next_u64().to_string()));
READ_FROM = get_absolute_path_string(format!("fifos/{}", rand::thread_rng().next_u64().to_string()));
libc::mkfifo(CString::new(WRITE_TO.clone()).unwrap().as_ptr(), 0o644);
libc::mkfifo(CString::new(READ_FROM.clone()).unwrap().as_ptr(), 0o644);
Command::new(get_absolute_path_string("geth/src/github.com/ethereum/go-ethereum/build/bin/evm".into()))
.arg(WRITE_TO.as_str())
.arg(READ_FROM.as_str())
.spawn()
.unwrap();
FIRST_TIME = false;
}
}
match evmfuzz::convert_to_proto(data) {
Some(proto) => {
let parity_results = execute_proto(&proto);
let geth_result_bytes = run_geth(data);
let geth_results = evmfuzz::get_fuzz_result(geth_result_bytes.as_slice());
assert_eq!(parity_results.len(), geth_results.get_roots().len());
assert_eq!(parity_results.len(), geth_results.get_dumps().len());
for i in 0..parity_results.len() {
let geth_result = geth_results.get_roots()[i].clone();
let parity_result = parity_results[i].clone();
if geth_result != parity_result.0 {
let mut proto_for_debug = proto.clone();
proto_for_debug.set_is_debug_mode(true);
let parity_debug_results = execute_proto(&proto_for_debug);
let bug_tx = evmfuzz::get_nth_tx(&proto, i);
let geth_debug_results_bytes =
run_geth(proto_for_debug.write_to_bytes().unwrap().as_slice());
let geth_debug_results_proto = evmfuzz::get_fuzz_result(geth_debug_results_bytes.as_slice());
let geth_debug_results = geth_debug_results_proto.get_dumps().clone();
if i == 0 {
println!("FIRST TX is the problem (very low chance but hey it happened...)");
println!("Bug Tx: {:?}", bug_tx);
} else {
let parity_state_before_bug = parity_debug_results[i-1].clone().1.unwrap();
let parity_state_after_bug = parity_debug_results[i].clone().1.unwrap();
let geth_state_before_bug = geth_debug_results[i-1].clone();
let geth_state_after_bug = geth_debug_results[i].clone();
println!("===================BUG SUMMARY=================");
println!("Bug Tx: {:?}", bug_tx);
println!("Parity before tx: {:?}", parity_state_before_bug);
println!("Parity after tx: {:?}", parity_state_after_bug);
println!("Geth before tx: {}", geth_state_before_bug);
println!("Geth after tx: {}", geth_state_after_bug);
println!("===================BUGs SUMMARY=================");
println!("Geth trace {}", geth_debug_results_proto.get_traces()[i].clone());
println!("Parity CALL/CREATE trace");
for t in parity_debug_results[i].clone().2 {
println!("{:?}", t);
}
}
}
assert_eq!(geth_result, parity_result.0);
}
},
None => (),
}
}
fuzz_target!(|data: &[u8]| {
fuzz_main(data);
});
fn fuzz_mutate(bytes: &mut Vec<u8>, max_size: usize, seed: u32) {
evmfuzz::do_fuzz_mutate(bytes, max_size, seed);
}
fuzz_mutate!(|bytes: &mut Vec<u8>, max_size: usize, seed: u32| {
fuzz_mutate(bytes, max_size, seed);
});
| 32.926174 | 113 | 0.584794 |
b9fb5f4a3114ac6a64d3f14303efcb81fcbfcdfe | 2,254 | use std::{collections::BTreeMap, str::FromStr};
use serde::{Deserialize, Serialize};
use chain_core::init::{
address::RedeemAddress,
coin::Coin,
config::{JailingParameters, RewardsParameters, SlashRatio, SlashingParameters},
};
use chain_core::state::account::{ConfidentialInit, ValidatorName, ValidatorSecurityContact};
use chain_core::state::tendermint::TendermintValidatorPubKey;
#[derive(Deserialize, Debug)]
pub struct GenesisDevConfig {
pub distribution: BTreeMap<RedeemAddress, Coin>,
pub required_council_node_stake: Coin,
pub jailing_config: JailingParameters,
pub slashing_config: SlashingParameters,
pub rewards_config: RewardsParameters,
pub initial_fee_policy: InitialFeePolicy,
pub council_nodes: BTreeMap<
RedeemAddress,
(
ValidatorName,
ValidatorSecurityContact,
TendermintValidatorPubKey,
ConfidentialInit,
),
>,
}
impl GenesisDevConfig {
pub fn new(expansion_cap: Coin) -> Self {
GenesisDevConfig {
distribution: BTreeMap::new(),
required_council_node_stake: Coin::new(1_250_000_000_000_000_000).unwrap(),
jailing_config: JailingParameters {
block_signing_window: 100,
missed_block_threshold: 50,
},
slashing_config: SlashingParameters {
liveness_slash_percent: SlashRatio::from_str("0.1").unwrap(),
byzantine_slash_percent: SlashRatio::from_str("0.2").unwrap(),
},
rewards_config: RewardsParameters {
monetary_expansion_cap: expansion_cap,
reward_period_seconds: 24 * 60 * 60,
monetary_expansion_r0: "0.45".parse().unwrap(),
monetary_expansion_tau: 1_4500_0000_0000_0000,
monetary_expansion_decay: 999_860,
},
initial_fee_policy: InitialFeePolicy {
base_fee: "1.1".to_string(),
per_byte_fee: "1.25".to_string(),
},
council_nodes: BTreeMap::new(),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct InitialFeePolicy {
pub base_fee: String,
pub per_byte_fee: String,
}
| 34.151515 | 92 | 0.639752 |
01aedbc61dbe6043ebc88ef55c13dca615f10937 | 15,126 | use crate::{
pass::Pass,
util::{alias_ident_for, is_literal, prepend, undefined, ExprFactory, StmtLike},
};
use ast::*;
use serde::Deserialize;
use std::mem;
use swc_atoms::js_word;
use swc_common::{util::move_map::MoveMap, Fold, FoldWith, Span, Spanned, DUMMY_SP};
#[cfg(test)]
mod tests;
pub fn spread(c: Config) -> impl Pass {
Spread { c }
}
#[derive(Debug, Clone, Copy, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Config {
pub loose: bool,
}
/// es2015 - `SpreadElement`
#[derive(Default)]
struct Spread {
c: Config,
}
#[derive(Default)]
struct ActualFolder {
c: Config,
vars: Vec<VarDeclarator>,
}
impl<T> Fold<Vec<T>> for Spread
where
T: StmtLike + FoldWith<ActualFolder> + FoldWith<Self>,
{
fn fold(&mut self, items: Vec<T>) -> Vec<T> {
let mut folder = ActualFolder {
c: self.c,
vars: vec![],
};
let mut items = items.move_map(|item| item.fold_with(&mut folder));
if !folder.vars.is_empty() {
prepend(
&mut items,
T::from_stmt(Stmt::Decl(Decl::Var(VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
declare: false,
decls: mem::replace(&mut folder.vars, vec![]),
}))),
);
}
items
}
}
impl Fold<Expr> for ActualFolder {
fn fold(&mut self, e: Expr) -> Expr {
let e = validate!(e.fold_children(self));
match e {
Expr::Array(ArrayLit { span, elems }) => {
if !elems.iter().any(|e| match e {
Some(ExprOrSpread {
spread: Some(_), ..
}) => true,
_ => false,
}) {
return Expr::Array(ArrayLit { span, elems });
}
self.concat_args(span, elems.into_iter(), true)
}
// super(...spread) should be removed by es2015::classes pass
Expr::Call(CallExpr {
callee: ExprOrSuper::Expr(callee),
args,
span,
type_args,
}) => {
let has_spread = args
.iter()
.any(|ExprOrSpread { spread, .. }| spread.is_some());
if !has_spread {
return Expr::Call(CallExpr {
callee: ExprOrSuper::Expr(callee),
args,
span,
type_args,
});
}
let (this, callee) = match *callee {
Expr::Member(MemberExpr {
obj: ExprOrSuper::Super(Super { span, .. }),
..
}) => (box Expr::This(ThisExpr { span }), callee),
Expr::Member(MemberExpr {
obj: ExprOrSuper::Expr(ref expr @ box Expr::This(..)),
..
}) => (expr.clone(), callee),
// Injected variables can be accessed without any side effect
Expr::Member(MemberExpr {
obj: ExprOrSuper::Expr(box Expr::Ident(ref i)),
..
}) if i.span.is_dummy() => (box Expr::Ident(i.clone()), callee),
Expr::Ident(Ident { span, .. }) => (undefined(span), callee),
Expr::Member(MemberExpr {
span,
obj: ExprOrSuper::Expr(expr),
prop,
computed,
}) => {
let ident = alias_ident_for(&expr, "_instance");
self.vars.push(VarDeclarator {
span: DUMMY_SP,
definite: false,
name: Pat::Ident(ident.clone()),
// Initialized by paren exprssion.
init: None,
});
let this = box Expr::Ident(ident.clone());
let callee = Expr::Assign(AssignExpr {
span: DUMMY_SP,
left: PatOrExpr::Pat(box Pat::Ident(ident)),
op: op!("="),
right: expr,
});
(
this,
box Expr::Member(MemberExpr {
span,
obj: callee.as_obj(),
prop,
computed,
}),
)
}
// https://github.com/swc-project/swc/issues/400
// _ => (undefined(callee.span()), callee),
_ => (
box Expr::This(ThisExpr {
span: callee.span(),
}),
callee,
),
};
let args_array = if is_literal(&args) {
Expr::Array(ArrayLit {
span,
elems: expand_literal_args(args.into_iter().map(Some)),
})
} else {
self.concat_args(span, args.into_iter().map(Some), false)
};
let apply = MemberExpr {
span: DUMMY_SP,
obj: callee.as_callee(),
prop: box Ident::new(js_word!("apply"), span).into(),
computed: false,
};
Expr::Call(CallExpr {
span,
callee: apply.as_callee(),
args: vec![this.as_arg(), args_array.as_arg()],
type_args: None,
})
}
Expr::New(NewExpr {
callee,
args: Some(args),
span,
type_args,
}) => {
let has_spread = args
.iter()
.any(|ExprOrSpread { spread, .. }| spread.is_some());
if !has_spread {
return Expr::New(NewExpr {
span,
callee,
args: Some(args),
type_args,
});
}
let args = self.concat_args(span, args.into_iter().map(Some), true);
Expr::Call(CallExpr {
span,
callee: helper!(construct, "construct"),
args: vec![callee.as_arg(), args.as_arg()],
type_args: Default::default(),
})
}
_ => e,
}
}
}
impl ActualFolder {
fn concat_args(
&self,
span: Span,
args: impl ExactSizeIterator + Iterator<Item = Option<ExprOrSpread>>,
need_array: bool,
) -> Expr {
//
// []
//
let mut first_arr = None;
let mut tmp_arr = vec![];
let mut buf = vec![];
let args_len = args.len();
macro_rules! make_arr {
() => {
let elems = mem::replace(&mut tmp_arr, vec![]);
match first_arr {
Some(_) => {
if !elems.is_empty() {
buf.push(Expr::Array(ArrayLit { span, elems }).as_arg());
}
}
None => {
first_arr = Some(Expr::Array(ArrayLit { span, elems }));
}
}
};
}
for arg in args {
if let Some(arg) = arg {
let ExprOrSpread { expr, spread } = arg;
match spread {
// ...b -> toConsumableArray(b)
Some(span) => {
//
make_arr!();
buf.push(match *expr {
Expr::Ident(Ident {
sym: js_word!("arguments"),
..
}) => {
if args_len == 1 {
if need_array {
return Expr::Call(CallExpr {
span,
callee: member_expr!(
DUMMY_SP,
Array.prototype.slice.call
)
.as_callee(),
args: vec![expr.as_arg()],
type_args: Default::default(),
});
} else {
return *expr;
}
} else {
Expr::Call(CallExpr {
span,
callee: member_expr!(DUMMY_SP, Array.prototype.slice.call)
.as_callee(),
args: vec![expr.as_arg()],
type_args: Default::default(),
})
.as_arg()
}
}
_ => {
if args_len == 1 && !need_array {
return if self.c.loose {
*expr
} else {
Expr::Call(CallExpr {
span,
callee: helper!(
to_consumable_array,
"toConsumableArray"
),
args: vec![expr.as_arg()],
type_args: Default::default(),
})
};
}
// [].concat(arr) is shorter than _toConsumableArray(arr)
if args_len == 1 {
return if self.c.loose {
Expr::Call(CallExpr {
span: DUMMY_SP,
callee: ArrayLit {
span: DUMMY_SP,
elems: vec![],
}
.member(quote_ident!("concat"))
.as_callee(),
args: vec![expr.as_arg()],
type_args: Default::default(),
})
} else {
Expr::Call(CallExpr {
span,
callee: helper!(
to_consumable_array,
"toConsumableArray"
),
args: vec![expr.as_arg()],
type_args: Default::default(),
})
};
}
Expr::Call(CallExpr {
span,
callee: helper!(to_consumable_array, "toConsumableArray"),
args: vec![expr.as_arg()],
type_args: Default::default(),
})
.as_arg()
}
});
}
None => tmp_arr.push(Some(expr.as_arg())),
}
} else {
tmp_arr.push(None);
}
}
make_arr!();
if !buf.is_empty()
&& match first_arr {
None => true,
Some(Expr::Array(ref arr)) if arr.elems.is_empty() => true,
_ => false,
}
{
let callee = buf
.remove(0)
.expr
.member(Ident::new(js_word!("concat"), DUMMY_SP))
.as_callee();
if buf[0].spread.is_none() {
return Expr::Call(CallExpr {
span,
callee,
args: buf,
type_args: Default::default(),
});
} else {
return Expr::Call(CallExpr {
span,
callee,
args: buf,
type_args: Default::default(),
});
}
}
Expr::Call(CallExpr {
// TODO
span,
callee: first_arr
.take()
.unwrap_or_else(|| {
// No arg
// assert!(args.is_empty());
Expr::Array(ArrayLit {
span,
elems: vec![],
})
})
.member(Ident::new(js_word!("concat"), span))
.as_callee(),
args: buf,
type_args: Default::default(),
})
}
}
fn expand_literal_args(
args: impl ExactSizeIterator + Iterator<Item = Option<ExprOrSpread>>,
) -> Vec<Option<ExprOrSpread>> {
fn expand(
buf: &mut Vec<Option<ExprOrSpread>>,
args: impl ExactSizeIterator + Iterator<Item = Option<ExprOrSpread>>,
) {
for arg in args {
match arg {
Some(ExprOrSpread {
spread: Some(..),
expr: box Expr::Array(arr),
}) => expand(buf, arr.elems.into_iter()),
_ => buf.push(arg),
}
}
}
let mut buf = Vec::with_capacity(args.len() + 4);
expand(&mut buf, args);
buf
}
| 35.674528 | 98 | 0.327582 |
ed92bdf36d7673c61669af0114c2115b1f389e5f | 3,906 | use anyhow::Result;
use delay_timer::prelude::*;
#[allow(deprecated)]
use delay_timer::utils::convenience::functions::unblock_process_task_fn;
use hyper::{Client, Uri};
use std::time::Duration;
use tokio::time::sleep;
use tracing::Level;
use tracing_subscriber::FmtSubscriber;
// You can replace the 66 line with the command you expect to execute.
#[tokio::main]
async fn main() -> Result<()> {
// a builder for `FmtSubscriber`.
FmtSubscriber::builder()
// all spans/events with a level higher than TRACE (e.g, debug, info, warn, etc.)
// will be written to stdout.
.with_max_level(Level::DEBUG)
// completes the builder.
.init();
// In addition to the mixed (smol & tokio) runtime
// You can also share a tokio runtime with delayTimer, please see api `DelayTimerBuilder::tokio_runtime` for details.
// Build an DelayTimer that uses the default configuration of the Smol runtime internally.
let delay_timer = DelayTimerBuilder::default().build();
// Develop a print job that runs in an asynchronous cycle.
let task_instance_chain = delay_timer.insert_task(build_task_async_print()?)?;
// Develop a php script shell-task that runs in an asynchronous cycle.
let shell_task_instance_chain = delay_timer.insert_task(build_task_async_execute_process()?)?;
// Get the running instance of task 1.
let task_instance = task_instance_chain.next_with_async_wait().await?;
// Wating request done then cancel it (It's just a custom logic).
sleep(Duration::from_secs(1)).await;
// Cancel running task instances.
task_instance.cancel_with_async_wait().await?;
// Cancel running shell-task instances.
// Probably already finished running, no need to cancel.
let _ = shell_task_instance_chain
.next_with_async_wait()
.await?
.cancel_with_async_wait()
.await?;
// Remove task which id is 1.
delay_timer.remove_task(1)?;
// No new tasks are accepted; running tasks are not affected.
Ok(delay_timer.stop_delay_timer()?)
}
fn build_task_async_print() -> Result<Task, TaskError> {
let id = 1;
let name = String::from("someting");
let mut task_builder = TaskBuilder::default();
let body = move || {
let name_ref = name.clone();
async move {
async_template(id, name_ref).await.expect("Request failed.");
sleep(Duration::from_secs(3)).await;
println!("create_async_fn_body:i'success");
}
};
task_builder
.set_task_id(1)
.set_frequency_repeated_by_seconds(6)
.set_maximum_parallel_runnable_num(2)
.spawn_async_routine(body)
}
fn build_task_async_execute_process() -> Result<Task, TaskError> {
let task_id = 3;
let mut task_builder = TaskBuilder::default();
let body = move || {
#[allow(deprecated)]
unblock_process_task_fn("/opt/homebrew/bin/php /Users/bincheng_paopao/project/repo/rust/myself/delay-timer/examples/try_spawn.php >> ./try_spawn.txt".into(), task_id)
};
task_builder
.set_frequency_repeated_by_seconds(1)
.set_task_id(task_id)
.set_maximum_running_time(10)
.set_maximum_parallel_runnable_num(1)
.spawn_async_routine(body)
}
pub async fn async_template(id: i32, name: String) -> Result<()> {
let client = Client::new();
// The default connector does not handle TLS.
// Speaking to https destinations will require configuring a connector that implements TLS.
// So use http for test.
let url = format!("http://httpbin.org/get?id={}&name={}", id, name);
let uri: Uri = url.parse()?;
let res = client.get(uri).await?;
println!("Response: {}", res.status());
// Concatenate the body stream into a single buffer...
let buf = hyper::body::to_bytes(res).await?;
println!("body: {:?}", buf);
Ok(())
}
| 34.566372 | 174 | 0.673067 |
6af27f6ca26933702ab37bdc6f4d657e440b2aa6 | 7,506 | use crate::util::error::DynResult;
use log::{debug, error};
use std::future::Future;
use std::io;
use tokio::io::Interest;
use tokio::net::windows::named_pipe::{
ClientOptions, NamedPipeClient, NamedPipeServer, ServerOptions,
};
use tokio::sync::mpsc::{self, Receiver, Sender};
use tokio::time::{self, Duration};
use winapi::shared::winerror;
pub struct ConnectionWindows {
send: Sender<Vec<u8>>,
recv: Receiver<Vec<u8>>,
}
fn find_eol(a: &[u8], start: usize) -> Option<usize> {
for p in start..a.len() {
let c = a[p];
if c == b'\r' || c == b'\n' {
return Some(p);
}
}
None
}
macro_rules! rw_pipe_def {
($name: ident, $P: ident) => {
async fn $name(
pipe: $P,
recv: Sender<Vec<u8>>,
mut send: Receiver<Vec<u8>>,
) -> DynResult<()> {
let mut write_buffer: Option<Vec<u8>> = None;
let mut read_buffer = Vec::with_capacity(200);
loop {
let interest = if write_buffer.is_some() {
Interest::READABLE | Interest::WRITABLE
} else {
Interest::READABLE
};
tokio::select! {
ready = pipe.ready(interest) => {
match ready {
Ok(ready) => {
if ready.is_readable() {
let mut pos = read_buffer.len();
// Make room for more data
read_buffer.resize(pos + 100, 0);
match pipe.try_read(&mut read_buffer[pos..]) {
Ok(n) => {
read_buffer.truncate(pos + n);
let mut start = 0;
loop {
if let Some(end) = find_eol(&read_buffer, pos) {
let line = &read_buffer[start .. end];
if !line.is_empty() {
if recv.send(line.to_vec()).await.is_err() {
return Ok(())
}
}
start = end +1;
if start == read_buffer.len() {
read_buffer.clear();
} else {
read_buffer.drain(0..start);
}
start =0;
pos = 0;
} else {
break;
}
}
},
Err(e) => {
read_buffer.truncate(pos);
if e.kind() != io::ErrorKind::WouldBlock {
return Err(e.into())
}
}
}
}
if ready.is_writable() {
if let Some(buffer) = &write_buffer {
match pipe.try_write(&buffer) {
Ok(_) => {
write_buffer = None;
},
Err(e) => {
if e.kind() != io::ErrorKind::WouldBlock {
return Err(e.into())
}
}
}
}
}
},
Err(e) => return Err(e.into())
}
},
res = (&mut send).recv() => {
match res {
Some(data) => write_buffer = Some(data),
None => return Ok(())
}
}
}
}
}
};
}
rw_pipe_def! {rw_pipe_client, NamedPipeClient}
rw_pipe_def! {rw_pipe_server, NamedPipeServer}
impl ConnectionWindows {
pub async fn server<H, F, S>(path: &str, handler: H, shutdown: S) -> DynResult<()>
where
H: Fn(ConnectionWindows) -> F,
F: Future<Output = ()> + Send + 'static,
S: Future<Output = ()> + Send + 'static,
{
let mut server = ServerOptions::new()
.first_pipe_instance(true)
.create(path)?;
loop {
server.connect().await?;
let connected = server;
server = ServerOptions::new().create(path)?;
let (send_tx, send_rx) = mpsc::channel(3);
let (recv_tx, recv_rx) = mpsc::channel(3);
tokio::spawn(async move {
if let Err(e) = rw_pipe_server(connected, recv_tx, send_rx).await {
error!("Server thread failed: {}", e);
}
});
let conn = ConnectionWindows {
send: send_tx,
recv: recv_rx,
};
tokio::spawn(handler(conn));
}
Ok(())
}
pub async fn client(path: &str) -> DynResult<ConnectionWindows> {
let mut retries = 5;
let client = loop {
let client = match ClientOptions::new().open(path) {
Ok(client) => break client,
Err(e) if e.raw_os_error() == Some(winerror::ERROR_PIPE_BUSY as i32) => {
// Try again
if retries == 0 {
return Err("Named pipe busy, too many retries".into());
}
retries -= 1;
}
Err(e) => return Err(e.into()),
};
time::sleep(Duration::from_millis(50)).await
};
let (send_tx, send_rx) = mpsc::channel(3);
let (recv_tx, recv_rx) = mpsc::channel(3);
tokio::spawn(async move {
if let Err(e) = rw_pipe_client(client, recv_tx, send_rx).await {
error!("Client thread failed: {}", e);
}
});
let conn = ConnectionWindows {
send: send_tx,
recv: recv_rx,
};
Ok(conn)
}
pub async fn send_data(&mut self, data: &[u8]) -> DynResult<()> {
self.send.send(data.to_vec()).await?;
Ok(())
}
pub async fn recv_data(&mut self) -> DynResult<Vec<u8>> {
self.recv.recv().await.ok_or("Receiver queue closed".into())
}
}
| 39.09375 | 100 | 0.343059 |
abcd5c66f2e57bfe259de15c380125e3af421504 | 9,724 | // automatically generated by the FlatBuffers compiler, do not modify
#![allow(dead_code)]
#![allow(unused_imports)]
extern crate flatbuffers;
pub mod namespace_a {
#![allow(dead_code)]
#![allow(unused_imports)]
use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
use self::flatbuffers::EndianScalar;
pub enum TableInFirstNSOffset {}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct TableInFirstNS<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for TableInFirstNS<'a> {
type Inner = TableInFirstNS<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table { buf: buf, loc: loc },
}
}
}
impl<'a> TableInFirstNS<'a> {
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
TableInFirstNS {
_tab: table,
}
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args TableInFirstNSArgs) -> flatbuffers::WIPOffset<TableInFirstNS<'bldr>> {
let mut builder = TableInFirstNSBuilder::new(_fbb);
if let Some(x) = args.foo_struct { builder.add_foo_struct(x); }
if let Some(x) = args.foo_table { builder.add_foo_table(x); }
builder.add_foo_enum(args.foo_enum);
builder.finish()
}
pub const VT_FOO_TABLE: flatbuffers::VOffsetT = 4;
pub const VT_FOO_ENUM: flatbuffers::VOffsetT = 6;
pub const VT_FOO_STRUCT: flatbuffers::VOffsetT = 8;
#[inline]
pub fn foo_table(&'a self) -> Option<namespace_b::TableInNestedNS<'a>> {
self._tab.get::<flatbuffers::ForwardsUOffset<namespace_b::TableInNestedNS<'a>>>(TableInFirstNS::VT_FOO_TABLE, None)
}
#[inline]
pub fn foo_enum(&'a self) -> namespace_b::EnumInNestedNS {
self._tab.get::<namespace_b::EnumInNestedNS>(TableInFirstNS::VT_FOO_ENUM, Some(namespace_b::EnumInNestedNS::A)).unwrap()
}
#[inline]
pub fn foo_struct(&'a self) -> Option<&'a namespace_b::StructInNestedNS> {
self._tab.get::<namespace_b::StructInNestedNS>(TableInFirstNS::VT_FOO_STRUCT, None)
}
}
pub struct TableInFirstNSArgs {
pub foo_table: Option<flatbuffers::WIPOffset<namespace_b::TableInNestedNS<'a >>>,
pub foo_enum: namespace_b::EnumInNestedNS,
pub foo_struct: Option<&'a namespace_b::StructInNestedNS>,
}
impl<'a> Default for TableInFirstNSArgs {
#[inline]
fn default() -> Self {
TableInFirstNSArgs {
foo_table: None,
foo_enum: namespace_b::EnumInNestedNS::A,
foo_struct: None,
}
}
}
pub struct TableInFirstNSBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> TableInFirstNSBuilder<'a, 'b> {
#[inline]
pub fn add_foo_table(&mut self, foo_table: flatbuffers::WIPOffset<namespace_b::TableInNestedNS<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<namespace_b::TableInNestedNS>>(TableInFirstNS::VT_FOO_TABLE, foo_table);
}
#[inline]
pub fn add_foo_enum(&mut self, foo_enum: namespace_b::EnumInNestedNS) {
self.fbb_.push_slot::<namespace_b::EnumInNestedNS>(TableInFirstNS::VT_FOO_ENUM, foo_enum, namespace_b::EnumInNestedNS::A);
}
#[inline]
pub fn add_foo_struct(&mut self, foo_struct: &'b namespace_b::StructInNestedNS) {
self.fbb_.push_slot_always::<&namespace_b::StructInNestedNS>(TableInFirstNS::VT_FOO_STRUCT, foo_struct);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TableInFirstNSBuilder<'a, 'b> {
let start = _fbb.start_table();
TableInFirstNSBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<TableInFirstNS<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
pub enum SecondTableInAOffset {}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct SecondTableInA<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for SecondTableInA<'a> {
type Inner = SecondTableInA<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table { buf: buf, loc: loc },
}
}
}
impl<'a> SecondTableInA<'a> {
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
SecondTableInA {
_tab: table,
}
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args SecondTableInAArgs) -> flatbuffers::WIPOffset<SecondTableInA<'bldr>> {
let mut builder = SecondTableInABuilder::new(_fbb);
if let Some(x) = args.refer_to_c { builder.add_refer_to_c(x); }
builder.finish()
}
pub const VT_REFER_TO_C: flatbuffers::VOffsetT = 4;
#[inline]
pub fn refer_to_c(&'a self) -> Option<super::namespace_c::TableInC<'a>> {
self._tab.get::<flatbuffers::ForwardsUOffset<super::namespace_c::TableInC<'a>>>(SecondTableInA::VT_REFER_TO_C, None)
}
}
pub struct SecondTableInAArgs {
pub refer_to_c: Option<flatbuffers::WIPOffset<super::namespace_c::TableInC<'a >>>,
}
impl<'a> Default for SecondTableInAArgs {
#[inline]
fn default() -> Self {
SecondTableInAArgs {
refer_to_c: None,
}
}
}
pub struct SecondTableInABuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> SecondTableInABuilder<'a, 'b> {
#[inline]
pub fn add_refer_to_c(&mut self, refer_to_c: flatbuffers::WIPOffset<super::namespace_c::TableInC<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<super::namespace_c::TableInC>>(SecondTableInA::VT_REFER_TO_C, refer_to_c);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> SecondTableInABuilder<'a, 'b> {
let start = _fbb.start_table();
SecondTableInABuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<SecondTableInA<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
} // pub mod NamespaceA
pub mod namespace_c {
#![allow(dead_code)]
#![allow(unused_imports)]
use std::mem;
use std::cmp::Ordering;
extern crate flatbuffers;
use self::flatbuffers::EndianScalar;
pub enum TableInCOffset {}
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct TableInC<'a> {
pub _tab: flatbuffers::Table<'a>,
}
impl<'a> flatbuffers::Follow<'a> for TableInC<'a> {
type Inner = TableInC<'a>;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
Self {
_tab: flatbuffers::Table { buf: buf, loc: loc },
}
}
}
impl<'a> TableInC<'a> {
#[inline]
pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
TableInC {
_tab: table,
}
}
#[allow(unused_mut)]
pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>(
_fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>,
args: &'args TableInCArgs) -> flatbuffers::WIPOffset<TableInC<'bldr>> {
let mut builder = TableInCBuilder::new(_fbb);
if let Some(x) = args.refer_to_a2 { builder.add_refer_to_a2(x); }
if let Some(x) = args.refer_to_a1 { builder.add_refer_to_a1(x); }
builder.finish()
}
pub const VT_REFER_TO_A1: flatbuffers::VOffsetT = 4;
pub const VT_REFER_TO_A2: flatbuffers::VOffsetT = 6;
#[inline]
pub fn refer_to_a1(&'a self) -> Option<super::namespace_a::TableInFirstNS<'a>> {
self._tab.get::<flatbuffers::ForwardsUOffset<super::namespace_a::TableInFirstNS<'a>>>(TableInC::VT_REFER_TO_A1, None)
}
#[inline]
pub fn refer_to_a2(&'a self) -> Option<super::namespace_a::SecondTableInA<'a>> {
self._tab.get::<flatbuffers::ForwardsUOffset<super::namespace_a::SecondTableInA<'a>>>(TableInC::VT_REFER_TO_A2, None)
}
}
pub struct TableInCArgs {
pub refer_to_a1: Option<flatbuffers::WIPOffset<super::namespace_a::TableInFirstNS<'a >>>,
pub refer_to_a2: Option<flatbuffers::WIPOffset<super::namespace_a::SecondTableInA<'a >>>,
}
impl<'a> Default for TableInCArgs {
#[inline]
fn default() -> Self {
TableInCArgs {
refer_to_a1: None,
refer_to_a2: None,
}
}
}
pub struct TableInCBuilder<'a: 'b, 'b> {
fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>,
start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
}
impl<'a: 'b, 'b> TableInCBuilder<'a, 'b> {
#[inline]
pub fn add_refer_to_a1(&mut self, refer_to_a1: flatbuffers::WIPOffset<super::namespace_a::TableInFirstNS<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<super::namespace_a::TableInFirstNS>>(TableInC::VT_REFER_TO_A1, refer_to_a1);
}
#[inline]
pub fn add_refer_to_a2(&mut self, refer_to_a2: flatbuffers::WIPOffset<super::namespace_a::SecondTableInA<'b >>) {
self.fbb_.push_slot_always::<flatbuffers::WIPOffset<super::namespace_a::SecondTableInA>>(TableInC::VT_REFER_TO_A2, refer_to_a2);
}
#[inline]
pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> TableInCBuilder<'a, 'b> {
let start = _fbb.start_table();
TableInCBuilder {
fbb_: _fbb,
start_: start,
}
}
#[inline]
pub fn finish(self) -> flatbuffers::WIPOffset<TableInC<'a>> {
let o = self.fbb_.end_table(self.start_);
flatbuffers::WIPOffset::new(o.value())
}
}
} // pub mod NamespaceC
| 32.851351 | 132 | 0.665261 |
096ee0b00e43ed89ffdb8df76bda0bc42a20b38d | 3,198 | #![allow(unused_imports)]
use super::*;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
# [wasm_bindgen (extends = :: js_sys :: Object , js_name = GamepadEventInit)]
#[derive(Debug, Clone, PartialEq, Eq)]
#[doc = "The `GamepadEventInit` dictionary."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `GamepadEventInit`*"]
pub type GamepadEventInit;
}
impl GamepadEventInit {
#[doc = "Construct a new `GamepadEventInit`."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `GamepadEventInit`*"]
pub fn new() -> Self {
#[allow(unused_mut)]
let mut ret: Self = ::wasm_bindgen::JsCast::unchecked_into(::js_sys::Object::new());
ret
}
#[doc = "Change the `bubbles` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `GamepadEventInit`*"]
pub fn bubbles(&mut self, val: bool) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("bubbles"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[doc = "Change the `cancelable` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `GamepadEventInit`*"]
pub fn cancelable(&mut self, val: bool) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("cancelable"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[doc = "Change the `composed` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `GamepadEventInit`*"]
pub fn composed(&mut self, val: bool) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("composed"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[cfg(feature = "Gamepad")]
#[doc = "Change the `gamepad` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `Gamepad`, `GamepadEventInit`*"]
pub fn gamepad(&mut self, val: Option<&Gamepad>) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(
self.as_ref(),
&JsValue::from("gamepad"),
&JsValue::from(val),
);
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
}
| 34.76087 | 110 | 0.55566 |
11c67d421c27a42dc668ff670213efdde53611fb | 902 | use super::*;
use proptest::strategy::Strategy;
#[test]
fn without_empty_list_returns_true() {
with_process_arc(|arc_process| {
TestRunner::new(Config::with_source_file(file!()))
.run(
&(
Just(Term::NIL),
strategy::term(arc_process.clone())
.prop_filter("Right must not be empty list", |v| !v.is_nil()),
),
|(left, right)| {
prop_assert_eq!(
erlang::are_not_equal_after_conversion_2(left, right),
true.into()
);
Ok(())
},
)
.unwrap();
});
}
#[test]
fn with_empty_list_right_returns_false() {
assert_eq!(
erlang::are_not_equal_after_conversion_2(Term::NIL, Term::NIL),
false.into()
);
}
| 25.771429 | 86 | 0.464523 |
383eab758a03bae3565c787454621ca666a35b53 | 2,236 | use bracket_color::prelude::{RGB, RGBA};
use byteorder::{ReadBytesExt, WriteBytesExt};
use std::io;
/// Structure representing the components of one color
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct XpColor {
/// Red component 0..255
pub r: u8,
/// Green component 0..255
pub g: u8,
/// Blue component 0..255
pub b: u8,
}
impl From<RGB> for XpColor {
fn from(rgb: RGB) -> Self {
XpColor::new(
(rgb.r * 255.0) as u8,
(rgb.g * 255.0) as u8,
(rgb.b * 255.0) as u8,
)
}
}
impl From<RGBA> for XpColor {
fn from(rgb: RGBA) -> Self {
XpColor::new(
(rgb.r * 255.0) as u8,
(rgb.g * 255.0) as u8,
(rgb.b * 255.0) as u8,
)
}
}
impl XpColor {
/// deepest black
pub const BLACK: Self = Self { r: 0, g: 0, b: 0 };
/// color 0xff00ff (hot pink) is regarded as transparent
pub const TRANSPARENT: Self = Self {
r: 255,
g: 0,
b: 255,
};
/// Construct a new color from r,g,b values
#[inline]
#[must_use]
pub const fn new(r: u8, g: u8, b: u8) -> Self {
Self { r, g, b }
}
/// Return whether this color is considered transparent (if this is the background color of a
/// cell, the layer below it will see through)
#[inline]
#[must_use]
pub fn is_transparent(self) -> bool {
self == Self::TRANSPARENT
}
/// Read a RGB color from a `ReadBytesExt`
///
/// # Errors
#[inline]
pub fn read<T: ReadBytesExt>(rdr: &mut T) -> io::Result<Self> {
let r = rdr.read_u8()?;
let g = rdr.read_u8()?;
let b = rdr.read_u8()?;
Ok(Self { r, g, b })
}
/// Write a RGB color to a `WriteBytesExt`
///
/// # Errors
#[inline]
pub fn write<T: WriteBytesExt>(self, wr: &mut T) -> io::Result<()> {
wr.write_u8(self.r)?;
wr.write_u8(self.g)?;
wr.write_u8(self.b)?;
Ok(())
}
}
impl From<XpColor> for RGB {
fn from(xp: XpColor) -> Self {
RGB::from_u8(xp.r, xp.g, xp.b)
}
}
impl From<XpColor> for RGBA {
fn from(xp: XpColor) -> Self {
RGBA::from_u8(xp.r, xp.g, xp.b, 255)
}
} | 23.787234 | 97 | 0.521467 |
ed07903c5de277ce2188a8751fd3ee34fe854c80 | 24,328 | /// Project: Audio filters in Rust
/// Date: 2021.12.05
/// Author of the port: João Nuno Carvalho
///
/// Description: Audio or DSP filters, allow you to attenuate or accentuate some frequencies
/// or range of frequencies in a signal. The signal can be of any kind, but in
/// here, we will focus on 1D signals. Like audio signals.
/// There can also occur differences in the signal phases, that vary with the
/// filter and the frequency components of the signal.
/// This is a port of Audio filters, from Python to Rust,
/// from the Audio filter from TheAlgorithms GitHub in Python. That is by it
/// self a port from WebAudio API implementation of the same common
/// filters in the browsers.
///
/// The following filters are implemented over a BiQuad IIR filter:
/// -low-pass
/// -high-pass
/// -band-pass
/// -all-pass
/// -peak
/// -low-shelf
/// -high-shelf
/// -notch
/// -10 band equalizer
///
/// License: MIT Open Source License, like the original license from
/// GitHub - TheAlgorithms / Python / audio_filters
/// https://github.com/TheAlgorithms/Python/tree/master/audio_filters
///
/// How to run the code.
///
/// To make a project for this files do:
/// -Install Rust your computer (Linux, Win, Mac, Raspberry Pi).
///
/// cargo new audio_filters_in_rust
/// cd audio_filters_in_rust
///
/// -Copy the repository files to this directory and overlap them.
///
/// To compile do:
/// cargo build --release
///
/// To run do:
/// cargo run --release
///
/// to run the tests do:
/// cargo test
///
/// References:
/// 1. GitHub - TheAlgorithms / Python / audio_filters
/// https://github.com/TheAlgorithms/Python/tree/master/audio_filters
///
/// 2. WebAudio - Cookbook formulae for audio equalizer biquad filter coefficients
/// https://webaudio.github.io/Audio-EQ-Cookbook/audio-eq-cookbook.html
///
/// 3. Good resources on DSP – Digital Signal Programming
/// https://github.com/joaocarvalhoopen/How_to_learn_modern_electronics#dsp--digital-signal-programming
///
/// 4. Biquads - EarLevel
/// http://www.earlevel.com/main/2003/02/28/biquads/
///
/// 5. Biquad C++ source code - EarLevel
/// https://www.earlevel.com/main/2012/11/26/biquad-c-source-code/
///
/// 6. A biquad calculator V3 - EarLevel
/// https://www.earlevel.com/main/2021/09/02/biquad-calculator-v3/
///
/// 7. WebAudio API - Mozilla Docs
/// https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API
///
/// 8. Audio Filters - Theory and Practice
/// by Ethan Winer
/// http://ethanwiner.com/filters.html
///
/// 9. Audio filter - Wikipedia
/// https://en.wikipedia.org/wiki/Audio_filter
///
/// 10. Electronic filter - Wikipedia
/// https://en.wikipedia.org/wiki/Electronic_filter
///
/// 11. How to learn modern Rust
/// https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust
///
///
/// 10 Band Equalizer
///
/// 12. Making an EQ from cascading filters
/// https://dsp.stackexchange.com/questions/10309/making-an-eq-from-cascading-filters
///
/// 13. PEAK/NOTCH FILTER DESIGN
/// https://www.dsprelated.com/showcode/169.php
///
/// 14. The Equivalence of Various Methods of Computing
/// Biquad Coefficients for Audio Parametric Equalizers
/// http://www.thesounddesign.com/MIO/EQ-Coefficients.pdf
///
/// 15. How to learn modern Rust
/// https://github.com/joaocarvalhoopen/How_to_learn_modern_Rust
///
use crate::iir_filter::IIRFilter;
use std::f64::consts::TAU;
use std::f64::consts::PI;
/// Create 2nd-order IIR filters with Butterworth design.
///
/// Code based on https://webaudio.github.io/Audio-EQ-Cookbook/audio-eq-cookbook.html
/// Alternatively you can use scipy.signal.butter, which should yield the same results.
///
/// Creates a low-pass filter
///
/// In Python:
/// >>> filter = make_lowpass(1000, 48000)
/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.004277569313094809,
/// 0.008555138626189618, 0.004277569313094809]
///
/// In Rust:
/// >>> let filter = make_lowpass(1000, 48000);
/// >>> let res_coeffs: Vec<f64> = filter.a_coeffs.iter.extends(filter.b_coeffs).collect();
/// >>> println!("{}", res_coeffs);
/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.004277569313094809,
/// 0.008555138626189618, 0.004277569313094809]
///
pub fn make_lowpass(frequency: f64, sample_rate: u32, q_factor: Option<f64>) -> IIRFilter {
let q_factor: f64 = if q_factor.is_none() {
1.0 / f64::sqrt(2.0)
} else {
q_factor.unwrap()
};
let w0 = TAU * frequency / sample_rate as f64;
let _sin = f64::sin(w0);
let _cos = f64::cos(w0);
let alpha = _sin / (2.0 * q_factor);
let b0 = (1.0 - _cos) / 2.0;
let b1 = 1.0 - _cos;
let a0 = 1.0 + alpha;
let a1 = -2.0 * _cos;
let a2 = 1.0 - alpha;
let filter_order = 2;
let mut filter = IIRFilter::new(filter_order);
let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b0]);
filter
}
/// Creates a high-pass filter
///
/// In Python:
/// >>> filter = make_highpass(1000, 48000)
/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.9957224306869052,
/// -1.9914448613738105, 0.9957224306869052]
///
pub fn make_highpass(frequency: f64, sample_rate: u32, q_factor: Option<f64>) -> IIRFilter {
let q_factor: f64 = if q_factor.is_none() {
1.0 / f64::sqrt(2.0)
} else {
q_factor.unwrap()
};
let w0 = TAU * frequency / sample_rate as f64;
let _sin = f64::sin(w0);
let _cos = f64::cos(w0);
let alpha = _sin / (2.0 * q_factor);
let b0 = (1.0 + _cos) / 2.0;
let b1 = -1.0 - _cos;
let a0 = 1.0 + alpha;
let a1 = -2.0 * _cos;
let a2 = 1.0 - alpha;
let filter_order = 2;
let mut filter = IIRFilter::new(filter_order);
let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b0]);
filter
}
/// Creates a band-pass filter
///
/// In Python:
/// >>> filter = make_bandpass(1000, 48000)
/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.06526309611002579,
/// 0, -0.06526309611002579]
///
pub fn make_bandpass(frequency: f64, sample_rate: u32, q_factor: Option<f64>) -> IIRFilter {
let q_factor: f64 = if q_factor.is_none() {
1.0 / f64::sqrt(2.0)
} else {
q_factor.unwrap()
};
let w0 = TAU * frequency / sample_rate as f64;
let _sin = f64::sin(w0);
let _cos = f64::cos(w0);
let alpha = _sin / (2.0 * q_factor);
let b0 = _sin / 2.0;
let b1 = 0.0;
let b2 = -b0;
let a0 = 1.0 + alpha;
let a1 = -2.0 * _cos;
let a2 = 1.0 - alpha;
let filter_order = 2;
let mut filter = IIRFilter::new(filter_order);
let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b2]);
filter
}
/// Creates an all-pass filter
///
/// In Python:
/// >>> filter = make_allpass(1000, 48000)
/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
/// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.9077040443587427,
/// -1.9828897227476208, 1.0922959556412573]
///
pub fn make_allpass(frequency: f64, sample_rate: u32, q_factor: Option<f64>) -> IIRFilter {
let q_factor: f64 = if q_factor.is_none() {
1.0 / f64::sqrt(2.0)
} else {
q_factor.unwrap()
};
let w0 = TAU * frequency / sample_rate as f64;
let _sin = f64::sin(w0);
let _cos = f64::cos(w0);
let alpha = _sin / (2.0 * q_factor);
let b0 = 1.0 - alpha;
let b1 = -2.0 * _cos;
let b2 = 1.0 + alpha;
let filter_order = 2;
let mut filter = IIRFilter::new(filter_order);
let _ = filter.set_coefficients(& [b2, b1, b0], & [b0, b1, b2]);
filter
}
/// Creates a peak filter
///
/// In Python:
/// >>> filter = make_peak(1000, 48000, 6)
/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
/// [1.0653405327119334, -1.9828897227476208, 0.9346594672880666, 1.1303715025601122,
/// -1.9828897227476208, 0.8696284974398878]
///
pub fn make_peak(frequency: f64, sample_rate: u32, gain_db: f64, q_factor: Option<f64>) -> IIRFilter {
let q_factor: f64 = if q_factor.is_none() {
1.0 / f64::sqrt(2.0)
} else {
q_factor.unwrap()
};
let w0 = TAU * frequency / sample_rate as f64;
let _sin = f64::sin(w0);
let _cos = f64::cos(w0);
let alpha = _sin / (2.0 * q_factor);
let big_a = 10.0_f64.powf(gain_db / 40.0);
let b0 = 1.0 + alpha * big_a;
let b1 = -2.0 * _cos;
let b2 = 1.0 - alpha * big_a;
let a0 = 1.0 + alpha / big_a;
let a1 = -2.0 * _cos;
let a2 = 1.0 - alpha / big_a;
let filter_order = 2;
let mut filter = IIRFilter::new(filter_order);
let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b2]);
filter
}
// This is a peak_eq filter similar to the above peak filter but with constant Q and the gain
// is taken at -3dB like a analog peak_eq filter would be.
// This filter is ideal to make equalizers, like a 10 band parametric equalizer.
//
// See:
// 1. Peak / notch filter design
// https://www.dsprelated.com/showcode/169.php#commax_container
//
// and
//
// 2. Making an EQ from cascading filters
// https://dsp.stackexchange.com/questions/10309/making-an-eq-from-cascading-filters
//
// and
//
// 3. The Equivalence of Various Methods of Computing
// Biquad Coefficients for Audio Parametric Equalizers
// http://www.thesounddesign.com/MIO/EQ-Coefficients.pdf
//
pub fn make_peak_eq_constant_q(frequency_center: f64, sample_rate: u32, gain_db: f64, q_factor: Option<f64>) -> IIRFilter {
// This specific filter is a port to Rust with modifications from the following example code:
// PEAK/NOTCH FILTER DESIGN
// https://www.dsprelated.com/showcode/169.php#commax_container
//
// Derive coefficients for a peaking filter with a given amplitude and
// bandwidth. All coefficients are calculated as described in Zolzer's
// DAFX book (p. 50 - 55). This algorithm assumes a constant Q-term
// is used through the equation.
//
// Original Author: sparafucile17 08/22/05
//
let q_factor: f64 = if q_factor.is_none() {
1.0 / f64::sqrt(2.0)
} else {
q_factor.unwrap()
};
let q = q_factor;
let k = f64::tan((PI * frequency_center) / sample_rate as f64);
let mut v0 = 10.0_f64.powf(gain_db / 20.0);
// Invert gain if a cut
if v0 < 1.0 {
v0 = 1.0 / v0;
}
let b0: f64;
let b1: f64;
let b2: f64;
let a1: f64;
let a2: f64;
let _k_sqr = k.powf(2.0);
//***********
// BOOST
//***********
if gain_db > 0.0 {
b0 = (1.0 + ((v0 / q) * k) + _k_sqr) / (1.0 + ((1.0 / q) * k) + _k_sqr);
b1 = (2.0 * (_k_sqr - 1.0)) / (1.0 + ((1.0 / q) * k) + _k_sqr);
b2 = (1.0 - ((v0 / q) * k) + _k_sqr) / (1.0 + ((1.0 / q) * k) + _k_sqr);
a1 = b1;
a2 = (1.0 - ((1.0 / q) * k) + _k_sqr) / (1.0 + ((1.0 / q) * k) + _k_sqr);
//***********
// CUT
//***********
} else {
b0 = (1.0 + ((1.0 / q) * k) + _k_sqr) / (1.0 + ((v0 / q) * k) + _k_sqr);
b1 = (2.0 * (_k_sqr - 1.0)) / (1.0 + ((v0 / q) * k) + _k_sqr);
b2 = (1.0 - ((1.0 / q) * k) + _k_sqr) / (1.0 + ((v0 / q) * k) + _k_sqr);
a1 = b1;
a2 = (1.0 - ((v0 / q) * k) + _k_sqr) / (1.0 + ((v0 / q) * k) + _k_sqr);
}
let filter_order = 2;
let mut filter = IIRFilter::new(filter_order);
// Note: The BiQuad filter fill's in the a0 with i.0 automatically.
let _ = filter.set_coefficients(& [a1, a2], & [b0, b1, b2]);
filter
}
/// Creates a low-shelf filter
///
/// In Python:
/// >>> filter = make_lowshelf(1000, 48000, 6)
/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
/// [3.0409336710888786, -5.608870992220748, 2.602157875636628, 3.139954022810743,
/// -5.591841778072785, 2.5201667380627257]
///
pub fn make_lowshelf(frequency: f64, sample_rate: u32, gain_db: f64, q_factor: Option<f64>) -> IIRFilter {
let q_factor: f64 = if q_factor.is_none() {
1.0 / f64::sqrt(2.0)
} else {
q_factor.unwrap()
};
let w0 = TAU * frequency / sample_rate as f64;
let _sin = f64::sin(w0);
let _cos = f64::cos(w0);
let alpha = _sin / (2.0 * q_factor);
let big_a = 10.0_f64.powf(gain_db / 40.0);
let pmc = (big_a + 1.0) - (big_a - 1.0) * _cos;
let ppmc = (big_a + 1.0) + (big_a - 1.0) * _cos;
let mpc = (big_a - 1.0) - (big_a + 1.0) * _cos;
let pmpc = (big_a - 1.0) + (big_a + 1.0) * _cos;
let aa2 = 2.0 * f64::sqrt(big_a) * alpha;
let b0 = big_a * (pmc + aa2);
let b1 = 2.0 * big_a * mpc;
let b2 = big_a * (pmc - aa2);
let a0 = ppmc + aa2;
let a1 = -2.0 * pmpc;
let a2 = ppmc - aa2;
let filter_order = 2;
let mut filter = IIRFilter::new(filter_order);
let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b2]);
filter
}
/// Creates a high-shelf filter
///
/// In Python:
/// >>> filter = make_highshelf(1000, 48000, 6)
/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
/// [2.2229172136088806, -3.9587208137297303, 1.7841414181566304, 4.295432981120543,
/// -7.922740859457287, 3.6756456963725253]
///
pub fn make_highshelf(frequency: f64, sample_rate: u32, gain_db: f64, q_factor: Option<f64>) -> IIRFilter {
let q_factor: f64 = if q_factor.is_none() {
1.0 / f64::sqrt(2.0)
} else {
q_factor.unwrap()
};
let w0 = TAU * frequency / sample_rate as f64;
let _sin = f64::sin(w0);
let _cos = f64::cos(w0);
let alpha = _sin / (2.0 * q_factor);
let big_a = 10.0_f64.powf(gain_db / 40.0);
let pmc = (big_a + 1.0) - (big_a - 1.0) * _cos;
let ppmc = (big_a + 1.0) + (big_a - 1.0) * _cos;
let mpc = (big_a - 1.0) - (big_a + 1.0) * _cos;
let pmpc = (big_a - 1.0) + (big_a + 1.0) * _cos;
let aa2 = 2.0 * f64::sqrt(big_a) * alpha;
let b0 = big_a * (ppmc + aa2);
let b1 = -2.0 * big_a * pmpc;
let b2 = big_a * (ppmc - aa2);
let a0 = pmc + aa2;
let a1 = 2.0 * mpc;
let a2 = pmc - aa2;
let filter_order = 2;
let mut filter = IIRFilter::new(filter_order);
let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b2]);
filter
}
/// Creates a notch filter
///
/// In Python:
/// >>> filter = make_notch(1000, 48000, 10)
/// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
/// [, , , ,
/// , ]
///
pub fn make_notch(frequency: f64, sample_rate: u32, q_factor: Option<f64>) -> IIRFilter {
let q_factor: f64 = if q_factor.is_none() {
1.0 / f64::sqrt(2.0)
} else {
q_factor.unwrap()
};
let w0 = TAU * frequency / sample_rate as f64;
let _sin = f64::sin(w0);
let _cos = f64::cos(w0);
use std::f64::consts::E;
let alpha = _sin * f64::sinh((f64::log(2.0,E) / 2.0) * q_factor * (w0 /_sin ));
let b0 = 1.0;
let b1 = -2.0 * _cos;
let a0 = 1.0 + alpha;
let a1 = -2.0 * _cos;
let a2 = 1.0 - alpha;
let filter_order = 2;
let mut filter = IIRFilter::new(filter_order);
let _ = filter.set_coefficients(& [a0, a1, a2], & [b0, b1, b0]);
filter
}
#[cfg(test)]
mod tests {
use super::*;
fn print_values(target_vec: & Vec<f64>, res_coeffs: & Vec<&f64>) {
println!("\n >>>> target_coefficents");
for str_t in target_vec {
print!("{}, ", str_t);
}
println!("\n >>>> res_coefficents");
for str_t in res_coeffs {
print!("{}, ", str_t);
}
println!("");
}
#[test]
fn test_make_lowpass() {
// >>> filter = make_lowpass(1000, 48000)
// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.004277569313094809,
// 0.008555138626189618, 0.004277569313094809]
let frequency = 1_000.0; // Hz
let sample_rate = 48_000; // Samples
let filter = make_lowpass(frequency, sample_rate, None);
let target_vec = vec![1.0922959556412573, -1.9828897227476208, 0.9077040443587427,
0.004277569313094809, 0.008555138626189618, 0.004277569313094809];
let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();
print_values(& target_vec, & res_coeffs);
for i in 0..target_vec.len() {
assert_eq!(*(res_coeffs[i]), target_vec[i]);
}
// assert_eq!(true, false);
}
#[test]
fn test_make_highpass() {
// >>> filter = make_highpass(1000, 48000)
// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.9957224306869052,
// -1.9914448613738105, 0.9957224306869052]
let frequency = 1_000.0; // Hz
let sample_rate = 48_000; // Samples
let filter = make_highpass(frequency, sample_rate, None);
let target_vec = vec![1.0922959556412573, -1.9828897227476208, 0.9077040443587427,
0.9957224306869052, -1.9914448613738105, 0.9957224306869052];
let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();
print_values(& target_vec, & res_coeffs);
for i in 0..target_vec.len() {
assert_eq!(*(res_coeffs[i]), target_vec[i]);
}
// assert_eq!(true, false);
}
#[test]
fn test_make_bandpass() {
// >>> filter = make_bandpass(1000, 48000)
// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.06526309611002579,
// 0, -0.06526309611002579]
let frequency = 1_000.0; // Hz
let sample_rate = 48_000; // Samples
let filter = make_bandpass(frequency, sample_rate, None);
let target_vec = vec![1.0922959556412573, -1.9828897227476208, 0.9077040443587427,
0.06526309611002579, 0.0, -0.06526309611002579];
let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();
print_values(& target_vec, & res_coeffs);
for i in 0..target_vec.len() {
assert_eq!(*(res_coeffs[i]), target_vec[i]);
}
// assert_eq!(true, false);
}
#[test]
fn test_make_allpass() {
// >>> filter = make_allpass(1000, 48000)
// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
// [1.0922959556412573, -1.9828897227476208, 0.9077040443587427, 0.9077040443587427,
// -1.9828897227476208, 1.0922959556412573]
let frequency = 1_000.0; // Hz
let sample_rate = 48_000; // Samples
let filter = make_allpass(frequency, sample_rate, None);
let target_vec = vec![1.0922959556412573, -1.9828897227476208, 0.9077040443587427,
0.9077040443587427, -1.9828897227476208, 1.0922959556412573];
let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();
print_values(& target_vec, & res_coeffs);
for i in 0..target_vec.len() {
assert_eq!(*(res_coeffs[i]), target_vec[i]);
}
// assert_eq!(true, false);
}
#[test]
fn test_make_peak() {
// >>> filter = make_peak(1000, 48000, 6)
// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
// [1.0653405327119334, -1.9828897227476208, 0.9346594672880666, 1.1303715025601122,
// -1.9828897227476208, 0.8696284974398878]
let frequency = 1_000.0; // Hz
let sample_rate = 48_000; // Samples
let gain_db = 6.0; // dB
let filter = make_peak(frequency, sample_rate, gain_db, None);
let target_vec = vec![1.0653405327119334, -1.9828897227476208, 0.9346594672880666,
1.1303715025601122, -1.9828897227476208, 0.8696284974398878];
let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();
print_values(& target_vec, & res_coeffs);
for i in 0..target_vec.len() {
assert_eq!(*(res_coeffs[i]), target_vec[i]);
}
// assert_eq!(true, false);
}
#[test]
fn test_make_lowshelf() {
// >>> filter = make_lowshelf(1000, 48000, 6)
// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
// [3.0409336710888786, -5.608870992220748, 2.602157875636628, 3.139954022810743,
// -5.591841778072785, 2.5201667380627257]
let frequency = 1_000.0; // Hz
let sample_rate = 48_000; // Samples
let gain_db = 6.0; // dB
let filter = make_lowshelf(frequency, sample_rate, gain_db, None);
let target_vec = vec![3.0409336710888786, -5.608870992220748, 2.602157875636628,
3.139954022810743, -5.591841778072785, 2.5201667380627257];
let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();
print_values(& target_vec, & res_coeffs);
for i in 0..target_vec.len() {
assert_eq!(*(res_coeffs[i]), target_vec[i]);
}
// assert_eq!(true, false);
}
#[test]
fn test_make_highshelf() {
// >>> filter = make_highshelf(1000, 48000, 6)
// >>> filter.a_coeffs + filter.b_coeffs # doctest: +NORMALIZE_WHITESPACE
// [2.2229172136088806, -3.9587208137297303, 1.7841414181566304, 4.295432981120543,
// -7.922740859457287, 3.6756456963725253]
//
let frequency = 1_000.0; // Hz
let sample_rate = 48_000; // Samples
let gain_db = 6.0; // dB
let filter = make_highshelf(frequency, sample_rate, gain_db, None);
let target_vec = vec![2.2229172136088806, -3.9587208137297303, 1.7841414181566304,
4.295432981120543, -7.922740859457287, 3.6756456963725253];
let res_coeffs: Vec<&f64> = filter.a_coeffs.iter().chain(filter.b_coeffs.iter()).collect();
print_values(& target_vec, & res_coeffs);
for i in 0..target_vec.len() {
assert_eq!(*(res_coeffs[i]), target_vec[i]);
}
// assert_eq!(true, false);
}
}
| 36.148588 | 123 | 0.562644 |
0a4141e698b44b66be923180755f01913d6e8904 | 12,462 | mod never_type;
mod coercion;
mod regression;
mod simple;
mod patterns;
mod traits;
mod method_resolution;
mod macros;
mod display_source_code;
use std::{env, sync::Arc};
use base_db::{fixture::WithFixture, FileRange, SourceDatabase, SourceDatabaseExt};
use expect_test::Expect;
use hir_def::{
body::{Body, BodySourceMap, SyntheticSyntax},
child_by_source::ChildBySource,
db::DefDatabase,
item_scope::ItemScope,
keys,
nameres::DefMap,
src::HasSource,
AssocItemId, DefWithBodyId, LocalModuleId, Lookup, ModuleDefId,
};
use hir_expand::{db::AstDatabase, InFile};
use once_cell::race::OnceBool;
use stdx::format_to;
use syntax::{
algo,
ast::{self, AstNode, NameOwner},
SyntaxNode,
};
use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry};
use tracing_tree::HierarchicalLayer;
use crate::{
db::HirDatabase, display::HirDisplay, infer::TypeMismatch, test_db::TestDB, InferenceResult, Ty,
};
// These tests compare the inference results for all expressions in a file
// against snapshots of the expected results using expect. Use
// `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots.
fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
static ENABLE: OnceBool = OnceBool::new();
if !ENABLE.get_or_init(|| env::var("CHALK_DEBUG").is_ok()) {
return None;
}
let filter = EnvFilter::from_env("CHALK_DEBUG");
let layer = HierarchicalLayer::default()
.with_indent_lines(true)
.with_ansi(false)
.with_indent_amount(2)
.with_writer(std::io::stderr);
let subscriber = Registry::default().with(filter).with(layer);
Some(tracing::subscriber::set_default(subscriber))
}
fn check_types(ra_fixture: &str) {
check_types_impl(ra_fixture, false)
}
fn check_types_source_code(ra_fixture: &str) {
check_types_impl(ra_fixture, true)
}
fn check_types_impl(ra_fixture: &str, display_source: bool) {
let _tracing = setup_tracing();
let db = TestDB::with_files(ra_fixture);
let mut checked_one = false;
for (file_id, annotations) in db.extract_annotations() {
for (range, expected) in annotations {
let ty = type_at_range(&db, FileRange { file_id, range });
let actual = if display_source {
let module = db.module_for_file(file_id);
ty.display_source_code(&db, module).unwrap()
} else {
ty.display_test(&db).to_string()
};
assert_eq!(expected, actual);
checked_one = true;
}
}
assert!(checked_one, "no `//^` annotations found");
}
fn type_at_range(db: &TestDB, pos: FileRange) -> Ty {
let file = db.parse(pos.file_id).ok().unwrap();
let expr = algo::find_node_at_range::<ast::Expr>(file.syntax(), pos.range).unwrap();
let fn_def = expr.syntax().ancestors().find_map(ast::Fn::cast).unwrap();
let module = db.module_for_file(pos.file_id);
let func = *module.child_by_source(db)[keys::FUNCTION]
.get(&InFile::new(pos.file_id.into(), fn_def))
.unwrap();
let (_body, source_map) = db.body_with_source_map(func.into());
if let Some(expr_id) = source_map.node_expr(InFile::new(pos.file_id.into(), &expr)) {
let infer = db.infer(func.into());
return infer[expr_id].clone();
}
panic!("Can't find expression")
}
fn infer(ra_fixture: &str) -> String {
infer_with_mismatches(ra_fixture, false)
}
fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
let _tracing = setup_tracing();
let (db, file_id) = TestDB::with_single_file(content);
let mut buf = String::new();
let mut infer_def = |inference_result: Arc<InferenceResult>,
body_source_map: Arc<BodySourceMap>| {
let mut types: Vec<(InFile<SyntaxNode>, &Ty)> = Vec::new();
let mut mismatches: Vec<(InFile<SyntaxNode>, &TypeMismatch)> = Vec::new();
for (pat, ty) in inference_result.type_of_pat.iter() {
let syntax_ptr = match body_source_map.pat_syntax(pat) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id).unwrap();
sp.map(|ptr| {
ptr.either(
|it| it.to_node(&root).syntax().clone(),
|it| it.to_node(&root).syntax().clone(),
)
})
}
Err(SyntheticSyntax) => continue,
};
types.push((syntax_ptr, ty));
}
for (expr, ty) in inference_result.type_of_expr.iter() {
let node = match body_source_map.expr_syntax(expr) {
Ok(sp) => {
let root = db.parse_or_expand(sp.file_id).unwrap();
sp.map(|ptr| ptr.to_node(&root).syntax().clone())
}
Err(SyntheticSyntax) => continue,
};
types.push((node.clone(), ty));
if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr) {
mismatches.push((node, mismatch));
}
}
// sort ranges for consistency
types.sort_by_key(|(node, _)| {
let range = node.value.text_range();
(range.start(), range.end())
});
for (node, ty) in &types {
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
(self_param.name().unwrap().syntax().text_range(), "self".to_string())
} else {
(node.value.text_range(), node.value.text().to_string().replace("\n", " "))
};
let macro_prefix = if node.file_id != file_id.into() { "!" } else { "" };
format_to!(
buf,
"{}{:?} '{}': {}\n",
macro_prefix,
range,
ellipsize(text, 15),
ty.display_test(&db)
);
}
if include_mismatches {
mismatches.sort_by_key(|(node, _)| {
let range = node.value.text_range();
(range.start(), range.end())
});
for (src_ptr, mismatch) in &mismatches {
let range = src_ptr.value.text_range();
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
format_to!(
buf,
"{}{:?}: expected {}, got {}\n",
macro_prefix,
range,
mismatch.expected.display_test(&db),
mismatch.actual.display_test(&db),
);
}
}
};
let module = db.module_for_file(file_id);
let def_map = module.def_map(&db);
let mut defs: Vec<DefWithBodyId> = Vec::new();
visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
defs.sort_by_key(|def| match def {
DefWithBodyId::FunctionId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::ConstId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
DefWithBodyId::StaticId(it) => {
let loc = it.lookup(&db);
loc.source(&db).value.syntax().text_range().start()
}
});
for def in defs {
let (_body, source_map) = db.body_with_source_map(def);
let infer = db.infer(def);
infer_def(infer, source_map);
}
buf.truncate(buf.trim_end().len());
buf
}
fn visit_module(
db: &TestDB,
crate_def_map: &DefMap,
module_id: LocalModuleId,
cb: &mut dyn FnMut(DefWithBodyId),
) {
visit_scope(db, crate_def_map, &crate_def_map[module_id].scope, cb);
for impl_id in crate_def_map[module_id].scope.impls() {
let impl_data = db.impl_data(impl_id);
for &item in impl_data.items.iter() {
match item {
AssocItemId::FunctionId(it) => {
let def = it.into();
cb(def);
let body = db.body(def);
visit_body(db, &body, cb);
}
AssocItemId::ConstId(it) => {
let def = it.into();
cb(def);
let body = db.body(def);
visit_body(db, &body, cb);
}
AssocItemId::TypeAliasId(_) => (),
}
}
}
fn visit_scope(
db: &TestDB,
crate_def_map: &DefMap,
scope: &ItemScope,
cb: &mut dyn FnMut(DefWithBodyId),
) {
for decl in scope.declarations() {
match decl {
ModuleDefId::FunctionId(it) => {
let def = it.into();
cb(def);
let body = db.body(def);
visit_body(db, &body, cb);
}
ModuleDefId::ConstId(it) => {
let def = it.into();
cb(def);
let body = db.body(def);
visit_body(db, &body, cb);
}
ModuleDefId::StaticId(it) => {
let def = it.into();
cb(def);
let body = db.body(def);
visit_body(db, &body, cb);
}
ModuleDefId::TraitId(it) => {
let trait_data = db.trait_data(it);
for &(_, item) in trait_data.items.iter() {
match item {
AssocItemId::FunctionId(it) => cb(it.into()),
AssocItemId::ConstId(it) => cb(it.into()),
AssocItemId::TypeAliasId(_) => (),
}
}
}
ModuleDefId::ModuleId(it) => visit_module(db, crate_def_map, it.local_id, cb),
_ => (),
}
}
}
fn visit_body(db: &TestDB, body: &Body, cb: &mut dyn FnMut(DefWithBodyId)) {
for def_map in body.block_scopes.iter().filter_map(|block| db.block_def_map(*block)) {
for (mod_id, _) in def_map.modules() {
visit_module(db, &def_map, mod_id, cb);
}
}
}
}
fn ellipsize(mut text: String, max_len: usize) -> String {
if text.len() <= max_len {
return text;
}
let ellipsis = "...";
let e_len = ellipsis.len();
let mut prefix_len = (max_len - e_len) / 2;
while !text.is_char_boundary(prefix_len) {
prefix_len += 1;
}
let mut suffix_len = max_len - e_len - prefix_len;
while !text.is_char_boundary(text.len() - suffix_len) {
suffix_len += 1;
}
text.replace_range(prefix_len..text.len() - suffix_len, ellipsis);
text
}
#[test]
fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
let (mut db, pos) = TestDB::with_position(
"
//- /lib.rs
fn foo() -> i32 {
$01 + 1
}
",
);
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id);
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def);
});
});
assert!(format!("{:?}", events).contains("infer"))
}
let new_text = "
fn foo() -> i32 {
1
+
1
}
"
.to_string();
db.set_file_text(pos.file_id, Arc::new(new_text));
{
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id);
let crate_def_map = module.def_map(&db);
visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
db.infer(def);
});
});
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
}
}
fn check_infer(ra_fixture: &str, expect: Expect) {
let mut actual = infer(ra_fixture);
actual.push('\n');
expect.assert_eq(&actual);
}
fn check_infer_with_mismatches(ra_fixture: &str, expect: Expect) {
let mut actual = infer_with_mismatches(ra_fixture, true);
actual.push('\n');
expect.assert_eq(&actual);
}
| 33.5 | 100 | 0.531456 |
8936c966c4087f39b71bcda1ec358505fc68c325 | 367 | //! Tests auto-converted from "sass-spec/spec/libsass-closed-issues/issue_1305.hrx"
#[test]
fn test() {
assert_eq!(
crate::rsass(
".foo {\
\n content: call(\'unquote\', \'foo\', ()...);\
\n}\
\n"
)
.unwrap(),
".foo {\
\n content: foo;\
\n}\
\n"
);
}
| 19.315789 | 83 | 0.392371 |
906eecc1f995c5d566d380a27f7a5f2f0d40e721 | 724 | //! Validate
#![allow(missing_docs)]
#![no_main]
use libfuzzer_sys::fuzz_target;
use oapth::{Commands, Migration, MigrationGroup};
use tokio::runtime::Runtime;
#[derive(Debug, arbitrary::Arbitrary)]
struct Data {
m_name: String,
m_sql_down: String,
m_sql_up: String,
m_version: i32,
mg_name: String,
mg_version: i32,
}
fuzz_target!(|data: Data| {
let mut rt = Runtime::new().unwrap();
rt.block_on(async {
let mut c = Commands::new(());
let mg = MigrationGroup::new(data.mg_version, data.mg_name);
let ms = [Migration::new(
[].iter().copied(),
data.m_version,
data.m_name,
data.m_sql_down,
data.m_sql_up,
)];
let _ = c.validate(&mg, ms.iter());
});
});
| 20.685714 | 64 | 0.633978 |
f59e41b07de53416f1482ea748d4582b956de7c3 | 2,920 | use cosmwasm_std::{CanonicalAddr, StdResult, Storage, Uint128};
use cosmwasm_storage::{Bucket, ReadonlyBucket};
use pylon_utils::common::OrderBy;
use pylon_utils::range::{calc_range_end_addr, calc_range_start_addr};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::constants::{DEFAULT_QUERY_LIMIT, MAX_QUERY_LIMIT};
#[derive(Serialize, Deserialize, Default, Clone, Debug, PartialEq, JsonSchema)]
pub struct User {
pub swapped_in: Uint128,
pub swapped_out: Uint128,
pub swapped_out_claimed: Uint128,
}
impl User {
pub fn load(storage: &dyn Storage, owner: &CanonicalAddr) -> Self {
ReadonlyBucket::<Self>::new(storage, super::PREFIX_USER)
.load(owner.as_slice())
.unwrap_or_default()
}
pub fn load_range(
storage: &dyn Storage,
start_after: Option<CanonicalAddr>,
limit: Option<u32>,
order: Option<OrderBy>,
) -> Vec<(CanonicalAddr, Self)> {
let (start, end, order_by) = match order {
Some(OrderBy::Asc) => (calc_range_start_addr(start_after), None, OrderBy::Asc),
_ => (None, calc_range_end_addr(start_after), OrderBy::Desc),
};
let limit = limit.unwrap_or(DEFAULT_QUERY_LIMIT).min(MAX_QUERY_LIMIT) as usize;
ReadonlyBucket::<Self>::new(storage, super::PREFIX_USER)
.range(start.as_deref(), end.as_deref(), order_by.into())
.take(limit)
.map(|item| -> (CanonicalAddr, Self) {
let (k, v) = item.unwrap();
(CanonicalAddr::from(k.as_slice()), v)
})
.collect()
}
pub fn save(storage: &mut dyn Storage, owner: &CanonicalAddr, user: &Self) -> StdResult<()> {
Bucket::<Self>::new(storage, super::PREFIX_USER).save(owner.as_slice(), user)
}
pub fn remove(storage: &mut dyn Storage, owner: &CanonicalAddr) {
Bucket::<Self>::new(storage, super::PREFIX_USER).remove(owner.as_slice())
}
pub fn register_whitelist(storage: &mut dyn Storage, owner: &CanonicalAddr) -> StdResult<()> {
Self::save_whitelist(storage, owner, true)
}
pub fn unregister_whitelist(storage: &mut dyn Storage, owner: &CanonicalAddr) -> StdResult<()> {
Self::save_whitelist(storage, owner, false)
}
fn save_whitelist(
storage: &mut dyn Storage,
owner: &CanonicalAddr,
whitelisted: bool,
) -> StdResult<()> {
Bucket::<bool>::multilevel(
storage,
&[super::PREFIX_USER_INDEX, super::PREFIX_WHITELIST],
)
.save(owner.as_slice(), &whitelisted)
}
pub fn is_whitelisted(storage: &dyn Storage, owner: &CanonicalAddr) -> bool {
ReadonlyBucket::<bool>::multilevel(
storage,
&[super::PREFIX_USER_INDEX, super::PREFIX_WHITELIST],
)
.load(owner.as_slice())
.unwrap_or_default()
}
}
| 35.180723 | 100 | 0.622945 |
08146c0b065feb90fec88f37983bfb03d3da973b | 1,283 | extern crate tokio_timer;
extern crate futures;
extern crate futures_cpupool;
use tokio_timer::Timer;
use futures::Future;
use std::time::Duration;
use futures_cpupool::CpuPool;
const BIG_PRIME: u64 = 15485867;
// checks whether a number is prime, slowly
fn is_prime(num: u64) -> bool {
for i in 2..num {
if i % num == 0 { return false }
}
true
}
fn main1() {
// set up a thread pool
let pool = CpuPool::new_num_cpus();
// spawn our computation, getting back a *future*
let prime = pool.spawn_fn(|| {
// For reasons we'll see later, we need to return a Result here
let res: Result<bool, ()> = Ok(is_prime(BIG_PRIME));
res
});
println!("Created the future");
}
fn main() {
let pool = CpuPool::new(4);
let timer = Timer::default();
// a future that resolves to None after a timeout
let timeout = timer.sleep(Duration::from_millis(700))
.then(|_| Err(()));
// a future that resolves to
let prime = pool.spawn_fn(|| {
Ok(is_prime(BIG_PRIME))
});
let winner = timeout.select(prime).map(|(win, _)| win);
match winner.wait() {
Ok(true) => println!("Prime"),
Ok(false) => println!("Not prime"),
Err(_) => println!("Timed out"),
}
}
| 23.327273 | 71 | 0.600935 |
79ddedd4187560fbfe8e4c2e44da0e170545f552 | 638 | #![feature(trait_upcasting)]
#![allow(incomplete_features)]
trait Foo: Bar<i32> + Bar<u32> {}
trait Bar<T> {
fn bar(&self) -> Option<T> {
None
}
}
fn test_specific(x: &dyn Foo) {
let _ = x as &dyn Bar<i32>; // OK
let _ = x as &dyn Bar<u32>; // OK
}
fn test_unknown_version(x: &dyn Foo) {
let _ = x as &dyn Bar<_>; // Ambiguous
//~^ ERROR non-primitive cast
//~^^ ERROR the trait bound `&dyn Foo: Bar<_>` is not satisfied
}
fn test_infer_version(x: &dyn Foo) {
let a = x as &dyn Bar<_>; // OK
let _: Option<u32> = a.bar();
}
fn main() {}
| 22.785714 | 93 | 0.523511 |
ac91c40834603786f19ff345c3fc172ccbc3ddff | 5,331 | #![deny(warnings)]
use ucx_sys::ucs_status_ptr_t;
use ucx_sys::ucs_status_t;
use ucx_sys::UCS_PTR_IS_ERR;
use ucx_sys::UCS_PTR_RAW_STATUS;
#[macro_use]
extern crate log;
#[cfg(test)]
macro_rules! spawn_thread {
($future:expr) => {
std::thread::spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_time()
.build()
.unwrap();
let local = tokio::task::LocalSet::new();
local.block_on(&rt, $future);
println!("after block!");
})
};
}
pub mod ucp;
#[repr(i8)]
#[derive(thiserror::Error, Debug, PartialEq, Eq)]
pub enum Error {
#[error("Operation in progress")]
Inprogress,
#[error("No pending message")]
NoMessage,
#[error("No resources are available to initiate the operation")]
NoReource,
#[error("Input/output error")]
IoError,
#[error("Out of memory")]
NoMemory,
#[error("Invalid parameter")]
InvalidParam,
#[error("Destination is unreachable")]
Unreachable,
#[error("Address not valid")]
InvalidAddr,
#[error("Function not implemented")]
NotImplemented,
#[error("Message truncated")]
MessageTruncated,
#[error("No progress")]
NoProgress,
#[error("Provided buffer is too small")]
BufferTooSmall,
#[error("No such element")]
NoElem,
#[error("Failed to connect some of the requested endpoints")]
SomeConnectsFailed,
#[error("No such device")]
NoDevice,
#[error("Device is busy")]
Busy,
#[error("Request canceled")]
Canceled,
#[error("Shared memory error")]
ShmemSegment,
#[error("Element already exists")]
AlreadyExists,
#[error("Index out of range")]
OutOfRange,
#[error("Operation timed out")]
Timeout,
#[error("User-defined limit was reached")]
ExceedsLimit,
#[error("Unsupported operation")]
Unsupported,
#[error("Operation rejected by remote peer")]
Rejected,
#[error("Endpoint is not connected")]
NotConnected,
#[error("Connection reset by remote peer")]
ConnectionReset,
#[error("First link failure")]
FirstLinkFailure,
#[error("Last link failure")]
LastLinkFailure,
#[error("First endpoint failure")]
FirstEndpointFailure,
#[error("Last endpoint failure")]
LastEndpointFailure,
#[error("Endpoint timeout")]
EndpointTimeout,
#[error("Unknown error")]
Unknown,
}
impl Error {
// status != UCS_OK
fn from_error(status: ucs_status_t) -> Self {
debug_assert_ne!(status, ucs_status_t::UCS_OK);
match status {
ucs_status_t::UCS_INPROGRESS => Self::Inprogress,
ucs_status_t::UCS_ERR_NO_MESSAGE => Self::NoMessage,
ucs_status_t::UCS_ERR_NO_RESOURCE => Self::NoReource,
ucs_status_t::UCS_ERR_IO_ERROR => Self::IoError,
ucs_status_t::UCS_ERR_NO_MEMORY => Self::NoMemory,
ucs_status_t::UCS_ERR_INVALID_PARAM => Self::InvalidParam,
ucs_status_t::UCS_ERR_UNREACHABLE => Self::Unreachable,
ucs_status_t::UCS_ERR_INVALID_ADDR => Self::InvalidAddr,
ucs_status_t::UCS_ERR_NOT_IMPLEMENTED => Self::NotImplemented,
ucs_status_t::UCS_ERR_MESSAGE_TRUNCATED => Self::MessageTruncated,
ucs_status_t::UCS_ERR_NO_PROGRESS => Self::NoProgress,
ucs_status_t::UCS_ERR_BUFFER_TOO_SMALL => Self::BufferTooSmall,
ucs_status_t::UCS_ERR_NO_ELEM => Self::NoElem,
ucs_status_t::UCS_ERR_SOME_CONNECTS_FAILED => Self::SomeConnectsFailed,
ucs_status_t::UCS_ERR_NO_DEVICE => Self::NoDevice,
ucs_status_t::UCS_ERR_BUSY => Self::Busy,
ucs_status_t::UCS_ERR_CANCELED => Self::Canceled,
ucs_status_t::UCS_ERR_SHMEM_SEGMENT => Self::ShmemSegment,
ucs_status_t::UCS_ERR_ALREADY_EXISTS => Self::AlreadyExists,
ucs_status_t::UCS_ERR_OUT_OF_RANGE => Self::OutOfRange,
ucs_status_t::UCS_ERR_TIMED_OUT => Self::Timeout,
ucs_status_t::UCS_ERR_EXCEEDS_LIMIT => Self::ExceedsLimit,
ucs_status_t::UCS_ERR_UNSUPPORTED => Self::Unsupported,
ucs_status_t::UCS_ERR_REJECTED => Self::Rejected,
ucs_status_t::UCS_ERR_NOT_CONNECTED => Self::NotConnected,
ucs_status_t::UCS_ERR_CONNECTION_RESET => Self::ConnectionReset,
ucs_status_t::UCS_ERR_FIRST_LINK_FAILURE => Self::FirstLinkFailure,
ucs_status_t::UCS_ERR_LAST_LINK_FAILURE => Self::LastLinkFailure,
ucs_status_t::UCS_ERR_FIRST_ENDPOINT_FAILURE => Self::FirstEndpointFailure,
ucs_status_t::UCS_ERR_ENDPOINT_TIMEOUT => Self::EndpointTimeout,
ucs_status_t::UCS_ERR_LAST_ENDPOINT_FAILURE => Self::LastEndpointFailure,
_ => Self::Unknown,
}
}
#[inline]
fn from_status(status: ucs_status_t) -> Result<(), Self> {
if status == ucs_status_t::UCS_OK {
Ok(())
} else {
Err(Self::from_error(status))
}
}
#[inline]
#[allow(dead_code)]
fn from_ptr(ptr: ucs_status_ptr_t) -> Result<(), Self> {
if UCS_PTR_IS_ERR(ptr) {
Err(Self::from_error(UCS_PTR_RAW_STATUS(ptr)))
} else {
Ok(())
}
}
}
| 33.111801 | 87 | 0.631401 |
16cf459b8ff87dcb86d61c49a29d191af64ccc9f | 328 | pub(crate) mod apu;
pub mod cart;
pub mod console;
pub mod controller;
pub(crate) mod cpu;
pub(crate) mod memory;
pub mod ports;
pub(crate) mod ppu;
pub use cart::{Cart, CartReadingError};
pub use console::Console;
pub use controller::ButtonState;
pub use ports::{AudioDevice, PixelBuffer, VideoDevice, NES_HEIGHT, NES_WIDTH};
| 23.428571 | 78 | 0.759146 |
8775281aeeaee64e5021dc5977484a514a1beda6 | 6,972 | // WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!!
#[cfg(any(feature = "all", feature = "android-location-GpsSatellite"))]
__jni_bindgen! {
/// public final class [GpsSatellite](https://developer.android.com/reference/android/location/GpsSatellite.html)
///
/// Required feature: android-location-GpsSatellite
#[deprecated] public final class GpsSatellite ("android/location/GpsSatellite") extends crate::java::lang::Object {
// // Not emitting: Non-public method
// /// [GpsSatellite](https://developer.android.com/reference/android/location/GpsSatellite.html#GpsSatellite(int))
// fn new<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: i32) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::location::GpsSatellite>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// // class.path == "android/location/GpsSatellite", java.flags == (empty), .name == "<init>", .descriptor == "(I)V"
// unsafe {
// let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0)];
// let (__jni_class, __jni_method) = __jni_env.require_class_method("android/location/GpsSatellite\0", "<init>\0", "(I)V\0");
// __jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr())
// }
// }
/// [getPrn](https://developer.android.com/reference/android/location/GpsSatellite.html#getPrn())
#[deprecated] pub fn getPrn<'env>(&'env self) -> __jni_bindgen::std::result::Result<i32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/location/GpsSatellite", java.flags == PUBLIC, .name == "getPrn", .descriptor == "()I"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/location/GpsSatellite\0", "getPrn\0", "()I\0");
__jni_env.call_int_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [getSnr](https://developer.android.com/reference/android/location/GpsSatellite.html#getSnr())
#[deprecated] pub fn getSnr<'env>(&'env self) -> __jni_bindgen::std::result::Result<f32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/location/GpsSatellite", java.flags == PUBLIC, .name == "getSnr", .descriptor == "()F"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/location/GpsSatellite\0", "getSnr\0", "()F\0");
__jni_env.call_float_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [getElevation](https://developer.android.com/reference/android/location/GpsSatellite.html#getElevation())
#[deprecated] pub fn getElevation<'env>(&'env self) -> __jni_bindgen::std::result::Result<f32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/location/GpsSatellite", java.flags == PUBLIC, .name == "getElevation", .descriptor == "()F"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/location/GpsSatellite\0", "getElevation\0", "()F\0");
__jni_env.call_float_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [getAzimuth](https://developer.android.com/reference/android/location/GpsSatellite.html#getAzimuth())
#[deprecated] pub fn getAzimuth<'env>(&'env self) -> __jni_bindgen::std::result::Result<f32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/location/GpsSatellite", java.flags == PUBLIC, .name == "getAzimuth", .descriptor == "()F"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/location/GpsSatellite\0", "getAzimuth\0", "()F\0");
__jni_env.call_float_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [hasEphemeris](https://developer.android.com/reference/android/location/GpsSatellite.html#hasEphemeris())
#[deprecated] pub fn hasEphemeris<'env>(&'env self) -> __jni_bindgen::std::result::Result<bool, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/location/GpsSatellite", java.flags == PUBLIC, .name == "hasEphemeris", .descriptor == "()Z"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/location/GpsSatellite\0", "hasEphemeris\0", "()Z\0");
__jni_env.call_boolean_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [hasAlmanac](https://developer.android.com/reference/android/location/GpsSatellite.html#hasAlmanac())
#[deprecated] pub fn hasAlmanac<'env>(&'env self) -> __jni_bindgen::std::result::Result<bool, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/location/GpsSatellite", java.flags == PUBLIC, .name == "hasAlmanac", .descriptor == "()Z"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/location/GpsSatellite\0", "hasAlmanac\0", "()Z\0");
__jni_env.call_boolean_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// [usedInFix](https://developer.android.com/reference/android/location/GpsSatellite.html#usedInFix())
#[deprecated] pub fn usedInFix<'env>(&'env self) -> __jni_bindgen::std::result::Result<bool, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/location/GpsSatellite", java.flags == PUBLIC, .name == "usedInFix", .descriptor == "()Z"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/location/GpsSatellite\0", "usedInFix\0", "()Z\0");
__jni_env.call_boolean_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
}
}
| 69.72 | 237 | 0.626076 |
7520b2618a06f698c543b9bd4355b446080cc72f | 37,755 | use pest::iterators::Pairs;
use pest::Parser;
use std::collections::HashMap;
use std::fs;
use std::str::FromStr;
use strum_macros::EnumString;
#[derive(Parser)]
#[grammar = "asm.pest"]
pub struct AssemblyParser;
pub fn parse(file_name: String) -> Vec<Instruction> {
let unparsed_file = fs::read_to_string(file_name).unwrap();
let assembly = AssemblyParser::parse(Rule::assembly, &unparsed_file)
.expect("unsuccessful parse")
.next()
.unwrap();
let raw_instructions = assembly.into_inner();
let labels = parse_labels(raw_instructions.clone());
let mut instructions = Vec::new();
for instruction in raw_instructions {
let rule = instruction.as_rule();
// ignore comments and end of input
if !matches!(rule, Rule::comment | Rule::EOI) {
let mut inner_instruction_pairs = instruction.into_inner();
let inner_instruction = inner_instruction_pairs.peek().unwrap();
let mut rule = inner_instruction.as_rule();
if matches!(rule, Rule::label) {
inner_instruction_pairs.next();
}
let mut pairs = inner_instruction_pairs.peek().unwrap().into_inner();
let inner_instruction = pairs.peek().unwrap();
rule = inner_instruction.as_rule();
let command = InstructionCommand::from_str(inner_instruction.as_str()).unwrap();
pairs.next();
match rule {
Rule::intermediate_reg_command => {
let register =
InstructionRegister::from_str(pairs.peek().unwrap().as_str()).unwrap();
pairs.next();
let mut intermediate = Vec::new();
for char in pairs.as_str().chars() {
if char == '0' {
intermediate.push(0);
} else {
intermediate.push(1);
}
}
let instruction = Instruction::IntermediateRegister(
command,
binary_to_int(&intermediate),
register,
);
instructions.push(instruction);
}
Rule::intermediate_16_bit_command => {
let register_pair: InstructionRegisterPair;
let unparsed_register = pairs.peek().unwrap().as_str();
// TODO make this prettier
if unparsed_register == "SP" {
register_pair = InstructionRegisterPair::SP;
} else if unparsed_register == "PSW" {
register_pair = InstructionRegisterPair::FA;
} else {
match InstructionRegister::from_str(unparsed_register).unwrap() {
InstructionRegister::B => register_pair = InstructionRegisterPair::BC,
InstructionRegister::D => register_pair = InstructionRegisterPair::DE,
InstructionRegister::H => register_pair = InstructionRegisterPair::DE,
_ => panic!("invalid register"),
}
}
pairs.next();
let mut raw_intermediate = Vec::new();
for char in pairs.as_str().chars() {
if char == '0' {
raw_intermediate.push(0);
} else {
raw_intermediate.push(1);
}
}
let high_bits = (binary_to_int(&raw_intermediate[0..8]) as i16) << 8;
let low_bits = binary_to_int(&raw_intermediate[8..16]) as i16;
let instruction = Instruction::Intermediate16Bit(
command,
register_pair,
high_bits + low_bits,
);
instructions.push(instruction);
}
Rule::double_reg_command => {
let register0 =
InstructionRegister::from_str(pairs.peek().unwrap().as_str()).unwrap();
pairs.next();
let register1 =
InstructionRegister::from_str(pairs.peek().unwrap().as_str()).unwrap();
pairs.next();
let instruction = Instruction::DoubleRegister(command, (register0, register1));
instructions.push(instruction);
}
Rule::single_reg_command => {
let register =
InstructionRegister::from_str(pairs.peek().unwrap().as_str()).unwrap();
pairs.next();
let instruction = Instruction::SingleRegister(command, register);
instructions.push(instruction);
}
Rule::pair_reg_command => {
let register_pair: InstructionRegisterPair;
let unparsed_register = pairs.peek().unwrap().as_str();
// TODO make this prettier
if unparsed_register == "SP" {
register_pair = InstructionRegisterPair::SP;
} else if unparsed_register == "PSW" {
register_pair = InstructionRegisterPair::FA;
} else {
match InstructionRegister::from_str(unparsed_register).unwrap() {
InstructionRegister::B => register_pair = InstructionRegisterPair::BC,
InstructionRegister::D => register_pair = InstructionRegisterPair::DE,
InstructionRegister::H => register_pair = InstructionRegisterPair::DE,
_ => panic!("invalid register"),
}
}
pairs.next();
let instruction = Instruction::PairRegister(command, register_pair);
instructions.push(instruction);
}
Rule::intermediate_16_bit_command_no_reg => {
let mut raw_intermediate = Vec::new();
for char in pairs.as_str().chars() {
if char == '0' {
raw_intermediate.push(0);
} else {
raw_intermediate.push(1);
}
}
let high_bits = (binary_to_int(&raw_intermediate[0..8]) as i16) << 8;
let low_bits = (binary_to_int(&raw_intermediate[8..16]) as i16) & 255;
let instruction =
Instruction::Intermediate16BitNoReg(command, high_bits + low_bits);
instructions.push(instruction);
}
Rule::intermediate_command => {
let mut intermediate = Vec::new();
for char in pairs.as_str().chars() {
if char == '0' {
intermediate.push(0);
} else {
intermediate.push(1);
}
}
let instruction =
Instruction::Intermediate(command, binary_to_int(&intermediate));
instructions.push(instruction);
}
Rule::no_reg_command => {
let instruction = Instruction::NoRegister(command);
instructions.push(instruction);
}
Rule::label_command => {
let label = pairs.peek().unwrap().as_span().as_str();
let instruction = Instruction::Label(command, *labels.get(label).unwrap());
instructions.push(instruction);
}
_ => panic!("invalid rule: {:?}", rule),
}
}
}
instructions
}
fn parse_labels(raw_instructions: Pairs<Rule>) -> HashMap<String, u16> {
let mut labels = HashMap::new();
let mut label_address = 0;
let mut label = "".to_owned();
for instruction in raw_instructions {
let rule = instruction.as_rule();
// ignore comments and end of input
if !matches!(rule, Rule::comment | Rule::EOI) {
let mut inner_instruction_pairs = instruction.into_inner();
let inner_instruction = inner_instruction_pairs.peek().unwrap();
let mut rule = inner_instruction.as_rule();
if matches!(rule, Rule::label) {
label = inner_instruction.as_str().to_string();
for value in labels.keys() {
if label == *value {
panic!("can't have duplicate labels: {:?}", label);
}
}
if InstructionCommand::from_str(&label).is_ok()
|| InstructionRegister::from_str(&label).is_ok()
{
panic!("label can't occupy reserved names: {:?}", label);
}
inner_instruction_pairs.next();
}
let mut pairs = inner_instruction_pairs.peek().unwrap().into_inner();
let inner_instruction = pairs.peek().unwrap();
rule = inner_instruction.as_rule();
pairs.next();
if !label.is_empty() {
labels.insert(label.trim_end_matches(':').to_owned(), label_address);
label = "".to_owned();
}
match rule {
Rule::intermediate_reg_command => {
label_address += 2;
}
Rule::intermediate_16_bit_command => {
label_address += 3;
}
Rule::double_reg_command => {
label_address += 1;
}
Rule::single_reg_command => {
label_address += 1;
}
Rule::pair_reg_command => {
label_address += 1;
}
Rule::intermediate_16_bit_command_no_reg => {
label_address += 3;
}
Rule::intermediate_command => {
label_address += 2;
}
Rule::no_reg_command => {
label_address += 1;
}
Rule::label_command => {
label_address += 3;
}
_ => panic!("invalid rule: {:?}", rule),
}
}
}
labels
}
#[derive(Debug, EnumString, Clone, PartialEq)]
pub enum InstructionCommand {
#[strum(serialize = "MVI")]
Mvi,
#[strum(serialize = "MOV")]
Mov,
#[strum(serialize = "ADD")]
Add,
#[strum(serialize = "ADC")]
Adc,
#[strum(serialize = "ADI")]
Adi,
#[strum(serialize = "ACI")]
Aci,
#[strum(serialize = "SUI")]
Sui,
#[strum(serialize = "SUB")]
Sub,
#[strum(serialize = "INR")]
Inr,
#[strum(serialize = "DCR")]
Dcr,
#[strum(serialize = "ANA")]
Ana,
#[strum(serialize = "STC")]
Stc,
#[strum(serialize = "CMC")]
Cmc,
#[strum(serialize = "CMA")]
Cma,
#[strum(serialize = "RLC")]
Rlc,
#[strum(serialize = "RRC")]
Rrc,
#[strum(serialize = "RAL")]
Ral,
#[strum(serialize = "RAR")]
Rar,
#[strum(serialize = "ORA")]
Ora,
#[strum(serialize = "DAA")]
Daa,
#[strum(serialize = "STAX")]
Stax,
#[strum(serialize = "LDAX")]
Ldax,
#[strum(serialize = "CMP")]
Cmp,
#[strum(serialize = "XRA")]
Xra,
#[strum(serialize = "SBB")]
Sbb,
#[strum(serialize = "XCHG")]
Xchg,
#[strum(serialize = "SPHL")]
Sphl,
#[strum(serialize = "XTHL")]
Xthl,
#[strum(serialize = "DCX")]
Dcx,
#[strum(serialize = "INX")]
Inx,
#[strum(serialize = "DAD")]
Dad,
#[strum(serialize = "PUSH")]
Push,
#[strum(serialize = "POP")]
Pop,
#[strum(serialize = "ORI")]
Ori,
#[strum(serialize = "XRI")]
Xri,
#[strum(serialize = "ANI")]
Ani,
#[strum(serialize = "CPI")]
Cpi,
#[strum(serialize = "SBI")]
Sbi,
#[strum(serialize = "LXI")]
Lxi,
#[strum(serialize = "STA")]
Sta,
#[strum(serialize = "LDA")]
Lda,
#[strum(serialize = "SHLD")]
Shld,
#[strum(serialize = "LHLD")]
Lhld,
#[strum(serialize = "PCHL")]
Pchl,
#[strum(serialize = "JMP")]
Jmp,
#[strum(serialize = "JC")]
Jc,
#[strum(serialize = "JZ")]
Jz,
#[strum(serialize = "JNC")]
Jnc,
#[strum(serialize = "JNZ")]
Jnz,
#[strum(serialize = "JM")]
Jm,
#[strum(serialize = "JP")]
Jp,
#[strum(serialize = "JPE")]
Jpe,
#[strum(serialize = "JPO")]
Jpo,
#[strum(serialize = "HLT")]
Hlt,
}
pub trait InstructionArgument {
fn encode(&self) -> Vec<u8>;
fn decode(raw_bits: &[u8]) -> Self;
}
#[derive(Debug, Copy, Clone, EnumString, PartialEq)]
pub enum InstructionRegister {
A,
B,
C,
D,
E,
H,
L,
M,
}
impl InstructionArgument for InstructionRegister {
fn encode(&self) -> Vec<u8> {
match self {
InstructionRegister::A => vec![1, 1, 1],
InstructionRegister::B => vec![0, 0, 0],
InstructionRegister::C => vec![0, 0, 1],
InstructionRegister::D => vec![0, 1, 0],
InstructionRegister::E => vec![0, 1, 1],
InstructionRegister::H => vec![1, 0, 0],
InstructionRegister::L => vec![1, 0, 1],
InstructionRegister::M => vec![1, 1, 0],
}
}
fn decode(raw_bits: &[u8]) -> InstructionRegister {
match *raw_bits {
[1, 1, 1] => InstructionRegister::A,
[0, 0, 0] => InstructionRegister::B,
[0, 0, 1] => InstructionRegister::C,
[0, 1, 0] => InstructionRegister::D,
[0, 1, 1] => InstructionRegister::E,
[1, 0, 0] => InstructionRegister::H,
[1, 0, 1] => InstructionRegister::L,
[1, 1, 0] => InstructionRegister::M,
_ => panic!("Invalid register"),
}
}
}
impl InstructionRegister {
pub fn to_index(self) -> u8 {
match self {
InstructionRegister::A => 0,
InstructionRegister::B => 1,
InstructionRegister::C => 2,
InstructionRegister::D => 3,
InstructionRegister::E => 4,
InstructionRegister::H => 5,
InstructionRegister::L => 6,
InstructionRegister::M => 7,
}
}
pub fn from_index(index: i32) -> InstructionRegister {
match index {
0 => InstructionRegister::A,
1 => InstructionRegister::B,
2 => InstructionRegister::C,
3 => InstructionRegister::D,
4 => InstructionRegister::E,
5 => InstructionRegister::H,
6 => InstructionRegister::L,
7 => InstructionRegister::M,
_ => panic!("Invalid argument provided!"),
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum InstructionRegisterPair {
BC,
DE,
HL,
SP,
FA,
}
impl InstructionArgument for InstructionRegisterPair {
fn encode(&self) -> Vec<u8> {
match self {
InstructionRegisterPair::BC => vec![0, 0],
InstructionRegisterPair::DE => vec![0, 1],
InstructionRegisterPair::HL => vec![1, 0],
InstructionRegisterPair::SP => vec![1, 1],
InstructionRegisterPair::FA => vec![1, 1],
}
}
fn decode(raw_bits: &[u8]) -> InstructionRegisterPair {
match *raw_bits {
[0, 0] => InstructionRegisterPair::BC,
[0, 1] => InstructionRegisterPair::DE,
[1, 0] => InstructionRegisterPair::HL,
[1, 1] => InstructionRegisterPair::SP,
_ => panic!("Invalid registerpair"),
}
}
}
impl InstructionRegisterPair {
pub fn get_registers(&self) -> (InstructionRegister, InstructionRegister) {
match self {
InstructionRegisterPair::BC => (InstructionRegister::B, InstructionRegister::C),
InstructionRegisterPair::DE => (InstructionRegister::D, InstructionRegister::E),
InstructionRegisterPair::HL => (InstructionRegister::H, InstructionRegister::L),
_ => panic!("invalid register pair"),
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum Instruction {
NoRegister(InstructionCommand),
SingleRegister(InstructionCommand, InstructionRegister),
DoubleRegister(
InstructionCommand,
(InstructionRegister, InstructionRegister),
),
Intermediate(InstructionCommand, i8),
Intermediate16Bit(InstructionCommand, InstructionRegisterPair, i16),
Intermediate16BitNoReg(InstructionCommand, i16),
IntermediateRegister(InstructionCommand, i8, InstructionRegister),
PairRegister(InstructionCommand, InstructionRegisterPair),
Label(InstructionCommand, u16),
}
impl Instruction {
pub fn get_size(&self) -> u8 {
match self {
Instruction::NoRegister(_) => 1,
Instruction::SingleRegister(_, _) => 1,
Instruction::DoubleRegister(_, _) => 1,
Instruction::Intermediate(_, _) => 2,
Instruction::Intermediate16Bit(_, _, _) => 3,
Instruction::Intermediate16BitNoReg(_, _) => 3,
Instruction::IntermediateRegister(_, _, _) => 2,
Instruction::PairRegister(_, _) => 1,
Instruction::Label(_, _) => 3,
}
}
pub fn encode(&self) -> Vec<u8> {
match self {
Instruction::NoRegister(command) => match command {
InstructionCommand::Stc => {
vec![0, 0, 1, 1, 0, 1, 1, 1]
}
InstructionCommand::Cmc => {
vec![0, 0, 1, 1, 1, 1, 1, 1]
}
InstructionCommand::Cma => {
vec![0, 0, 1, 0, 1, 1, 1, 1]
}
InstructionCommand::Rlc => {
vec![0, 0, 0, 0, 0, 1, 1, 1]
}
InstructionCommand::Rrc => {
vec![0, 0, 0, 0, 1, 1, 1, 1]
}
InstructionCommand::Ral => {
vec![0, 0, 0, 1, 0, 1, 1, 1]
}
InstructionCommand::Rar => {
vec![0, 0, 0, 1, 1, 1, 1, 1]
}
InstructionCommand::Daa => {
vec![0, 0, 1, 0, 0, 1, 1, 1]
}
InstructionCommand::Xchg => {
vec![1, 1, 1, 0, 1, 0, 1, 1]
}
InstructionCommand::Sphl => {
vec![1, 1, 1, 1, 1, 0, 0, 1]
}
InstructionCommand::Xthl => {
vec![1, 1, 1, 0, 0, 0, 1, 1]
}
InstructionCommand::Pchl => {
vec![1, 1, 1, 0, 1, 0, 0, 1]
}
InstructionCommand::Hlt => {
vec![0, 1, 1, 1, 0, 1, 1, 0]
}
_ => panic!("invalid instruction"),
},
Instruction::SingleRegister(command, register) => match command {
InstructionCommand::Add => {
let mut base_result = vec![1, 0, 0, 0, 0];
base_result.append(&mut register.encode());
base_result
}
InstructionCommand::Adc => {
let mut base_result = vec![1, 0, 0, 0, 1];
base_result.append(&mut register.encode());
base_result
}
InstructionCommand::Sub => {
let mut base_result = vec![1, 0, 0, 1, 0];
base_result.append(&mut register.encode());
base_result
}
InstructionCommand::Inr => {
let mut base_result = vec![0, 0];
base_result.append(&mut register.encode());
base_result.append(&mut vec![1, 0, 0]);
base_result
}
InstructionCommand::Dcr => {
let mut base_result = vec![0, 0];
base_result.append(&mut register.encode());
base_result.append(&mut vec![1, 0, 1]);
base_result
}
InstructionCommand::Ana => {
let mut base_result = vec![1, 0, 1, 0, 0];
base_result.append(&mut register.encode());
base_result
}
InstructionCommand::Ora => {
let mut base_result = vec![1, 0, 1, 1, 0];
base_result.append(&mut register.encode());
base_result
}
InstructionCommand::Cmp => {
let mut base_result = vec![1, 0, 1, 1, 1];
base_result.append(&mut register.encode());
base_result
}
InstructionCommand::Xra => {
let mut base_result = vec![1, 0, 1, 0, 1];
base_result.append(&mut register.encode());
base_result
}
InstructionCommand::Sbb => {
let mut base_result = vec![1, 0, 0, 1, 1];
base_result.append(&mut register.encode());
base_result
}
_ => panic!("invalid instruction"),
},
Instruction::DoubleRegister(command, registers) => match command {
InstructionCommand::Mov => {
let mut base_result = vec![0, 1];
base_result.append(&mut registers.0.encode());
base_result.append(&mut registers.1.encode());
base_result
}
_ => panic!("invalid instruction"),
},
Instruction::Intermediate(command, intermediate) => match command {
InstructionCommand::Adi => {
let mut base_result = vec![1, 1, 0, 0, 0, 1, 1, 0];
base_result.append(&mut int_to_binary(*intermediate as i16, 8));
base_result
}
InstructionCommand::Aci => {
let mut base_result = vec![1, 1, 0, 0, 1, 1, 1, 0];
base_result.append(&mut int_to_binary(*intermediate as i16, 8));
base_result
}
InstructionCommand::Sui => {
let mut base_result = vec![1, 1, 0, 1, 0, 1, 1, 0];
base_result.append(&mut int_to_binary(*intermediate as i16, 8));
base_result
}
InstructionCommand::Ori => {
let mut base_result = vec![1, 1, 1, 1, 0, 1, 1, 0];
base_result.append(&mut int_to_binary(*intermediate as i16, 8));
base_result
}
InstructionCommand::Xri => {
let mut base_result = vec![1, 1, 1, 0, 1, 1, 1, 0];
base_result.append(&mut int_to_binary(*intermediate as i16, 8));
base_result
}
InstructionCommand::Ani => {
let mut base_result = vec![1, 1, 1, 0, 0, 1, 1, 0];
base_result.append(&mut int_to_binary(*intermediate as i16, 8));
base_result
}
InstructionCommand::Cpi => {
let mut base_result = vec![1, 1, 1, 1, 1, 1, 1, 0];
base_result.append(&mut int_to_binary(*intermediate as i16, 8));
base_result
}
InstructionCommand::Sbi => {
let mut base_result = vec![1, 1, 0, 1, 1, 1, 1, 0];
base_result.append(&mut int_to_binary(*intermediate as i16, 8));
base_result
}
_ => panic!("invalid instruction"),
},
Instruction::Intermediate16Bit(command, register_pair, intermediate) => match command {
InstructionCommand::Lxi => {
let mut base_result = vec![0, 0];
base_result.append(&mut register_pair.encode());
base_result.append(&mut vec![0, 0, 0, 1]);
base_result.append(&mut int_to_binary(*intermediate, 16));
base_result
}
_ => panic!("invalid instruction"),
},
Instruction::Intermediate16BitNoReg(command, intermediate) => match command {
InstructionCommand::Sta => {
let mut base_result = vec![0, 0, 1, 1, 0, 0, 1, 0];
base_result.append(&mut int_to_binary(*intermediate, 16));
base_result
}
InstructionCommand::Lda => {
let mut base_result = vec![0, 0, 1, 1, 1, 0, 1, 0];
base_result.append(&mut int_to_binary(*intermediate, 16));
base_result
}
InstructionCommand::Shld => {
let mut base_result = vec![0, 0, 1, 0, 0, 0, 1, 0];
base_result.append(&mut int_to_binary(*intermediate, 16));
base_result
}
InstructionCommand::Lhld => {
let mut base_result = vec![0, 0, 1, 0, 1, 0, 1, 0];
base_result.append(&mut int_to_binary(*intermediate, 16));
base_result
}
_ => panic!("invalid instruction"),
},
Instruction::IntermediateRegister(command, intermediate, register) => match command {
InstructionCommand::Mvi => {
let mut base_result = vec![0, 0];
base_result.append(&mut register.encode());
base_result.append(&mut vec![1, 1, 0]);
base_result.append(&mut int_to_binary(*intermediate as i16, 8));
base_result
}
_ => panic!("invalid instruction"),
},
Instruction::Label(command, address) => {
let mut base_result = vec![];
match command {
InstructionCommand::Jmp => {
base_result.append(&mut vec![1, 1, 0, 0, 0, 0, 1, 1]);
base_result.append(&mut int_to_binary(*address as i16, 16));
base_result
}
InstructionCommand::Jc => {
base_result.append(&mut vec![1, 1, 0, 1, 1, 0, 1, 0]);
base_result.append(&mut int_to_binary(*address as i16, 16));
base_result
}
InstructionCommand::Jnc => {
base_result.append(&mut vec![1, 1, 0, 1, 0, 0, 1, 0]);
base_result.append(&mut int_to_binary(*address as i16, 16));
base_result
}
InstructionCommand::Jz => {
base_result.append(&mut vec![1, 1, 0, 0, 1, 0, 1, 0]);
base_result.append(&mut int_to_binary(*address as i16, 16));
base_result
}
InstructionCommand::Jnz => {
base_result.append(&mut vec![1, 1, 0, 0, 0, 0, 1, 0]);
base_result.append(&mut int_to_binary(*address as i16, 16));
base_result
}
InstructionCommand::Jm => {
base_result.append(&mut vec![1, 1, 1, 1, 1, 0, 1, 0]);
base_result.append(&mut int_to_binary(*address as i16, 16));
base_result
}
InstructionCommand::Jp => {
base_result.append(&mut vec![1, 1, 1, 1, 0, 0, 1, 0]);
base_result.append(&mut int_to_binary(*address as i16, 16));
base_result
}
InstructionCommand::Jpe => {
base_result.append(&mut vec![1, 1, 1, 0, 1, 0, 1, 0]);
base_result.append(&mut int_to_binary(*address as i16, 16));
base_result
}
InstructionCommand::Jpo => {
base_result.append(&mut vec![1, 1, 1, 0, 0, 0, 1, 0]);
base_result.append(&mut int_to_binary(*address as i16, 16));
base_result
}
_ => panic!("invalid instruction"),
}
}
Instruction::PairRegister(command, register_pair) => {
let mut base_result = vec![0, 0];
match command {
InstructionCommand::Stax => {
base_result.append(&mut register_pair.encode());
base_result.append(&mut vec![0, 0, 1, 0]);
base_result
}
InstructionCommand::Ldax => {
base_result.append(&mut register_pair.encode());
base_result.append(&mut vec![1, 0, 1, 0]);
base_result
}
InstructionCommand::Dcx => {
base_result.append(&mut register_pair.encode());
base_result.append(&mut vec![1, 0, 1, 1]);
base_result
}
InstructionCommand::Inx => {
base_result.append(&mut register_pair.encode());
base_result.append(&mut vec![0, 0, 1, 1]);
base_result
}
InstructionCommand::Dad => {
base_result.append(&mut register_pair.encode());
base_result.append(&mut vec![1, 0, 0, 1]);
base_result
}
InstructionCommand::Push => {
base_result = vec![1, 1];
if matches!(register_pair, InstructionRegisterPair::SP) {
panic!("can not use SP in this instruction");
}
base_result.append(&mut register_pair.encode());
base_result.append(&mut vec![0, 1, 0, 1]);
base_result
}
InstructionCommand::Pop => {
base_result = vec![1, 1];
if matches!(register_pair, InstructionRegisterPair::SP) {
panic!("can not use SP in this instruction");
}
base_result.append(&mut register_pair.encode());
base_result.append(&mut vec![0, 0, 0, 1]);
base_result
}
_ => panic!("invalid instruction"),
}
}
}
}
}
pub fn int_to_binary(value: i16, mut size: i8) -> Vec<u8> {
let mut result = Vec::new();
size -= 1;
while size >= 0 {
let bit = (value >> size) & 1;
result.push(bit as u8);
size -= 1;
}
result
}
pub fn binary_to_int(intermediate: &[u8]) -> i8 {
let mut result = 0;
for (i, num) in intermediate.iter().enumerate() {
result |= num;
if i != 7 {
result <<= 1;
}
}
result as i8
}
#[cfg(test)]
mod tests {
use crate::assembler::parser::binary_to_int;
use crate::assembler::parser::int_to_binary;
use super::parse;
use super::{InstructionArgument, InstructionRegister, InstructionRegisterPair};
#[test]
fn test_register_encoding() {
assert_eq!(*InstructionRegister::A.encode(), vec![1, 1, 1]);
assert_eq!(*InstructionRegister::B.encode(), vec![0, 0, 0]);
assert_eq!(*InstructionRegister::C.encode(), vec![0, 0, 1]);
assert_eq!(*InstructionRegister::D.encode(), vec![0, 1, 0]);
assert_eq!(*InstructionRegister::E.encode(), vec![0, 1, 1]);
assert_eq!(*InstructionRegister::H.encode(), vec![1, 0, 0]);
assert_eq!(*InstructionRegister::L.encode(), vec![1, 0, 1]);
assert_eq!(*InstructionRegister::M.encode(), vec![1, 1, 0]);
}
#[test]
fn test_register_decoding() {
assert!(matches!(
InstructionRegister::decode(&[1, 1, 1]),
InstructionRegister::A
));
assert!(matches!(
InstructionRegister::decode(&[0, 0, 0]),
InstructionRegister::B
));
assert!(matches!(
InstructionRegister::decode(&[0, 0, 1]),
InstructionRegister::C
));
assert!(matches!(
InstructionRegister::decode(&[0, 1, 0]),
InstructionRegister::D
));
assert!(matches!(
InstructionRegister::decode(&[0, 1, 1]),
InstructionRegister::E
));
assert!(matches!(
InstructionRegister::decode(&[1, 0, 0]),
InstructionRegister::H
));
assert!(matches!(
InstructionRegister::decode(&[1, 0, 1]),
InstructionRegister::L
));
assert!(matches!(
InstructionRegister::decode(&[1, 1, 0]),
InstructionRegister::M
));
}
#[test]
fn test_register_pair_encoding() {
assert_eq!(InstructionRegisterPair::BC.encode(), &[0, 0]);
assert_eq!(InstructionRegisterPair::DE.encode(), &[0, 1]);
assert_eq!(InstructionRegisterPair::HL.encode(), &[1, 0]);
assert_eq!(InstructionRegisterPair::SP.encode(), &[1, 1]);
}
#[test]
fn test_register_pair_decoding() {
assert!(matches!(
InstructionRegisterPair::decode(&[0, 0]),
InstructionRegisterPair::BC
));
assert!(matches!(
InstructionRegisterPair::decode(&[0, 1]),
InstructionRegisterPair::DE
));
assert!(matches!(
InstructionRegisterPair::decode(&[1, 0]),
InstructionRegisterPair::HL
));
assert!(matches!(
InstructionRegisterPair::decode(&[1, 1]),
InstructionRegisterPair::SP
));
}
#[test]
#[should_panic]
fn test_invalid_register_pair_decoding() {
InstructionRegisterPair::decode(&[1, 1, 1]);
}
#[test]
#[should_panic]
fn test_register_decoding_panic() {
InstructionRegister::decode(&[1, 1, 1, 1]);
}
#[test]
fn test_to_index() {
assert_eq!(InstructionRegister::A.to_index(), 0);
assert_eq!(InstructionRegister::B.to_index(), 1);
assert_eq!(InstructionRegister::C.to_index(), 2);
assert_eq!(InstructionRegister::D.to_index(), 3);
assert_eq!(InstructionRegister::E.to_index(), 4);
assert_eq!(InstructionRegister::H.to_index(), 5);
assert_eq!(InstructionRegister::L.to_index(), 6);
assert_eq!(InstructionRegister::M.to_index(), 7);
}
#[test]
fn test_from_index() {
assert!(matches!(
InstructionRegister::from_index(0),
InstructionRegister::A
));
assert!(matches!(
InstructionRegister::from_index(1),
InstructionRegister::B
));
assert!(matches!(
InstructionRegister::from_index(2),
InstructionRegister::C
));
assert!(matches!(
InstructionRegister::from_index(3),
InstructionRegister::D
));
assert!(matches!(
InstructionRegister::from_index(4),
InstructionRegister::E
));
assert!(matches!(
InstructionRegister::from_index(5),
InstructionRegister::H
));
assert!(matches!(
InstructionRegister::from_index(6),
InstructionRegister::L
));
assert!(matches!(
InstructionRegister::from_index(7),
InstructionRegister::M
));
}
#[test]
fn test_binary_to_int() {
assert_eq!(binary_to_int(&mut vec![0, 0, 0, 0, 1, 1, 1, 1]), 15);
assert_eq!(binary_to_int(&mut vec![1, 0, 0, 0, 0, 0, 0, 0]), -128);
}
#[test]
fn test_int_to_binary() {
assert_eq!(int_to_binary(15, 8), vec![0, 0, 0, 0, 1, 1, 1, 1]);
assert_eq!(int_to_binary(-128, 8), vec![1, 0, 0, 0, 0, 0, 0, 0]);
assert_eq!(
int_to_binary(4000, 16),
vec![0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0]
);
}
#[test]
#[should_panic]
fn test_duplicate_labels() {
parse("data/test/duplicate_labels.asm".to_string());
}
}
| 35.08829 | 99 | 0.474083 |
1c5183d788f4e60d65723a706fefdd3322b418c1 | 415 | use super::*;
use assert2::assert;
type Range<const START: u32, const END: u32> = RiU32<START, END>;
#[test]
fn range_is_expected_len() {
// Given
const MIN_CPU_HZ: u32 = 350_000_000;
const MAX_CPU_HZ: u32 = 1_400_000_000;
type Sut = Range<MIN_CPU_HZ, MAX_CPU_HZ>;
let expected = Some(1_050_000_001);
// When
let result = Sut::len();
// Then
assert!(result == expected);
}
| 18.863636 | 65 | 0.638554 |
48f29d7cc5ac84875761693e53d408ea6fac56ef | 175,354 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::pipeline::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> Result<azure_core::Response, azure_core::Error> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(endpoint: impl Into<String>, credential: std::sync::Arc<dyn azure_core::TokenCredential>, scopes: Vec<String>) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::pipeline::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn application(&self) -> application::Client {
application::Client(self.clone())
}
pub fn application_package(&self) -> application_package::Client {
application_package::Client(self.clone())
}
pub fn batch_account(&self) -> batch_account::Client {
batch_account::Client(self.clone())
}
pub fn certificate(&self) -> certificate::Client {
certificate::Client(self.clone())
}
pub fn location(&self) -> location::Client {
location::Client(self.clone())
}
pub fn operations(&self) -> operations::Client {
operations::Client(self.clone())
}
pub fn pool(&self) -> pool::Client {
pool::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
BatchAccount_Get(#[from] batch_account::get::Error),
#[error(transparent)]
BatchAccount_Create(#[from] batch_account::create::Error),
#[error(transparent)]
BatchAccount_Update(#[from] batch_account::update::Error),
#[error(transparent)]
BatchAccount_Delete(#[from] batch_account::delete::Error),
#[error(transparent)]
BatchAccount_List(#[from] batch_account::list::Error),
#[error(transparent)]
BatchAccount_ListByResourceGroup(#[from] batch_account::list_by_resource_group::Error),
#[error(transparent)]
BatchAccount_SynchronizeAutoStorageKeys(#[from] batch_account::synchronize_auto_storage_keys::Error),
#[error(transparent)]
BatchAccount_RegenerateKey(#[from] batch_account::regenerate_key::Error),
#[error(transparent)]
BatchAccount_GetKeys(#[from] batch_account::get_keys::Error),
#[error(transparent)]
ApplicationPackage_Activate(#[from] application_package::activate::Error),
#[error(transparent)]
Application_Get(#[from] application::get::Error),
#[error(transparent)]
Application_Create(#[from] application::create::Error),
#[error(transparent)]
Application_Update(#[from] application::update::Error),
#[error(transparent)]
Application_Delete(#[from] application::delete::Error),
#[error(transparent)]
ApplicationPackage_Get(#[from] application_package::get::Error),
#[error(transparent)]
ApplicationPackage_Create(#[from] application_package::create::Error),
#[error(transparent)]
ApplicationPackage_Delete(#[from] application_package::delete::Error),
#[error(transparent)]
Application_List(#[from] application::list::Error),
#[error(transparent)]
Location_GetQuotas(#[from] location::get_quotas::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
Location_CheckNameAvailability(#[from] location::check_name_availability::Error),
#[error(transparent)]
Certificate_ListByBatchAccount(#[from] certificate::list_by_batch_account::Error),
#[error(transparent)]
Certificate_Get(#[from] certificate::get::Error),
#[error(transparent)]
Certificate_Create(#[from] certificate::create::Error),
#[error(transparent)]
Certificate_Update(#[from] certificate::update::Error),
#[error(transparent)]
Certificate_Delete(#[from] certificate::delete::Error),
#[error(transparent)]
Certificate_CancelDeletion(#[from] certificate::cancel_deletion::Error),
#[error(transparent)]
Pool_ListByBatchAccount(#[from] pool::list_by_batch_account::Error),
#[error(transparent)]
Pool_Get(#[from] pool::get::Error),
#[error(transparent)]
Pool_Create(#[from] pool::create::Error),
#[error(transparent)]
Pool_Update(#[from] pool::update::Error),
#[error(transparent)]
Pool_Delete(#[from] pool::delete::Error),
#[error(transparent)]
Pool_DisableAutoScale(#[from] pool::disable_auto_scale::Error),
#[error(transparent)]
Pool_StopResize(#[from] pool::stop_resize::Error),
}
pub mod batch_account {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn create(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
parameters: impl Into<models::BatchAccountCreateParameters>,
subscription_id: impl Into<String>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
}
}
pub fn update(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
parameters: impl Into<models::BatchAccountUpdateParameters>,
subscription_id: impl Into<String>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
}
}
pub fn delete(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
pub fn list_by_resource_group(
&self,
resource_group_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn synchronize_auto_storage_keys(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> synchronize_auto_storage_keys::Builder {
synchronize_auto_storage_keys::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn regenerate_key(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
parameters: impl Into<models::BatchAccountRegenerateKeyParameters>,
subscription_id: impl Into<String>,
) -> regenerate_key::Builder {
regenerate_key::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
}
}
pub fn get_keys(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> get_keys::Builder {
get_keys::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
subscription_id: subscription_id.into(),
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BatchAccount, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BatchAccount =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::BatchAccount),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) parameters: models::BatchAccountCreateParameters,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BatchAccount =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) parameters: models::BatchAccountUpdateParameters,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BatchAccount, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BatchAccount =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BatchAccountListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Batch/batchAccounts",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BatchAccountListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BatchAccountListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BatchAccountListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod synchronize_auto_storage_keys {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/syncAutoStorageKeys",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod regenerate_key {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) parameters: models::BatchAccountRegenerateKeyParameters,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BatchAccountKeys, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/regenerateKeys",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BatchAccountKeys =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_keys {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BatchAccountKeys, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/listKeys",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BatchAccountKeys =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod application_package {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn activate(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
application_id: impl Into<String>,
version: impl Into<String>,
parameters: impl Into<models::ActivateApplicationPackageParameters>,
subscription_id: impl Into<String>,
) -> activate::Builder {
activate::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
application_id: application_id.into(),
version: version.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
}
}
pub fn get(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
application_id: impl Into<String>,
version: impl Into<String>,
subscription_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
application_id: application_id.into(),
version: version.into(),
subscription_id: subscription_id.into(),
}
}
pub fn create(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
application_id: impl Into<String>,
version: impl Into<String>,
subscription_id: impl Into<String>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
application_id: application_id.into(),
version: version.into(),
subscription_id: subscription_id.into(),
}
}
pub fn delete(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
application_id: impl Into<String>,
version: impl Into<String>,
subscription_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
application_id: application_id.into(),
version: version.into(),
subscription_id: subscription_id.into(),
}
}
}
pub mod activate {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) application_id: String,
pub(crate) version: String,
pub(crate) parameters: models::ActivateApplicationPackageParameters,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/applications/{}/versions/{}/activate" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . account_name , & self . application_id , & self . version) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) application_id: String,
pub(crate) version: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ApplicationPackage, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/applications/{}/versions/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.application_id,
&self.version
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ApplicationPackage =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) application_id: String,
pub(crate) version: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ApplicationPackage, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/applications/{}/versions/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.application_id,
&self.version
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ApplicationPackage =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) application_id: String,
pub(crate) version: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/applications/{}/versions/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.application_id,
&self.version
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod application {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
application_id: impl Into<String>,
subscription_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
application_id: application_id.into(),
subscription_id: subscription_id.into(),
}
}
pub fn create(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
application_id: impl Into<String>,
subscription_id: impl Into<String>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
application_id: application_id.into(),
subscription_id: subscription_id.into(),
parameters: None,
}
}
pub fn update(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
application_id: impl Into<String>,
parameters: impl Into<models::ApplicationUpdateParameters>,
subscription_id: impl Into<String>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
application_id: application_id.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
}
}
pub fn delete(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
application_id: impl Into<String>,
subscription_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
application_id: application_id.into(),
subscription_id: subscription_id.into(),
}
}
pub fn list(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
subscription_id: subscription_id.into(),
maxresults: None,
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) application_id: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Application, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/applications/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.application_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Application =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) application_id: String,
pub(crate) subscription_id: String,
pub(crate) parameters: Option<models::ApplicationCreateParameters>,
}
impl Builder {
pub fn parameters(mut self, parameters: impl Into<models::ApplicationCreateParameters>) -> Self {
self.parameters = Some(parameters.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Application, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/applications/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.application_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(parameters) = &self.parameters {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(parameters).map_err(Error::Serialize)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Application =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) application_id: String,
pub(crate) parameters: models::ApplicationUpdateParameters,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/applications/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.application_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) application_id: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<(), Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/applications/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.application_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::NO_CONTENT => Ok(()),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) subscription_id: String,
pub(crate) maxresults: Option<i32>,
}
impl Builder {
pub fn maxresults(mut self, maxresults: i32) -> Self {
self.maxresults = Some(maxresults);
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ListApplicationsResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/applications",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = &self.maxresults {
url.query_pairs_mut().append_pair("maxresults", &maxresults.to_string());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ListApplicationsResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod location {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get_quotas(&self, location_name: impl Into<String>, subscription_id: impl Into<String>) -> get_quotas::Builder {
get_quotas::Builder {
client: self.0.clone(),
location_name: location_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn check_name_availability(
&self,
location_name: impl Into<String>,
subscription_id: impl Into<String>,
parameters: impl Into<models::CheckNameAvailabilityParameters>,
) -> check_name_availability::Builder {
check_name_availability::Builder {
client: self.0.clone(),
location_name: location_name.into(),
subscription_id: subscription_id.into(),
parameters: parameters.into(),
}
}
}
pub mod get_quotas {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) location_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::BatchLocationQuota, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Batch/locations/{}/quotas",
self.client.endpoint(),
&self.subscription_id,
&self.location_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::BatchLocationQuota =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod check_name_availability {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) location_name: String,
pub(crate) subscription_id: String,
pub(crate) parameters: models::CheckNameAvailabilityParameters,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::CheckNameAvailabilityResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.Batch/locations/{}/checkNameAvailability",
self.client.endpoint(),
&self.subscription_id,
&self.location_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CheckNameAvailabilityResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod operations {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self) -> list::Builder {
list::Builder { client: self.0.clone() }
}
}
pub mod list {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationListResult, Error>> {
Box::pin(async move {
let url_str = &format!("{}/providers/Microsoft.Batch/operations", self.client.endpoint(),);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OperationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod certificate {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_batch_account(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> list_by_batch_account::Builder {
list_by_batch_account::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
subscription_id: subscription_id.into(),
maxresults: None,
select: None,
filter: None,
}
}
pub fn get(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
certificate_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
certificate_name: certificate_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn create(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
certificate_name: impl Into<String>,
parameters: impl Into<models::CertificateCreateOrUpdateParameters>,
subscription_id: impl Into<String>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
certificate_name: certificate_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
if_match: None,
if_none_match: None,
}
}
pub fn update(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
certificate_name: impl Into<String>,
parameters: impl Into<models::CertificateCreateOrUpdateParameters>,
subscription_id: impl Into<String>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
certificate_name: certificate_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
if_match: None,
}
}
pub fn delete(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
certificate_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
certificate_name: certificate_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn cancel_deletion(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
certificate_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> cancel_deletion::Builder {
cancel_deletion::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
certificate_name: certificate_name.into(),
subscription_id: subscription_id.into(),
}
}
}
pub mod list_by_batch_account {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) subscription_id: String,
pub(crate) maxresults: Option<i32>,
pub(crate) select: Option<String>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn maxresults(mut self, maxresults: i32) -> Self {
self.maxresults = Some(maxresults);
self
}
pub fn select(mut self, select: impl Into<String>) -> Self {
self.select = Some(select.into());
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ListCertificatesResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/certificates",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = &self.maxresults {
url.query_pairs_mut().append_pair("maxresults", &maxresults.to_string());
}
if let Some(select) = &self.select {
url.query_pairs_mut().append_pair("$select", select);
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ListCertificatesResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) certificate_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Certificate, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/certificates/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.certificate_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Certificate =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) certificate_name: String,
pub(crate) parameters: models::CertificateCreateOrUpdateParameters,
pub(crate) subscription_id: String,
pub(crate) if_match: Option<String>,
pub(crate) if_none_match: Option<String>,
}
impl Builder {
pub fn if_match(mut self, if_match: impl Into<String>) -> Self {
self.if_match = Some(if_match.into());
self
}
pub fn if_none_match(mut self, if_none_match: impl Into<String>) -> Self {
self.if_none_match = Some(if_none_match.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Certificate, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/certificates/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.certificate_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
if let Some(if_match) = &self.if_match {
req_builder = req_builder.header("If-Match", if_match);
}
if let Some(if_none_match) = &self.if_none_match {
req_builder = req_builder.header("If-None-Match", if_none_match);
}
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Certificate =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) certificate_name: String,
pub(crate) parameters: models::CertificateCreateOrUpdateParameters,
pub(crate) subscription_id: String,
pub(crate) if_match: Option<String>,
}
impl Builder {
pub fn if_match(mut self, if_match: impl Into<String>) -> Self {
self.if_match = Some(if_match.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Certificate, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/certificates/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.certificate_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
if let Some(if_match) = &self.if_match {
req_builder = req_builder.header("If-Match", if_match);
}
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Certificate =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) certificate_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/certificates/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.certificate_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod cancel_deletion {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) certificate_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Certificate, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/certificates/{}/cancelDelete",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.certificate_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Certificate =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod pool {
use super::{models, API_VERSION};
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_by_batch_account(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> list_by_batch_account::Builder {
list_by_batch_account::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
subscription_id: subscription_id.into(),
maxresults: None,
select: None,
filter: None,
}
}
pub fn get(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
pool_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
pool_name: pool_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn create(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
pool_name: impl Into<String>,
parameters: impl Into<models::Pool>,
subscription_id: impl Into<String>,
) -> create::Builder {
create::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
pool_name: pool_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
if_match: None,
if_none_match: None,
}
}
pub fn update(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
pool_name: impl Into<String>,
parameters: impl Into<models::Pool>,
subscription_id: impl Into<String>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
pool_name: pool_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
if_match: None,
}
}
pub fn delete(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
pool_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
pool_name: pool_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn disable_auto_scale(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
pool_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> disable_auto_scale::Builder {
disable_auto_scale::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
pool_name: pool_name.into(),
subscription_id: subscription_id.into(),
}
}
pub fn stop_resize(
&self,
resource_group_name: impl Into<String>,
account_name: impl Into<String>,
pool_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> stop_resize::Builder {
stop_resize::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
account_name: account_name.into(),
pool_name: pool_name.into(),
subscription_id: subscription_id.into(),
}
}
}
pub mod list_by_batch_account {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) subscription_id: String,
pub(crate) maxresults: Option<i32>,
pub(crate) select: Option<String>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn maxresults(mut self, maxresults: i32) -> Self {
self.maxresults = Some(maxresults);
self
}
pub fn select(mut self, select: impl Into<String>) -> Self {
self.select = Some(select.into());
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ListPoolsResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/pools",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(maxresults) = &self.maxresults {
url.query_pairs_mut().append_pair("maxresults", &maxresults.to_string());
}
if let Some(select) = &self.select {
url.query_pairs_mut().append_pair("$select", select);
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ListPoolsResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) pool_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Pool, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/pools/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Pool =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) pool_name: String,
pub(crate) parameters: models::Pool,
pub(crate) subscription_id: String,
pub(crate) if_match: Option<String>,
pub(crate) if_none_match: Option<String>,
}
impl Builder {
pub fn if_match(mut self, if_match: impl Into<String>) -> Self {
self.if_match = Some(if_match.into());
self
}
pub fn if_none_match(mut self, if_none_match: impl Into<String>) -> Self {
self.if_none_match = Some(if_none_match.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Pool, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/pools/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
if let Some(if_match) = &self.if_match {
req_builder = req_builder.header("If-Match", if_match);
}
if let Some(if_none_match) = &self.if_none_match {
req_builder = req_builder.header("If-None-Match", if_none_match);
}
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Pool =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) pool_name: String,
pub(crate) parameters: models::Pool,
pub(crate) subscription_id: String,
pub(crate) if_match: Option<String>,
}
impl Builder {
pub fn if_match(mut self, if_match: impl Into<String>) -> Self {
self.if_match = Some(if_match.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Pool, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/pools/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
if let Some(if_match) = &self.if_match {
req_builder = req_builder.header("If-Match", if_match);
}
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Pool =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) pool_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/pools/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod disable_auto_scale {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) pool_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Pool, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/pools/{}/disableAutoScale",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Pool =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod stop_resize {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrl(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequest(http::Error),
#[error("Failed to serialize request body: {0}")]
Serialize(serde_json::Error),
#[error("Failed to get access token: {0}")]
GetToken(azure_core::Error),
#[error("Failed to execute request: {0}")]
SendRequest(azure_core::Error),
#[error("Failed to get response bytes: {0}")]
ResponseBytes(azure_core::StreamError),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
Deserialize(serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) account_name: String,
pub(crate) pool_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Pool, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Batch/batchAccounts/{}/pools/{}/stopResize",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.account_name,
&self.pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Pool =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
| 50.490642 | 325 | 0.513458 |
6a019895feb7a0804926848ce532edd51b110fd4 | 3,812 | pub use hookmap_core::{Button, ButtonAction, ButtonEvent};
use crate::macros::{ButtonArg, ExpandButtonArg};
use hookmap_core::ButtonOperation;
#[derive(Debug, Clone)]
pub struct ConstantAny<const N: usize>([Button; N]);
/// Shift key that does not distinguish between right and left.
pub const SHIFT: ConstantAny<2> = ConstantAny([Button::LShift, Button::RShift]);
/// Control key that does not distinguish between right and left.
pub const CTRL: ConstantAny<2> = ConstantAny([Button::LCtrl, Button::RCtrl]);
/// Alt key that does not distinguish between right and left.
pub const ALT: ConstantAny<2> = ConstantAny([Button::LAlt, Button::RAlt]);
/// Meta key that does not distinguish between right and left.
pub const META: ConstantAny<2> = ConstantAny([Button::LMeta, Button::RMeta]);
impl<const N: usize> ExpandButtonArg for ConstantAny<N> {
fn expand(self) -> Box<dyn Iterator<Item = ButtonArg>> {
Box::new(IntoIterator::into_iter(self.0).map(ButtonArg::direct))
}
}
/// Emulates button input.
pub trait ButtonInput {
/// Emulates button press operation.
fn press(&self);
/// Emulates button release operation.
fn release(&self);
/// Presses button and releases it immediately.
fn click(&self) {
self.press();
self.release();
}
/// Emulates button press operation.
/// This differs from [`ButtonInput::press`] in that it can call hook handlers.
fn press_recursive(&self);
/// Emulates button release operation.
/// This differs from [`ButtonInput::release`] in that it can call hook handlers.
fn release_recursive(&self);
/// Calls [`ButtonInput::press_recursive`] and [`ButtonInput::release_recursive`].
fn click_recursive(&self) {
self.press_recursive();
self.release_recursive();
}
}
impl<T: ButtonInput> ButtonInput for &T {
fn press(&self) {
(**self).press()
}
fn release(&self) {
(**self).release()
}
fn press_recursive(&self) {
(**self).press_recursive()
}
fn release_recursive(&self) {
(**self).release_recursive()
}
}
impl ButtonInput for Button {
fn press(&self) {
self.generate_press_event(false);
}
fn release(&self) {
self.generate_release_event(false);
}
fn press_recursive(&self) {
self.generate_press_event(true);
}
fn release_recursive(&self) {
self.generate_release_event(true);
}
}
impl<const N: usize> ButtonInput for ConstantAny<N> {
fn press(&self) {
if let Some(button) = self.0.get(0) {
button.press();
}
}
fn release(&self) {
if let Some(button) = self.0.get(0) {
button.release();
}
}
fn press_recursive(&self) {
if let Some(button) = self.0.get(0) {
button.press_recursive();
}
}
fn release_recursive(&self) {
if let Some(button) = self.0.get(0) {
button.release_recursive();
}
}
}
// Get the status of a button.
pub trait ButtonState {
/// Returns `true` if the button is pressed.
fn is_pressed(&self) -> bool;
/// Returns `true` if the button is released.
fn is_released(&self) -> bool {
!self.is_pressed()
}
}
impl<T: ButtonState> ButtonState for &T {
fn is_pressed(&self) -> bool {
(**self).is_pressed()
}
fn is_released(&self) -> bool {
(**self).is_released()
}
}
impl ButtonState for Button {
fn is_pressed(&self) -> bool {
self.read_is_pressed()
}
}
impl<const N: usize> ButtonState for ConstantAny<N> {
fn is_pressed(&self) -> bool {
self.0.iter().any(Button::is_pressed)
}
fn is_released(&self) -> bool {
self.0.iter().any(Button::is_released)
}
}
| 24.753247 | 86 | 0.619098 |
e5bbeed16f5548e675d17da260d20eeebb3dac42 | 6,967 | macro_rules! read_csr {
($csr_number:expr, $asm_fn: ident) => {
/// Reads the CSR
#[inline]
unsafe fn _read() -> usize {
match () {
#[cfg(all(riscv, feature = "inline-asm"))]
() => {
let r: usize;
llvm_asm!("csrrs $0, $1, x0" : "=r"(r) : "i"($csr_number) :: "volatile");
r
}
#[cfg(all(riscv, not(feature = "inline-asm")))]
() => {
extern "C" {
fn $asm_fn() -> usize;
}
$asm_fn()
}
#[cfg(not(riscv))]
() => unimplemented!(),
}
}
};
}
macro_rules! read_csr_rv32 {
($csr_number:expr, $asm_fn: ident) => {
/// Reads the CSR
#[inline]
unsafe fn _read() -> usize {
match () {
#[cfg(all(riscv32, feature = "inline-asm"))]
() => {
let r: usize;
llvm_asm!("csrrs $0, $1, x0" : "=r"(r) : "i"($csr_number) :: "volatile");
r
}
#[cfg(all(riscv32, not(feature = "inline-asm")))]
() => {
extern "C" {
fn $asm_fn() -> usize;
}
$asm_fn()
}
#[cfg(not(riscv32))]
() => unimplemented!(),
}
}
};
}
macro_rules! read_csr_as {
($register:ident, $csr_number:expr, $asm_fn: ident) => {
read_csr!($csr_number, $asm_fn);
/// Reads the CSR
#[inline]
pub fn read() -> $register {
$register {
bits: unsafe { _read() },
}
}
};
}
macro_rules! read_csr_as_usize {
($csr_number:expr, $asm_fn: ident) => {
read_csr!($csr_number, $asm_fn);
/// Reads the CSR
#[inline]
pub fn read() -> usize {
unsafe { _read() }
}
};
}
macro_rules! read_csr_as_usize_rv32 {
($csr_number:expr, $asm_fn: ident) => {
read_csr_rv32!($csr_number, $asm_fn);
/// Reads the CSR
#[inline]
pub fn read() -> usize {
unsafe { _read() }
}
};
}
macro_rules! write_csr {
($csr_number:expr, $asm_fn: ident) => {
/// Writes the CSR
#[inline]
#[allow(unused_variables)]
unsafe fn _write(bits: usize) {
match () {
#[cfg(all(riscv, feature = "inline-asm"))]
() => llvm_asm!("csrrw x0, $1, $0" :: "r"(bits), "i"($csr_number) :: "volatile"),
#[cfg(all(riscv, not(feature = "inline-asm")))]
() => {
extern "C" {
fn $asm_fn(bits: usize);
}
$asm_fn(bits);
}
#[cfg(not(riscv))]
() => unimplemented!(),
}
}
};
}
macro_rules! write_csr_rv32 {
($csr_number:expr, $asm_fn: ident) => {
/// Writes the CSR
#[inline]
#[allow(unused_variables)]
unsafe fn _write(bits: usize) {
match () {
#[cfg(all(riscv32, feature = "inline-asm"))]
() => llvm_asm!("csrrw x0, $1, $0" :: "r"(bits), "i"($csr_number) :: "volatile"),
#[cfg(all(riscv32, not(feature = "inline-asm")))]
() => {
extern "C" {
fn $asm_fn(bits: usize);
}
$asm_fn(bits);
}
#[cfg(not(riscv32))]
() => unimplemented!(),
}
}
};
}
macro_rules! write_csr_as_usize {
($csr_number:expr, $asm_fn: ident) => {
write_csr!($csr_number, $asm_fn);
/// Writes the CSR
#[inline]
pub fn write(bits: usize) {
unsafe { _write(bits) }
}
};
}
macro_rules! write_csr_as_usize_rv32 {
($csr_number:expr, $asm_fn: ident) => {
write_csr_rv32!($csr_number, $asm_fn);
/// Writes the CSR
#[inline]
pub fn write(bits: usize) {
unsafe { _write(bits) }
}
};
}
macro_rules! set {
($csr_number:expr, $asm_fn: ident) => {
/// Set the CSR
#[inline]
#[allow(unused_variables)]
unsafe fn _set(bits: usize) {
match () {
#[cfg(all(riscv, feature = "inline-asm"))]
() => llvm_asm!("csrrs x0, $1, $0" :: "r"(bits), "i"($csr_number) :: "volatile"),
#[cfg(all(riscv, not(feature = "inline-asm")))]
() => {
extern "C" {
fn $asm_fn(bits: usize);
}
$asm_fn(bits);
}
#[cfg(not(riscv))]
() => unimplemented!(),
}
}
};
}
macro_rules! clear {
($csr_number:expr, $asm_fn: ident) => {
/// Clear the CSR
#[inline]
#[allow(unused_variables)]
unsafe fn _clear(bits: usize) {
match () {
#[cfg(all(riscv, feature = "inline-asm"))]
() => llvm_asm!("csrrc x0, $1, $0" :: "r"(bits), "i"($csr_number) :: "volatile"),
#[cfg(all(riscv, not(feature = "inline-asm")))]
() => {
extern "C" {
fn $asm_fn(bits: usize);
}
$asm_fn(bits);
}
#[cfg(not(riscv))]
() => unimplemented!(),
}
}
};
}
macro_rules! set_csr {
($(#[$attr:meta])*, $set_field:ident, $e:expr) => {
$(#[$attr])*
#[inline]
pub unsafe fn $set_field() {
_set($e);
}
}
}
macro_rules! clear_csr {
($(#[$attr:meta])*, $clear_field:ident, $e:expr) => {
$(#[$attr])*
#[inline]
pub unsafe fn $clear_field() {
_clear($e);
}
}
}
macro_rules! set_clear_csr {
($(#[$attr:meta])*, $set_field:ident, $clear_field:ident, $e:expr) => {
set_csr!($(#[$attr])*, $set_field, $e);
clear_csr!($(#[$attr])*, $clear_field, $e);
}
}
macro_rules! read_composite_csr {
($hi:expr, $lo:expr) => {
/// Reads the CSR as a 64-bit value
#[inline]
pub fn read64() -> u64 {
match () {
#[cfg(riscv32)]
() => loop {
let hi = $hi;
let lo = $lo;
if hi == $hi {
return ((hi as u64) << 32) | lo as u64;
}
},
#[cfg(not(riscv32))]
() => $lo as u64,
}
}
};
}
| 25.520147 | 97 | 0.382661 |
dee9a51d438349cf30c211c4369062cd5ead2a15 | 645 | use system::ia_32e::instructions::page_table::flush_all;
use system::interrupt;
use crate::process::scheduler::switch;
#[derive(Clone, Copy, Debug)]
#[repr(u8)]
pub enum IpiKind {
WakeUp = 0x40,
Tlb = 0x41,
Switch = 0x42,
Pit = 0x43,
}
impl From<IpiKind> for usize {
fn from(kind: IpiKind) -> Self {
match kind {
IpiKind::WakeUp => 0x40,
IpiKind::Tlb => 0x41,
IpiKind::Switch => 0x42,
IpiKind::Pit => 0x43,
}
}
}
interrupt!(ipi_wakeup,{
});
interrupt!(ipi_switch,{
switch();
});
interrupt!(ipi_pit,{
});
interrupt!(ipi_tlb,{
flush_all();
}); | 16.538462 | 56 | 0.570543 |
fed8bc95b6b86eeb0b9e80d657fc6a6cad282883 | 921 | #![feature(unboxed_closures)]
// Tests that we can't assign to or mutably borrow upvars from `Fn`
// closures (issue #17780)
fn set(x: &mut usize) { *x = 5; }
fn to_fn<A,F:Fn<A>>(f: F) -> F { f }
fn to_fn_mut<A,F:FnMut<A>>(f: F) -> F { f }
fn main() {
// By-ref captures
{
let mut x = 0;
let _f = to_fn(|| x = 42); //~ ERROR cannot assign
let mut y = 0;
let _g = to_fn(|| set(&mut y)); //~ ERROR cannot borrow
let mut z = 0;
let _h = to_fn_mut(|| { set(&mut z); to_fn(|| z = 42); }); //~ ERROR cannot assign
}
// By-value captures
{
let mut x = 0;
let _f = to_fn(move || x = 42); //~ ERROR cannot assign
let mut y = 0;
let _g = to_fn(move || set(&mut y)); //~ ERROR cannot borrow
let mut z = 0;
let _h = to_fn_mut(move || { set(&mut z); to_fn(move || z = 42); }); //~ ERROR cannot assign
}
}
| 25.583333 | 100 | 0.5038 |
03ca85faafff6b6af56c96eb14d1154c375d89c9 | 56,102 | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A pass that qualifies constness of temporaries in constants,
//! static initializers and functions and also drives promotion.
//!
//! The Qualif flags below can be used to also provide better
//! diagnostics as to why a constant rvalue wasn't promoted.
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_set::IdxSetBuf;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use rustc_data_structures::fx::FxHashSet;
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::middle::const_val::ConstVal;
use rustc::traits::{self, TraitEngine};
use rustc::ty::{self, TyCtxt, Ty, TypeFoldable};
use rustc::ty::cast::CastTy;
use rustc::ty::maps::Providers;
use rustc::mir::*;
use rustc::mir::traversal::ReversePostorder;
use rustc::mir::visit::{PlaceContext, Visitor};
use rustc::middle::lang_items;
use rustc_target::spec::abi::Abi;
use syntax::attr;
use syntax::ast::LitKind;
use syntax::feature_gate::{UnstableFeatures, feature_err, emit_feature_err, GateIssue};
use syntax_pos::{Span, DUMMY_SP};
use std::fmt;
use rustc_data_structures::sync::Lrc;
use std::usize;
use transform::{MirPass, MirSource};
use super::promote_consts::{self, Candidate, TempState};
bitflags! {
// Borrows of temporaries can be promoted only if
// they have none of these qualifications, with
// the exception of `STATIC_REF` (in statics only).
struct Qualif: u8 {
// Constant containing interior mutability (UnsafeCell).
const MUTABLE_INTERIOR = 1 << 0;
// Constant containing an ADT that implements Drop.
const NEEDS_DROP = 1 << 1;
// Function argument.
const FN_ARGUMENT = 1 << 2;
// Static place or move from a static.
const STATIC = 1 << 3;
// Reference to a static.
const STATIC_REF = 1 << 4;
// Not constant at all - non-`const fn` calls, asm!,
// pointer comparisons, ptr-to-int casts, etc.
const NOT_CONST = 1 << 5;
// Refers to temporaries which cannot be promoted as
// promote_consts decided they weren't simple enough.
const NOT_PROMOTABLE = 1 << 6;
// Const items can only have MUTABLE_INTERIOR
// and NOT_PROMOTABLE without producing an error.
const CONST_ERROR = !Qualif::MUTABLE_INTERIOR.bits &
!Qualif::NOT_PROMOTABLE.bits;
}
}
impl<'a, 'tcx> Qualif {
/// Remove flags which are impossible for the given type.
fn restrict(&mut self, ty: Ty<'tcx>,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>) {
if ty.is_freeze(tcx, param_env, DUMMY_SP) {
*self = *self - Qualif::MUTABLE_INTERIOR;
}
if !ty.needs_drop(tcx, param_env) {
*self = *self - Qualif::NEEDS_DROP;
}
}
}
/// What kind of item we are in.
#[derive(Copy, Clone, PartialEq, Eq)]
enum Mode {
Const,
Static,
StaticMut,
ConstFn,
Fn
}
impl fmt::Display for Mode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Mode::Const => write!(f, "constant"),
Mode::Static | Mode::StaticMut => write!(f, "static"),
Mode::ConstFn => write!(f, "constant function"),
Mode::Fn => write!(f, "function")
}
}
}
struct Qualifier<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
mode: Mode,
span: Span,
def_id: DefId,
mir: &'a Mir<'tcx>,
rpo: ReversePostorder<'a, 'tcx>,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
local_qualif: IndexVec<Local, Option<Qualif>>,
qualif: Qualif,
const_fn_arg_vars: BitVector,
temp_promotion_state: IndexVec<Local, TempState>,
promotion_candidates: Vec<Candidate>
}
impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> {
fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
mir: &'a Mir<'tcx>,
mode: Mode)
-> Qualifier<'a, 'tcx, 'tcx> {
let mut rpo = traversal::reverse_postorder(mir);
let temps = promote_consts::collect_temps(mir, &mut rpo);
rpo.reset();
let param_env = tcx.param_env(def_id);
let mut local_qualif = IndexVec::from_elem(None, &mir.local_decls);
for arg in mir.args_iter() {
let mut qualif = Qualif::NEEDS_DROP;
qualif.restrict(mir.local_decls[arg].ty, tcx, param_env);
local_qualif[arg] = Some(qualif);
}
Qualifier {
mode,
span: mir.span,
def_id,
mir,
rpo,
tcx,
param_env,
local_qualif,
qualif: Qualif::empty(),
const_fn_arg_vars: BitVector::new(mir.local_decls.len()),
temp_promotion_state: temps,
promotion_candidates: vec![]
}
}
// FIXME(eddyb) we could split the errors into meaningful
// categories, but enabling full miri would make that
// slightly pointless (even with feature-gating).
fn not_const(&mut self) {
self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
let mut err = struct_span_err!(
self.tcx.sess,
self.span,
E0019,
"{} contains unimplemented expression type",
self.mode
);
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note("A function call isn't allowed in the const's initialization expression \
because the expression's value must be known at compile-time.");
err.note("Remember: you can't use a function call inside a const's initialization \
expression! However, you can use it anywhere else.");
}
err.emit();
}
}
/// Error about extra statements in a constant.
fn statement_like(&mut self) {
self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
let mut err = feature_err(
&self.tcx.sess.parse_sess,
"const_let",
self.span,
GateIssue::Language,
&format!("statements in {}s are unstable", self.mode),
);
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note("Blocks in constants may only contain items (such as constant, function \
definition, etc...) and a tail expression.");
err.help("To avoid it, you have to replace the non-item object.");
}
err.emit();
}
}
/// Add the given qualification to self.qualif.
fn add(&mut self, qualif: Qualif) {
self.qualif = self.qualif | qualif;
}
/// Add the given type's qualification to self.qualif.
fn add_type(&mut self, ty: Ty<'tcx>) {
self.add(Qualif::MUTABLE_INTERIOR | Qualif::NEEDS_DROP);
self.qualif.restrict(ty, self.tcx, self.param_env);
}
/// Within the provided closure, self.qualif will start
/// out empty, and its value after the closure returns will
/// be combined with the value before the call to nest.
fn nest<F: FnOnce(&mut Self)>(&mut self, f: F) {
let original = self.qualif;
self.qualif = Qualif::empty();
f(self);
self.add(original);
}
/// Check if a Local with the current qualifications is promotable.
fn can_promote(&self, qualif: Qualif) -> bool {
// References to statics are allowed, but only in other statics.
if self.mode == Mode::Static || self.mode == Mode::StaticMut {
(qualif - Qualif::STATIC_REF).is_empty()
} else {
qualif.is_empty()
}
}
/// Check if a Place with the current qualifications could
/// be consumed, by either an operand or a Deref projection.
fn try_consume(&mut self) -> bool {
if self.qualif.intersects(Qualif::STATIC) && self.mode != Mode::Fn {
let msg = if self.mode == Mode::Static ||
self.mode == Mode::StaticMut {
"cannot refer to other statics by value, use the \
address-of operator or a constant instead"
} else {
"cannot refer to statics by value, use a constant instead"
};
struct_span_err!(self.tcx.sess, self.span, E0394, "{}", msg)
.span_label(self.span, "referring to another static by value")
.note("use the address-of operator or a constant instead")
.emit();
// Replace STATIC with NOT_CONST to avoid further errors.
self.qualif = self.qualif - Qualif::STATIC;
self.add(Qualif::NOT_CONST);
false
} else {
true
}
}
/// Assign the current qualification to the given destination.
fn assign(&mut self, dest: &Place<'tcx>, location: Location) {
trace!("assign: {:?}", dest);
let qualif = self.qualif;
let span = self.span;
let store = |slot: &mut Option<Qualif>| {
if slot.is_some() {
span_bug!(span, "multiple assignments to {:?}", dest);
}
*slot = Some(qualif);
};
// Only handle promotable temps in non-const functions.
if self.mode == Mode::Fn {
if let Place::Local(index) = *dest {
if self.mir.local_kind(index) == LocalKind::Temp
&& self.temp_promotion_state[index].is_promotable() {
debug!("store to promotable temp {:?} ({:?})", index, qualif);
store(&mut self.local_qualif[index]);
}
}
return;
}
match *dest {
Place::Local(index) if (self.mir.local_kind(index) == LocalKind::Var ||
self.mir.local_kind(index) == LocalKind::Arg) &&
self.tcx.sess.features_untracked().const_let => {
debug!("store to var {:?}", index);
self.local_qualif[index] = Some(self.qualif);
}
Place::Local(index) if self.mir.local_kind(index) == LocalKind::Temp ||
self.mir.local_kind(index) == LocalKind::ReturnPointer => {
debug!("store to {:?} (temp or return pointer)", index);
store(&mut self.local_qualif[index])
}
Place::Projection(box Projection {
base: Place::Local(index),
elem: ProjectionElem::Deref
}) if self.mir.local_kind(index) == LocalKind::Temp
&& self.mir.local_decls[index].ty.is_box()
&& self.local_qualif[index].map_or(false, |qualif| {
qualif.intersects(Qualif::NOT_CONST)
}) => {
// Part of `box expr`, we should've errored
// already for the Box allocation Rvalue.
}
// This must be an explicit assignment.
_ => {
// Catch more errors in the destination.
self.visit_place(dest, PlaceContext::Store, location);
self.statement_like();
}
}
}
/// Qualify a whole const, static initializer or const fn.
fn qualify_const(&mut self) -> (Qualif, Lrc<IdxSetBuf<Local>>) {
debug!("qualifying {} {:?}", self.mode, self.def_id);
let mir = self.mir;
let mut seen_blocks = BitVector::new(mir.basic_blocks().len());
let mut bb = START_BLOCK;
loop {
seen_blocks.insert(bb.index());
self.visit_basic_block_data(bb, &mir[bb]);
let target = match mir[bb].terminator().kind {
TerminatorKind::Goto { target } |
TerminatorKind::Drop { target, .. } |
TerminatorKind::Assert { target, .. } |
TerminatorKind::Call { destination: Some((_, target)), .. } => {
Some(target)
}
// Non-terminating calls cannot produce any value.
TerminatorKind::Call { destination: None, .. } => {
break;
}
TerminatorKind::SwitchInt {..} |
TerminatorKind::DropAndReplace { .. } |
TerminatorKind::Resume |
TerminatorKind::Abort |
TerminatorKind::GeneratorDrop |
TerminatorKind::Yield { .. } |
TerminatorKind::Unreachable |
TerminatorKind::FalseEdges { .. } |
TerminatorKind::FalseUnwind { .. } => None,
TerminatorKind::Return => {
if !self.tcx.sess.features_untracked().const_let {
// Check for unused values. This usually means
// there are extra statements in the AST.
for temp in mir.temps_iter() {
if self.local_qualif[temp].is_none() {
continue;
}
let state = self.temp_promotion_state[temp];
if let TempState::Defined { location, uses: 0 } = state {
let data = &mir[location.block];
let stmt_idx = location.statement_index;
// Get the span for the initialization.
let source_info = if stmt_idx < data.statements.len() {
data.statements[stmt_idx].source_info
} else {
data.terminator().source_info
};
self.span = source_info.span;
// Treat this as a statement in the AST.
self.statement_like();
}
}
// Make sure there are no extra unassigned variables.
self.qualif = Qualif::NOT_CONST;
for index in mir.vars_iter() {
if !self.const_fn_arg_vars.contains(index.index()) {
debug!("unassigned variable {:?}", index);
self.assign(&Place::Local(index), Location {
block: bb,
statement_index: usize::MAX,
});
}
}
}
break;
}
};
match target {
// No loops allowed.
Some(target) if !seen_blocks.contains(target.index()) => {
bb = target;
}
_ => {
self.not_const();
break;
}
}
}
self.qualif = self.local_qualif[RETURN_PLACE].unwrap_or(Qualif::NOT_CONST);
// Account for errors in consts by using the
// conservative type qualification instead.
if self.qualif.intersects(Qualif::CONST_ERROR) {
self.qualif = Qualif::empty();
let return_ty = mir.return_ty();
self.add_type(return_ty);
}
// Collect all the temps we need to promote.
let mut promoted_temps = IdxSetBuf::new_empty(self.temp_promotion_state.len());
for candidate in &self.promotion_candidates {
match *candidate {
Candidate::Ref(Location { block: bb, statement_index: stmt_idx }) => {
match self.mir[bb].statements[stmt_idx].kind {
StatementKind::Assign(_, Rvalue::Ref(_, _, Place::Local(index))) => {
promoted_temps.add(&index);
}
_ => {}
}
}
Candidate::Argument { .. } => {}
}
}
(self.qualif, Lrc::new(promoted_temps))
}
}
/// Accumulates an Rvalue or Call's effects in self.qualif.
/// For functions (constant or not), it also records
/// candidates for promotion in promotion_candidates.
impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> {
fn visit_local(&mut self,
&local: &Local,
_: PlaceContext<'tcx>,
_: Location) {
let kind = self.mir.local_kind(local);
match kind {
LocalKind::ReturnPointer => {
self.not_const();
}
LocalKind::Var if !self.tcx.sess.features_untracked().const_let => {
if self.mode != Mode::Fn {
emit_feature_err(&self.tcx.sess.parse_sess, "const_let",
self.span, GateIssue::Language,
&format!("let bindings in {}s are unstable",self.mode));
}
self.add(Qualif::NOT_CONST);
}
LocalKind::Var |
LocalKind::Arg |
LocalKind::Temp => {
if let LocalKind::Arg = kind {
self.add(Qualif::FN_ARGUMENT);
}
if !self.temp_promotion_state[local].is_promotable() {
self.add(Qualif::NOT_PROMOTABLE);
}
if let Some(qualif) = self.local_qualif[local] {
self.add(qualif);
} else {
self.not_const();
}
}
}
}
fn visit_place(&mut self,
place: &Place<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
match *place {
Place::Local(ref local) => self.visit_local(local, context, location),
Place::Static(ref global) => {
self.add(Qualif::STATIC);
if self.mode != Mode::Fn {
for attr in &self.tcx.get_attrs(global.def_id)[..] {
if attr.check_name("thread_local") {
span_err!(self.tcx.sess, self.span, E0625,
"thread-local statics cannot be \
accessed at compile-time");
self.add(Qualif::NOT_CONST);
return;
}
}
}
if self.mode == Mode::Const || self.mode == Mode::ConstFn {
let mut err = struct_span_err!(self.tcx.sess, self.span, E0013,
"{}s cannot refer to statics, use \
a constant instead", self.mode);
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note(
"Static and const variables can refer to other const variables. But a \
const variable cannot refer to a static variable."
);
err.help(
"To fix this, the value can be extracted as a const and then used."
);
}
err.emit()
}
}
Place::Projection(ref proj) => {
self.nest(|this| {
this.super_place(place, context, location);
match proj.elem {
ProjectionElem::Deref => {
if !this.try_consume() {
return;
}
if this.qualif.intersects(Qualif::STATIC_REF) {
this.qualif = this.qualif - Qualif::STATIC_REF;
this.add(Qualif::STATIC);
}
this.add(Qualif::NOT_CONST);
let base_ty = proj.base.ty(this.mir, this.tcx).to_ty(this.tcx);
if let ty::TyRawPtr(_) = base_ty.sty {
if this.mode != Mode::Fn {
let mut err = struct_span_err!(
this.tcx.sess,
this.span,
E0396,
"raw pointers cannot be dereferenced in {}s",
this.mode
);
err.span_label(this.span,
"dereference of raw pointer in constant");
if this.tcx.sess.teach(&err.get_code().unwrap()) {
err.note(
"The value behind a raw pointer can't be determined \
at compile-time (or even link-time), which means it \
can't be used in a constant expression."
);
err.help("A possible fix is to dereference your pointer \
at some point in run-time.");
}
err.emit();
}
}
}
ProjectionElem::Field(..) |
ProjectionElem::Index(_) => {
if this.mode != Mode::Fn &&
this.qualif.intersects(Qualif::STATIC) {
span_err!(this.tcx.sess, this.span, E0494,
"cannot refer to the interior of another \
static, use a constant instead");
}
let ty = place.ty(this.mir, this.tcx).to_ty(this.tcx);
this.qualif.restrict(ty, this.tcx, this.param_env);
}
ProjectionElem::ConstantIndex {..} |
ProjectionElem::Subslice {..} |
ProjectionElem::Downcast(..) => {
this.not_const()
}
}
});
}
}
}
fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
match *operand {
Operand::Copy(_) |
Operand::Move(_) => {
self.nest(|this| {
this.super_operand(operand, location);
this.try_consume();
});
// Mark the consumed locals to indicate later drops are noops.
if let Operand::Move(Place::Local(local)) = *operand {
self.local_qualif[local] = self.local_qualif[local].map(|q|
q - Qualif::NEEDS_DROP
);
}
}
Operand::Constant(ref constant) => {
if let Literal::Value {
value: &ty::Const { val: ConstVal::Unevaluated(def_id, _), ty, .. }
} = constant.literal {
// Don't peek inside trait associated constants.
if self.tcx.trait_of_item(def_id).is_some() {
self.add_type(ty);
} else {
let (bits, _) = self.tcx.at(constant.span).mir_const_qualif(def_id);
let qualif = Qualif::from_bits(bits).expect("invalid mir_const_qualif");
self.add(qualif);
// Just in case the type is more specific than
// the definition, e.g. impl associated const
// with type parameters, take it into account.
self.qualif.restrict(ty, self.tcx, self.param_env);
}
}
}
}
}
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
// Recurse through operands and places.
if let Rvalue::Ref(region, kind, ref place) = *rvalue {
let mut is_reborrow = false;
if let Place::Projection(ref proj) = *place {
if let ProjectionElem::Deref = proj.elem {
let base_ty = proj.base.ty(self.mir, self.tcx).to_ty(self.tcx);
if let ty::TyRef(..) = base_ty.sty {
is_reborrow = true;
}
}
}
if is_reborrow {
self.nest(|this| {
this.super_place(place, PlaceContext::Borrow {
region,
kind
}, location);
if !this.try_consume() {
return;
}
if this.qualif.intersects(Qualif::STATIC_REF) {
this.qualif = this.qualif - Qualif::STATIC_REF;
this.add(Qualif::STATIC);
}
});
} else {
self.super_rvalue(rvalue, location);
}
} else {
self.super_rvalue(rvalue, location);
}
match *rvalue {
Rvalue::Use(_) |
Rvalue::Repeat(..) |
Rvalue::UnaryOp(UnOp::Neg, _) |
Rvalue::UnaryOp(UnOp::Not, _) |
Rvalue::NullaryOp(NullOp::SizeOf, _) |
Rvalue::CheckedBinaryOp(..) |
Rvalue::Cast(CastKind::ReifyFnPointer, ..) |
Rvalue::Cast(CastKind::UnsafeFnPointer, ..) |
Rvalue::Cast(CastKind::ClosureFnPointer, ..) |
Rvalue::Cast(CastKind::Unsize, ..) |
Rvalue::Discriminant(..) => {}
Rvalue::Len(_) => {
// Static places in consts would have errored already,
// don't treat length checks as reads from statics.
self.qualif = self.qualif - Qualif::STATIC;
}
Rvalue::Ref(_, kind, ref place) => {
// Static places in consts would have errored already,
// only keep track of references to them here.
if self.qualif.intersects(Qualif::STATIC) {
self.qualif = self.qualif - Qualif::STATIC;
self.add(Qualif::STATIC_REF);
}
let ty = place.ty(self.mir, self.tcx).to_ty(self.tcx);
// Default to forbidding the borrow and/or its promotion,
// due to the potential for direct or interior mutability,
// and only proceed by setting `forbidden_mut` to `false`.
let mut forbidden_mut = true;
if let BorrowKind::Mut { .. } = kind {
// In theory, any zero-sized value could be borrowed
// mutably without consequences. However, only &mut []
// is allowed right now, and only in functions.
if self.mode == Mode::StaticMut {
// Inside a `static mut`, &mut [...] is also allowed.
match ty.sty {
ty::TyArray(..) | ty::TySlice(_) => forbidden_mut = false,
_ => {}
}
} else if let ty::TyArray(_, len) = ty.sty {
// FIXME(eddyb) the `self.mode == Mode::Fn` condition
// seems unnecessary, given that this is merely a ZST.
if len.unwrap_usize(self.tcx) == 0 && self.mode == Mode::Fn {
forbidden_mut = false;
}
}
if forbidden_mut {
self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
let mut err = struct_span_err!(self.tcx.sess, self.span, E0017,
"references in {}s may only refer \
to immutable values", self.mode);
err.span_label(self.span, format!("{}s require immutable values",
self.mode));
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note("References in statics and constants may only refer to \
immutable values.\n\n\
Statics are shared everywhere, and if they refer to \
mutable data one might violate memory safety since \
holding multiple mutable references to shared data is \
not allowed.\n\n\
If you really want global mutable state, try using \
static mut or a global UnsafeCell.");
}
err.emit();
}
}
} else {
// Constants cannot be borrowed if they contain interior mutability as
// it means that our "silent insertion of statics" could change
// initializer values (very bad).
if self.qualif.intersects(Qualif::MUTABLE_INTERIOR) {
// A reference of a MUTABLE_INTERIOR place is instead
// NOT_CONST (see `if forbidden_mut` below), to avoid
// duplicate errors (from reborrowing, for example).
self.qualif = self.qualif - Qualif::MUTABLE_INTERIOR;
if self.mode != Mode::Fn {
span_err!(self.tcx.sess, self.span, E0492,
"cannot borrow a constant which may contain \
interior mutability, create a static instead");
}
} else {
// We allow immutable borrows of frozen data.
forbidden_mut = false;
}
}
if forbidden_mut {
self.add(Qualif::NOT_CONST);
} else {
// We might have a candidate for promotion.
let candidate = Candidate::Ref(location);
// We can only promote interior borrows of promotable temps.
let mut place = place;
while let Place::Projection(ref proj) = *place {
if proj.elem == ProjectionElem::Deref {
break;
}
place = &proj.base;
}
if let Place::Local(local) = *place {
if self.mir.local_kind(local) == LocalKind::Temp {
if let Some(qualif) = self.local_qualif[local] {
// `forbidden_mut` is false, so we can safely ignore
// `MUTABLE_INTERIOR` from the local's qualifications.
// This allows borrowing fields which don't have
// `MUTABLE_INTERIOR`, from a type that does, e.g.:
// `let _: &'static _ = &(Cell::new(1), 2).1;`
if self.can_promote(qualif - Qualif::MUTABLE_INTERIOR) {
self.promotion_candidates.push(candidate);
}
}
}
}
}
}
Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
let operand_ty = operand.ty(self.mir, self.tcx);
let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
match (cast_in, cast_out) {
(CastTy::Ptr(_), CastTy::Int(_)) |
(CastTy::FnPtr, CastTy::Int(_)) => {
self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
let mut err = struct_span_err!(
self.tcx.sess,
self.span,
E0018,
"raw pointers cannot be cast to integers in {}s",
self.mode
);
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note("\
The value of static and constant integers must be known at compile time. You can't cast a pointer \
to an integer because the address of a pointer can vary.
For example, if you write:
```
static MY_STATIC: u32 = 42;
static MY_STATIC_ADDR: usize = &MY_STATIC as *const _ as usize;
static WHAT: usize = (MY_STATIC_ADDR^17) + MY_STATIC_ADDR;
```
Then `MY_STATIC_ADDR` would contain the address of `MY_STATIC`. However, the address can change \
when the program is linked, as well as change between different executions due to ASLR, and many \
linkers would not be able to calculate the value of `WHAT`.
On the other hand, static and constant pointers can point either to a known numeric address or to \
the address of a symbol.
```
static MY_STATIC: u32 = 42;
static MY_STATIC_ADDR: &'static u32 = &MY_STATIC;
const CONST_ADDR: *const u8 = 0x5f3759df as *const u8;
```
This does not pose a problem by itself because they can't be accessed directly.");
}
err.emit();
}
}
_ => {}
}
}
Rvalue::BinaryOp(op, ref lhs, _) => {
if let ty::TyRawPtr(_) = lhs.ty(self.mir, self.tcx).sty {
assert!(op == BinOp::Eq || op == BinOp::Ne ||
op == BinOp::Le || op == BinOp::Lt ||
op == BinOp::Ge || op == BinOp::Gt ||
op == BinOp::Offset);
self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
struct_span_err!(
self.tcx.sess, self.span, E0395,
"raw pointers cannot be compared in {}s",
self.mode)
.span_label(
self.span,
"comparing raw pointers in static")
.emit();
}
}
}
Rvalue::NullaryOp(NullOp::Box, _) => {
self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
let mut err = struct_span_err!(self.tcx.sess, self.span, E0010,
"allocations are not allowed in {}s", self.mode);
err.span_label(self.span, format!("allocation not allowed in {}s", self.mode));
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note(
"The value of statics and constants must be known at compile time, \
and they live for the entire lifetime of a program. Creating a boxed \
value allocates memory on the heap at runtime, and therefore cannot \
be done at compile time."
);
}
err.emit();
}
}
Rvalue::Aggregate(ref kind, _) => {
if let AggregateKind::Adt(def, ..) = **kind {
if def.has_dtor(self.tcx) {
self.add(Qualif::NEEDS_DROP);
}
if Some(def.did) == self.tcx.lang_items().unsafe_cell_type() {
let ty = rvalue.ty(self.mir, self.tcx);
self.add_type(ty);
assert!(self.qualif.intersects(Qualif::MUTABLE_INTERIOR));
}
}
}
}
}
fn visit_terminator_kind(&mut self,
bb: BasicBlock,
kind: &TerminatorKind<'tcx>,
location: Location) {
if let TerminatorKind::Call { ref func, ref args, ref destination, .. } = *kind {
self.visit_operand(func, location);
let fn_ty = func.ty(self.mir, self.tcx);
let mut callee_def_id = None;
let (mut is_shuffle, mut is_const_fn) = (false, None);
if let ty::TyFnDef(def_id, _) = fn_ty.sty {
callee_def_id = Some(def_id);
match self.tcx.fn_sig(def_id).abi() {
Abi::RustIntrinsic |
Abi::PlatformIntrinsic => {
assert!(!self.tcx.is_const_fn(def_id));
match &self.tcx.item_name(def_id).as_str()[..] {
| "size_of"
| "min_align_of"
| "type_id"
| "bswap"
| "ctpop"
| "cttz"
| "cttz_nonzero"
| "ctlz"
| "ctlz_nonzero" => is_const_fn = Some(def_id),
name if name.starts_with("simd_shuffle") => {
is_shuffle = true;
}
_ => {}
}
}
_ => {
if self.tcx.is_const_fn(def_id) {
is_const_fn = Some(def_id);
}
}
}
}
let constant_arguments = callee_def_id.and_then(|id| {
args_required_const(self.tcx, id)
});
for (i, arg) in args.iter().enumerate() {
self.nest(|this| {
this.visit_operand(arg, location);
if this.mode != Mode::Fn {
return
}
let candidate = Candidate::Argument { bb, index: i };
if is_shuffle && i == 2 {
if this.can_promote(this.qualif) {
this.promotion_candidates.push(candidate);
} else {
span_err!(this.tcx.sess, this.span, E0526,
"shuffle indices are not constant");
}
return
}
let constant_arguments = match constant_arguments.as_ref() {
Some(s) => s,
None => return,
};
if !constant_arguments.contains(&i) {
return
}
if this.can_promote(this.qualif) {
this.promotion_candidates.push(candidate);
} else {
this.tcx.sess.span_err(this.span,
&format!("argument {} is required to be a constant",
i + 1));
}
});
}
// Const fn calls.
if let Some(def_id) = is_const_fn {
// find corresponding rustc_const_unstable feature
if let Some(&attr::Stability {
rustc_const_unstable: Some(attr::RustcConstUnstable {
feature: ref feature_name
}),
.. }) = self.tcx.lookup_stability(def_id) {
if
// feature-gate is not enabled,
!self.tcx.features()
.declared_lib_features
.iter()
.any(|&(ref sym, _)| sym == feature_name) &&
// this doesn't come from a crate with the feature-gate enabled,
self.def_id.is_local() &&
// this doesn't come from a macro that has #[allow_internal_unstable]
!self.span.allows_unstable()
{
self.qualif = Qualif::NOT_CONST;
if self.mode != Mode::Fn {
// inside a constant environment, not having the feature gate is
// an error
let mut err = self.tcx.sess.struct_span_err(self.span,
&format!("`{}` is not yet stable as a const fn",
self.tcx.item_path_str(def_id)));
help!(&mut err,
"in Nightly builds, add `#![feature({})]` \
to the crate attributes to enable",
feature_name);
err.emit();
}
}
}
} else {
self.qualif = Qualif::NOT_CONST;
if self.mode != Mode::Fn {
// FIXME(#24111) Remove this check when const fn stabilizes
let (msg, note) = if let UnstableFeatures::Disallow =
self.tcx.sess.opts.unstable_features {
(format!("calls in {}s are limited to \
tuple structs and tuple variants",
self.mode),
Some("a limited form of compile-time function \
evaluation is available on a nightly \
compiler via `const fn`"))
} else {
(format!("calls in {}s are limited \
to constant functions, \
tuple structs and tuple variants",
self.mode),
None)
};
let mut err = struct_span_err!(self.tcx.sess, self.span, E0015, "{}", msg);
if let Some(note) = note {
err.span_note(self.span, note);
}
err.emit();
}
}
if let Some((ref dest, _)) = *destination {
// Avoid propagating irrelevant callee/argument qualifications.
if self.qualif.intersects(Qualif::CONST_ERROR) {
self.qualif = Qualif::NOT_CONST;
} else {
// Be conservative about the returned value of a const fn.
let tcx = self.tcx;
let ty = dest.ty(self.mir, tcx).to_ty(tcx);
self.qualif = Qualif::empty();
self.add_type(ty);
}
self.assign(dest, location);
}
} else if let TerminatorKind::Drop { location: ref place, .. } = *kind {
self.super_terminator_kind(bb, kind, location);
// Deny *any* live drops anywhere other than functions.
if self.mode != Mode::Fn {
// HACK(eddyb) Emulate a bit of dataflow analysis,
// conservatively, that drop elaboration will do.
let needs_drop = if let Place::Local(local) = *place {
if self.local_qualif[local].map_or(true, |q| q.intersects(Qualif::NEEDS_DROP)) {
Some(self.mir.local_decls[local].source_info.span)
} else {
None
}
} else {
Some(self.span)
};
if let Some(span) = needs_drop {
// Double-check the type being dropped, to minimize false positives.
let ty = place.ty(self.mir, self.tcx).to_ty(self.tcx);
if ty.needs_drop(self.tcx, self.param_env) {
struct_span_err!(self.tcx.sess, span, E0493,
"destructors cannot be evaluated at compile-time")
.span_label(span, format!("{}s cannot evaluate destructors",
self.mode))
.emit();
}
}
}
} else {
// Qualify any operands inside other terminators.
self.super_terminator_kind(bb, kind, location);
}
}
fn visit_assign(&mut self,
_: BasicBlock,
dest: &Place<'tcx>,
rvalue: &Rvalue<'tcx>,
location: Location) {
self.visit_rvalue(rvalue, location);
// Check the allowed const fn argument forms.
if let (Mode::ConstFn, &Place::Local(index)) = (self.mode, dest) {
if self.mir.local_kind(index) == LocalKind::Var &&
self.const_fn_arg_vars.insert(index.index()) &&
!self.tcx.sess.features_untracked().const_let {
// Direct use of an argument is permitted.
match *rvalue {
Rvalue::Use(Operand::Copy(Place::Local(local))) |
Rvalue::Use(Operand::Move(Place::Local(local))) => {
if self.mir.local_kind(local) == LocalKind::Arg {
return;
}
}
_ => {}
}
// Avoid a generic error for other uses of arguments.
if self.qualif.intersects(Qualif::FN_ARGUMENT) {
let decl = &self.mir.local_decls[index];
let mut err = feature_err(
&self.tcx.sess.parse_sess,
"const_let",
decl.source_info.span,
GateIssue::Language,
"arguments of constant functions can only be immutable by-value bindings"
);
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note("Constant functions are not allowed to mutate anything. Thus, \
binding to an argument with a mutable pattern is not allowed.");
err.note("Remove any mutable bindings from the argument list to fix this \
error. In case you need to mutate the argument, try lazily \
initializing a global variable instead of using a const fn, or \
refactoring the code to a functional style to avoid mutation if \
possible.");
}
err.emit();
return;
}
}
}
self.assign(dest, location);
}
fn visit_source_info(&mut self, source_info: &SourceInfo) {
self.span = source_info.span;
}
fn visit_statement(&mut self, bb: BasicBlock, statement: &Statement<'tcx>, location: Location) {
self.nest(|this| {
this.visit_source_info(&statement.source_info);
match statement.kind {
StatementKind::Assign(ref place, ref rvalue) => {
this.visit_assign(bb, place, rvalue, location);
}
StatementKind::ReadForMatch(..) |
StatementKind::SetDiscriminant { .. } |
StatementKind::StorageLive(_) |
StatementKind::StorageDead(_) |
StatementKind::InlineAsm {..} |
StatementKind::EndRegion(_) |
StatementKind::Validate(..) |
StatementKind::UserAssertTy(..) |
StatementKind::Nop => {}
}
});
}
fn visit_terminator(&mut self,
bb: BasicBlock,
terminator: &Terminator<'tcx>,
location: Location) {
self.nest(|this| this.super_terminator(bb, terminator, location));
}
}
pub fn provide(providers: &mut Providers) {
*providers = Providers {
mir_const_qualif,
..*providers
};
}
fn mir_const_qualif<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> (u8, Lrc<IdxSetBuf<Local>>) {
// NB: This `borrow()` is guaranteed to be valid (i.e., the value
// cannot yet be stolen), because `mir_validated()`, which steals
// from `mir_const(), forces this query to execute before
// performing the steal.
let mir = &tcx.mir_const(def_id).borrow();
if mir.return_ty().references_error() {
tcx.sess.delay_span_bug(mir.span, "mir_const_qualif: Mir had errors");
return (Qualif::NOT_CONST.bits(), Lrc::new(IdxSetBuf::new_empty(0)));
}
let mut qualifier = Qualifier::new(tcx, def_id, mir, Mode::Const);
let (qualif, promoted_temps) = qualifier.qualify_const();
(qualif.bits(), promoted_temps)
}
pub struct QualifyAndPromoteConstants;
impl MirPass for QualifyAndPromoteConstants {
fn run_pass<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
src: MirSource,
mir: &mut Mir<'tcx>) {
// There's not really any point in promoting errorful MIR.
if mir.return_ty().references_error() {
tcx.sess.delay_span_bug(mir.span, "QualifyAndPromoteConstants: Mir had errors");
return;
}
if src.promoted.is_some() {
return;
}
let def_id = src.def_id;
let id = tcx.hir.as_local_node_id(def_id).unwrap();
let mut const_promoted_temps = None;
let mode = match tcx.hir.body_owner_kind(id) {
hir::BodyOwnerKind::Fn => {
if tcx.is_const_fn(def_id) {
Mode::ConstFn
} else {
Mode::Fn
}
}
hir::BodyOwnerKind::Const => {
const_promoted_temps = Some(tcx.mir_const_qualif(def_id).1);
Mode::Const
}
hir::BodyOwnerKind::Static(hir::MutImmutable) => Mode::Static,
hir::BodyOwnerKind::Static(hir::MutMutable) => Mode::StaticMut,
};
if mode == Mode::Fn || mode == Mode::ConstFn {
// This is ugly because Qualifier holds onto mir,
// which can't be mutated until its scope ends.
let (temps, candidates) = {
let mut qualifier = Qualifier::new(tcx, def_id, mir, mode);
if mode == Mode::ConstFn {
// Enforce a constant-like CFG for `const fn`.
qualifier.qualify_const();
} else {
while let Some((bb, data)) = qualifier.rpo.next() {
qualifier.visit_basic_block_data(bb, data);
}
}
(qualifier.temp_promotion_state, qualifier.promotion_candidates)
};
// Do the actual promotion, now that we know what's viable.
promote_consts::promote_candidates(mir, tcx, temps, candidates);
} else {
let promoted_temps = if mode == Mode::Const {
// Already computed by `mir_const_qualif`.
const_promoted_temps.unwrap()
} else {
Qualifier::new(tcx, def_id, mir, mode).qualify_const().1
};
// In `const` and `static` everything without `StorageDead`
// is `'static`, we don't have to create promoted MIR fragments,
// just remove `Drop` and `StorageDead` on "promoted" locals.
for block in mir.basic_blocks_mut() {
block.statements.retain(|statement| {
match statement.kind {
StatementKind::StorageDead(index) => {
!promoted_temps.contains(&index)
}
_ => true
}
});
let terminator = block.terminator_mut();
match terminator.kind {
TerminatorKind::Drop { location: Place::Local(index), target, .. } => {
if promoted_temps.contains(&index) {
terminator.kind = TerminatorKind::Goto {
target,
};
}
}
_ => {}
}
}
}
// Statics must be Sync.
if mode == Mode::Static {
// `#[thread_local]` statics don't have to be `Sync`.
for attr in &tcx.get_attrs(def_id)[..] {
if attr.check_name("thread_local") {
return;
}
}
let ty = mir.return_ty();
tcx.infer_ctxt().enter(|infcx| {
let param_env = ty::ParamEnv::empty();
let cause = traits::ObligationCause::new(mir.span, id, traits::SharedStatic);
let mut fulfillment_cx = traits::FulfillmentContext::new();
fulfillment_cx.register_bound(&infcx,
param_env,
ty,
tcx.require_lang_item(lang_items::SyncTraitLangItem),
cause);
if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) {
infcx.report_fulfillment_errors(&err, None, false);
}
});
}
}
}
fn args_required_const(tcx: TyCtxt, def_id: DefId) -> Option<FxHashSet<usize>> {
let attrs = tcx.get_attrs(def_id);
let attr = attrs.iter().find(|a| a.check_name("rustc_args_required_const"))?;
let mut ret = FxHashSet();
for meta in attr.meta_item_list()? {
match meta.literal()?.node {
LitKind::Int(a, _) => { ret.insert(a as usize); }
_ => return None,
}
}
Some(ret)
}
| 42.501515 | 100 | 0.458932 |
67ef18950af00647132baa51a718e2011ef7dd74 | 1,735 | //! **city2ba** is a set of tools for generating synthetic bundle adjustment datasets.
//!
//! Bundle Adjustment is a nonlinear global optimization used to reduce noise in structure from
//! motion (SfM) and simultaneous localization and mapping applications (SLAM). See
//! [https://en.wikipedia.org/wiki/Bundle_adjustment](https://en.wikipedia.org/wiki/Bundle_adjustment)
//! for more details. Not many bundle adjustment datasets are freely available, so this package
//! contains tools for generating synthetic ones. A bundle adjustment dataset contains a set of
//! cameras, a set of 3D points, and a set of camera-point observations. The goal of a bundle
//! adjuster is to minimizer the difference between the projection of each 3D point into each
//! camera and the location where it was actually observed. This package provides ways for
//! generating zero error (ground truth) datasets in the [generate] and [synthetic] modules, and
//! ways to add noise to existing datasets (so they are no longer zero error) in the [noise]
//! module.
//!
//! This crate also provides command line tools for generating bundle adjustment datasets:
//! ```bash
//! # Generate a problem from a 3D model
//! city2ba generate model.obj problem.bal --num-cameras 100 --num-points 200
//!
//! # Add noise to the problem
//! city2ba noise problem.bal problem_noised.bal --drift-strength 0.001 --rotation-std 0.0001
//!
//! # Generate a problem using a city block grid
//! city2ba synthetic problem.bal --blocks 4
//!
//! # Convert a problem to a format for visualization
//! city2ba ply problem.bal problem.ply
//! ```
#[macro_use]
extern crate itertools;
mod baproblem;
pub mod generate;
pub mod noise;
pub mod synthetic;
pub use baproblem::*;
| 44.487179 | 102 | 0.746398 |
09d6972bdf24379cfcca72d2d9dc6c5b5b096bcb | 396 | use super::common::receive_from_stream;
use crate::reply::Event;
use crate::Fallible;
use std::convert::TryFrom;
use std::os::unix::net::UnixStream;
pub struct EventIterator(pub(crate) UnixStream);
impl Iterator for EventIterator {
type Item = Fallible<Event>;
fn next(&mut self) -> Option<Self::Item> {
Some(receive_from_stream(&mut self.0).and_then(Event::try_from))
}
}
| 24.75 | 72 | 0.707071 |
562c010a5f33157c9b7fce7514855139f67ab8ef | 2,490 | //! The [`Array`](crate::array::Array) struct.
use graphics::color;
use graphics::types::Color;
use crate::state::{ArrayAccess, SharedState};
/// A convenient wrapper around [`SharedState`] for
/// [algorithms](crate::algorithms) that handles concurrency and all that stuff.
/// **All methods in this struct lock the [state](crate::state::State) for as
/// short as possible** so that the rendering thread can lock it when it wants.
#[derive(Debug)]
pub struct Array(SharedState);
impl Array {
/// Creates a new array from a copy of [`SharedState`].
pub fn new(state: SharedState) -> Self {
Array(state)
}
/// Puts the current thread to sleep for the specified amount of time and
/// blocks it if the animation is [paused](crate::state::State::paused).
pub fn wait(&self, ms: u64) {
use std::thread;
use std::time::Duration;
// state must be locked for as short as possible so we shouldn't keep it
// locked while sleeping (`thread::sleep` and `thread::park`)
thread::sleep(Duration::from_micros({
let state = self.0.get();
(ms as f64 * 1000.0 / state.speed) as u64
}));
let paused = {
let state = self.0.get();
state.paused
};
if paused {
thread::park();
}
}
/// Returns the length of the underlying [vector](crate::state::State::array).
pub fn len(&self) -> usize {
let state = self.0.get();
state.array.len()
}
/// Returns a value at a given index.
pub fn get(&self, index: usize) -> u32 {
let mut state = self.0.get();
let value = state.array[index];
let time = state.time;
state.array_accesses.push(ArrayAccess { time, index });
value
}
/// Sets a value of the at a given index.
pub fn set(&self, index: usize, value: u32) {
let mut state = self.0.get();
state.array[index] = value;
}
/// Swaps two values at given indices.
pub fn swap(&self, a: usize, b: usize) {
let mut state = self.0.get();
state.array.swap(a, b);
}
/// Resets color of the value at a given index (sets it to the transparent
/// color).
///
/// _See_ [`State.colors`](crate::state::State::colors)
pub fn reset_color(&self, index: usize) {
self.set_color(index, color::TRANSPARENT);
}
/// Sets color of the value at a given index.
///
/// _See_ [`State.colors`](crate::state::State::colors)
pub fn set_color(&self, index: usize, color: Color) {
let mut state = self.0.get();
state.colors[index] = color;
}
}
| 27.666667 | 80 | 0.630522 |
29405693a445f7dc9af1cf456fae5676d0f46fd3 | 9,679 | use std::f32::consts::{PI, TAU};
use crate::{constants::*, geometry::Rectangle, util::*};
use macroquad::{prelude::*, audio::{Sound, play_sound_once, stop_sound}};
#[derive(Debug, Clone)]
pub struct Player {
pub center: Vec2,
pub size: Vec2,
pub speed: f32,
pub ground_height: f32,
pub velocity: Vec2,
pub position: Vec2,
pub acceleration: Vec2,
pub is_jumping: bool,
pub can_jump: bool,
pub is_moving: bool,
pedal_theta: f32,
previous_pedal_theta: f32,
wheel_theta: f32,
previous_wheel_theta: f32,
pub headlight: Vec2,
pub taillight: Vec2,
jump_sound: Sound,
land_sound: Sound,
}
impl Player {
pub fn new(size: Vec2, resolution: Vec2, jump: Sound, land: Sound) -> Self {
Self {
center: vec2((resolution.x - size.x) * 0.5, resolution.y - size.y),
size,
speed: DEFAULT_PLAYER_SPEED,
ground_height: 0.,
velocity: Vec2::ZERO,
acceleration: Vec2::ZERO,
position: Vec2::ZERO,
is_moving: true,
is_jumping: false,
can_jump: false,
pedal_theta: 0.,
previous_pedal_theta: 0.,
wheel_theta: 0.,
previous_wheel_theta: 0.,
headlight: vec2(0., 0.),
taillight: vec2(0., 0.),
jump_sound: jump,
land_sound: land,
}
}
pub fn reset(&mut self) {
self.speed = DEFAULT_PLAYER_SPEED;
self.ground_height = 0.;
self.velocity = Vec2::ZERO;
self.acceleration = Vec2::ZERO;
self.position = Vec2::ZERO;
self.is_jumping = false;
self.is_moving = true;
self.can_jump = false;
}
pub fn render(&mut self) {
let line_thickness = 8.;
let half_line_thickness = line_thickness * 0.5;
let wheel_radius = 24.;
let center = self.center - self.position
+ vec2(
self.size.x * 0.5,
self.size.y - wheel_radius - line_thickness,
);
let wheel_1 = center - vec2(40., -line_thickness);
let wheel_2 = center + vec2(40., line_thickness);
let bottom_bracket = wheel_1 + vec2((wheel_2.x - wheel_1.x) * 0.5, 0.);
let seat_post = wheel_1 + vec2((bottom_bracket.x - wheel_1.x) * 0.5, -wheel_radius * 1.5);
let seat_start = seat_post - vec2(line_thickness * 2., line_thickness * 0.75);
let seat_end = seat_post + vec2(line_thickness * 1.7, -line_thickness * 0.75 * 1.4);
let steering_tube = wheel_2 - vec2(2. * line_thickness, wheel_radius * 1.6);
let steer = steering_tube + vec2(line_thickness, -line_thickness);
let crank_length = wheel_radius * 0.5;
let crank_1 = point_on_circle(bottom_bracket, crank_length, self.pedal_theta);
let crank_2 = point_on_circle(bottom_bracket, crank_length, self.pedal_theta + PI);
let pedal_length = 8.;
let pedal_vec = vec2(pedal_length, 0.);
let pedal_1_start = crank_1 - pedal_vec;
let pedal_1_end = crank_1 + pedal_vec;
let pedal_2_start = crank_2 - pedal_vec;
let pedal_2_end = crank_2 + pedal_vec;
draw_line(
bottom_bracket.x,
bottom_bracket.y,
crank_2.x,
crank_2.y,
line_thickness * 0.5,
PALETTE[12],
);
draw_line(
pedal_2_start.x,
pedal_2_start.y,
pedal_2_end.x,
pedal_2_end.y,
line_thickness * 0.75,
PALETTE[0],
);
draw_circle_lines(
wheel_1.x,
wheel_1.y,
wheel_radius,
line_thickness,
PALETTE[0],
);
draw_circle_lines(
wheel_2.x,
wheel_2.y,
wheel_radius,
line_thickness,
PALETTE[0],
);
let spokes = 16.;
let increment = TAU / spokes;
let mut theta = self.wheel_theta;
for _i in 0..(spokes as usize) {
let point_1 =
point_on_circle(wheel_1, wheel_radius - line_thickness * 0.5, theta % TAU);
let point_2 =
point_on_circle(wheel_2, wheel_radius - line_thickness * 0.5, theta % TAU);
draw_line(wheel_1.x, wheel_1.y, point_1.x, point_1.y, 1., PALETTE[12]);
draw_line(wheel_2.x, wheel_2.y, point_2.x, point_2.y, 1., PALETTE[12]);
theta += increment;
}
draw_line(
wheel_1.x - half_line_thickness,
wheel_1.y,
bottom_bracket.x,
bottom_bracket.y,
line_thickness,
PALETTE[1],
);
draw_line(
wheel_1.x - half_line_thickness,
wheel_1.y,
seat_post.x,
seat_post.y,
line_thickness,
PALETTE[1],
);
draw_line(
seat_post.x,
seat_post.y,
bottom_bracket.x,
bottom_bracket.y,
line_thickness,
PALETTE[1],
);
draw_line(
bottom_bracket.x,
bottom_bracket.y,
steering_tube.x,
steering_tube.y,
line_thickness,
PALETTE[1],
);
draw_line(
seat_post.x,
seat_post.y,
steering_tube.x,
steering_tube.y,
line_thickness,
PALETTE[1],
);
draw_line(
steering_tube.x,
steering_tube.y,
wheel_2.x,
wheel_2.y,
line_thickness + 2.,
PALETTE[1],
);
let lamp_front = steering_tube + vec2(line_thickness, line_thickness * 0.75);
let lamp_back = vec2(center.x - 28., lamp_front.y - line_thickness * 0.3);
self.headlight = lamp_front;
self.taillight = lamp_front;
draw_circle(
lamp_front.x,
lamp_front.y,
line_thickness * 0.5,
PALETTE[14],
);
draw_circle(lamp_back.x, lamp_back.y, line_thickness * 0.5, PALETTE[8]);
draw_line(
seat_start.x,
seat_start.y,
seat_end.x,
seat_end.y,
line_thickness + 4.,
PALETTE[4],
);
draw_circle(wheel_1.x, wheel_1.y, line_thickness, PALETTE[9]);
draw_circle(wheel_2.x, wheel_2.y, line_thickness, PALETTE[9]);
draw_circle(
bottom_bracket.x,
bottom_bracket.y,
line_thickness * 0.5,
PALETTE[1],
);
draw_circle(
steering_tube.x,
steering_tube.y,
line_thickness * 0.5,
PALETTE[1],
);
draw_line(
steering_tube.x,
steering_tube.y,
steer.x,
steer.y,
line_thickness,
PALETTE[1],
);
draw_circle(steer.x, steer.y, 0.75 * line_thickness, PALETTE[0]);
draw_line(
bottom_bracket.x,
bottom_bracket.y,
crank_1.x,
crank_1.y,
line_thickness * 0.5,
PALETTE[12],
);
draw_circle(bottom_bracket.x, bottom_bracket.y, 4., PALETTE[12]);
draw_line(
pedal_1_start.x,
pedal_1_start.y,
pedal_1_end.x,
pedal_1_end.y,
line_thickness * 0.75,
PALETTE[0],
);
}
pub fn step(&mut self, time: f32) {
if self.position.y <= self.ground_height {
self.position.y = self.ground_height;
self.acceleration += *UP * *GRAVITY;
if self.is_jumping {
self.acceleration = *UP * *GRAVITY;
self.velocity = Vec2::ZERO;
self.is_jumping = false;
stop_sound(self.jump_sound);
play_sound_once(self.land_sound);
}
self.can_jump = true;
}
if time <= 0.1 {
self.can_jump = false;
}
self.acceleration += *DOWN * *GRAVITY;
self.velocity += self.acceleration * TIMESTEP;
self.position += self.velocity * TIMESTEP;
self.speed += 0.0001;
self.previous_wheel_theta = self.wheel_theta;
let wheel_speed = if self.is_jumping { 15. } else { 30. };
self.wheel_theta = if self.is_moving {
(self.previous_wheel_theta + (TAU / wheel_speed)) % TAU
} else {
self.previous_wheel_theta
};
self.previous_pedal_theta = self.pedal_theta;
self.pedal_theta = if self.is_moving && !self.is_jumping {
(self.previous_pedal_theta + (TAU / 30.)) % TAU
} else {
self.previous_pedal_theta
};
}
pub fn tick(&mut self) {
if is_mouse_button_down(MouseButton::Left) && self.can_jump {
if !self.is_jumping {
self.jump();
play_sound_once(self.jump_sound);
}
}
if is_mouse_button_released(MouseButton::Left) && self.is_jumping && self.can_jump {
self.can_jump = false;
}
}
fn jump(&mut self) {
self.position.y = self.ground_height + 0.1;
self.is_jumping = true;
self.velocity += *UP * *JUMP_FORCE;
}
pub fn get_aabb(&self) -> Rectangle {
Rectangle::new(self.origin(), self.size)
}
fn origin(&self) -> Vec2 {
vec2(
self.center.x,
self.center.y - self.ground_height - self.position.y,
)
}
}
| 29.690184 | 98 | 0.523298 |
bfe33c6b5661a19e07f1159f5427fdc2b5c46c17 | 1,496 | /* automatically generated by rust-bindgen */
#![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)]
pub mod foo {
pub type Type = ::std::os::raw::c_uint;
pub const THIS: Type = 0;
pub const SHOULD_BE: Type = 1;
pub const A_CONSTANT: Type = 2;
}
pub use self::foo::Type as foo_alias1;
pub use self::foo_alias1 as foo_alias2;
#[repr(C)]
#[derive(Debug, Copy)]
pub struct bar {
pub this_should_work: foo::Type,
}
#[test]
fn bindgen_test_layout_bar() {
assert_eq!(
::std::mem::size_of::<bar>(),
4usize,
concat!("Size of: ", stringify!(bar))
);
assert_eq!(
::std::mem::align_of::<bar>(),
4usize,
concat!("Alignment of ", stringify!(bar))
);
assert_eq!(
unsafe { &(*(0 as *const bar)).this_should_work as *const _ as usize },
0usize,
concat!(
"Alignment of field: ",
stringify!(bar),
"::",
stringify!(this_should_work)
)
);
}
impl Clone for bar {
fn clone(&self) -> Self {
*self
}
}
impl Default for bar {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
extern "C" {
pub fn func1(
arg1: foo::Type,
arg2: *mut foo::Type,
arg3: *mut *mut foo::Type,
) -> *mut foo::Type;
}
extern "C" {
pub fn func2(
arg1: foo_alias1,
arg2: *mut foo_alias1,
arg3: *mut *mut foo_alias1,
) -> *mut foo_alias1;
}
| 22.328358 | 82 | 0.54746 |
cc3e650979a255414f94d5ddcd9780eb49ad1a21 | 14,588 | use object::{Object, ObjectSection, ObjectSymbol};
use std::collections::{HashMap, HashSet};
use std::convert::TryInto;
use std::num::TryFromIntError;
use wasmer_compiler::{
CompileError, CompiledFunctionFrameInfo, CustomSection, CustomSectionProtection,
CustomSections, FunctionAddressMap, FunctionBody, InstructionAddressMap, Relocation,
RelocationKind, RelocationTarget, SectionBody, SectionIndex, SourceLoc,
};
use wasmer_types::entity::PrimaryMap;
use wasmer_vm::libcalls::LibCall;
fn map_tryfromint_err(error: TryFromIntError) -> CompileError {
CompileError::Codegen(format!("int doesn't fit: {}", error))
}
fn map_object_err(error: object::read::Error) -> CompileError {
CompileError::Codegen(format!("error parsing object file: {}", error))
}
pub struct CompiledFunction {
pub compiled_function: wasmer_compiler::CompiledFunction,
pub custom_sections: CustomSections,
pub eh_frame_section_indices: Vec<SectionIndex>,
}
pub fn load_object_file<F>(
contents: &[u8],
root_section: &str,
root_section_reloc_target: RelocationTarget,
mut symbol_name_to_relocation_target: F,
) -> Result<CompiledFunction, CompileError>
where
F: FnMut(&str) -> Result<Option<RelocationTarget>, CompileError>,
{
// TODO: use perfect hash function?
let mut libcalls = HashMap::new();
libcalls.insert("ceilf".to_string(), LibCall::CeilF32);
libcalls.insert("ceil".to_string(), LibCall::CeilF64);
libcalls.insert("floorf".to_string(), LibCall::FloorF32);
libcalls.insert("floor".to_string(), LibCall::FloorF64);
libcalls.insert("nearbyintf".to_string(), LibCall::NearestF32);
libcalls.insert("nearbyint".to_string(), LibCall::NearestF64);
libcalls.insert("truncf".to_string(), LibCall::TruncF32);
libcalls.insert("trunc".to_string(), LibCall::TruncF64);
libcalls.insert("wasmer_vm_f32_ceil".to_string(), LibCall::CeilF32);
libcalls.insert("wasmer_vm_f64_ceil".to_string(), LibCall::CeilF64);
libcalls.insert("wasmer_vm_f32_floor".to_string(), LibCall::FloorF32);
libcalls.insert("wasmer_vm_f64_floor".to_string(), LibCall::FloorF64);
libcalls.insert("wasmer_vm_f32_nearest".to_string(), LibCall::NearestF32);
libcalls.insert("wasmer_vm_f64_nearest".to_string(), LibCall::NearestF64);
libcalls.insert("wasmer_vm_f32_trunc".to_string(), LibCall::TruncF32);
libcalls.insert("wasmer_vm_f64_trunc".to_string(), LibCall::TruncF64);
libcalls.insert("wasmer_vm_memory32_size".to_string(), LibCall::Memory32Size);
libcalls.insert(
"wasmer_vm_imported_memory32_size".to_string(),
LibCall::ImportedMemory32Size,
);
libcalls.insert("wasmer_vm_table_copy".to_string(), LibCall::TableCopy);
libcalls.insert("wasmer_vm_table_init".to_string(), LibCall::TableInit);
libcalls.insert("wasmer_vm_table_fill".to_string(), LibCall::TableFill);
libcalls.insert("wasmer_vm_table_size".to_string(), LibCall::TableSize);
libcalls.insert(
"wasmer_vm_imported_table_size".to_string(),
LibCall::ImportedTableSize,
);
libcalls.insert("wasmer_vm_table_get".to_string(), LibCall::TableGet);
libcalls.insert(
"wasmer_vm_imported_table_get".to_string(),
LibCall::ImportedTableGet,
);
libcalls.insert("wasmer_vm_table_set".to_string(), LibCall::TableSet);
libcalls.insert(
"wasmer_vm_imported_table_set".to_string(),
LibCall::ImportedTableSet,
);
libcalls.insert("wasmer_vm_table_grow".to_string(), LibCall::TableGrow);
libcalls.insert(
"wasmer_vm_imported_table_grow".to_string(),
LibCall::ImportedTableGrow,
);
libcalls.insert("wasmer_vm_func_ref".to_string(), LibCall::FuncRef);
libcalls.insert("wasmer_vm_elem_drop".to_string(), LibCall::ElemDrop);
libcalls.insert("wasmer_vm_memory32_copy".to_string(), LibCall::Memory32Copy);
libcalls.insert(
"wasmer_vm_imported_memory32_copy".to_string(),
LibCall::ImportedMemory32Copy,
);
libcalls.insert("wasmer_vm_memory32_fill".to_string(), LibCall::Memory32Fill);
libcalls.insert(
"wasmer_vm_imported_memory32_fill".to_string(),
LibCall::ImportedMemory32Fill,
);
libcalls.insert("wasmer_vm_memory32_init".to_string(), LibCall::Memory32Init);
libcalls.insert("wasmer_vm_data_drop".to_string(), LibCall::DataDrop);
libcalls.insert("wasmer_vm_raise_trap".to_string(), LibCall::RaiseTrap);
let elf = object::File::parse(contents).map_err(map_object_err)?;
let mut visited: HashSet<object::read::SectionIndex> = HashSet::new();
let mut worklist: Vec<object::read::SectionIndex> = Vec::new();
let mut section_targets: HashMap<object::read::SectionIndex, RelocationTarget> = HashMap::new();
let root_section_index = elf
.section_by_name(root_section)
.ok_or_else(|| CompileError::Codegen(format!("no section named {}", root_section)))?
.index();
let mut section_to_custom_section = HashMap::new();
section_targets.insert(root_section_index, root_section_reloc_target);
let mut next_custom_section: u32 = 0;
let mut elf_section_to_target = |elf_section_index: object::read::SectionIndex| {
*section_targets.entry(elf_section_index).or_insert_with(|| {
let next = SectionIndex::from_u32(next_custom_section);
section_to_custom_section.insert(elf_section_index, next);
let target = RelocationTarget::CustomSection(next);
next_custom_section += 1;
target
})
};
// From elf section index to list of Relocations. Although we use a Vec,
// the order of relocations is not important.
let mut relocations: HashMap<object::read::SectionIndex, Vec<Relocation>> = HashMap::new();
// Each iteration of this loop pulls a section and the relocations
// relocations that apply to it. We begin with the ".root_section"
// section, and then parse all relocation sections that apply to that
// section. Those relocations may refer to additional sections which we
// then add to the worklist until we've visited the closure of
// everything needed to run the code in ".root_section".
//
// `worklist` is the list of sections we have yet to visit. It never
// contains any duplicates or sections we've already visited. `visited`
// contains all the sections we've ever added to the worklist in a set
// so that we can quickly check whether a section is new before adding
// it to worklist. `section_to_custom_section` is filled in with all
// the sections we want to include.
worklist.push(root_section_index);
visited.insert(root_section_index);
// Also add any .eh_frame sections.
let mut eh_frame_section_indices = vec![];
for section in elf.sections() {
if section.kind() == object::SectionKind::Elf(object::elf::SHT_X86_64_UNWIND) {
let index = section.index();
worklist.push(index);
visited.insert(index);
eh_frame_section_indices.push(index);
// This allocates a custom section index for the ELF section.
elf_section_to_target(index);
}
}
while let Some(section_index) = worklist.pop() {
for (offset, reloc) in elf
.section_by_index(section_index)
.map_err(map_object_err)?
.relocations()
{
let kind = match (elf.architecture(), reloc.kind(), reloc.size()) {
(_, object::RelocationKind::Absolute, 64) => RelocationKind::Abs8,
(
object::Architecture::X86_64,
object::RelocationKind::Elf(object::elf::R_X86_64_PC64),
0,
) => RelocationKind::X86PCRel8,
(object::Architecture::Aarch64, object::RelocationKind::PltRelative, 26) => {
RelocationKind::Arm64Call
}
_ => {
return Err(CompileError::Codegen(format!(
"unknown relocation {:?}",
reloc
)));
}
};
let addend = reloc.addend();
let target = match reloc.target() {
object::read::RelocationTarget::Symbol(index) => {
let symbol = elf.symbol_by_index(index).map_err(map_object_err)?;
let symbol_name = symbol.name().map_err(map_object_err)?;
if symbol.kind() == object::SymbolKind::Section {
match symbol.section() {
object::SymbolSection::Section(section_index) => {
if section_index == root_section_index {
root_section_reloc_target
} else {
if visited.insert(section_index) {
worklist.push(section_index);
}
elf_section_to_target(section_index)
}
}
_ => {
return Err(CompileError::Codegen(format!(
"relocation targets unknown section {:?}",
reloc
)));
}
}
// Maybe a libcall then?
} else if let Some(libcall) = libcalls.get(symbol_name) {
RelocationTarget::LibCall(*libcall)
} else if let Some(reloc_target) =
symbol_name_to_relocation_target(symbol_name)?
{
reloc_target
} else {
return Err(CompileError::Codegen(format!(
"relocation targets unknown symbol {:?}",
reloc
)));
}
}
object::read::RelocationTarget::Section(index) => {
if index == root_section_index {
root_section_reloc_target
} else {
if visited.insert(index) {
worklist.push(index);
}
elf_section_to_target(index)
}
}
object::read::RelocationTarget::Absolute => {
// Wasm-produced object files should never have absolute
// addresses in them because none of the parts of the Wasm
// VM, nor the generated code are loaded at fixed addresses.
return Err(CompileError::Codegen(format!(
"relocation targets absolute address {:?}",
reloc
)));
}
// `object::read::RelocationTarget` is a
// non-exhaustive enum (`#[non_exhaustive]`), so it
// could have additional variants added in the
// future. Therefore, when matching against variants
// of non-exhaustive enums, an extra wildcard arm must
// be added to account for any future variants.
t => {
return Err(CompileError::Codegen(format!(
"relocation target is unknown `{:?}`",
t
)));
}
};
relocations
.entry(section_index)
.or_default()
.push(Relocation {
kind,
reloc_target: target,
offset: offset.try_into().map_err(map_tryfromint_err)?,
addend,
});
}
}
let eh_frame_section_indices = eh_frame_section_indices
.iter()
.map(|index| {
section_to_custom_section.get(index).map_or_else(
|| {
Err(CompileError::Codegen(format!(
".eh_frame section with index={:?} was never loaded",
index
)))
},
|idx| Ok(*idx),
)
})
.collect::<Result<Vec<SectionIndex>, _>>()?;
let mut custom_sections = section_to_custom_section
.iter()
.map(|(elf_section_index, custom_section_index)| {
(
custom_section_index,
CustomSection {
protection: CustomSectionProtection::Read,
bytes: SectionBody::new_with_vec(
elf.section_by_index(*elf_section_index)
.unwrap()
.data()
.unwrap()
.to_vec(),
),
relocations: relocations
.remove_entry(elf_section_index)
.map_or(vec![], |(_, v)| v),
},
)
})
.collect::<Vec<_>>();
custom_sections.sort_unstable_by_key(|a| a.0);
let custom_sections = custom_sections
.into_iter()
.map(|(_, v)| v)
.collect::<PrimaryMap<SectionIndex, _>>();
let function_body = FunctionBody {
body: elf
.section_by_index(root_section_index)
.unwrap()
.data()
.unwrap()
.to_vec(),
unwind_info: None,
};
let address_map = FunctionAddressMap {
instructions: vec![InstructionAddressMap {
srcloc: SourceLoc::default(),
code_offset: 0,
code_len: function_body.body.len(),
}],
start_srcloc: SourceLoc::default(),
end_srcloc: SourceLoc::default(),
body_offset: 0,
body_len: function_body.body.len(),
};
Ok(CompiledFunction {
compiled_function: wasmer_compiler::CompiledFunction {
body: function_body,
relocations: relocations
.remove_entry(&root_section_index)
.map_or(vec![], |(_, v)| v),
frame_info: CompiledFunctionFrameInfo {
address_map,
traps: vec![],
},
},
custom_sections,
eh_frame_section_indices,
})
}
| 42.16185 | 100 | 0.572251 |
e613b3e0dd59e9058440050b5dc02e4fca1afcde | 7,380 | use super::state_machine::machine::NFA;
/// A matcher is some strategy for matching a given regex to a given input string. Different
/// matchers perform well in different situations, and which matcher to use can be decided based on
/// the regex.
pub trait Matcher {
/// Given an input line, return the byte index boundary of the first matching substring.
fn match_substring(&self, input: &str) -> (usize, usize);
}
/// Pure state-machine based matching, with no literal optimizations.
pub struct NFAMatcher<'a> {
pub nfa: NFA<'a>,
// Whether or not the regex started with a caret (^) and/or ends with a
// dollar ($).
pub from_start: bool,
pub until_end: bool,
}
impl<'a> Matcher for NFAMatcher<'a> {
fn match_substring(&self, input: &str) -> (usize, usize) {
match nfa_match_substring(&self.nfa, input, self.from_start) {
None => (0, 0),
Some(result) => {
if self.until_end && result.1 != input.len() {
return (0, 0);
}
result
}
}
}
}
/// Pure literal string finder. Meaning the regex contained no meta-characters.
pub struct LiteralMatcher<'a> {
pub to_find: &'a str,
pub from_start: bool,
pub until_end: bool,
}
impl<'a> Matcher for LiteralMatcher<'a> {
fn match_substring(&self, input: &str) -> (usize, usize) {
if input.len() < self.to_find.len() {
return (0, 0);
}
if self.from_start {
if self.until_end {
if input == self.to_find {
return (0, self.to_find.len());
}
return (0, 0);
}
if &input[..self.to_find.len()] == self.to_find {
return (0, self.to_find.len());
}
return (0, 0);
}
if self.until_end {
if &input[(input.len() - self.to_find.len())..] == self.to_find {
return (input.len() - self.to_find.len(), input.len());
}
return (0, 0);
}
match input.find(self.to_find) {
Some(byte_index) => (byte_index, byte_index + self.to_find.len()),
None => (0, 0),
}
}
}
/// Matching by first searching for a literal head, then using the state machine to match from
/// there.
pub struct LiteralHeadMatcher<'a> {
pub literal_head: &'a str,
pub nfa: NFA<'a>,
pub from_start: bool,
pub until_end: bool,
}
impl<'a> Matcher for LiteralHeadMatcher<'a> {
fn match_substring(&self, input: &str) -> (usize, usize) {
let mut input_for_nfa_start: usize = 0;
if self.from_start {
if &input[..self.literal_head.len()] != self.literal_head {
return (0, 0);
}
} else {
match input.find(self.literal_head) {
None => return (0, 0),
Some(byte_index) => input_for_nfa_start = byte_index,
}
}
let input_for_nfa = &input[input_for_nfa_start..];
let relative_result: (usize, usize) =
match nfa_match_substring(&self.nfa, input_for_nfa, self.from_start) {
Some(res) => res,
None => return (0, 0),
};
let result = (
input_for_nfa_start + relative_result.0,
input_for_nfa_start + relative_result.1,
);
if self.until_end && result.1 != input.len() {
return (0, 0);
}
// No check of `from_start` here since if that is true then `result.0` is guaranteed to be
// 0 by the time we get here.
result
}
}
/// Matching by first searching for a literal tail from the end of the input, then limiting the
/// state machine search to only search until that.
pub struct LiteralTailMatcher<'a> {
pub literal_tail: &'a str,
pub nfa: NFA<'a>,
pub from_start: bool,
pub until_end: bool,
}
impl<'a> Matcher for LiteralTailMatcher<'a> {
fn match_substring(&self, input: &str) -> (usize, usize) {
let input_for_nfa_end: usize = match input.rfind(self.literal_tail) {
None => return (0, 0),
Some(byte_index) => byte_index + self.literal_tail.len(),
};
let input_for_nfa = &input[0..input_for_nfa_end];
let result: (usize, usize) =
match nfa_match_substring(&self.nfa, input_for_nfa, self.from_start) {
Some(res) => res,
None => return (0, 0),
};
if self.until_end && result.1 != input.len() {
return (0, 0);
}
// No check of `from_start` here since if that is true then `result.0` is guaranteed to be
// 0 by the time we get here.
result
}
}
/// Combination of the literal head and literal tail matchers. For regex snadwitched between two
/// literal string search patterns. Uses those to limit the range for the state machine search in
/// both directions.
pub struct LiteralSandwitchMatcher<'a> {
pub literal_tail: &'a str,
pub literal_head: &'a str,
pub nfa: NFA<'a>,
pub from_start: bool,
pub until_end: bool,
}
impl<'a> Matcher for LiteralSandwitchMatcher<'a> {
fn match_substring(&self, input: &str) -> (usize, usize) {
let mut input_for_nfa_start: usize = 0;
if self.from_start {
if &input[..self.literal_head.len()] != self.literal_head {
return (0, 0);
}
} else {
match input.find(self.literal_head) {
None => return (0, 0),
Some(byte_index) => input_for_nfa_start = byte_index,
}
}
let input_for_nfa_end: usize = match input.rfind(self.literal_tail) {
None => return (0, 0),
Some(byte_index) => byte_index + self.literal_tail.len(),
};
if input_for_nfa_end < input_for_nfa_start {
// Both literal tail and literal head found, but there is no match on the literal head
// after the match on the head. Then the input can not possibly match.
return (0, 0);
}
let input_for_nfa = &input[input_for_nfa_start..input_for_nfa_end];
let relative_result: (usize, usize) =
match nfa_match_substring(&self.nfa, input_for_nfa, self.from_start) {
Some(res) => res,
None => return (0, 0),
};
let result = (
input_for_nfa_start + relative_result.0,
input_for_nfa_start + relative_result.1,
);
if self.until_end && result.1 != input.len() {
return (0, 0);
}
// No check of `from_start` here since if that is true then `result.0` is guaranteed to be
// 0 by the time we get here.
result
}
}
#[inline]
fn nfa_match_substring(nfa: &NFA, input: &str, from_start: bool) -> Option<(usize, usize)> {
if from_start {
if let Some(match_end_byte_index) = nfa.simulate(input, true) {
return Some((0, match_end_byte_index));
}
return None;
}
for (byte_index, _) in input.char_indices() {
if let Some(relative_match_end) = nfa.simulate(&input[byte_index..], true) {
return Some((byte_index, byte_index + relative_match_end));
}
}
None
}
| 32.368421 | 99 | 0.56477 |
f7250091d26e5641e1e95dc2c803b262331520bc | 426 | use crate::value::Value;
use bytes::Bytes;
use enum_dispatch::enum_dispatch;
/// `AgateIterator` defines the interface of all iterators,
/// including `TableIterator`, `MergeIterator` and `ConcatIterator`.
#[enum_dispatch]
pub trait AgateIterator {
fn next(&mut self);
fn rewind(&mut self);
fn seek(&mut self, key: &Bytes);
fn key(&self) -> &[u8];
fn value(&self) -> Value;
fn valid(&self) -> bool;
}
| 26.625 | 68 | 0.664319 |
67f520b2a616520a6da5210d5b55a2de9c9a82b8 | 6,535 | use crate::id::OfType;
use crate::Id;
use std::any::Any;
/// Application state when system should run
#[derive(Eq, PartialEq, Debug, Hash, Clone, Copy)]
pub enum Rule {
/// System runs ar any state
Always,
/// System runs at specific state
StateOn(Id<State>),
/// System does not run at specific state
StateOff(Id<State>),
}
struct Entry {
state_id: Id<State>,
name: String,
boxed: Box<dyn IntoState>,
}
/// Initial Meta state
///
/// Not in stack by default. Used for default value of internal state pointer
struct Meta;
/// Application States stack service
pub struct State {
stack: Vec<Entry>,
state_ptr: *const Id<State>,
}
// Secured by state_ptr controls
unsafe impl Send for State {}
unsafe impl Sync for State {}
impl State {
/// Sets pointer to data, holding information about the application state
pub(crate) fn set_pointer(&mut self, state_ptr: *const Id<State>) {
self.state_ptr = state_ptr;
}
fn write_pointer(&self, value: Id<State>) {
if !self.state_ptr.is_null() {
unsafe {
*(self.state_ptr as *mut Id<State>) = value;
}
}
}
/// Returns a rule, so the system will run when the state is ON
pub fn on<T: IntoState>() -> Rule {
let state_id: Id<State> = Id::of::<T>();
Rule::StateOn(state_id)
}
/// Returns a rule, so the system will run when the state is OFF
pub fn off<T: IntoState>() -> Rule {
Rule::StateOff(Id::of::<T>())
}
/// Pushes the application state to the stack
pub fn push<T>(&mut self, state: T)
where
T: IntoState,
{
let state_id: Id<State> = Id::of::<T>();
let name = String::from(std::any::type_name::<T>());
self.stack.push(Entry {
state_id,
name,
boxed: Box::new(state),
});
self.write_pointer(state_id);
}
/// Pops the application state from the stack, and returns it
pub fn pop<T: IntoState>(&mut self) -> Option<T> {
self.pop_any()
.map(|boxed| {
if boxed.is::<T>() {
unsafe {
let raw: *mut dyn IntoState = Box::into_raw(boxed);
Some(*Box::from_raw(raw as *mut T))
}
} else {
None
}
})
.unwrap_or(None)
}
/// Pops the application state from the stack, but do not downcast it
pub fn pop_any(&mut self) -> Option<Box<dyn IntoState>> {
let last = self.stack.pop();
let state_id = self
.stack
.last()
.map(|entry| entry.state_id)
.unwrap_or_else(Self::meta);
self.write_pointer(state_id);
last.map(|entry| entry.boxed)
}
/// Returns a referrence to the current state
pub fn get<T: IntoState>(&self) -> Option<&T> {
self.stack
.last()
.map(|entry| entry.boxed.downcast_ref())
.unwrap_or(None)
}
/// Returns a mutable referrence to the current state
pub fn get_mut<T: IntoState>(&mut self) -> Option<&mut T> {
self.stack
.last_mut()
.map(|entry| entry.boxed.downcast_mut())
.unwrap_or(None)
}
/// Returns [`Id<State>`] of current state
pub fn id(&self) -> Option<Id<State>> {
self.stack.last().map(|entry| entry.state_id)
}
/// Returns dump of current stack
pub fn dump(&self) -> Vec<&str> {
self.stack
.iter()
.map(|entry| entry.name.as_str())
.collect::<Vec<_>>()
}
/// Clears states stack
pub fn clear(&mut self) {
self.stack.clear();
self.write_pointer(Self::meta());
}
/// Get id of meta state (one defines the state with empty stack)
pub fn meta() -> Id<State> {
Id::of::<Meta>()
}
}
impl Default for State {
fn default() -> Self {
Self {
stack: vec![],
state_ptr: std::ptr::null(),
}
}
}
impl OfType for Id<State> {
fn of<T: std::any::Any>() -> Self {
Id::from(std::any::TypeId::of::<T>())
}
}
/// Application state abstraction
pub trait IntoState: Any + Send + Sync + 'static {}
impl<T: 'static + Send + Sync> IntoState for T {}
impl dyn IntoState {
/// Casts down the reference
#[inline]
pub fn downcast_ref<T: Any>(&self) -> Option<&T> {
if self.is::<T>() {
unsafe { Some(&*(self as *const dyn IntoState as *const T)) }
} else {
None
}
}
/// Casts down the mutual reference
#[inline]
pub fn downcast_mut<T: Any>(&mut self) -> Option<&mut T> {
if self.is::<T>() {
unsafe { Some(&mut *(self as *mut dyn IntoState as *mut T)) }
} else {
None
}
}
/// Checks if the reference is of specific type
#[inline]
fn is<T: Any>(&self) -> bool {
std::any::TypeId::of::<T>() == self.type_id()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[derive(Debug, Eq, PartialEq)]
struct SimpleState {}
#[derive(Debug, Eq, PartialEq)]
struct StateWithData(u32);
#[derive(Debug, Eq, PartialEq)]
struct StateWithAllocation(String);
#[test]
fn state_stack_and_downcasting() {
let mut state = State::default();
state.push(SimpleState {});
state.push(StateWithData(123));
state.push(StateWithAllocation(String::from("Allocated string")));
let last: StateWithAllocation = state.pop().unwrap();
assert_eq!(last, StateWithAllocation(String::from("Allocated string")));
let last: StateWithData = state.pop().unwrap();
assert_eq!(last, StateWithData(123));
let last: SimpleState = state.pop().unwrap();
assert_eq!(last, SimpleState {});
}
#[test]
fn state_clear() {
let current_state: Id<State> = State::meta();
let mut state = State::default();
state.set_pointer(¤t_state);
let state_id = unsafe { *(state.state_ptr as *const Id<State>) };
assert_eq!(state_id, Id::of::<Meta>());
state.push(SimpleState {});
state.push(StateWithData(123));
state.push(StateWithAllocation(String::from("Allocated string")));
state.clear();
let state_id = unsafe { *(state.state_ptr as *const Id<State>) };
assert_eq!(state_id, Id::of::<Meta>());
}
}
| 26.893004 | 80 | 0.548585 |
4a6ad75942485d331a69234f2a8bc32fc4cfb6c4 | 2,735 | use crate::common::*;
#[derive(Debug, Eq, PartialEq)]
pub struct User {
pub id: u64,
pub discord_id: UserId,
pub prompt_message: Option<PromptMessage>,
pub welcomed: bool,
pub bio: Option<String>,
}
impl User {
pub fn update(&self, response: &Response) -> Update {
let prompt = if let Some(prompt_message) = self.prompt_message {
prompt_message.prompt
} else {
return Update {
next_prompt: Prompt::Welcome,
action: None,
};
};
let action = match response {
Response::Message(content) => Self::action_for_message(prompt, content),
Response::Reaction(emoji) => Self::action_for_reaction(prompt, *emoji),
Response::UnrecognizedReaction(..) | Response::Custom(..) => None,
};
let action = if let Some(action) = action {
action
} else {
return Update {
action: None,
next_prompt: prompt,
};
};
Update {
next_prompt: self.next_prompt(&action),
action: Some(action),
}
}
fn action_for_message(prompt: Prompt, content: &str) -> Option<Action> {
use Prompt::*;
let content = content.trim();
match prompt {
Welcome =>
if content.to_lowercase() == "ok" {
return Some(Action::Welcome);
},
Bio =>
return Some(Action::SetBio {
text: content.to_owned(),
}),
Candidate { id } => match content.to_lowercase().as_str() {
"yes" | "y" => return Some(Action::AcceptCandidate { id }),
"no" | "n" => return Some(Action::DeclineCandidate { id }),
_ => {},
},
Match { id } =>
if content.to_lowercase() == "ok" {
return Some(Action::DismissMatch { id });
},
Quiescent => {},
}
None
}
fn action_for_reaction(prompt: Prompt, emoji: Emoji) -> Option<Action> {
use {Emoji::*, Prompt::*};
match prompt {
Welcome =>
if emoji == ThumbsUp {
Some(Action::Welcome)
} else {
None
},
Candidate { id } => match emoji {
ThumbsUp => Some(Action::AcceptCandidate { id }),
ThumbsDown => Some(Action::DeclineCandidate { id }),
},
Match { id } =>
if emoji == ThumbsUp {
Some(Action::DismissMatch { id })
} else {
None
},
Quiescent | Bio => None,
}
}
fn next_prompt(&self, action: &Action) -> Prompt {
if !(self.welcomed || *action == Action::Welcome) {
return Prompt::Welcome;
}
if self.bio.is_none() {
if let Action::SetBio { .. } = action {
} else {
return Prompt::Bio;
}
}
Prompt::Quiescent
}
}
| 24.419643 | 78 | 0.531993 |
c1177b5358c3e24fc6a42593ea000862cffacee3 | 5,320 | extern crate find_longest_substring;
#[cfg(test)]
mod tests {
use find_longest_substring::*;
use std::cmp::Ordering;
use std::error::Error;
use std::fs;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::Write;
use std::io::{BufRead, BufReader, Read};
use std::path::{Display, Path, PathBuf};
fn get_tests_data_dir() -> PathBuf {
let root_dir: &Path = Path::new(".");
let file_path: PathBuf = root_dir.join("tests").join("data");
file_path
}
#[test]
//cargo test --test test2 read_data_from_a_file_1 -- --nocapture
/// Read small peace of data to a string
fn read_data_from_a_file_1() {
let file_path: PathBuf = get_tests_data_dir().join("input1.txt");
let path: &Path = file_path.as_path();
let display: Display = file_path.display();
let mut file = match File::open(path) {
Err(why) => panic!("couldn't open {}: {}", display, why.description()),
Ok(file) => file,
};
let mut contents = String::new();
let _ = match file.read_to_string(&mut contents) {
Err(why) => panic!("couldn't read {}: {}", display, why.description()),
_ => (),
};
assert_eq!("CCGCCGGGCGCG", longest_substring(contents.as_str()));
}
#[test]
//cargo test --test test2 read_rosalind_test_data -- --nocapture
/// Write data for testing Rosalind
///
fn read_rosalind_test_data() {
let file_data_for_test: File;
{
let file_path: PathBuf = get_tests_data_dir().join("rosalind.txt");
let path: &Path = file_path.as_path();
let display: Display = file_path.display();
file_data_for_test = match File::open(path) {
Err(why) => panic!("couldn't open {}: {}", display, why.description()),
Ok(file) => file,
};
}
let file_expected_data: File;
{
let file_path: PathBuf = get_tests_data_dir().join("rosalind_expected.txt");
let path: &Path = file_path.as_path();
let display: Display = file_path.display();
file_expected_data = match File::open(path) {
Err(why) => panic!("couldn't open {}: {}", display, why.description()),
Ok(file) => file,
};
}
let tested_data_buffer_reader = BufReader::new(file_data_for_test).lines();
let mut expected_data_buffer_reader = BufReader::new(file_expected_data);
for line in tested_data_buffer_reader {
if let Ok(string_line) = line {
if string_line.starts_with(">") {
continue;
} else {
let new_line = longest_substring(string_line.as_str());
let mut expected_line: String = String::new();
if let Err(e) = expected_data_buffer_reader.read_line(&mut expected_line) {
eprintln!("reading from cursor won't fail. {}", e);
}
expected_line = expected_line.trim().to_string();
let result = expected_line.cmp(&new_line.to_string());
// println!("{} | {}", &expected_line, &new_line.to_string() );
assert_eq!(Ordering::Equal, result);
}
}
}
}
#[test]
#[ignore]
//cargo test --test test2 ignore_read_rosalind_test_data -- --nocapture
/// Write data for testing Rosalind
///
fn ignore_read_rosalind_test_data() {
let file_path: PathBuf = get_tests_data_dir().join("rosalind.txt");
let path: &Path = file_path.as_path();
let display: Display = file_path.display();
let file = match File::open(path) {
Err(why) => panic!("couldn't open {}: {}", display, why.description()),
Ok(file) => file,
};
let output_file_path: PathBuf = get_tests_data_dir().join("rosalind_expected.txt");
let output_path: &Path = output_file_path.as_path();
if output_path.exists() {
if let Err(e) = fs::remove_file(output_path) {
eprintln!("Can't remove a file: {}", e);
}
}
let mut output_file = OpenOptions::new()
.write(true)
.append(true)
.create(true)
.open(output_path)
.unwrap();
for line in BufReader::new(file).lines() {
if let Ok(string_line) = line {
if string_line.starts_with(">") {
// println!("{}",string_line);
continue;
} else {
//println!("{}",longest_substring(string_line.as_str()));
// output_buffer_writer.write_all(string_line.as_bytes()).expect("Unable to write data");
// output_buffer_writer.write_all(string_line.as_bytes()).expect("Unable to write data");
let new_line = longest_substring(string_line.as_str());
if let Err(e) = writeln!(output_file, "{}", new_line.as_ref()) {
eprintln!("Couldn't write to file: {}", e);
}
}
}
}
}
}
| 36.944444 | 109 | 0.53703 |
717b2b3750f8c6ee1fa57271fe81578f717f4cba | 5,716 | //! Possible errors in the library.
use std::fmt;
#[cfg(feature = "wbindgen")]
use wasm_bindgen::JsValue;
use strum_macros::IntoStaticStr;
use block_modes::{BlockModeError, InvalidKeyIvLength};
use hmac::crypto_mac::InvalidKeyLength;
use hmac::crypto_mac::MacError;
/// This crate's error type.
#[derive(Debug, IntoStaticStr)]
pub enum Error {
/// The provided data has an invalid length. Error code: -1
InvalidLength,
/// The key length is invalid. Error code: -2
InvalidKeyLength,
/// The length of the FFI output buffer is invalid. Error code: -3
InvalidOutputLength,
/// The signature of the data blob does not match 0x0d0c. Error code: -11
InvalidSignature,
/// The MAC is invalid. Error code: -12
InvalidMac,
/// The operation cannot be done with this type. Error code: -13
InvalidDataType,
/// The data type is unknown. Error code: -21
UnknownType,
/// The data subtype is unknown. Error code: -22
UnknownSubtype,
/// The data type version is unknown. Error code: -23
UnknownVersion,
/// The data is invalid. Error code: -24
InvalidData,
/// A null pointer has been passed to the FFI interface. Error code: -31
NullPointer,
/// A cryptographic error occurred. Error code: -32
CryptoError,
/// An error with the Random Number Generator occurred. Error code: -33
RandomError,
/// A generic IO error has occurred. Error code: -34
IoError(std::io::Error),
/// There is not enough shares to regenerate a secret: -41
NotEnoughShares,
/// The version of the multiple data is inconsistent: -42
InconsistentVersion,
}
impl Error {
/// Returns the error code associated with the error.
/// This is useful for passing the exception type across a language boundary.
pub fn error_code(&self) -> i64 {
match *self {
Error::InvalidLength => -1,
Error::InvalidKeyLength => -2,
Error::InvalidOutputLength => -3,
Error::InvalidSignature => -11,
Error::InvalidMac => -12,
Error::InvalidDataType => -13,
Error::UnknownType => -21,
Error::UnknownSubtype => -22,
Error::UnknownVersion => -23,
Error::InvalidData => -24,
Error::NullPointer => -31,
Error::CryptoError => -32,
Error::RandomError => -33,
Error::IoError(_) => -34,
Error::NotEnoughShares => -41,
Error::InconsistentVersion => -42,
}
}
/// Returns a description of the error
pub fn description(&self) -> String {
match *self {
Error::InvalidLength => "The provided data has an invalid length.".to_string(),
Error::InvalidKeyLength => "The key length is invalid.".to_string(),
Error::InvalidOutputLength => {
"The length of the FFI output buffer is invalid.".to_string()
}
Error::InvalidSignature => {
"The signature of the data blob does not match 0x0d0c.".to_string()
}
Error::InvalidMac => "The MAC is invalid.".to_string(),
Error::InvalidDataType => "The operation cannot be done with this type.".to_string(),
Error::UnknownType => "The data type is unknown.".to_string(),
Error::UnknownSubtype => "The data subtype is unknown.".to_string(),
Error::InvalidData => "The data is invalid.".to_string(),
Error::UnknownVersion => "The data type version is unknown.".to_string(),
Error::NullPointer => {
"A null pointer has been passed to the FFI interface.".to_string()
}
Error::CryptoError => "A cryptographic error occurred.".to_string(),
Error::RandomError => "An error with the Random Number Generator occurred.".to_string(),
Error::IoError(ref error) => error.to_string(),
Error::NotEnoughShares => {
"There wasn't enough share to regenerate the secret.".to_string()
}
Error::InconsistentVersion => {
"The version is not the same for all the data.".to_string()
}
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Error::IoError(ref error) => error.fmt(f),
_ => write!(f, "Error {}: {}", self.error_code(), self.description()),
}
}
}
impl From<InvalidKeyLength> for Error {
fn from(_error: InvalidKeyLength) -> Error {
Error::InvalidKeyLength
}
}
impl From<MacError> for Error {
fn from(_error: MacError) -> Error {
Error::InvalidMac
}
}
impl From<InvalidKeyIvLength> for Error {
fn from(_error: InvalidKeyIvLength) -> Error {
Error::InvalidKeyLength
}
}
impl From<BlockModeError> for Error {
fn from(_error: BlockModeError) -> Error {
Error::CryptoError
}
}
impl From<aead::Error> for Error {
fn from(_error: aead::Error) -> Error {
Error::InvalidMac
}
}
impl From<rand::Error> for Error {
fn from(_error: rand::Error) -> Error {
Error::RandomError
}
}
impl From<std::io::Error> for Error {
fn from(_error: std::io::Error) -> Error {
Error::RandomError
}
}
impl From<argon2::Error> for Error {
fn from(_error: argon2::Error) -> Self {
Error::CryptoError
}
}
#[cfg(feature = "wbindgen")]
impl From<Error> for JsValue {
fn from(error: Error) -> JsValue {
let js_error = js_sys::Error::new(&error.description());
js_error.set_name(error.into());
js_error.into()
}
}
| 32.850575 | 100 | 0.602344 |
5dd0a0ef2c049b61e27493280c94aa41896cf490 | 421 | // tests1.rs
// Tests are important to ensure that your code does what you think it should do.
// Tests can be run on this file with the following command:
// rustlings run tests1
// This test has a problem with it -- make the test compile! Make the test
// pass! Make the test fail! Execute `rustlings hint tests1` for hints :)
#[cfg(test)]
mod tests {
#[test]
fn you_can_assert() {
assert!(true, true);
}
}
| 26.3125 | 81 | 0.691211 |
89e69e88b8ee69556f57aba3467dbb8d6eeb2999 | 11,977 | use std::convert::TryFrom;
use std::ops::Range;
use crate::coord::ranged1d::{
AsRangedCoord, DefaultFormatting, DiscreteRanged, KeyPointHint, NoDefaultFormatting, Ranged,
ReversibleRanged, ValueFormatter,
};
macro_rules! impl_discrete_trait {
($name:ident) => {
impl DiscreteRanged for $name {
fn size(&self) -> usize {
if &self.1 < &self.0 {
return 0;
}
let values = self.1 - self.0;
(values + 1) as usize
}
fn index_of(&self, value: &Self::ValueType) -> Option<usize> {
if value < &self.0 {
return None;
}
let ret = value - self.0;
Some(ret as usize)
}
fn from_index(&self, index: usize) -> Option<Self::ValueType> {
if let Ok(index) = Self::ValueType::try_from(index) {
return Some(self.0 + index);
}
None
}
}
};
}
macro_rules! impl_ranged_type_trait {
($value:ty, $coord:ident) => {
impl AsRangedCoord for Range<$value> {
type CoordDescType = $coord;
type Value = $value;
}
};
}
macro_rules! impl_reverse_mapping_trait {
($type:ty, $name: ident) => {
impl ReversibleRanged for $name {
fn unmap(&self, p: i32, (min, max): (i32, i32)) -> Option<$type> {
if p < min.min(max) || p > max.max(min) || min == max {
return None;
}
let logical_offset = f64::from(p - min) / f64::from(max - min);
return Some(((self.1 - self.0) as f64 * logical_offset + self.0 as f64) as $type);
}
}
};
}
macro_rules! make_numeric_coord {
($type:ty, $name:ident, $key_points:ident, $doc: expr, $fmt: ident) => {
#[doc = $doc]
#[derive(Clone)]
pub struct $name($type, $type);
impl From<Range<$type>> for $name {
fn from(range: Range<$type>) -> Self {
return $name(range.start, range.end);
}
}
impl Ranged for $name {
type FormatOption = $fmt;
type ValueType = $type;
#[allow(clippy::float_cmp)]
fn map(&self, v: &$type, limit: (i32, i32)) -> i32 {
// Corner case: If we have a range that have only one value,
// then we just assign everything to the only point
if self.1 == self.0 {
return (limit.1 - limit.0) / 2;
}
let logic_length = (*v as f64 - self.0 as f64) / (self.1 as f64 - self.0 as f64);
let actual_length = limit.1 - limit.0;
if actual_length == 0 {
return limit.1;
}
return limit.0 + (actual_length as f64 * logic_length + 1e-3).floor() as i32;
}
fn key_points<Hint: KeyPointHint>(&self, hint: Hint) -> Vec<$type> {
$key_points((self.0, self.1), hint.max_num_points())
}
fn range(&self) -> Range<$type> {
return self.0..self.1;
}
}
};
($type:ty, $name:ident, $key_points:ident, $doc: expr) => {
make_numeric_coord!($type, $name, $key_points, $doc, DefaultFormatting);
};
}
macro_rules! gen_key_points_comp {
(float, $name:ident, $type:ty) => {
fn $name(range: ($type, $type), max_points: usize) -> Vec<$type> {
if max_points == 0 {
return vec![];
}
let range = (range.0 as f64, range.1 as f64);
assert!(!(range.0.is_nan() || range.1.is_nan()));
if range.0 == range.1 {
return vec![range.0 as $type];
}
let mut scale = (10f64).powf((range.1 - range.0).log(10.0).floor());
let mut digits = -(range.1 - range.0).log(10.0).floor() as i32 + 1;
fn rem_euclid(a: f64, b: f64) -> f64 {
if b > 0.0 {
a - (a / b).floor() * b
} else {
a - (a / b).ceil() * b
}
}
// At this point we need to make sure that the loop invariant:
// The scale must yield number of points than requested
if 1 + ((range.1 - range.0) / scale).floor() as usize > max_points {
scale *= 10.0;
}
'outer: loop {
let old_scale = scale;
for nxt in [2.0, 5.0, 10.0].iter() {
let new_left = range.0 + scale / nxt - rem_euclid(range.0, scale / nxt);
let new_right = range.1 - rem_euclid(range.1, scale / nxt);
let npoints = 1 + ((new_right - new_left) / old_scale * nxt) as usize;
if npoints > max_points {
break 'outer;
}
scale = old_scale / nxt;
}
scale = old_scale / 10.0;
if scale < 1.0 {
digits += 1;
}
}
let mut ret = vec![];
let mut left = range.0 + scale - rem_euclid(range.0, scale);
let right = range.1 - rem_euclid(range.1, scale);
while left <= right {
let size = (10f64).powf(digits as f64 + 1.0);
let new_left = (left * size).abs() + 1e-3;
if left < 0.0 {
left = -new_left.round() / size;
} else {
left = new_left.round() / size;
}
ret.push(left as $type);
left += scale;
}
return ret;
}
};
(integer, $name:ident, $type:ty) => {
fn $name(range: ($type, $type), max_points: usize) -> Vec<$type> {
let mut scale: $type = 1;
let range = (range.0.min(range.1), range.0.max(range.1));
'outer: while (range.1 - range.0 + scale - 1) as usize / (scale as usize) > max_points {
let next_scale = scale * 10;
for new_scale in [scale * 2, scale * 5, scale * 10].iter() {
scale = *new_scale;
if (range.1 - range.0 + *new_scale - 1) as usize / (*new_scale as usize)
< max_points
{
break 'outer;
}
}
scale = next_scale;
}
let (mut left, right) = (
range.0 + (scale - range.0 % scale) % scale,
range.1 - range.1 % scale,
);
let mut ret = vec![];
while left <= right {
ret.push(left as $type);
left += scale;
}
return ret;
}
};
}
gen_key_points_comp!(float, compute_f32_key_points, f32);
gen_key_points_comp!(float, compute_f64_key_points, f64);
gen_key_points_comp!(integer, compute_i32_key_points, i32);
gen_key_points_comp!(integer, compute_u32_key_points, u32);
gen_key_points_comp!(integer, compute_i64_key_points, i64);
gen_key_points_comp!(integer, compute_u64_key_points, u64);
gen_key_points_comp!(integer, compute_i128_key_points, i128);
gen_key_points_comp!(integer, compute_u128_key_points, u128);
gen_key_points_comp!(integer, compute_isize_key_points, isize);
gen_key_points_comp!(integer, compute_usize_key_points, usize);
make_numeric_coord!(
f32,
RangedCoordf32,
compute_f32_key_points,
"The ranged coordinate for type f32",
NoDefaultFormatting
);
impl_reverse_mapping_trait!(f32, RangedCoordf32);
impl ValueFormatter<f32> for RangedCoordf32 {
fn format(value: &f32) -> String {
crate::data::float::FloatPrettyPrinter {
allow_scientific: false,
min_decimal: 1,
max_decimal: 5,
}
.print(*value as f64)
}
}
make_numeric_coord!(
f64,
RangedCoordf64,
compute_f64_key_points,
"The ranged coordinate for type f64",
NoDefaultFormatting
);
impl_reverse_mapping_trait!(f64, RangedCoordf64);
impl ValueFormatter<f64> for RangedCoordf64 {
fn format(value: &f64) -> String {
crate::data::float::FloatPrettyPrinter {
allow_scientific: false,
min_decimal: 1,
max_decimal: 5,
}
.print(*value)
}
}
make_numeric_coord!(
u32,
RangedCoordu32,
compute_u32_key_points,
"The ranged coordinate for type u32"
);
make_numeric_coord!(
i32,
RangedCoordi32,
compute_i32_key_points,
"The ranged coordinate for type i32"
);
make_numeric_coord!(
u64,
RangedCoordu64,
compute_u64_key_points,
"The ranged coordinate for type u64"
);
make_numeric_coord!(
i64,
RangedCoordi64,
compute_i64_key_points,
"The ranged coordinate for type i64"
);
make_numeric_coord!(
u128,
RangedCoordu128,
compute_u128_key_points,
"The ranged coordinate for type u128"
);
make_numeric_coord!(
i128,
RangedCoordi128,
compute_i128_key_points,
"The ranged coordinate for type i128"
);
make_numeric_coord!(
usize,
RangedCoordusize,
compute_usize_key_points,
"The ranged coordinate for type usize"
);
make_numeric_coord!(
isize,
RangedCoordisize,
compute_isize_key_points,
"The ranged coordinate for type isize"
);
impl_discrete_trait!(RangedCoordu32);
impl_discrete_trait!(RangedCoordi32);
impl_discrete_trait!(RangedCoordu64);
impl_discrete_trait!(RangedCoordi64);
impl_discrete_trait!(RangedCoordu128);
impl_discrete_trait!(RangedCoordi128);
impl_discrete_trait!(RangedCoordusize);
impl_discrete_trait!(RangedCoordisize);
impl_ranged_type_trait!(f32, RangedCoordf32);
impl_ranged_type_trait!(f64, RangedCoordf64);
impl_ranged_type_trait!(i32, RangedCoordi32);
impl_ranged_type_trait!(u32, RangedCoordu32);
impl_ranged_type_trait!(i64, RangedCoordi64);
impl_ranged_type_trait!(u64, RangedCoordu64);
impl_ranged_type_trait!(i128, RangedCoordi128);
impl_ranged_type_trait!(u128, RangedCoordu128);
impl_ranged_type_trait!(isize, RangedCoordisize);
impl_ranged_type_trait!(usize, RangedCoordusize);
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_key_points() {
let kp = compute_i32_key_points((0, 999), 28);
assert!(kp.len() > 0);
assert!(kp.len() <= 28);
let kp = compute_f64_key_points((-1.2, 1.2), 1);
assert!(kp.len() == 1);
let kp = compute_f64_key_points((-1.2, 1.2), 0);
assert!(kp.len() == 0);
}
#[test]
fn test_linear_coord_map() {
let coord: RangedCoordu32 = (0..20).into();
assert_eq!(coord.key_points(11).len(), 11);
assert_eq!(coord.key_points(11)[0], 0);
assert_eq!(coord.key_points(11)[10], 20);
assert_eq!(coord.map(&5, (0, 100)), 25);
let coord: RangedCoordf32 = (0f32..20f32).into();
assert_eq!(coord.map(&5.0, (0, 100)), 25);
}
#[test]
fn test_linear_coord_system() {
let _coord =
crate::coord::ranged2d::cartesian::Cartesian2d::<RangedCoordu32, RangedCoordu32>::new(
0..10,
0..10,
(0..1024, 0..768),
);
}
#[test]
fn test_coord_unmap() {
let coord: RangedCoordu32 = (0..20).into();
let pos = coord.map(&5, (1000, 2000));
let value = coord.unmap(pos, (1000, 2000));
assert_eq!(value, Some(5));
}
#[test]
fn test_zero_sized_coord_not_hang() {
let coord: RangedCoordf32 = (0.0..0.0).into();
let _points = coord.key_points(10);
}
#[test]
fn test_small_coord() {
let coord: RangedCoordf64 = (0.0..1e-25).into();
let points = coord.key_points(10);
assert!(points.len() > 0);
}
}
| 31.353403 | 100 | 0.536946 |
5def07e00633eccc93fc4be4fad4d8f492272870 | 12,634 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use anyhow::{format_err, Result};
use itertools::Itertools;
use libra_crypto::{hash::SPARSE_MERKLE_PLACEHOLDER_HASH, HashValue};
use libra_types::{
access_path::AccessPath,
account_address::AccountAddress,
account_state::AccountState,
account_state_blob::{AccountStateBlob, AccountStateWithProof},
contract_event::ContractEvent,
epoch_change::EpochChangeProof,
epoch_state::EpochState,
event::EventKey,
ledger_info::LedgerInfoWithSignatures,
move_resource::MoveStorage,
proof::{definition::LeafCount, AccumulatorConsistencyProof, SparseMerkleProof},
transaction::{TransactionListWithProof, TransactionToCommit, TransactionWithProof, Version},
};
use serde::{Deserialize, Serialize};
use std::{
collections::{HashMap, HashSet},
convert::TryFrom,
sync::Arc,
};
use thiserror::Error;
#[cfg(any(feature = "testing", feature = "fuzzing"))]
pub mod mock;
pub mod state_view;
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct StartupInfo {
/// The latest ledger info.
pub latest_ledger_info: LedgerInfoWithSignatures,
/// If the above ledger info doesn't carry a validator set, the latest validator set. Otherwise
/// `None`.
pub latest_epoch_state: Option<EpochState>,
pub committed_tree_state: TreeState,
pub synced_tree_state: Option<TreeState>,
}
impl StartupInfo {
pub fn new(
latest_ledger_info: LedgerInfoWithSignatures,
latest_epoch_state: Option<EpochState>,
committed_tree_state: TreeState,
synced_tree_state: Option<TreeState>,
) -> Self {
Self {
latest_ledger_info,
latest_epoch_state,
committed_tree_state,
synced_tree_state,
}
}
pub fn get_epoch_state(&self) -> &EpochState {
self.latest_ledger_info
.ledger_info()
.next_epoch_state()
.unwrap_or_else(|| {
self.latest_epoch_state
.as_ref()
.expect("EpochState must exist")
})
}
}
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct TreeState {
pub num_transactions: LeafCount,
pub ledger_frozen_subtree_hashes: Vec<HashValue>,
pub account_state_root_hash: HashValue,
}
impl TreeState {
pub fn new(
num_transactions: LeafCount,
ledger_frozen_subtree_hashes: Vec<HashValue>,
account_state_root_hash: HashValue,
) -> Self {
Self {
num_transactions,
ledger_frozen_subtree_hashes,
account_state_root_hash,
}
}
pub fn is_empty(&self) -> bool {
self.num_transactions == 0
&& self.account_state_root_hash == *SPARSE_MERKLE_PLACEHOLDER_HASH
}
}
#[derive(Debug, Deserialize, Error, PartialEq, Serialize)]
pub enum Error {
#[error("Service error: {:?}", error)]
ServiceError { error: String },
#[error("Serialization error: {0}")]
SerializationError(String),
}
impl From<anyhow::Error> for Error {
fn from(error: anyhow::Error) -> Self {
Self::ServiceError {
error: format!("{}", error),
}
}
}
impl From<lcs::Error> for Error {
fn from(error: lcs::Error) -> Self {
Self::SerializationError(format!("{}", error))
}
}
impl From<libra_secure_net::Error> for Error {
fn from(error: libra_secure_net::Error) -> Self {
Self::ServiceError {
error: format!("{}", error),
}
}
}
/// Trait that is implemented by a DB that supports certain public (to client) read APIs
/// expected of a Libra DB
pub trait DbReader: Send + Sync {
/// See [`LibraDB::get_epoch_change_ledger_infos`].
///
/// [`LibraDB::get_epoch_change_ledger_infos`]:
/// ../libradb/struct.LibraDB.html#method.get_epoch_change_ledger_infos
fn get_epoch_change_ledger_infos(
&self,
start_epoch: u64,
end_epoch: u64,
) -> Result<EpochChangeProof>;
/// See [`LibraDB::get_transactions`].
///
/// [`LibraDB::get_transactions`]: ../libradb/struct.LibraDB.html#method.get_transactions
fn get_transactions(
&self,
start_version: Version,
batch_size: u64,
ledger_version: Version,
fetch_events: bool,
) -> Result<TransactionListWithProof>;
/// Returns events by given event key
fn get_events(
&self,
event_key: &EventKey,
start: u64,
ascending: bool,
limit: u64,
) -> Result<Vec<(u64, ContractEvent)>>;
/// See [`LibraDB::get_latest_account_state`].
///
/// [`LibraDB::get_latest_account_state`]:
/// ../libradb/struct.LibraDB.html#method.get_latest_account_state
fn get_latest_account_state(&self, address: AccountAddress)
-> Result<Option<AccountStateBlob>>;
/// Returns the latest ledger info.
fn get_latest_ledger_info(&self) -> Result<LedgerInfoWithSignatures>;
/// Returns the latest ledger info.
fn get_latest_version(&self) -> Result<Version> {
Ok(self.get_latest_ledger_info()?.ledger_info().version())
}
/// Returns the latest version and committed block timestamp
fn get_latest_commit_metadata(&self) -> Result<(Version, u64)> {
let ledger_info_with_sig = self.get_latest_ledger_info()?;
let ledger_info = ledger_info_with_sig.ledger_info();
Ok((ledger_info.version(), ledger_info.timestamp_usecs()))
}
/// Gets information needed from storage during the main node startup.
/// See [`LibraDB::get_startup_info`].
///
/// [`LibraDB::get_startup_info`]:
/// ../libradb/struct.LibraDB.html#method.get_startup_info
fn get_startup_info(&self) -> Result<Option<StartupInfo>>;
fn get_txn_by_account(
&self,
address: AccountAddress,
seq_num: u64,
ledger_version: Version,
fetch_events: bool,
) -> Result<Option<TransactionWithProof>>;
/// Returns proof of new state for a given ledger info with signatures relative to version known
/// to client
fn get_state_proof_with_ledger_info(
&self,
known_version: u64,
ledger_info: LedgerInfoWithSignatures,
) -> Result<(EpochChangeProof, AccumulatorConsistencyProof)>;
/// Returns proof of new state relative to version known to client
fn get_state_proof(
&self,
known_version: u64,
) -> Result<(
LedgerInfoWithSignatures,
EpochChangeProof,
AccumulatorConsistencyProof,
)>;
/// Returns the account state corresponding to the given version and account address with proof
/// based on `ledger_version`
fn get_account_state_with_proof(
&self,
address: AccountAddress,
version: Version,
ledger_version: Version,
) -> Result<AccountStateWithProof>;
// Gets an account state by account address, out of the ledger state indicated by the state
// Merkle tree root with a sparse merkle proof proving state tree root.
// See [`LibraDB::get_account_state_with_proof_by_version`].
//
// [`LibraDB::get_account_state_with_proof_by_version`]:
// ../libradb/struct.LibraDB.html#method.get_account_state_with_proof_by_version
//
// This is used by libra core (executor) internally.
fn get_account_state_with_proof_by_version(
&self,
address: AccountAddress,
version: Version,
) -> Result<(Option<AccountStateBlob>, SparseMerkleProof)>;
/// See [`LibraDB::get_latest_state_root`].
///
/// [`LibraDB::get_latest_state_root`]:
/// ../libradb/struct.LibraDB.html#method.get_latest_state_root
fn get_latest_state_root(&self) -> Result<(Version, HashValue)>;
/// Gets the latest TreeState no matter if db has been bootstrapped.
/// Used by the Db-bootstrapper.
fn get_latest_tree_state(&self) -> Result<TreeState>;
/// Get the ledger info of the epoch that `known_version` belongs to.
fn get_ledger_info(&self, known_version: u64) -> Result<LedgerInfoWithSignatures>;
}
impl MoveStorage for &dyn DbReader {
fn batch_fetch_resources(&self, access_paths: Vec<AccessPath>) -> Result<Vec<Vec<u8>>> {
self.batch_fetch_resources_by_version(access_paths, self.get_latest_version()?)
}
fn batch_fetch_resources_by_version(
&self,
access_paths: Vec<AccessPath>,
version: Version,
) -> Result<Vec<Vec<u8>>> {
let addresses: Vec<AccountAddress> = access_paths
.iter()
.collect::<HashSet<_>>()
.into_iter()
.map(|path| path.address)
.collect();
let results = addresses
.iter()
.map(|addr| self.get_account_state_with_proof(*addr, version, version))
.collect::<Result<Vec<_>>>()?;
// Account address --> AccountState
let account_states = addresses
.into_iter()
.zip_eq(results)
.map(|(addr, result)| {
let account_state = AccountState::try_from(&result.blob.ok_or_else(|| {
format_err!("missing blob in account state/account does not exist")
})?)?;
Ok((addr, account_state))
})
.collect::<Result<HashMap<_, AccountState>>>()?;
access_paths
.into_iter()
.map(|path| {
Ok(account_states
.get(&path.address)
.ok_or_else(|| format_err!("missing account state for queried access path"))?
.get(&path.path)
.ok_or_else(|| format_err!("no value found in account state"))?
.clone())
})
.collect()
}
}
/// Trait that is implemented by a DB that supports certain public (to client) write APIs
/// expected of a Libra DB. This adds write APIs to DbReader.
pub trait DbWriter: Send + Sync {
/// Persist transactions. Called by the executor module when either syncing nodes or committing
/// blocks during normal operation.
/// See [`LibraDB::save_transactions`].
///
/// [`LibraDB::save_transactions`]: ../libradb/struct.LibraDB.html#method.save_transactions
fn save_transactions(
&self,
txns_to_commit: &[TransactionToCommit],
first_version: Version,
ledger_info_with_sigs: Option<&LedgerInfoWithSignatures>,
) -> Result<()>;
}
#[derive(Clone)]
pub struct DbReaderWriter {
pub reader: Arc<dyn DbReader>,
pub writer: Arc<dyn DbWriter>,
}
impl DbReaderWriter {
pub fn new<D: 'static + DbReader + DbWriter>(db: D) -> Self {
let reader = Arc::new(db);
let writer = Arc::clone(&reader);
Self { reader, writer }
}
pub fn wrap<D: 'static + DbReader + DbWriter>(db: D) -> (Arc<D>, Self) {
let arc_db = Arc::new(db);
let reader = Arc::clone(&arc_db);
let writer = Arc::clone(&arc_db);
(arc_db, Self { reader, writer })
}
}
impl<D> From<D> for DbReaderWriter
where
D: 'static + DbReader + DbWriter,
{
fn from(db: D) -> Self {
Self::new(db)
}
}
/// Network types for storage service
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum StorageRequest {
GetAccountStateWithProofByVersionRequest(Box<GetAccountStateWithProofByVersionRequest>),
GetStartupInfoRequest,
SaveTransactionsRequest(Box<SaveTransactionsRequest>),
}
#[derive(Debug, PartialEq, Eq, Clone, Deserialize, Serialize)]
pub struct GetAccountStateWithProofByVersionRequest {
/// The access path to query with.
pub address: AccountAddress,
/// The version the query is based on.
pub version: Version,
}
impl GetAccountStateWithProofByVersionRequest {
/// Constructor.
pub fn new(address: AccountAddress, version: Version) -> Self {
Self { address, version }
}
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct SaveTransactionsRequest {
pub txns_to_commit: Vec<TransactionToCommit>,
pub first_version: Version,
pub ledger_info_with_signatures: Option<LedgerInfoWithSignatures>,
}
impl SaveTransactionsRequest {
/// Constructor.
pub fn new(
txns_to_commit: Vec<TransactionToCommit>,
first_version: Version,
ledger_info_with_signatures: Option<LedgerInfoWithSignatures>,
) -> Self {
SaveTransactionsRequest {
txns_to_commit,
first_version,
ledger_info_with_signatures,
}
}
}
| 32.06599 | 100 | 0.645322 |
62481e0f9cfff1bfbcfa320daca323e0cde11ce1 | 166 | pub mod define;
pub mod errors;
pub mod flv2hls;
pub mod m3u8;
pub mod flv_data_receiver;
pub mod rtmp_event_processor;
mod test_flv2hls;
pub mod ts;
pub mod server;
| 16.6 | 29 | 0.789157 |
fc5acd935113e4035737b8503731b79c696084e6 | 6,950 | use super::def::{IntervalSet, ParseIntervalSetError};
use fp::Float;
use interval::Interval;
use std::cmp::Ordering;
use std::fmt;
use std::fmt::{Display, Formatter, Write};
use std::str::FromStr;
impl<BOUND: Float> IntervalSet<BOUND> {
/// Constructs an interval set of one interval from given bounds.
///
/// The result may be empty if both bounds are NaN.
#[inline]
pub fn new(lo: BOUND, hi: BOUND) -> Self {
Self::from_interval(Interval::new(lo, hi))
}
/// Constructs an interval set of one interval from given interval.
///
/// The result may be empty if the interval is NaN.
#[inline]
pub fn from_interval(i: Interval<BOUND>) -> Self {
if i.is_nan() {
Self::empty()
} else {
Self { intervals: vec![i] }
}
}
/// Constructs an interval set from given intervals.
///
/// The intervals will be sorted and the intersecting intervals will be merged.
pub fn from_intervals(mut intervals: Vec<Interval<BOUND>>) -> Self {
intervals.retain(|i| !i.is_nan());
if intervals.is_empty() {
return Self::empty()
}
intervals.sort_by(|i, j| {
if i.lo < j.lo {
Ordering::Less
} else if i.lo > j.lo {
Ordering::Greater
} else {
Ordering::Equal
}
});
let mut iter = intervals.drain(..);
let first = iter.next().unwrap();
let (mut lo, mut hi) = first.into();
let mut optimized_intervals = Vec::<Interval<BOUND>>::new();
for i in iter {
let (i_lo, i_hi) = i.into();
if hi < i_lo {
optimized_intervals.push(Interval::new(lo, hi));
lo = i_lo;
hi = i_hi;
} else if hi < i_hi {
hi = i_hi;
}
}
optimized_intervals.push(Interval::new(lo, hi));
Self { intervals: optimized_intervals }
}
/// Constructs an interval set of one singleton interval.
///
/// The result may be empty if the value is NaN.
#[inline]
pub fn singleton(val: BOUND) -> Self {
Self::from_interval(Interval::singleton(val))
}
/// Constructs an interval set of one interval containing only zero.
#[inline]
pub fn zero(precision: usize) -> Self {
Self::from_interval(Interval::zero(precision))
}
/// Constructs an interval set of one interval containing only one.
#[inline]
pub fn one(precision: usize) -> Self {
Self::from_interval(Interval::one(precision))
}
/// Constructs an empty interval set.
#[inline]
pub fn empty() -> Self {
Self { intervals: vec![] }
}
/// Constructs an interval set of one interval containing all numbers.
#[inline]
pub fn whole(precision: usize) -> Self {
Self::from_interval(Interval::whole(precision))
}
/// Constructs an interval set from a float with given precision.
#[inline]
pub fn from_with_prec(val: f64, precision: usize) -> Self {
Self::new(BOUND::from_lo(val, precision), BOUND::from_hi(val, precision))
}
/// Constructs an interval set by parsing a string.
///
/// Accepts `INTERVAL_SET` according to the rule below.
///
/// INTERVAL_SET = INTERVAL | '{' ( INTERVAL ( ';' INTERVAL )* )? '}'
#[inline]
pub fn from_str_with_prec(s: &str, precision: usize) -> Result<Self, ParseIntervalSetError> {
if let Ok(i) = Interval::from_str_with_prec(s, precision) {
Ok(IntervalSet::from_interval(i))
} else {
if !s.starts_with('{') { return Err(ParseIntervalSetError::MissingOpeningBraces) }
let s = s.trim_left_matches('{').trim_left();
if !s.ends_with('}') { return Err(ParseIntervalSetError::MissingClosingBraces) }
let s = s.trim_right_matches('}').trim_right();
if s.is_empty() { return Ok(Self::empty()) }
let mut results: Vec<_> = s.split(';')
.map(|v| v.trim())
.map(|v| Interval::from_str_with_prec(v, precision))
.collect();
if results.iter().all(|i| i.is_ok()) {
Ok(Self::from_intervals(results.drain(..).map(|i| i.unwrap()).collect()))
} else {
Err(ParseIntervalSetError::IntervalsParseError)
}
}
}
/// Whether `self` contains only one interval that is singleton.
#[inline]
pub fn is_singleton(&self) -> bool {
self.intervals.len() == 1 && self.intervals[0].is_singleton()
}
/// Whether `self` contains only one interval that contains only zero.
#[inline]
pub fn is_zero(&self) -> bool {
self.intervals.len() == 1 && self.intervals[0].is_zero()
}
/// Whether `self` is empty.
#[inline]
pub fn is_empty(&self) -> bool {
self.intervals.is_empty()
}
/// Whether `self` contains an interval that contains zero.
#[inline]
pub fn has_zero(&self) -> bool {
self.intervals.iter().any(|i| i.has_zero())
}
/// Performs a binary operation by performing it on all pairs of intervals of `self` and `rhs`.
#[inline]
pub fn binary_op<OP>(self, rhs: Self, op: OP) -> Self
where OP: Fn(Interval<BOUND>, Interval<BOUND>) -> Vec<Interval<BOUND>>
{
let mut intervals = Vec::<Interval<BOUND>>::new();
for i in &self.intervals {
for j in &rhs.intervals {
intervals.append(&mut op(i.clone(), j.clone()));
}
}
Self::from_intervals(intervals)
}
}
impl<BOUND: Float> From<f64> for IntervalSet<BOUND> {
#[inline]
fn from(val: f64) -> Self {
Self::from_with_prec(val, 53)
}
}
impl<BOUND: Float> FromStr for IntervalSet<BOUND> {
type Err = ParseIntervalSetError;
#[inline]
fn from_str(s: &str) -> Result<Self, ParseIntervalSetError> {
Self::from_str_with_prec(s, 53)
}
}
impl<BOUND: Float> Display for IntervalSet<BOUND> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.intervals.is_empty() {
f.write_str("{}")
} else if self.intervals.len() == 1 {
Display::fmt(&self.intervals[0], f)
} else {
if let Err(e) = f.write_char('{') { return Err(e) }
let mut iter = self.intervals.iter();
if let Err(e) = Display::fmt(&iter.next().unwrap(), f) { return Err(e) }
for i in iter {
if let Err(e) = f.write_str("; ") { return Err(e) }
if let Err(e) = Display::fmt(&i, f) { return Err(e) }
}
f.write_char('}')
}
}
}
impl<BOUND: Float> Into<Vec<(BOUND, BOUND)>> for IntervalSet<BOUND> {
fn into(mut self) -> Vec<(BOUND, BOUND)> {
self.intervals.drain(..).map(|i| (i.lo, i.hi)).collect()
}
}
| 32.938389 | 99 | 0.558993 |
39485532bf4544306c0fff9d7bc973fcc5e33c9b | 32,267 | use crate::{execution::evm::opcode::*, models::*};
pub(crate) const COLD_SLOAD_COST: u16 = 2100;
pub(crate) const COLD_ACCOUNT_ACCESS_COST: u16 = 2600;
pub(crate) const WARM_STORAGE_READ_COST: u16 = 100;
/// Additional cold account access cost.
///
/// The warm access cost is unconditionally applied for every account access instruction.
/// If the access turns out to be cold, this cost must be applied additionally.
pub(crate) const ADDITIONAL_COLD_ACCOUNT_ACCESS_COST: u16 =
COLD_ACCOUNT_ACCESS_COST - WARM_STORAGE_READ_COST;
/// EVM instruction properties
#[derive(Clone, Copy, Debug)]
pub struct Properties {
/// The number of stack items the instruction accesses during execution.
pub stack_height_required: u8,
/// The stack height change caused by the instruction execution. Can be negative.
pub stack_height_change: i8,
}
impl Properties {
const fn new(stack_height_required: u8, stack_height_change: i8) -> Self {
Self {
stack_height_required,
stack_height_change,
}
}
}
pub const fn properties<const OPCODE: OpCode>() -> Properties {
match OPCODE {
OpCode::STOP => Properties::new(0, 0),
OpCode::ADD => Properties::new(2, -1),
OpCode::MUL => Properties::new(2, -1),
OpCode::SUB => Properties::new(2, -1),
OpCode::DIV => Properties::new(2, -1),
OpCode::SDIV => Properties::new(2, -1),
OpCode::MOD => Properties::new(2, -1),
OpCode::SMOD => Properties::new(2, -1),
OpCode::ADDMOD => Properties::new(3, -2),
OpCode::MULMOD => Properties::new(3, -2),
OpCode::EXP => Properties::new(2, -1),
OpCode::SIGNEXTEND => Properties::new(2, -1),
OpCode::LT => Properties::new(2, -1),
OpCode::GT => Properties::new(2, -1),
OpCode::SLT => Properties::new(2, -1),
OpCode::SGT => Properties::new(2, -1),
OpCode::EQ => Properties::new(2, -1),
OpCode::ISZERO => Properties::new(1, 0),
OpCode::AND => Properties::new(2, -1),
OpCode::OR => Properties::new(2, -1),
OpCode::XOR => Properties::new(2, -1),
OpCode::NOT => Properties::new(1, 0),
OpCode::BYTE => Properties::new(2, -1),
OpCode::SHL => Properties::new(2, -1),
OpCode::SHR => Properties::new(2, -1),
OpCode::SAR => Properties::new(2, -1),
OpCode::KECCAK256 => Properties::new(2, -1),
OpCode::ADDRESS => Properties::new(0, 1),
OpCode::BALANCE => Properties::new(1, 0),
OpCode::ORIGIN => Properties::new(0, 1),
OpCode::CALLER => Properties::new(0, 1),
OpCode::CALLVALUE => Properties::new(0, 1),
OpCode::CALLDATALOAD => Properties::new(1, 0),
OpCode::CALLDATASIZE => Properties::new(0, 1),
OpCode::CALLDATACOPY => Properties::new(3, -3),
OpCode::CODESIZE => Properties::new(0, 1),
OpCode::CODECOPY => Properties::new(3, -3),
OpCode::GASPRICE => Properties::new(0, 1),
OpCode::EXTCODESIZE => Properties::new(1, 0),
OpCode::EXTCODECOPY => Properties::new(4, -4),
OpCode::RETURNDATASIZE => Properties::new(0, 1),
OpCode::RETURNDATACOPY => Properties::new(3, -3),
OpCode::EXTCODEHASH => Properties::new(1, 0),
OpCode::BLOCKHASH => Properties::new(1, 0),
OpCode::COINBASE => Properties::new(0, 1),
OpCode::TIMESTAMP => Properties::new(0, 1),
OpCode::NUMBER => Properties::new(0, 1),
OpCode::DIFFICULTY => Properties::new(0, 1),
OpCode::GASLIMIT => Properties::new(0, 1),
OpCode::CHAINID => Properties::new(0, 1),
OpCode::SELFBALANCE => Properties::new(0, 1),
OpCode::BASEFEE => Properties::new(0, 1),
OpCode::POP => Properties::new(1, -1),
OpCode::MLOAD => Properties::new(1, 0),
OpCode::MSTORE => Properties::new(2, -2),
OpCode::MSTORE8 => Properties::new(2, -2),
OpCode::SLOAD => Properties::new(1, 0),
OpCode::SSTORE => Properties::new(2, -2),
OpCode::JUMP => Properties::new(1, -1),
OpCode::JUMPI => Properties::new(2, -2),
OpCode::PC => Properties::new(0, 1),
OpCode::MSIZE => Properties::new(0, 1),
OpCode::GAS => Properties::new(0, 1),
OpCode::JUMPDEST => Properties::new(0, 0),
OpCode::PUSH1 => Properties::new(0, 1),
OpCode::PUSH2 => Properties::new(0, 1),
OpCode::PUSH3 => Properties::new(0, 1),
OpCode::PUSH4 => Properties::new(0, 1),
OpCode::PUSH5 => Properties::new(0, 1),
OpCode::PUSH6 => Properties::new(0, 1),
OpCode::PUSH7 => Properties::new(0, 1),
OpCode::PUSH8 => Properties::new(0, 1),
OpCode::PUSH9 => Properties::new(0, 1),
OpCode::PUSH10 => Properties::new(0, 1),
OpCode::PUSH11 => Properties::new(0, 1),
OpCode::PUSH12 => Properties::new(0, 1),
OpCode::PUSH13 => Properties::new(0, 1),
OpCode::PUSH14 => Properties::new(0, 1),
OpCode::PUSH15 => Properties::new(0, 1),
OpCode::PUSH16 => Properties::new(0, 1),
OpCode::PUSH17 => Properties::new(0, 1),
OpCode::PUSH18 => Properties::new(0, 1),
OpCode::PUSH19 => Properties::new(0, 1),
OpCode::PUSH20 => Properties::new(0, 1),
OpCode::PUSH21 => Properties::new(0, 1),
OpCode::PUSH22 => Properties::new(0, 1),
OpCode::PUSH23 => Properties::new(0, 1),
OpCode::PUSH24 => Properties::new(0, 1),
OpCode::PUSH25 => Properties::new(0, 1),
OpCode::PUSH26 => Properties::new(0, 1),
OpCode::PUSH27 => Properties::new(0, 1),
OpCode::PUSH28 => Properties::new(0, 1),
OpCode::PUSH29 => Properties::new(0, 1),
OpCode::PUSH30 => Properties::new(0, 1),
OpCode::PUSH31 => Properties::new(0, 1),
OpCode::PUSH32 => Properties::new(0, 1),
OpCode::DUP1 => Properties::new(1, 1),
OpCode::DUP2 => Properties::new(2, 1),
OpCode::DUP3 => Properties::new(3, 1),
OpCode::DUP4 => Properties::new(4, 1),
OpCode::DUP5 => Properties::new(5, 1),
OpCode::DUP6 => Properties::new(6, 1),
OpCode::DUP7 => Properties::new(7, 1),
OpCode::DUP8 => Properties::new(8, 1),
OpCode::DUP9 => Properties::new(9, 1),
OpCode::DUP10 => Properties::new(10, 1),
OpCode::DUP11 => Properties::new(11, 1),
OpCode::DUP12 => Properties::new(12, 1),
OpCode::DUP13 => Properties::new(13, 1),
OpCode::DUP14 => Properties::new(14, 1),
OpCode::DUP15 => Properties::new(15, 1),
OpCode::DUP16 => Properties::new(16, 1),
OpCode::SWAP1 => Properties::new(2, 0),
OpCode::SWAP2 => Properties::new(3, 0),
OpCode::SWAP3 => Properties::new(4, 0),
OpCode::SWAP4 => Properties::new(5, 0),
OpCode::SWAP5 => Properties::new(6, 0),
OpCode::SWAP6 => Properties::new(7, 0),
OpCode::SWAP7 => Properties::new(8, 0),
OpCode::SWAP8 => Properties::new(9, 0),
OpCode::SWAP9 => Properties::new(10, 0),
OpCode::SWAP10 => Properties::new(11, 0),
OpCode::SWAP11 => Properties::new(12, 0),
OpCode::SWAP12 => Properties::new(13, 0),
OpCode::SWAP13 => Properties::new(14, 0),
OpCode::SWAP14 => Properties::new(15, 0),
OpCode::SWAP15 => Properties::new(16, 0),
OpCode::SWAP16 => Properties::new(17, 0),
OpCode::LOG0 => Properties::new(2, -2),
OpCode::LOG1 => Properties::new(3, -3),
OpCode::LOG2 => Properties::new(4, -4),
OpCode::LOG3 => Properties::new(5, -5),
OpCode::LOG4 => Properties::new(6, -6),
OpCode::CREATE => Properties::new(3, -2),
OpCode::CALL => Properties::new(7, -6),
OpCode::CALLCODE => Properties::new(7, -6),
OpCode::RETURN => Properties::new(2, -2),
OpCode::DELEGATECALL => Properties::new(6, -5),
OpCode::CREATE2 => Properties::new(4, -3),
OpCode::STATICCALL => Properties::new(6, -5),
OpCode::REVERT => Properties::new(2, -2),
OpCode::INVALID => Properties::new(0, 0),
OpCode::SELFDESTRUCT => Properties::new(1, -1),
_ => unreachable!(),
}
}
pub type GasCostTable = [[i16; 256]; Revision::len()];
const fn gas_costs() -> GasCostTable {
let mut table = [[-1; 256]; Revision::len()];
table[Revision::Frontier as usize][OpCode::STOP.to_usize()] = 0;
table[Revision::Frontier as usize][OpCode::ADD.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::MUL.to_usize()] = 5;
table[Revision::Frontier as usize][OpCode::SUB.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DIV.to_usize()] = 5;
table[Revision::Frontier as usize][OpCode::SDIV.to_usize()] = 5;
table[Revision::Frontier as usize][OpCode::MOD.to_usize()] = 5;
table[Revision::Frontier as usize][OpCode::SMOD.to_usize()] = 5;
table[Revision::Frontier as usize][OpCode::ADDMOD.to_usize()] = 8;
table[Revision::Frontier as usize][OpCode::MULMOD.to_usize()] = 8;
table[Revision::Frontier as usize][OpCode::EXP.to_usize()] = 10;
table[Revision::Frontier as usize][OpCode::SIGNEXTEND.to_usize()] = 5;
table[Revision::Frontier as usize][OpCode::LT.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::GT.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SLT.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SGT.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::EQ.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::ISZERO.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::AND.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::OR.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::XOR.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::NOT.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::BYTE.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::KECCAK256.to_usize()] = 30;
table[Revision::Frontier as usize][OpCode::ADDRESS.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::BALANCE.to_usize()] = 20;
table[Revision::Frontier as usize][OpCode::ORIGIN.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::CALLER.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::CALLVALUE.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::CALLDATALOAD.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::CALLDATASIZE.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::CALLDATACOPY.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::CODESIZE.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::CODECOPY.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::GASPRICE.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::EXTCODESIZE.to_usize()] = 20;
table[Revision::Frontier as usize][OpCode::EXTCODECOPY.to_usize()] = 20;
table[Revision::Frontier as usize][OpCode::BLOCKHASH.to_usize()] = 20;
table[Revision::Frontier as usize][OpCode::COINBASE.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::TIMESTAMP.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::NUMBER.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::DIFFICULTY.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::GASLIMIT.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::POP.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::MLOAD.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::MSTORE.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::MSTORE8.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SLOAD.to_usize()] = 50;
table[Revision::Frontier as usize][OpCode::SSTORE.to_usize()] = 0;
table[Revision::Frontier as usize][OpCode::JUMP.to_usize()] = 8;
table[Revision::Frontier as usize][OpCode::JUMPI.to_usize()] = 10;
table[Revision::Frontier as usize][OpCode::PC.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::MSIZE.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::GAS.to_usize()] = 2;
table[Revision::Frontier as usize][OpCode::JUMPDEST.to_usize()] = 1;
table[Revision::Frontier as usize][OpCode::PUSH1.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH2.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH3.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH4.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH5.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH6.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH7.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH8.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH9.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH10.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH11.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH12.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH13.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH14.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH15.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH16.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH17.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH18.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH19.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH20.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH21.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH22.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH23.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH24.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH25.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH26.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH27.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH28.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH29.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH30.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH31.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::PUSH32.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP1.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP2.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP3.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP4.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP5.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP6.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP7.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP8.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP9.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP10.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP11.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP12.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP13.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP14.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP15.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::DUP16.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP1.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP2.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP3.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP4.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP5.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP6.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP7.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP8.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP9.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP10.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP11.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP12.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP13.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP14.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP15.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::SWAP16.to_usize()] = 3;
table[Revision::Frontier as usize][OpCode::LOG0.to_usize()] = 375;
table[Revision::Frontier as usize][OpCode::LOG1.to_usize()] = 2 * 375;
table[Revision::Frontier as usize][OpCode::LOG2.to_usize()] = 3 * 375;
table[Revision::Frontier as usize][OpCode::LOG3.to_usize()] = 4 * 375;
table[Revision::Frontier as usize][OpCode::LOG4.to_usize()] = 5 * 375;
table[Revision::Frontier as usize][OpCode::CREATE.to_usize()] = 32000;
table[Revision::Frontier as usize][OpCode::CALL.to_usize()] = 40;
table[Revision::Frontier as usize][OpCode::CALLCODE.to_usize()] = 40;
table[Revision::Frontier as usize][OpCode::RETURN.to_usize()] = 0;
table[Revision::Frontier as usize][OpCode::INVALID.to_usize()] = 0;
table[Revision::Frontier as usize][OpCode::SELFDESTRUCT.to_usize()] = 0;
table[Revision::Homestead as usize] = table[Revision::Frontier as usize];
table[Revision::Homestead as usize][OpCode::DELEGATECALL.to_usize()] = 40;
table[Revision::Tangerine as usize] = table[Revision::Homestead as usize];
table[Revision::Tangerine as usize][OpCode::BALANCE.to_usize()] = 400;
table[Revision::Tangerine as usize][OpCode::EXTCODESIZE.to_usize()] = 700;
table[Revision::Tangerine as usize][OpCode::EXTCODECOPY.to_usize()] = 700;
table[Revision::Tangerine as usize][OpCode::SLOAD.to_usize()] = 200;
table[Revision::Tangerine as usize][OpCode::CALL.to_usize()] = 700;
table[Revision::Tangerine as usize][OpCode::CALLCODE.to_usize()] = 700;
table[Revision::Tangerine as usize][OpCode::DELEGATECALL.to_usize()] = 700;
table[Revision::Tangerine as usize][OpCode::SELFDESTRUCT.to_usize()] = 5000;
table[Revision::Spurious as usize] = table[Revision::Tangerine as usize];
table[Revision::Byzantium as usize] = table[Revision::Spurious as usize];
table[Revision::Byzantium as usize][OpCode::RETURNDATASIZE.to_usize()] = 2;
table[Revision::Byzantium as usize][OpCode::RETURNDATACOPY.to_usize()] = 3;
table[Revision::Byzantium as usize][OpCode::STATICCALL.to_usize()] = 700;
table[Revision::Byzantium as usize][OpCode::REVERT.to_usize()] = 0;
table[Revision::Constantinople as usize] = table[Revision::Byzantium as usize];
table[Revision::Constantinople as usize][OpCode::SHL.to_usize()] = 3;
table[Revision::Constantinople as usize][OpCode::SHR.to_usize()] = 3;
table[Revision::Constantinople as usize][OpCode::SAR.to_usize()] = 3;
table[Revision::Constantinople as usize][OpCode::EXTCODEHASH.to_usize()] = 400;
table[Revision::Constantinople as usize][OpCode::CREATE2.to_usize()] = 32000;
table[Revision::Petersburg as usize] = table[Revision::Constantinople as usize];
table[Revision::Istanbul as usize] = table[Revision::Petersburg as usize];
table[Revision::Istanbul as usize][OpCode::BALANCE.to_usize()] = 700;
table[Revision::Istanbul as usize][OpCode::CHAINID.to_usize()] = 2;
table[Revision::Istanbul as usize][OpCode::EXTCODEHASH.to_usize()] = 700;
table[Revision::Istanbul as usize][OpCode::SELFBALANCE.to_usize()] = 5;
table[Revision::Istanbul as usize][OpCode::SLOAD.to_usize()] = 800;
table[Revision::Berlin as usize] = table[Revision::Istanbul as usize];
table[Revision::Berlin as usize][OpCode::EXTCODESIZE.to_usize()] =
WARM_STORAGE_READ_COST as i16;
table[Revision::Berlin as usize][OpCode::EXTCODECOPY.to_usize()] =
WARM_STORAGE_READ_COST as i16;
table[Revision::Berlin as usize][OpCode::EXTCODEHASH.to_usize()] =
WARM_STORAGE_READ_COST as i16;
table[Revision::Berlin as usize][OpCode::BALANCE.to_usize()] = WARM_STORAGE_READ_COST as i16;
table[Revision::Berlin as usize][OpCode::CALL.to_usize()] = WARM_STORAGE_READ_COST as i16;
table[Revision::Berlin as usize][OpCode::CALLCODE.to_usize()] = WARM_STORAGE_READ_COST as i16;
table[Revision::Berlin as usize][OpCode::DELEGATECALL.to_usize()] =
WARM_STORAGE_READ_COST as i16;
table[Revision::Berlin as usize][OpCode::STATICCALL.to_usize()] = WARM_STORAGE_READ_COST as i16;
table[Revision::Berlin as usize][OpCode::SLOAD.to_usize()] = WARM_STORAGE_READ_COST as i16;
table[Revision::London as usize] = table[Revision::Berlin as usize];
table[Revision::London as usize][OpCode::BASEFEE.to_usize()] = 2;
table[Revision::Shanghai as usize] = table[Revision::London as usize];
table
}
pub const GAS_COSTS: GasCostTable = gas_costs();
pub const fn has_const_gas_cost<const OPCODE: OpCode>() -> bool {
const LATEST: Revision = Revision::latest();
let g = GAS_COSTS[Revision::Frontier as usize][OPCODE.to_usize()];
let revtable = Revision::iter();
let mut iter = 0;
loop {
let rev = revtable[iter];
if GAS_COSTS[rev as usize][OPCODE.to_usize()] != g {
return false;
}
if matches!(rev, LATEST) {
break;
} else {
iter += 1;
}
}
true
}
pub const fn opcode_gas_cost<const REVISION: Revision, const OPCODE: OpCode>() -> i16 {
GAS_COSTS[REVISION as usize][OPCODE.to_usize()]
}
const fn property_table() -> [Option<Properties>; 256] {
let mut table = [None; 256];
table[OpCode::STOP.to_usize()] = Some(Properties::new(0, 0));
table[OpCode::ADD.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::MUL.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::SUB.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::DIV.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::SDIV.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::MOD.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::SMOD.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::ADDMOD.to_usize()] = Some(Properties::new(3, -2));
table[OpCode::MULMOD.to_usize()] = Some(Properties::new(3, -2));
table[OpCode::EXP.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::SIGNEXTEND.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::LT.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::GT.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::SLT.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::SGT.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::EQ.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::ISZERO.to_usize()] = Some(Properties::new(1, 0));
table[OpCode::AND.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::OR.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::XOR.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::NOT.to_usize()] = Some(Properties::new(1, 0));
table[OpCode::BYTE.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::SHL.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::SHR.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::SAR.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::KECCAK256.to_usize()] = Some(Properties::new(2, -1));
table[OpCode::ADDRESS.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::BALANCE.to_usize()] = Some(Properties::new(1, 0));
table[OpCode::ORIGIN.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::CALLER.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::CALLVALUE.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::CALLDATALOAD.to_usize()] = Some(Properties::new(1, 0));
table[OpCode::CALLDATASIZE.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::CALLDATACOPY.to_usize()] = Some(Properties::new(3, -3));
table[OpCode::CODESIZE.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::CODECOPY.to_usize()] = Some(Properties::new(3, -3));
table[OpCode::GASPRICE.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::EXTCODESIZE.to_usize()] = Some(Properties::new(1, 0));
table[OpCode::EXTCODECOPY.to_usize()] = Some(Properties::new(4, -4));
table[OpCode::RETURNDATASIZE.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::RETURNDATACOPY.to_usize()] = Some(Properties::new(3, -3));
table[OpCode::EXTCODEHASH.to_usize()] = Some(Properties::new(1, 0));
table[OpCode::BLOCKHASH.to_usize()] = Some(Properties::new(1, 0));
table[OpCode::COINBASE.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::TIMESTAMP.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::NUMBER.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::DIFFICULTY.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::GASLIMIT.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::CHAINID.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::SELFBALANCE.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::BASEFEE.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::POP.to_usize()] = Some(Properties::new(1, -1));
table[OpCode::MLOAD.to_usize()] = Some(Properties::new(1, 0));
table[OpCode::MSTORE.to_usize()] = Some(Properties::new(2, -2));
table[OpCode::MSTORE8.to_usize()] = Some(Properties::new(2, -2));
table[OpCode::SLOAD.to_usize()] = Some(Properties::new(1, 0));
table[OpCode::SSTORE.to_usize()] = Some(Properties::new(2, -2));
table[OpCode::JUMP.to_usize()] = Some(Properties::new(1, -1));
table[OpCode::JUMPI.to_usize()] = Some(Properties::new(2, -2));
table[OpCode::PC.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::MSIZE.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::GAS.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::JUMPDEST.to_usize()] = Some(Properties::new(0, 0));
table[OpCode::PUSH1.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH2.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH3.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH4.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH5.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH6.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH7.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH8.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH9.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH10.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH11.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH12.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH13.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH14.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH15.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH16.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH17.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH18.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH19.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH20.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH21.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH22.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH23.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH24.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH25.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH26.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH27.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH28.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH29.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH30.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH31.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::PUSH32.to_usize()] = Some(Properties::new(0, 1));
table[OpCode::DUP1.to_usize()] = Some(Properties::new(1, 1));
table[OpCode::DUP2.to_usize()] = Some(Properties::new(2, 1));
table[OpCode::DUP3.to_usize()] = Some(Properties::new(3, 1));
table[OpCode::DUP4.to_usize()] = Some(Properties::new(4, 1));
table[OpCode::DUP5.to_usize()] = Some(Properties::new(5, 1));
table[OpCode::DUP6.to_usize()] = Some(Properties::new(6, 1));
table[OpCode::DUP7.to_usize()] = Some(Properties::new(7, 1));
table[OpCode::DUP8.to_usize()] = Some(Properties::new(8, 1));
table[OpCode::DUP9.to_usize()] = Some(Properties::new(9, 1));
table[OpCode::DUP10.to_usize()] = Some(Properties::new(10, 1));
table[OpCode::DUP11.to_usize()] = Some(Properties::new(11, 1));
table[OpCode::DUP12.to_usize()] = Some(Properties::new(12, 1));
table[OpCode::DUP13.to_usize()] = Some(Properties::new(13, 1));
table[OpCode::DUP14.to_usize()] = Some(Properties::new(14, 1));
table[OpCode::DUP15.to_usize()] = Some(Properties::new(15, 1));
table[OpCode::DUP16.to_usize()] = Some(Properties::new(16, 1));
table[OpCode::SWAP1.to_usize()] = Some(Properties::new(2, 0));
table[OpCode::SWAP2.to_usize()] = Some(Properties::new(3, 0));
table[OpCode::SWAP3.to_usize()] = Some(Properties::new(4, 0));
table[OpCode::SWAP4.to_usize()] = Some(Properties::new(5, 0));
table[OpCode::SWAP5.to_usize()] = Some(Properties::new(6, 0));
table[OpCode::SWAP6.to_usize()] = Some(Properties::new(7, 0));
table[OpCode::SWAP7.to_usize()] = Some(Properties::new(8, 0));
table[OpCode::SWAP8.to_usize()] = Some(Properties::new(9, 0));
table[OpCode::SWAP9.to_usize()] = Some(Properties::new(10, 0));
table[OpCode::SWAP10.to_usize()] = Some(Properties::new(11, 0));
table[OpCode::SWAP11.to_usize()] = Some(Properties::new(12, 0));
table[OpCode::SWAP12.to_usize()] = Some(Properties::new(13, 0));
table[OpCode::SWAP13.to_usize()] = Some(Properties::new(14, 0));
table[OpCode::SWAP14.to_usize()] = Some(Properties::new(15, 0));
table[OpCode::SWAP15.to_usize()] = Some(Properties::new(16, 0));
table[OpCode::SWAP16.to_usize()] = Some(Properties::new(17, 0));
table[OpCode::LOG0.to_usize()] = Some(Properties::new(2, -2));
table[OpCode::LOG1.to_usize()] = Some(Properties::new(3, -3));
table[OpCode::LOG2.to_usize()] = Some(Properties::new(4, -4));
table[OpCode::LOG3.to_usize()] = Some(Properties::new(5, -5));
table[OpCode::LOG4.to_usize()] = Some(Properties::new(6, -6));
table[OpCode::CREATE.to_usize()] = Some(Properties::new(3, -2));
table[OpCode::CALL.to_usize()] = Some(Properties::new(7, -6));
table[OpCode::CALLCODE.to_usize()] = Some(Properties::new(7, -6));
table[OpCode::RETURN.to_usize()] = Some(Properties::new(2, -2));
table[OpCode::DELEGATECALL.to_usize()] = Some(Properties::new(6, -5));
table[OpCode::CREATE2.to_usize()] = Some(Properties::new(4, -3));
table[OpCode::STATICCALL.to_usize()] = Some(Properties::new(6, -5));
table[OpCode::REVERT.to_usize()] = Some(Properties::new(2, -2));
table[OpCode::INVALID.to_usize()] = Some(Properties::new(0, 0));
table[OpCode::SELFDESTRUCT.to_usize()] = Some(Properties::new(1, -1));
table
}
pub const PROPERTIES: [Option<Properties>; 256] = property_table();
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn const_gas_cost() {
assert!(has_const_gas_cost::<{ OpCode::STOP }>());
assert!(has_const_gas_cost::<{ OpCode::ADD }>());
assert!(has_const_gas_cost::<{ OpCode::PUSH1 }>());
assert!(!has_const_gas_cost::<{ OpCode::SHL }>());
assert!(!has_const_gas_cost::<{ OpCode::BALANCE }>());
assert!(!has_const_gas_cost::<{ OpCode::SLOAD }>());
}
}
| 54.139262 | 100 | 0.642793 |