hexsha
stringlengths
40
40
size
int64
2
1.05M
content
stringlengths
2
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
03d0e65f0702305824efd162de0f9ae230e5c87a
370
#[macro_use] extern crate diesel; #[macro_use] extern crate serde_derive; #[macro_use] extern crate log; #[macro_use] extern crate failure; #[macro_use] extern crate serde_json; extern crate futures; extern crate actix; extern crate dotenv; extern crate env_logger; extern crate crypto; extern crate clap; pub mod database; pub mod web; pub mod wrapper; pub mod schema;
17.619048
26
0.783784
db57b88c1d8615bf58405b5f5647090e3c241414
266
use base::EntityKind; use ecs::EntityBuilder; use quill_common::entities::WanderingTrader; pub fn build_default(builder: &mut EntityBuilder) { super::build_default(builder); builder .add(WanderingTrader) .add(EntityKind::WanderingTrader); }
24.181818
51
0.725564
1108281244f43697c7c8633995546225f9ec2ed1
4,782
//! Tool to convert Wycheproof test vectors to raw hex format #![doc( html_logo_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg", html_favicon_url = "https://raw.githubusercontent.com/RustCrypto/media/6ee8e381/logo.svg", html_root_url = "https://docs.rs/wycheproof2blb/0.1.0" )] use std::io::Write; mod aead; mod aes_siv; mod ecdsa; mod ed25519; mod hkdf; mod mac; mod wycheproof; /// Test information pub struct TestInfo { /// Raw data for the tests. pub data: Vec<Vec<u8>>, /// Test case description. pub desc: String, } /// Generator function which takes input parameters: /// - contents of Wycheproof test data file /// - algorithm name /// - key size (in bits) to include /// and returns the raw contents, together with a list of test identifiers (one per entry). type BlbGenerator = fn(&[u8], &str, u32) -> Vec<TestInfo>; struct Algorithm { pub file: &'static str, pub generator: BlbGenerator, } fn main() { let args: Vec<String> = std::env::args().collect(); let wycheproof_dir = args .get(1) .expect("Provide directory with wycheproof vectors"); let algorithm = args.get(2).expect("Provide algorithm family"); let key_size = args .get(3) .expect("Provide key size in bits, or 0 for all sizes") .parse::<u32>() .expect("Key size needs to be a number of bits"); let out_path = args.get(4).expect("Provide path for output blobby file"); let descriptions_path = args.get(5).expect("Provide path for descriptions file"); let algo = match algorithm.as_str() { "AES-GCM" => Algorithm { file: "aes_gcm_test.json", generator: aead::aes_gcm_generator, }, "AES-GCM-SIV" => Algorithm { file: "aes_gcm_siv_test.json", generator: aead::aes_gcm_generator, }, "CHACHA20-POLY1305" => Algorithm { file: "chacha20_poly1305_test.json", generator: aead::chacha20_poly1305, }, "XCHACHA20-POLY1305" => Algorithm { file: "xchacha20_poly1305_test.json", generator: aead::xchacha20_poly1305, }, "AES-SIV-CMAC" => Algorithm { file: "aes_siv_cmac_test.json", generator: aes_siv::generator, }, "AES-CMAC" => Algorithm { file: "aes_cmac_test.json", generator: mac::generator, }, "HKDF-SHA-1" => Algorithm { file: "hkdf_sha1_test.json", generator: hkdf::generator, }, "HKDF-SHA-256" => Algorithm { file: "hkdf_sha256_test.json", generator: hkdf::generator, }, "HKDF-SHA-384" => Algorithm { file: "hkdf_sha384_test.json", generator: hkdf::generator, }, "HKDF-SHA-512" => Algorithm { file: "hkdf_sha512_test.json", generator: hkdf::generator, }, "HMACSHA1" => Algorithm { file: "hmac_sha1_test.json", generator: mac::generator, }, "HMACSHA224" => Algorithm { file: "hmac_sha224_test.json", generator: mac::generator, }, "HMACSHA256" => Algorithm { file: "hmac_sha256_test.json", generator: mac::generator, }, "HMACSHA384" => Algorithm { file: "hmac_sha384_test.json", generator: mac::generator, }, "HMACSHA512" => Algorithm { file: "hmac_sha512_test.json", generator: mac::generator, }, "EDDSA" => Algorithm { file: "eddsa_test.json", generator: ed25519::generator, }, "secp256r1" => Algorithm { file: "ecdsa_secp256r1_sha256_test.json", generator: ecdsa::generator, }, // There's also "ecdsa_secp256r1_sha256_p1363_test.json" with a different signature encoding. "secp256k1" => Algorithm { file: "ecdsa_secp256k1_sha256_test.json", generator: ecdsa::generator, }, _ => panic!("Unrecognized algorithm '{}'", algorithm), }; let data = wycheproof::data(wycheproof_dir, algo.file); let infos = (algo.generator)(&data, algorithm, key_size); println!("Emitting {} test cases", infos.len()); let mut txt_file = std::fs::File::create(descriptions_path).unwrap(); for info in &infos { writeln!(&mut txt_file, "{}", info.desc).unwrap(); } let mut out_file = std::fs::File::create(out_path).unwrap(); let blobs: Vec<Vec<u8>> = infos.into_iter().flat_map(|info| info.data).collect(); let (blb_data, _) = blobby::encode_blobs(&blobs); out_file.write_all(&blb_data).unwrap(); }
33.440559
101
0.584274
9b40eaa550770e3ec66ceb55e567321554abaca4
1,159
/* The following exercises were borrowed from Will Crichton's CS 242 Rust lab. */ use std::collections::HashSet; fn main() { println!("Hi! Try running \"cargo test\" to run tests."); } fn add_n(v: Vec<i32>, n: i32) -> Vec<i32> { let mut vret: Vec<i32> = Vec::new(); for i in v.iter() { vret.push(i + n); } vret } fn add_n_inplace(v: &mut Vec<i32>, n: i32) { let mut i = 0; while i < v.len() { v[i] = v[i] + n; i = i + 1; } } fn dedup(v: &mut Vec<i32>) { let mut numset = HashSet::new(); let mut i: usize = 0; while i < v.len() { if numset.contains(&v[i]) { v.remove(i); } else { numset.insert(v[i]); i = i + 1; } } } #[cfg(test)] mod test { use super::*; #[test] fn test_add_n() { assert_eq!(add_n(vec![1], 2), vec![3]); } #[test] fn test_add_n_inplace() { let mut v = vec![1]; add_n_inplace(&mut v, 2); assert_eq!(v, vec![3]); } #[test] fn test_dedup() { let mut v = vec![3, 1, 0, 1, 4, 4]; dedup(&mut v); assert_eq!(v, vec![3, 1, 0, 4]); } }
18.693548
81
0.47541
5be960b5129ecb4e0010a634196f9eb6236ad4d4
6,971
// Copyright 2021 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // use common_base::base::tokio; use common_datablocks::assert_blocks_sorted_eq; use common_datablocks::DataBlock; use common_datavalues::prelude::*; use common_exception::Result; use common_meta_app::schema::TableInfo; use common_meta_app::schema::TableMeta; use common_planners::*; use databend_query::catalogs::CATALOG_DEFAULT; use databend_query::storages::memory::MemoryTable; use databend_query::storages::ToReadDataSourcePlan; use futures::TryStreamExt; #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_memorytable() -> Result<()> { let ctx = crate::tests::create_query_context().await?; let schema = DataSchemaRefExt::create(vec![ DataField::new("a", u32::to_data_type()), DataField::new("b", u64::to_data_type()), ]); let table = MemoryTable::try_create(crate::tests::create_storage_context()?, TableInfo { desc: "'default'.'a'".into(), name: "a".into(), ident: Default::default(), meta: TableMeta { schema: schema.clone(), engine: "Memory".to_string(), options: TableOptions::default(), ..Default::default() }, })?; // append data. { let block = DataBlock::create(schema.clone(), vec![ Series::from_data(vec![1u32, 2]), Series::from_data(vec![11u64, 22]), ]); let block2 = DataBlock::create(schema.clone(), vec![ Series::from_data(vec![4u32, 3]), Series::from_data(vec![33u64, 33]), ]); let blocks = vec![Ok(block), Ok(block2)]; let input_stream = futures::stream::iter::<Vec<Result<DataBlock>>>(blocks.clone()); let r = table .append_data(ctx.clone(), Box::pin(input_stream)) .await .unwrap(); // with overwrite false table .commit_insertion(ctx.clone(), CATALOG_DEFAULT, r.try_collect().await?, false) .await?; } // read tests { let push_downs_vec: Vec<Option<Extras>> = vec![Some(vec![0usize]), Some(vec![1usize]), None] .into_iter() .map(|x| { x.map(|x| Extras { projection: Some(x), filters: vec![], limit: None, order_by: vec![], }) }) .collect(); let expected_datablocks_vec = vec![ vec![ "+---+", // "| a |", // "+---+", // "| 1 |", // "| 2 |", // "| 3 |", // "| 4 |", // "+---+", // ], vec![ "+----+", // "| b |", // "+----+", // "| 11 |", // "| 22 |", // "| 33 |", // "| 33 |", // "+----+", // ], vec![ "+---+----+", "| a | b |", "+---+----+", "| 1 | 11 |", "| 2 | 22 |", "| 3 | 33 |", "| 4 | 33 |", "+---+----+", ], ]; let expected_statistics_vec = vec![ Statistics::new_estimated(4usize, 16usize, 0, 0), Statistics::new_estimated(4usize, 32usize, 0, 0), Statistics::new_exact(4usize, 48usize, 2, 2), ]; for i in 0..push_downs_vec.len() { let push_downs = push_downs_vec[i].as_ref().cloned(); let expected_datablocks = expected_datablocks_vec[i].clone(); let expected_statistics = expected_statistics_vec[i].clone(); let source_plan = table.read_plan(ctx.clone(), push_downs).await?; ctx.try_set_partitions(source_plan.parts.clone())?; assert_eq!(table.engine(), "Memory"); assert!(table.benefit_column_prune()); let stream = table.read(ctx.clone(), &source_plan).await?; let result = stream.try_collect::<Vec<_>>().await?; assert_blocks_sorted_eq(expected_datablocks, &result); // statistics assert_eq!(expected_statistics, source_plan.statistics); } } // overwrite { let block = DataBlock::create(schema.clone(), vec![ Series::from_data(vec![5u64, 6]), Series::from_data(vec![55u64, 66]), ]); let block2 = DataBlock::create(schema.clone(), vec![ Series::from_data(vec![7u64, 8]), Series::from_data(vec![77u64, 88]), ]); let blocks = vec![Ok(block), Ok(block2)]; let input_stream = futures::stream::iter::<Vec<Result<DataBlock>>>(blocks.clone()); let r = table .append_data(ctx.clone(), Box::pin(input_stream)) .await .unwrap(); // with overwrite = true table .commit_insertion(ctx.clone(), CATALOG_DEFAULT, r.try_collect().await?, true) .await?; } // read overwrite { let source_plan = table.read_plan(ctx.clone(), None).await?; ctx.try_set_partitions(source_plan.parts.clone())?; assert_eq!(table.engine(), "Memory"); let stream = table.read(ctx.clone(), &source_plan).await?; let result = stream.try_collect::<Vec<_>>().await?; assert_blocks_sorted_eq( vec![ "+---+----+", "| a | b |", "+---+----+", "| 5 | 55 |", "| 6 | 66 |", "| 7 | 77 |", "| 8 | 88 |", "+---+----+", ], &result, ); } // truncate. { let truncate_plan = TruncateTablePlan { catalog: "default".to_string(), database: "default".to_string(), table: "a".to_string(), purge: false, }; table.truncate(ctx.clone(), truncate_plan).await?; let source_plan = table.read_plan(ctx.clone(), None).await?; let stream = table.read(ctx, &source_plan).await?; let result = stream.try_collect::<Vec<_>>().await?; assert_blocks_sorted_eq(vec!["++", "++"], &result); } Ok(()) }
34.004878
92
0.496629
b9b826398e1b13bd5398df01066e245fb063e494
1,981
#[doc = r" Value read from the register"] pub struct R { bits: u32, } impl super::FSM_ERA { #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get() } } } #[doc = r" Value of the field"] pub struct RESERVED26R { bits: u8, } impl RESERVED26R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct ERA_BANKR { bits: u8, } impl ERA_BANKR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct ERA_ADDRR { bits: u32, } impl ERA_ADDRR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 26:31 - Internal. Only to be used through TI provided API."] #[inline] pub fn reserved26(&self) -> RESERVED26R { let bits = { const MASK: u8 = 63; const OFFSET: u8 = 26; ((self.bits >> OFFSET) & MASK as u32) as u8 }; RESERVED26R { bits } } #[doc = "Bits 23:25 - Internal. Only to be used through TI provided API."] #[inline] pub fn era_bank(&self) -> ERA_BANKR { let bits = { const MASK: u8 = 7; const OFFSET: u8 = 23; ((self.bits >> OFFSET) & MASK as u32) as u8 }; ERA_BANKR { bits } } #[doc = "Bits 0:22 - Internal. Only to be used through TI provided API."] #[inline] pub fn era_addr(&self) -> ERA_ADDRR { let bits = { const MASK: u32 = 8388607; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u32 }; ERA_ADDRR { bits } } }
24.158537
78
0.519435
1682672cd8fda85539198e2fba10855740aecc65
2,937
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use crate::tests::fakes::base::Service; use anyhow::{format_err, Error}; use fidl::endpoints::{ServerEnd, ServiceMarker}; use fidl_fuchsia_ui_input::MediaButtonsEvent; use fidl_fuchsia_ui_policy::MediaButtonsListenerProxy; use fuchsia_async as fasync; use fuchsia_zircon as zx; use futures::TryStreamExt; use parking_lot::RwLock; use std::sync::Arc; pub struct InputDeviceRegistryService { listeners: Arc<RwLock<Vec<MediaButtonsListenerProxy>>>, last_sent_event: Arc<RwLock<Option<MediaButtonsEvent>>>, fail: bool, } impl InputDeviceRegistryService { pub fn new() -> Self { Self { listeners: Arc::new(RwLock::new(Vec::new())), last_sent_event: Arc::new(RwLock::new(None)), fail: false, } } pub fn set_fail(&mut self, fail: bool) { self.fail = fail; } pub fn send_media_button_event(&self, event: MediaButtonsEvent) { *self.last_sent_event.write() = Some(event.clone()); for listener in self.listeners.read().iter() { listener.on_media_buttons_event(event.clone()).ok(); } } } impl Service for InputDeviceRegistryService { fn can_handle_service(&self, service_name: &str) -> bool { return service_name == fidl_fuchsia_ui_policy::DeviceListenerRegistryMarker::NAME; } fn process_stream(&mut self, service_name: &str, channel: zx::Channel) -> Result<(), Error> { if !self.can_handle_service(service_name) { return Err(format_err!("can't handle service")); } let mut manager_stream = ServerEnd::<fidl_fuchsia_ui_policy::DeviceListenerRegistryMarker>::new(channel) .into_stream()?; let listeners_handle = self.listeners.clone(); let last_event = self.last_sent_event.clone(); if self.fail { return Err(format_err!("exiting early")); } fasync::Task::spawn(async move { while let Some(req) = manager_stream.try_next().await.unwrap() { #[allow(unreachable_patterns)] match req { fidl_fuchsia_ui_policy::DeviceListenerRegistryRequest::RegisterMediaButtonsListener { listener, control_handle: _, } => { if let Ok(proxy) = listener.into_proxy() { if let Some(event) = &*last_event.read() { proxy.on_media_buttons_event(event.clone()).ok(); } listeners_handle.write().push(proxy); } } _ => {} } } }).detach(); Ok(()) } }
33.758621
105
0.585291
fba4a2402247407c5a79abd85e4dd7224edcff15
2,808
#[macro_use] extern crate serde_derive; extern crate serde; extern crate serde_xml_rs; extern crate xml; use serde::Deserialize; use serde_xml_rs::{from_str, to_string}; use serde_xml_rs::{EventReader, ParserConfig}; #[derive(Debug, Serialize, Deserialize, PartialEq)] struct Item { name: String, source: String, } #[derive(Debug, Serialize, Deserialize, PartialEq)] enum Node { Boolean(bool), Identifier { value: String, index: u32 }, EOF, } #[derive(Debug, Serialize, Deserialize, PartialEq)] struct Nodes { #[serde(rename = "$value")] items: Vec<Node>, } #[test] fn basic_struct() { let src = r#"<Item><name>Banana</name><source>Store</source></Item>"#; let should_be = Item { name: "Banana".to_string(), source: "Store".to_string(), }; let item: Item = from_str(src).unwrap(); assert_eq!(item, should_be); let reserialized_item = to_string(&item).unwrap(); assert_eq!(src, reserialized_item); } #[test] #[ignore] fn round_trip_list_of_enums() { // Construct some inputs let nodes = Nodes { items: vec![ Node::Boolean(true), Node::Identifier { value: "foo".to_string(), index: 5, }, Node::EOF, ], }; let should_be = r#" <Nodes> <Boolean> true </Boolean> <Identifier> <value>foo</value> <index>5</index> </Identifier> <EOF /> </Nodes>"#; let serialized_nodes = to_string(&nodes).unwrap(); assert_eq!(serialized_nodes, should_be); // Then turn it back into a `Nodes` struct and make sure it's the same // as the original let deserialized_nodes: Nodes = from_str(serialized_nodes.as_str()).unwrap(); assert_eq!(deserialized_nodes, nodes); } #[test] fn whitespace_preserving_config() { // Test a configuration which does not clip whitespace from tags let src = r#" <Item> <name> space banana </name> <source> fantasy costco </source> </Item>"#; let item_should_be = Item { name: " space banana ".to_string(), source: " fantasy costco ".to_string(), }; let config = ParserConfig::new().trim_whitespace(false).whitespace_to_characters(false); let mut deserializer = serde_xml_rs::Deserializer::new(EventReader::new_with_config(src.as_bytes(), config)); let item = Item::deserialize(&mut deserializer).unwrap(); assert_eq!(item, item_should_be); // Space outside values is not preserved. let serialized_should_be = "<Item><name> space banana </name><source> fantasy costco </source></Item>"; let reserialized_item = to_string(&item).unwrap(); assert_eq!(reserialized_item, serialized_should_be); }
26.490566
113
0.622507
ab0a31ee50f3b83ee14fa77c943e910b6ecd86fd
1,547
mod errors; mod handlers; mod helpers; use std::error::Error; use std::sync::Arc; use asset_services_celery::run_helpers::{init_celery_from_env, init_logging_from_env}; use axum::handler::post; use axum::{AddExtensionLayer, Router}; use tower_http::cors::CorsLayer; use crate::helpers::bind_addr_from_env; #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { init_logging_from_env(); let bind_addr = bind_addr_from_env()?; let celery = Arc::new(init_celery_from_env().await?); celery.display_pretty().await; // CORS: let cors_layer = CorsLayer::permissive(); let axum_app = Router::new() .route("/ping", post(handlers::misc::ping)) .route("/verify/start", post(handlers::verification::start_verify)) .route("/verify/check", post(handlers::verification::check_verify)) .route("/kyc/start", post(handlers::kyc::start_kyc)) .route("/kyc/checks/create", post(handlers::kyc::create_check)) .route("/kyc/checks/retrieve", post(handlers::kyc::retrieve_check)) .route( "/messages/create", post(handlers::messaging::create_message), ) .layer(AddExtensionLayer::new(celery.clone())) // The CORS layer must come after the wrapped resources, for correct response headers. .layer(cors_layer); let axum_server = axum::Server::bind(&bind_addr).serve(axum_app.into_make_service()); log::info!("listening on http://{}", bind_addr); axum_server.await?; celery.close().await?; Ok(()) }
31.571429
94
0.662573
fe8ed969acfc64193ae60df0e76d0d13bbf049ea
22,525
use alloc::fmt; #[cfg(feature = "coff")] use crate::read::coff; #[cfg(feature = "elf")] use crate::read::elf; #[cfg(feature = "macho")] use crate::read::macho; #[cfg(feature = "pe")] use crate::read::pe; #[cfg(feature = "wasm")] use crate::read::wasm; use crate::read::{ self, Architecture, BinaryFormat, CompressedData, Error, FileFlags, Object, ObjectSection, ObjectSegment, Relocation, Result, SectionFlags, SectionIndex, SectionKind, Symbol, SymbolIndex, SymbolMap, }; /// Evaluate an expression on the contents of a file format enum. /// /// This is a hack to avoid virtual calls. macro_rules! with_inner { ($inner:expr, $enum:ident, | $var:ident | $body:expr) => { match $inner { #[cfg(feature = "coff")] $enum::Coff(ref $var) => $body, #[cfg(feature = "elf")] $enum::Elf32(ref $var) => $body, #[cfg(feature = "elf")] $enum::Elf64(ref $var) => $body, #[cfg(feature = "macho")] $enum::MachO32(ref $var) => $body, #[cfg(feature = "macho")] $enum::MachO64(ref $var) => $body, #[cfg(feature = "pe")] $enum::Pe32(ref $var) => $body, #[cfg(feature = "pe")] $enum::Pe64(ref $var) => $body, #[cfg(feature = "wasm")] $enum::Wasm(ref $var) => $body, } }; } macro_rules! with_inner_mut { ($inner:expr, $enum:ident, | $var:ident | $body:expr) => { match $inner { #[cfg(feature = "coff")] $enum::Coff(ref mut $var) => $body, #[cfg(feature = "elf")] $enum::Elf32(ref mut $var) => $body, #[cfg(feature = "elf")] $enum::Elf64(ref mut $var) => $body, #[cfg(feature = "macho")] $enum::MachO32(ref mut $var) => $body, #[cfg(feature = "macho")] $enum::MachO64(ref mut $var) => $body, #[cfg(feature = "pe")] $enum::Pe32(ref mut $var) => $body, #[cfg(feature = "pe")] $enum::Pe64(ref mut $var) => $body, #[cfg(feature = "wasm")] $enum::Wasm(ref mut $var) => $body, } }; } /// Like `with_inner!`, but wraps the result in another enum. macro_rules! map_inner { ($inner:expr, $from:ident, $to:ident, | $var:ident | $body:expr) => { match $inner { #[cfg(feature = "coff")] $from::Coff(ref $var) => $to::Coff($body), #[cfg(feature = "elf")] $from::Elf32(ref $var) => $to::Elf32($body), #[cfg(feature = "elf")] $from::Elf64(ref $var) => $to::Elf64($body), #[cfg(feature = "macho")] $from::MachO32(ref $var) => $to::MachO32($body), #[cfg(feature = "macho")] $from::MachO64(ref $var) => $to::MachO64($body), #[cfg(feature = "pe")] $from::Pe32(ref $var) => $to::Pe32($body), #[cfg(feature = "pe")] $from::Pe64(ref $var) => $to::Pe64($body), #[cfg(feature = "wasm")] $from::Wasm(ref $var) => $to::Wasm($body), } }; } /// Like `map_inner!`, but the result is a Result or Option. macro_rules! map_inner_option { ($inner:expr, $from:ident, $to:ident, | $var:ident | $body:expr) => { match $inner { #[cfg(feature = "coff")] $from::Coff(ref $var) => $body.map($to::Coff), #[cfg(feature = "elf")] $from::Elf32(ref $var) => $body.map($to::Elf32), #[cfg(feature = "elf")] $from::Elf64(ref $var) => $body.map($to::Elf64), #[cfg(feature = "macho")] $from::MachO32(ref $var) => $body.map($to::MachO32), #[cfg(feature = "macho")] $from::MachO64(ref $var) => $body.map($to::MachO64), #[cfg(feature = "pe")] $from::Pe32(ref $var) => $body.map($to::Pe32), #[cfg(feature = "pe")] $from::Pe64(ref $var) => $body.map($to::Pe64), #[cfg(feature = "wasm")] $from::Wasm(ref $var) => $body.map($to::Wasm), } }; } /// Call `next` for a file format iterator. macro_rules! next_inner { ($inner:expr, $from:ident, $to:ident) => { match $inner { #[cfg(feature = "coff")] $from::Coff(ref mut iter) => iter.next().map($to::Coff), #[cfg(feature = "elf")] $from::Elf32(ref mut iter) => iter.next().map($to::Elf32), #[cfg(feature = "elf")] $from::Elf64(ref mut iter) => iter.next().map($to::Elf64), #[cfg(feature = "macho")] $from::MachO32(ref mut iter) => iter.next().map($to::MachO32), #[cfg(feature = "macho")] $from::MachO64(ref mut iter) => iter.next().map($to::MachO64), #[cfg(feature = "pe")] $from::Pe32(ref mut iter) => iter.next().map($to::Pe32), #[cfg(feature = "pe")] $from::Pe64(ref mut iter) => iter.next().map($to::Pe64), #[cfg(feature = "wasm")] $from::Wasm(ref mut iter) => iter.next().map($to::Wasm), } }; } /// An object file. /// /// Most functionality is provided by the `Object` trait implementation. #[derive(Debug)] pub struct File<'data> { inner: FileInternal<'data>, } #[allow(clippy::large_enum_variant)] #[derive(Debug)] enum FileInternal<'data> { #[cfg(feature = "coff")] Coff(coff::CoffFile<'data>), #[cfg(feature = "elf")] Elf32(elf::ElfFile32<'data>), #[cfg(feature = "elf")] Elf64(elf::ElfFile64<'data>), #[cfg(feature = "macho")] MachO32(macho::MachOFile32<'data>), #[cfg(feature = "macho")] MachO64(macho::MachOFile64<'data>), #[cfg(feature = "pe")] Pe32(pe::PeFile32<'data>), #[cfg(feature = "pe")] Pe64(pe::PeFile64<'data>), #[cfg(feature = "wasm")] Wasm(wasm::WasmFile<'data>), } impl<'data> File<'data> { /// Parse the raw file data. pub fn parse(data: &'data [u8]) -> Result<Self> { if data.len() < 16 { return Err(Error("File too short")); } let inner = match [data[0], data[1], data[2], data[3], data[4]] { // 32-bit ELF #[cfg(feature = "elf")] [0x7f, b'E', b'L', b'F', 1] => FileInternal::Elf32(elf::ElfFile32::parse(data)?), // 64-bit ELF #[cfg(feature = "elf")] [0x7f, b'E', b'L', b'F', 2] => FileInternal::Elf64(elf::ElfFile64::parse(data)?), // 32-bit Mach-O #[cfg(feature = "macho")] [0xfe, 0xed, 0xfa, 0xce, _] | [0xce, 0xfa, 0xed, 0xfe, _] => FileInternal::MachO32(macho::MachOFile32::parse(data)?), // 64-bit Mach-O #[cfg(feature = "macho")] | [0xfe, 0xed, 0xfa, 0xcf, _] | [0xcf, 0xfa, 0xed, 0xfe, _] => FileInternal::MachO64(macho::MachOFile64::parse(data)?), // WASM #[cfg(feature = "wasm")] [0x00, b'a', b's', b'm', _] => FileInternal::Wasm(wasm::WasmFile::parse(data)?), // MS-DOS, assume stub for Windows PE32 or PE32+ #[cfg(feature = "pe")] [b'M', b'Z', _, _, _] => { // `optional_header_magic` doesn't care if it's `PeFile32` and `PeFile64`. match pe::PeFile64::optional_header_magic(data) { Ok(crate::pe::IMAGE_NT_OPTIONAL_HDR32_MAGIC) => { FileInternal::Pe32(pe::PeFile32::parse(data)?) } Ok(crate::pe::IMAGE_NT_OPTIONAL_HDR64_MAGIC) => { FileInternal::Pe64(pe::PeFile64::parse(data)?) } _ => return Err(Error("Unknown MS-DOS file")), } } // TODO: more COFF machines #[cfg(feature = "coff")] // COFF x86 [0x4c, 0x01, _, _, _] // COFF x86-64 | [0x64, 0x86, _, _, _] => FileInternal::Coff(coff::CoffFile::parse(data)?), _ => return Err(Error("Unknown file magic")), }; Ok(File { inner }) } /// Return the file format. pub fn format(&self) -> BinaryFormat { match self.inner { #[cfg(feature = "coff")] FileInternal::Coff(_) => BinaryFormat::Coff, #[cfg(feature = "elf")] FileInternal::Elf32(_) | FileInternal::Elf64(_) => BinaryFormat::Elf, #[cfg(feature = "macho")] FileInternal::MachO32(_) | FileInternal::MachO64(_) => BinaryFormat::MachO, #[cfg(feature = "pe")] FileInternal::Pe32(_) | FileInternal::Pe64(_) => BinaryFormat::Pe, #[cfg(feature = "wasm")] FileInternal::Wasm(_) => BinaryFormat::Wasm, } } } impl<'data> read::private::Sealed for File<'data> {} impl<'data, 'file> Object<'data, 'file> for File<'data> where 'data: 'file, { type Segment = Segment<'data, 'file>; type SegmentIterator = SegmentIterator<'data, 'file>; type Section = Section<'data, 'file>; type SectionIterator = SectionIterator<'data, 'file>; type SymbolIterator = SymbolIterator<'data, 'file>; fn architecture(&self) -> Architecture { with_inner!(self.inner, FileInternal, |x| x.architecture()) } fn is_little_endian(&self) -> bool { with_inner!(self.inner, FileInternal, |x| x.is_little_endian()) } fn is_64(&self) -> bool { with_inner!(self.inner, FileInternal, |x| x.is_64()) } fn segments(&'file self) -> SegmentIterator<'data, 'file> { SegmentIterator { inner: map_inner!(self.inner, FileInternal, SegmentIteratorInternal, |x| x .segments()), } } fn section_by_name(&'file self, section_name: &str) -> Option<Section<'data, 'file>> { map_inner_option!(self.inner, FileInternal, SectionInternal, |x| x .section_by_name(section_name)) .map(|inner| Section { inner }) } fn section_by_index(&'file self, index: SectionIndex) -> Result<Section<'data, 'file>> { map_inner_option!(self.inner, FileInternal, SectionInternal, |x| x .section_by_index(index)) .map(|inner| Section { inner }) } fn sections(&'file self) -> SectionIterator<'data, 'file> { SectionIterator { inner: map_inner!(self.inner, FileInternal, SectionIteratorInternal, |x| x .sections()), } } fn symbol_by_index(&self, index: SymbolIndex) -> Result<Symbol<'data>> { with_inner!(self.inner, FileInternal, |x| x.symbol_by_index(index)) } fn symbols(&'file self) -> SymbolIterator<'data, 'file> { SymbolIterator { inner: map_inner!(self.inner, FileInternal, SymbolIteratorInternal, |x| x .symbols()), } } fn dynamic_symbols(&'file self) -> SymbolIterator<'data, 'file> { SymbolIterator { inner: map_inner!(self.inner, FileInternal, SymbolIteratorInternal, |x| x .dynamic_symbols()), } } fn symbol_map(&self) -> SymbolMap<'data> { with_inner!(self.inner, FileInternal, |x| x.symbol_map()) } fn has_debug_symbols(&self) -> bool { with_inner!(self.inner, FileInternal, |x| x.has_debug_symbols()) } #[inline] fn mach_uuid(&self) -> Result<Option<[u8; 16]>> { with_inner!(self.inner, FileInternal, |x| x.mach_uuid()) } #[inline] fn build_id(&self) -> Result<Option<&'data [u8]>> { with_inner!(self.inner, FileInternal, |x| x.build_id()) } #[inline] fn gnu_debuglink(&self) -> Result<Option<(&'data [u8], u32)>> { with_inner!(self.inner, FileInternal, |x| x.gnu_debuglink()) } fn entry(&self) -> u64 { with_inner!(self.inner, FileInternal, |x| x.entry()) } fn flags(&self) -> FileFlags { with_inner!(self.inner, FileInternal, |x| x.flags()) } } /// An iterator over the segments of a `File`. #[derive(Debug)] pub struct SegmentIterator<'data, 'file> where 'data: 'file, { inner: SegmentIteratorInternal<'data, 'file>, } #[derive(Debug)] enum SegmentIteratorInternal<'data, 'file> where 'data: 'file, { #[cfg(feature = "coff")] Coff(coff::CoffSegmentIterator<'data, 'file>), #[cfg(feature = "elf")] Elf32(elf::ElfSegmentIterator32<'data, 'file>), #[cfg(feature = "elf")] Elf64(elf::ElfSegmentIterator64<'data, 'file>), #[cfg(feature = "macho")] MachO32(macho::MachOSegmentIterator32<'data, 'file>), #[cfg(feature = "macho")] MachO64(macho::MachOSegmentIterator64<'data, 'file>), #[cfg(feature = "pe")] Pe32(pe::PeSegmentIterator32<'data, 'file>), #[cfg(feature = "pe")] Pe64(pe::PeSegmentIterator64<'data, 'file>), #[cfg(feature = "wasm")] Wasm(wasm::WasmSegmentIterator<'data, 'file>), } impl<'data, 'file> Iterator for SegmentIterator<'data, 'file> { type Item = Segment<'data, 'file>; fn next(&mut self) -> Option<Self::Item> { next_inner!(self.inner, SegmentIteratorInternal, SegmentInternal) .map(|inner| Segment { inner }) } } /// A segment of a `File`. pub struct Segment<'data, 'file> where 'data: 'file, { inner: SegmentInternal<'data, 'file>, } #[derive(Debug)] enum SegmentInternal<'data, 'file> where 'data: 'file, { #[cfg(feature = "coff")] Coff(coff::CoffSegment<'data, 'file>), #[cfg(feature = "elf")] Elf32(elf::ElfSegment32<'data, 'file>), #[cfg(feature = "elf")] Elf64(elf::ElfSegment64<'data, 'file>), #[cfg(feature = "macho")] MachO32(macho::MachOSegment32<'data, 'file>), #[cfg(feature = "macho")] MachO64(macho::MachOSegment64<'data, 'file>), #[cfg(feature = "pe")] Pe32(pe::PeSegment32<'data, 'file>), #[cfg(feature = "pe")] Pe64(pe::PeSegment64<'data, 'file>), #[cfg(feature = "wasm")] Wasm(wasm::WasmSegment<'data, 'file>), } impl<'data, 'file> fmt::Debug for Segment<'data, 'file> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // It's painful to do much better than this let mut s = f.debug_struct("Segment"); match self.name() { Ok(Some(ref name)) => { s.field("name", name); } Ok(None) => {} Err(_) => { s.field("name", &"<invalid>"); } } s.field("address", &self.address()) .field("size", &self.size()) .finish() } } impl<'data, 'file> read::private::Sealed for Segment<'data, 'file> {} impl<'data, 'file> ObjectSegment<'data> for Segment<'data, 'file> { fn address(&self) -> u64 { with_inner!(self.inner, SegmentInternal, |x| x.address()) } fn size(&self) -> u64 { with_inner!(self.inner, SegmentInternal, |x| x.size()) } fn align(&self) -> u64 { with_inner!(self.inner, SegmentInternal, |x| x.align()) } fn file_range(&self) -> (u64, u64) { with_inner!(self.inner, SegmentInternal, |x| x.file_range()) } fn data(&self) -> Result<&'data [u8]> { with_inner!(self.inner, SegmentInternal, |x| x.data()) } fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> { with_inner!(self.inner, SegmentInternal, |x| x.data_range(address, size)) } fn name(&self) -> Result<Option<&str>> { with_inner!(self.inner, SegmentInternal, |x| x.name()) } } /// An iterator of the sections of a `File`. #[derive(Debug)] pub struct SectionIterator<'data, 'file> where 'data: 'file, { inner: SectionIteratorInternal<'data, 'file>, } // we wrap our enums in a struct so that they are kept private. #[derive(Debug)] enum SectionIteratorInternal<'data, 'file> where 'data: 'file, { #[cfg(feature = "coff")] Coff(coff::CoffSectionIterator<'data, 'file>), #[cfg(feature = "elf")] Elf32(elf::ElfSectionIterator32<'data, 'file>), #[cfg(feature = "elf")] Elf64(elf::ElfSectionIterator64<'data, 'file>), #[cfg(feature = "macho")] MachO32(macho::MachOSectionIterator32<'data, 'file>), #[cfg(feature = "macho")] MachO64(macho::MachOSectionIterator64<'data, 'file>), #[cfg(feature = "pe")] Pe32(pe::PeSectionIterator32<'data, 'file>), #[cfg(feature = "pe")] Pe64(pe::PeSectionIterator64<'data, 'file>), #[cfg(feature = "wasm")] Wasm(wasm::WasmSectionIterator<'data, 'file>), } impl<'data, 'file> Iterator for SectionIterator<'data, 'file> { type Item = Section<'data, 'file>; fn next(&mut self) -> Option<Self::Item> { next_inner!(self.inner, SectionIteratorInternal, SectionInternal) .map(|inner| Section { inner }) } } /// A Section of a File pub struct Section<'data, 'file> where 'data: 'file, { inner: SectionInternal<'data, 'file>, } enum SectionInternal<'data, 'file> where 'data: 'file, { #[cfg(feature = "coff")] Coff(coff::CoffSection<'data, 'file>), #[cfg(feature = "elf")] Elf32(elf::ElfSection32<'data, 'file>), #[cfg(feature = "elf")] Elf64(elf::ElfSection64<'data, 'file>), #[cfg(feature = "macho")] MachO32(macho::MachOSection32<'data, 'file>), #[cfg(feature = "macho")] MachO64(macho::MachOSection64<'data, 'file>), #[cfg(feature = "pe")] Pe32(pe::PeSection32<'data, 'file>), #[cfg(feature = "pe")] Pe64(pe::PeSection64<'data, 'file>), #[cfg(feature = "wasm")] Wasm(wasm::WasmSection<'data, 'file>), } impl<'data, 'file> fmt::Debug for Section<'data, 'file> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // It's painful to do much better than this let mut s = f.debug_struct("Section"); match self.segment_name() { Ok(Some(ref name)) => { s.field("segment", name); } Ok(None) => {} Err(_) => { s.field("segment", &"<invalid>"); } } s.field("name", &self.name().unwrap_or("<invalid>")) .field("address", &self.address()) .field("size", &self.size()) .field("kind", &self.kind()) .finish() } } impl<'data, 'file> read::private::Sealed for Section<'data, 'file> {} impl<'data, 'file> ObjectSection<'data> for Section<'data, 'file> { type RelocationIterator = RelocationIterator<'data, 'file>; fn index(&self) -> SectionIndex { with_inner!(self.inner, SectionInternal, |x| x.index()) } fn address(&self) -> u64 { with_inner!(self.inner, SectionInternal, |x| x.address()) } fn size(&self) -> u64 { with_inner!(self.inner, SectionInternal, |x| x.size()) } fn align(&self) -> u64 { with_inner!(self.inner, SectionInternal, |x| x.align()) } fn file_range(&self) -> Option<(u64, u64)> { with_inner!(self.inner, SectionInternal, |x| x.file_range()) } fn data(&self) -> Result<&'data [u8]> { with_inner!(self.inner, SectionInternal, |x| x.data()) } fn data_range(&self, address: u64, size: u64) -> Result<Option<&'data [u8]>> { with_inner!(self.inner, SectionInternal, |x| x.data_range(address, size)) } fn compressed_data(&self) -> Result<CompressedData<'data>> { with_inner!(self.inner, SectionInternal, |x| x.compressed_data()) } fn name(&self) -> Result<&str> { with_inner!(self.inner, SectionInternal, |x| x.name()) } fn segment_name(&self) -> Result<Option<&str>> { with_inner!(self.inner, SectionInternal, |x| x.segment_name()) } fn kind(&self) -> SectionKind { with_inner!(self.inner, SectionInternal, |x| x.kind()) } fn relocations(&self) -> RelocationIterator<'data, 'file> { RelocationIterator { inner: map_inner!( self.inner, SectionInternal, RelocationIteratorInternal, |x| x.relocations() ), } } fn flags(&self) -> SectionFlags { with_inner!(self.inner, SectionInternal, |x| x.flags()) } } /// An iterator over symbol table entries. #[derive(Debug)] pub struct SymbolIterator<'data, 'file> where 'data: 'file, { inner: SymbolIteratorInternal<'data, 'file>, } #[derive(Debug)] enum SymbolIteratorInternal<'data, 'file> where 'data: 'file, { #[cfg(feature = "coff")] Coff(coff::CoffSymbolIterator<'data, 'file>), #[cfg(feature = "elf")] Elf32(elf::ElfSymbolIterator32<'data, 'file>), #[cfg(feature = "elf")] Elf64(elf::ElfSymbolIterator64<'data, 'file>), #[cfg(feature = "macho")] MachO32(macho::MachOSymbolIterator32<'data, 'file>), #[cfg(feature = "macho")] MachO64(macho::MachOSymbolIterator64<'data, 'file>), #[cfg(feature = "pe")] Pe32(coff::CoffSymbolIterator<'data, 'file>), #[cfg(feature = "pe")] Pe64(coff::CoffSymbolIterator<'data, 'file>), #[cfg(feature = "wasm")] Wasm(wasm::WasmSymbolIterator<'data, 'file>), } impl<'data, 'file> Iterator for SymbolIterator<'data, 'file> { type Item = (SymbolIndex, Symbol<'data>); fn next(&mut self) -> Option<Self::Item> { with_inner_mut!(self.inner, SymbolIteratorInternal, |x| x.next()) } } /// An iterator over relocation entries #[derive(Debug)] pub struct RelocationIterator<'data, 'file> where 'data: 'file, { inner: RelocationIteratorInternal<'data, 'file>, } #[derive(Debug)] enum RelocationIteratorInternal<'data, 'file> where 'data: 'file, { #[cfg(feature = "coff")] Coff(coff::CoffRelocationIterator<'data, 'file>), #[cfg(feature = "elf")] Elf32(elf::ElfRelocationIterator32<'data, 'file>), #[cfg(feature = "elf")] Elf64(elf::ElfRelocationIterator64<'data, 'file>), #[cfg(feature = "macho")] MachO32(macho::MachORelocationIterator32<'data, 'file>), #[cfg(feature = "macho")] MachO64(macho::MachORelocationIterator64<'data, 'file>), #[cfg(feature = "pe")] Pe32(pe::PeRelocationIterator<'data, 'file>), #[cfg(feature = "pe")] Pe64(pe::PeRelocationIterator<'data, 'file>), #[cfg(feature = "wasm")] Wasm(wasm::WasmRelocationIterator<'data, 'file>), } impl<'data, 'file> Iterator for RelocationIterator<'data, 'file> { type Item = (u64, Relocation); fn next(&mut self) -> Option<Self::Item> { with_inner_mut!(self.inner, RelocationIteratorInternal, |x| x.next()) } }
32.363506
101
0.551387
9c27feb979b2fb54eec0b02cb49f5219b6d7ebe6
4,071
use position::*; use token::*; use std::str::Chars; #[derive(Clone)] pub struct Tokenizer<'a> { chars: Chars<'a>, pos: Position } impl<'a> Tokenizer<'a> { pub fn tokenize(chars: Chars<'a>) -> Tokenizer { Tokenizer { chars: chars, pos: Position::new("string... ") } } fn next_str(&mut self) -> TokenType { let mut str_lit = String::new(); loop { if let Some(c) = self.next_char() { if c == '"' { return TokenType::StrLit(str_lit); } str_lit.push(c); } else { return TokenType::Error; } } } fn next_num(&mut self, c: char) -> TokenType { let len = self.chars.clone().take_while(|c| c.is_numeric()).count(); let mut num = String::with_capacity(len + 1); num.push(c); for _ in 0..len { num.push(self.next_char().unwrap()); } TokenType::NumLit(num) } fn next_ident_string(&mut self) -> &str { let len = self.chars.clone().take_while(|c| c.is_alphanumeric()).count(); let r = &self.chars.as_str()[..len]; for _ in 0..len { self.next_char(); } r } fn next_token(&mut self) -> Option<Token> { loop { let token_pos = self.pos.clone(); if let Some(next_char) = self.next_char() { if next_char.is_whitespace() { continue; } return Some(match next_char { '=' => match self.chars.clone().next() { Some('=') => { self.next_char(); TokenType::Eq }, Some('>') => { self.next_char(); TokenType::FatArrow }, _ => TokenType::Assign }, '!' => match self.chars.clone().next() { Some('=') => { self.next_char(); TokenType::Neq }, _ => TokenType::Not }, '+' => TokenType::Plus, '-' => TokenType::Minus, '*' => TokenType::Star, '/' => TokenType::Slash, ',' => TokenType::Comma, ':' => TokenType::Colon, '(' => TokenType::LeftPar, ')' => TokenType::RightPar, '{' => TokenType::LeftBrace, '}' => TokenType::RightBrace, '[' => TokenType::LeftBracket, ']' => TokenType::RightBracket, '"' => self.next_str(), c if c.is_numeric() => self.next_num(c), c if c.is_alphabetic() => match (c, self.next_ident_string()) { ('i', "f") => TokenType::If, ('e', "lse") => TokenType::Else, ('l', "et") => TokenType::Let, ('w', "hile") => TokenType::While, (c, s) => { let mut name = String::with_capacity(s.len() + 1); name.push(c); name.push_str(s); TokenType::Ident(name) } }, _ => TokenType::Error }.with_pos(token_pos)); } else { return None; } } } fn next_char(&mut self) -> Option<char> { let c = self.chars.next(); match c { Some('\n') => self.pos.next_line(), _ => self.pos.next_col() } c } } impl<'a> Iterator for Tokenizer<'a> { type Item = Token; fn next(&mut self) -> Option<Self::Item> { self.next_token() } }
30.155556
83
0.38197
1ef486c093d67788fc6c296700f5721446a2e75d
24,562
#![deny(warnings, rust_2018_idioms)] #![type_length_limit = "16289823"] #![recursion_limit = "256"] use linkerd2_app_integration::*; macro_rules! generate_tests { (server: $make_server:path, client: $make_client:path) => { use linkerd2_proxy_api as pb; #[tokio::test] async fn outbound_asks_controller_api() { let _trace = trace_init(); let srv = $make_server().route("/", "hello").route("/bye", "bye").run().await; let ctrl = controller::new(); ctrl.profile_tx_default("disco.test.svc.cluster.local"); ctrl.destination_tx("disco.test.svc.cluster.local").send_addr(srv.addr); let proxy = proxy::new().controller(ctrl.run().await).outbound(srv).run().await; let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client.get("/").await, "hello"); assert_eq!(client.get("/bye").await, "bye"); // Ensure panics are propagated. proxy.join_servers().await; } #[tokio::test] async fn outbound_reconnects_if_controller_stream_ends() { let _trace = trace_init(); let srv = $make_server().route("/recon", "nect").run().await; let ctrl = controller::new(); ctrl.profile_tx_default("disco.test.svc.cluster.local"); drop(ctrl.destination_tx("disco.test.svc.cluster.local")); ctrl.destination_tx("disco.test.svc.cluster.local").send_addr(srv.addr); let proxy = proxy::new().controller(ctrl.run().await).outbound(srv).run().await; let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client.get("/recon").await, "nect"); } #[tokio::test] async fn outbound_fails_fast_when_destination_has_no_endpoints() { outbound_fails_fast(controller::destination_exists_with_no_endpoints()).await } #[tokio::test] async fn outbound_fails_fast_when_destination_does_not_exist() { outbound_fails_fast(controller::destination_does_not_exist()).await } async fn outbound_fails_fast(up: pb::destination::Update) { use std::sync::{Arc, atomic::{AtomicBool, Ordering}}; let _trace = trace_init(); let did_not_fall_back = Arc::new(AtomicBool::new(true)); let did_not_fall_back2 = did_not_fall_back.clone(); let srv = $make_server().route_fn("/", move |_| { did_not_fall_back2.store(false, Ordering::Release); panic!() }).run().await; let ctrl = controller::new(); ctrl.profile_tx_default("disco.test.svc.cluster.local"); ctrl.destination_tx("disco.test.svc.cluster.local").send(up); let proxy = proxy::new() .controller(ctrl.run().await) .outbound(srv) .run().await; let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); let rsp = client.request(client.request_builder("/")).await.unwrap(); assert!( did_not_fall_back.load(Ordering::Acquire), "original destination should not have been used!", ); // We should have gotten an HTTP response, not an error. assert_eq!(rsp.status(), http::StatusCode::SERVICE_UNAVAILABLE); // Ensure panics are propagated. proxy.join_servers().await; } #[tokio::test] async fn outbound_falls_back_to_orig_dst_when_outside_search_path() { let _trace = trace_init(); let srv = $make_server().route("/", "hello from my great website").run().await; let ctrl = controller::new(); ctrl.no_more_destinations(); let proxy = proxy::new() .controller(ctrl.run().await) .outbound(srv) .run().await; let client = $make_client(proxy.outbound, "my-great-websute.net"); assert_eq!(client.get("/").await, "hello from my great website"); // Ensure panics are propagated. proxy.join_servers().await; } #[tokio::test] async fn outbound_falls_back_to_orig_dst_after_invalid_argument() { let _trace = trace_init(); let srv = $make_server().route("/", "hello").run().await; const NAME: &'static str = "unresolvable.svc.cluster.local"; let ctrl = controller::new(); ctrl.profile_tx_default(NAME); ctrl.destination_fail( NAME, grpc::Status::new(grpc::Code::InvalidArgument, "unresolvable"), ); ctrl.no_more_destinations(); let proxy = proxy::new() .controller(ctrl.run().await) .outbound(srv) .run().await; let client = $make_client(proxy.outbound, NAME); assert_eq!(client.get("/").await, "hello"); // Ensure panics are propagated. proxy.join_servers().await; } #[tokio::test] async fn outbound_destinations_reset_on_reconnect_followed_by_empty() { outbound_destinations_reset_on_reconnect( controller::destination_exists_with_no_endpoints() ).await } #[tokio::test] async fn outbound_destinations_reset_on_reconnect_followed_by_dne() { outbound_destinations_reset_on_reconnect( controller::destination_does_not_exist() ).await } async fn outbound_destinations_reset_on_reconnect(up: pb::destination::Update) { let env = TestEnv::new(); let srv = $make_server().route("/", "hello").run().await; let ctrl = controller::new(); ctrl.profile_tx_default("initially-exists.ns.svc.cluster.local"); let dst_tx0 = ctrl.destination_tx("initially-exists.ns.svc.cluster.local"); dst_tx0.send_addr(srv.addr); let dst_tx1 = ctrl.destination_tx("initially-exists.ns.svc.cluster.local"); let proxy = proxy::new() .controller(ctrl.run().await) .outbound(srv) .run_with_test_env(env).await; let initially_exists = $make_client(proxy.outbound, "initially-exists.ns.svc.cluster.local"); assert_eq!(initially_exists.get("/").await, "hello"); drop(dst_tx0); // trigger reconnect dst_tx1.send(up); // Wait for the reconnect to happen. TODO: Replace this flaky logic. tokio::time::delay_for(Duration::from_millis(1000)).await; let rsp = initially_exists.request(initially_exists.request_builder("/")).await.unwrap(); assert_eq!(rsp.status(), http::StatusCode::SERVICE_UNAVAILABLE); // Ensure panics are propagated. proxy.join_servers().await; } #[tokio::test] async fn outbound_times_out() { let env = TestEnv::new(); let srv = $make_server().route("/hi", "hello").run().await; let ctrl = controller::new(); ctrl.profile_tx_default("disco.test.svc.cluster.local"); // when the proxy requests the destination, don't respond. let _dst_tx = ctrl .destination_tx("disco.test.svc.cluster.local"); let proxy = proxy::new() .controller(ctrl.run().await) .outbound(srv) .run_with_test_env(env) .await; let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); let req = client.request_builder("/"); let rsp = client.request(req.method("GET")).await.unwrap(); // the request should time out assert_eq!(rsp.status(), http::StatusCode::SERVICE_UNAVAILABLE); // Ensure panics are propagated. proxy.join_servers().await; } #[tokio::test] async fn outbound_asks_controller_without_orig_dst() { let _trace = trace_init(); let _ = TestEnv::new(); let srv = $make_server() .route("/", "hello") .route("/bye", "bye") .run() .await; let ctrl = controller::new(); ctrl.profile_tx_default("disco.test.svc.cluster.local"); ctrl.destination_tx("disco.test.svc.cluster.local").send_addr(srv.addr); let proxy = proxy::new() .controller(ctrl.run().await) // don't set srv as outbound(), so that SO_ORIGINAL_DST isn't // used as a backup .run().await; let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client.get("/").await, "hello"); assert_eq!(client.get("/bye").await, "bye"); // Ensure panics are propagated. srv.join().await; } #[tokio::test] async fn outbound_error_reconnects_after_backoff() { let env = TestEnv::new(); let srv = $make_server() .route("/", "hello") .run().await; // Used to delay `listen` in the server, to force connection refused errors. let (tx, rx) = oneshot::channel::<()>(); let ctrl = controller::new(); ctrl.profile_tx_default("disco.test.svc.cluster.local"); let dst_tx = ctrl.destination_tx("disco.test.svc.cluster.local"); dst_tx.send_addr(srv.addr); // but don't drop, to not trigger stream closing reconnects let proxy = proxy::new() .controller(ctrl.delay_listen(async move { let _ = rx.await; }).await) // don't set srv as outbound(), so that SO_ORIGINAL_DST isn't // used as a backup .run_with_test_env(env).await; // Allow the control client to notice a connection error tokio::time::delay_for(Duration::from_millis(500)).await; // Allow our controller to start accepting connections, // and then wait a little bit so the client tries again. drop(tx); tokio::time::delay_for(Duration::from_millis(500)).await; let client = $make_client(proxy.outbound, "disco.test.svc.cluster.local"); assert_eq!(client.get("/").await, "hello"); // Ensure panics are propagated. srv.join().await; } mod override_header { use std::sync::{Arc, atomic::{AtomicUsize, Ordering}}; use bytes::buf::Buf; use super::super::*; const OVERRIDE_HEADER: &'static str = "l5d-dst-override"; const FOO: &'static str = "foo.test.svc.cluster.local"; const BAR: &'static str = "bar.test.svc.cluster.local"; struct Fixture { foo_reqs: Arc<AtomicUsize>, bar_reqs: Arc<AtomicUsize>, foo: Option<server::Listening>, bar: server::Listening, ctrl: Option<controller::Controller>, _foo_dst: (controller::ProfileSender, controller::DstSender), _bar_dst: (controller::ProfileSender, controller::DstSender), } impl Fixture { async fn new() -> Fixture { let _trace = trace_init(); let foo_reqs = Arc::new(AtomicUsize::new(0)); let foo_reqs2 = foo_reqs.clone(); let foo = $make_server() .route_fn("/", move |req| { assert!( !req.headers().contains_key(OVERRIDE_HEADER), "dst override header should be stripped before forwarding request", ); foo_reqs2.clone().fetch_add(1, Ordering::Release); Response::builder().status(200) .body(Bytes::from_static(&b"hello from foo"[..])) .unwrap() }) .run().await; let bar_reqs = Arc::new(AtomicUsize::new(0)); let bar_reqs2 = bar_reqs.clone(); let bar = $make_server() .route_fn("/", move |req| { assert!( !req.headers().contains_key(OVERRIDE_HEADER), "dst override header should be stripped before forwarding request", ); bar_reqs2.clone().fetch_add(1, Ordering::Release); Response::builder().status(200) .body(Bytes::from_static(&b"hello from bar"[..])) .unwrap() }) .run().await; let ctrl = controller::new(); let foo_profile = ctrl.profile_tx(FOO); foo_profile.send(controller::profile(vec![ controller::route().request_path("/") .label("hello", "foo"), ], None, vec![])); let bar_profile = ctrl.profile_tx(BAR); bar_profile.send(controller::profile(vec![ controller::route().request_path("/") .label("hello", "bar"), ], None, vec![])); let foo_eps = ctrl.destination_tx(FOO); foo_eps.send_addr(foo.addr); let bar_eps = ctrl.destination_tx(BAR); bar_eps.send_addr(bar.addr); Fixture { foo_reqs, bar_reqs, foo: Some(foo), bar, ctrl: Some(ctrl), _foo_dst: (foo_profile, foo_eps), _bar_dst: (bar_profile, bar_eps), } } fn foo(&mut self) -> server::Listening { self.foo.take().unwrap() } fn foo_reqs(&self) -> usize { self.foo_reqs.load(Ordering::Acquire) } fn bar_reqs(&self) -> usize { self.bar_reqs.load(Ordering::Acquire) } async fn proxy(&mut self) -> proxy::Proxy { let ctrl = self.ctrl.take().unwrap(); proxy::new().controller(ctrl.run().await) } } async fn override_req(client: &client::Client) -> http::Response<hyper::Body> { client.request( client.request_builder("/") .header(OVERRIDE_HEADER, BAR) .method("GET") ).await .expect("override request") } #[tokio::test] async fn outbound_honors_override_header() { let mut fixture = Fixture::new().await; let proxy = fixture.proxy().await.run().await; let client = $make_client(proxy.outbound, FOO); // Request 1 --- without override header. assert_eq!(client.get("/").await, "hello from foo"); assert_eq!(fixture.foo_reqs(), 1); assert_eq!(fixture.bar_reqs(), 0); // Request 2 --- with override header let res = override_req(&client).await; assert_eq!(res.status(), http::StatusCode::OK); let stream = res.into_parts().1; let mut body = hyper::body::aggregate(stream).await.expect("response 2 body"); let body = std::str::from_utf8(body.to_bytes().as_ref()).expect("body is utf-8").to_owned(); assert_eq!(body, "hello from bar"); assert_eq!(fixture.foo_reqs(), 1); assert_eq!(fixture.bar_reqs(), 1); // Request 3 --- without override header again. assert_eq!(client.get("/").await, "hello from foo"); assert_eq!(fixture.foo_reqs(), 2); assert_eq!(fixture.bar_reqs(), 1); // Ensure panics are propagated. tokio::join!{ fixture.foo().join(), fixture.bar.join() }; } #[tokio::test] async fn outbound_overrides_profile() { let mut fixture = Fixture::new().await; let proxy = fixture.proxy().await.run().await; println!("make client: {}", FOO); let client = $make_client(proxy.outbound, FOO); let metrics = client::http1(proxy.metrics, "localhost"); // Request 1 --- without override header. client.get("/").await; assert_eventually_contains!(metrics.get("/metrics").await, "rt_hello=\"foo\""); // Request 2 --- with override header let res = override_req(&client).await; assert_eq!(res.status(), http::StatusCode::OK); assert_eventually_contains!(metrics.get("/metrics").await, "rt_hello=\"bar\""); // Ensure panics are propagated. tokio::join!{ fixture.foo().join(), fixture.bar.join() }; } #[tokio::test] async fn outbound_honors_override_header_with_orig_dst() { let mut fixture = Fixture::new().await; let proxy = fixture.proxy().await .outbound(fixture.foo()) .run().await; let client = $make_client(proxy.outbound, "foo.test.svc.cluster.local"); // Request 1 --- without override header. assert_eq!(client.get("/").await, "hello from foo"); assert_eq!(fixture.foo_reqs(), 1); assert_eq!(fixture.bar_reqs(), 0); // Request 2 --- with override header let res = override_req(&client).await; assert_eq!(res.status(), http::StatusCode::OK); let stream = res.into_parts().1; let mut body = hyper::body::aggregate(stream).await.expect("response 2 body"); let body = std::str::from_utf8(body.to_bytes().as_ref()).expect("body is utf-8").to_owned(); assert_eq!(body, "hello from bar"); assert_eq!(fixture.foo_reqs(), 1); assert_eq!(fixture.bar_reqs(), 1); // Request 3 --- without override header again. assert_eq!(client.get("/").await, "hello from foo"); assert_eq!(fixture.foo_reqs(), 2); assert_eq!(fixture.bar_reqs(), 1); // Ensure panics are propagated. tokio::join! { proxy.join_servers(), fixture.bar.join() }; } #[tokio::test] async fn inbound_overrides_profile() { let mut fixture = Fixture::new().await; let proxy = fixture.proxy().await .inbound(fixture.foo()) .run().await; let client = $make_client(proxy.inbound, FOO); let metrics = client::http1(proxy.metrics, "localhost"); // Request 1 --- without override header. client.get("/").await; assert_eventually_contains!(metrics.get("/metrics").await, "rt_hello=\"foo\""); // // Request 2 --- with override header // let res = override_req(&client); // assert_eq!(res.status(), http::StatusCode::OK); // assert_eventually_contains!(metrics.get("/metrics"), "rt_hello=\"bar\""); // Ensure panics are propagated. proxy.join_servers().await; } #[tokio::test] async fn inbound_still_routes_to_orig_dst() { let mut fixture = Fixture::new().await; let proxy = fixture.proxy().await .inbound(fixture.foo()) .run().await; let client = $make_client(proxy.inbound, "foo.test.svc.cluster.local"); // Request 1 --- without override header. assert_eq!(client.get("/").await, "hello from foo"); assert_eq!(fixture.foo_reqs(), 1); assert_eq!(fixture.bar_reqs(), 0); // Request 2 --- with override header let res = override_req(&client).await; assert_eq!(res.status(), http::StatusCode::OK); let stream = res.into_parts().1; let mut body = hyper::body::aggregate(stream).await.expect("response 2 body"); let body = std::str::from_utf8(body.to_bytes().as_ref()).expect("body is utf-8").to_owned(); assert_eq!(body, "hello from foo"); assert_eq!(fixture.foo_reqs(), 2); assert_eq!(fixture.bar_reqs(), 0); // Request 3 --- without override header again. assert_eq!(client.get("/").await, "hello from foo"); assert_eq!(fixture.foo_reqs(), 3); assert_eq!(fixture.bar_reqs(), 0); // Ensure panics are propagated. tokio::join! { proxy.join_servers(), fixture.bar.join() }; } } } } mod http2 { use linkerd2_app_integration::*; generate_tests! { server: server::new, client: client::new } #[tokio::test] async fn outbound_balancer_waits_for_ready_endpoint() { // See https://github.com/linkerd/linkerd2/issues/2550 let _t = trace_init(); let srv1 = server::http2() .route("/", "hello") .route("/bye", "bye") .run() .await; let srv2 = server::http2() .route("/", "hello") .route("/bye", "bye") .run() .await; let host = "disco.test.svc.cluster.local"; let ctrl = controller::new(); ctrl.profile_tx_default(host); let dst = ctrl.destination_tx(host); // Start by "knowing" the first server... dst.send_addr(srv1.addr); let proxy = proxy::new().controller(ctrl.run().await).run().await; let client = client::http2(proxy.outbound, host); let metrics = client::http1(proxy.metrics, "localhost"); assert_eq!(client.get("/").await, "hello"); // Simulate the first server falling over without discovery // knowing about it... srv1.join().await; tokio::task::yield_now().await; // Wait until the proxy has seen the `srv1` disconnect... assert_eventually_contains!( metrics.get("/metrics").await, "tcp_close_total{direction=\"outbound\",peer=\"dst\",tls=\"no_identity\",no_tls_reason=\"not_provided_by_service_discovery\",errno=\"\"} 1" ); // Start a new request to the destination, now that the server is dead. // This request should be waiting at the balancer for a ready endpoint. // // The only one it knows about is dead, so it won't have progressed. let fut = client.request(client.request_builder("/bye")); // When we tell the balancer about a new endpoint, it should have added // it and then dispatched the request... dst.send_addr(srv2.addr); let res = fut.await.expect("/bye response"); assert_eq!(res.status(), http::StatusCode::OK); } } mod http1 { use linkerd2_app_integration::*; generate_tests! { server: server::http1, client: client::http1 } mod absolute_uris { use linkerd2_app_integration::*; generate_tests! { server: server::http1, client: client::http1_absolute_uris } } }
39.111465
151
0.520275
d650ef56435ac0d8831a9f7406685deb020611fe
1,836
#[doc = "Register `HOST_SLCHOST_GPIO_STATUS1` reader"] pub struct R(crate::R<HOST_SLCHOST_GPIO_STATUS1_SPEC>); impl core::ops::Deref for R { type Target = crate::R<HOST_SLCHOST_GPIO_STATUS1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<HOST_SLCHOST_GPIO_STATUS1_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<HOST_SLCHOST_GPIO_STATUS1_SPEC>) -> Self { R(reader) } } #[doc = "Field `HOST_GPIO_SDIO_INT1` reader - "] pub struct HOST_GPIO_SDIO_INT1_R(crate::FieldReader<u8, u8>); impl HOST_GPIO_SDIO_INT1_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { HOST_GPIO_SDIO_INT1_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for HOST_GPIO_SDIO_INT1_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl R { #[doc = "Bits 0:7"] #[inline(always)] pub fn host_gpio_sdio_int1(&self) -> HOST_GPIO_SDIO_INT1_R { HOST_GPIO_SDIO_INT1_R::new((self.bits & 0xff) as u8) } } #[doc = "\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [host_slchost_gpio_status1](index.html) module"] pub struct HOST_SLCHOST_GPIO_STATUS1_SPEC; impl crate::RegisterSpec for HOST_SLCHOST_GPIO_STATUS1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [host_slchost_gpio_status1::R](R) reader structure"] impl crate::Readable for HOST_SLCHOST_GPIO_STATUS1_SPEC { type Reader = R; } #[doc = "`reset()` method sets HOST_SLCHOST_GPIO_STATUS1 to value 0"] impl crate::Resettable for HOST_SLCHOST_GPIO_STATUS1_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
34
228
0.680283
db3f85604ba423534ceacbe8ba23d89ed5dcfdd6
22,751
//! This module specifies the input to rust-analyzer. In some sense, this is //! **the** most important module, because all other fancy stuff is strictly //! derived from this input. //! //! Note that neither this module, nor any other part of the analyzer's core do //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how //! actual IO is done and lowered to input. use std::{fmt, iter::FromIterator, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc}; use cfg::CfgOptions; use rustc_hash::{FxHashMap, FxHashSet}; use syntax::SmolStr; use tt::Subtree; use vfs::{file_set::FileSet, FileId, VfsPath}; /// Files are grouped into source roots. A source root is a directory on the /// file systems which is watched for changes. Typically it corresponds to a /// Rust crate. Source roots *might* be nested: in this case, a file belongs to /// the nearest enclosing source root. Paths to files are always relative to a /// source root, and the analyzer does not know the root path of the source root at /// all. So, a file from one source root can't refer to a file in another source /// root by path. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct SourceRootId(pub u32); #[derive(Clone, Debug, PartialEq, Eq)] pub struct SourceRoot { /// Sysroot or crates.io library. /// /// Libraries are considered mostly immutable, this assumption is used to /// optimize salsa's query structure pub is_library: bool, pub(crate) file_set: FileSet, } impl SourceRoot { pub fn new_local(file_set: FileSet) -> SourceRoot { SourceRoot { is_library: false, file_set } } pub fn new_library(file_set: FileSet) -> SourceRoot { SourceRoot { is_library: true, file_set } } pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> { self.file_set.path_for_file(file) } pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> { self.file_set.file_for_path(path) } pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ { self.file_set.iter() } } /// `CrateGraph` is a bit of information which turns a set of text files into a /// number of Rust crates. /// /// Each crate is defined by the `FileId` of its root module, the set of enabled /// `cfg` flags and the set of dependencies. /// /// Note that, due to cfg's, there might be several crates for a single `FileId`! /// /// For the purposes of analysis, a crate does not have a name. Instead, names /// are specified on dependency edges. That is, a crate might be known under /// different names in different dependent crates. /// /// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust /// language proper, not a concept of the build system. In practice, we get /// `CrateGraph` by lowering `cargo metadata` output. /// /// `CrateGraph` is `!Serialize` by design, see /// <https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/dev/architecture.md#serialization> #[derive(Debug, Clone, Default /* Serialize, Deserialize */)] pub struct CrateGraph { arena: FxHashMap<CrateId, CrateData>, } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct CrateId(pub u32); #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateName(SmolStr); impl CrateName { /// Creates a crate name, checking for dashes in the string provided. /// Dashes are not allowed in the crate names, /// hence the input string is returned as `Err` for those cases. pub fn new(name: &str) -> Result<CrateName, &str> { if name.contains('-') { Err(name) } else { Ok(Self(SmolStr::new(name))) } } /// Creates a crate name, unconditionally replacing the dashes with underscores. pub fn normalize_dashes(name: &str) -> CrateName { Self(SmolStr::new(name.replace('-', "_"))) } pub fn as_smol_str(&self) -> &SmolStr { &self.0 } } impl fmt::Display for CrateName { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } } impl ops::Deref for CrateName { type Target = str; fn deref(&self) -> &str { &*self.0 } } /// Origin of the crates. It is used in emitting monikers. #[derive(Debug, Clone)] pub enum CrateOrigin { /// Crates that are from crates.io official registry, CratesIo { repo: Option<String> }, /// Crates that are provided by the language, like std, core, proc-macro, ... Lang, /// Crates that we don't know their origin. // Ideally this enum should cover all cases, and then we remove this variant. Unknown, } impl Default for CrateOrigin { fn default() -> Self { Self::Unknown } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateDisplayName { // The name we use to display various paths (with `_`). crate_name: CrateName, // The name as specified in Cargo.toml (with `-`). canonical_name: String, } impl CrateDisplayName { pub fn canonical_name(&self) -> &str { &self.canonical_name } pub fn crate_name(&self) -> &CrateName { &self.crate_name } } impl From<CrateName> for CrateDisplayName { fn from(crate_name: CrateName) -> CrateDisplayName { let canonical_name = crate_name.to_string(); CrateDisplayName { crate_name, canonical_name } } } impl fmt::Display for CrateDisplayName { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.crate_name.fmt(f) } } impl ops::Deref for CrateDisplayName { type Target = str; fn deref(&self) -> &str { &*self.crate_name } } impl CrateDisplayName { pub fn from_canonical_name(canonical_name: String) -> CrateDisplayName { let crate_name = CrateName::normalize_dashes(&canonical_name); CrateDisplayName { crate_name, canonical_name } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct ProcMacroId(pub u32); #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum ProcMacroKind { CustomDerive, FuncLike, Attr, } pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { fn expand( &self, subtree: &Subtree, attrs: Option<&Subtree>, env: &Env, ) -> Result<Subtree, ProcMacroExpansionError>; } pub enum ProcMacroExpansionError { Panic(String), /// Things like "proc macro server was killed by OOM". System(String), } #[derive(Debug, Clone)] pub struct ProcMacro { pub name: SmolStr, pub kind: ProcMacroKind, pub expander: Arc<dyn ProcMacroExpander>, } #[derive(Debug, Clone)] pub struct CrateData { pub root_file_id: FileId, pub edition: Edition, pub version: Option<String>, /// A name used in the package's project declaration: for Cargo projects, /// its `[package].name` can be different for other project types or even /// absent (a dummy crate for the code snippet, for example). /// /// For purposes of analysis, crates are anonymous (only names in /// `Dependency` matters), this name should only be used for UI. pub display_name: Option<CrateDisplayName>, pub cfg_options: CfgOptions, pub potential_cfg_options: CfgOptions, pub env: Env, pub dependencies: Vec<Dependency>, pub proc_macro: Vec<ProcMacro>, pub origin: CrateOrigin, pub is_proc_macro: bool, } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum Edition { Edition2015, Edition2018, Edition2021, } impl Edition { pub const CURRENT: Edition = Edition::Edition2018; } #[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct Env { entries: FxHashMap<String, String>, } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Dependency { pub crate_id: CrateId, pub name: CrateName, prelude: bool, } impl Dependency { pub fn new(name: CrateName, crate_id: CrateId) -> Self { Self { name, crate_id, prelude: true } } pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self { Self { name, crate_id, prelude } } /// Whether this dependency is to be added to the depending crate's extern prelude. pub fn is_prelude(&self) -> bool { self.prelude } } impl CrateGraph { pub fn add_crate_root( &mut self, file_id: FileId, edition: Edition, display_name: Option<CrateDisplayName>, version: Option<String>, cfg_options: CfgOptions, potential_cfg_options: CfgOptions, env: Env, proc_macro: Vec<ProcMacro>, is_proc_macro: bool, origin: CrateOrigin, ) -> CrateId { let data = CrateData { root_file_id: file_id, edition, version, display_name, cfg_options, potential_cfg_options, env, proc_macro, dependencies: Vec::new(), origin, is_proc_macro, }; let crate_id = CrateId(self.arena.len() as u32); let prev = self.arena.insert(crate_id, data); assert!(prev.is_none()); crate_id } pub fn add_dep( &mut self, from: CrateId, dep: Dependency, ) -> Result<(), CyclicDependenciesError> { let _p = profile::span("add_dep"); // Check if adding a dep from `from` to `to` creates a cycle. To figure // that out, look for a path in the *opposite* direction, from `to` to // `from`. if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) { let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect(); let err = CyclicDependenciesError { path }; assert!(err.from().0 == from && err.to().0 == dep.crate_id); return Err(err); } self.arena.get_mut(&from).unwrap().add_dep(dep); Ok(()) } pub fn is_empty(&self) -> bool { self.arena.is_empty() } pub fn iter(&self) -> impl Iterator<Item = CrateId> + '_ { self.arena.keys().copied() } /// Returns an iterator over all transitive dependencies of the given crate, /// including the crate itself. pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> + '_ { let mut worklist = vec![of]; let mut deps = FxHashSet::default(); while let Some(krate) = worklist.pop() { if !deps.insert(krate) { continue; } worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id)); } deps.into_iter() } /// Returns all transitive reverse dependencies of the given crate, /// including the crate itself. pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> + '_ { let mut worklist = vec![of]; let mut rev_deps = FxHashSet::default(); rev_deps.insert(of); let mut inverted_graph = FxHashMap::<_, Vec<_>>::default(); self.arena.iter().for_each(|(&krate, data)| { data.dependencies .iter() .for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate)) }); while let Some(krate) = worklist.pop() { if let Some(krate_rev_deps) = inverted_graph.get(&krate) { krate_rev_deps .iter() .copied() .filter(|&rev_dep| rev_deps.insert(rev_dep)) .for_each(|rev_dep| worklist.push(rev_dep)); } } rev_deps.into_iter() } /// Returns all crates in the graph, sorted in topological order (ie. dependencies of a crate /// come before the crate itself). pub fn crates_in_topological_order(&self) -> Vec<CrateId> { let mut res = Vec::new(); let mut visited = FxHashSet::default(); for krate in self.arena.keys().copied() { go(self, &mut visited, &mut res, krate); } return res; fn go( graph: &CrateGraph, visited: &mut FxHashSet<CrateId>, res: &mut Vec<CrateId>, source: CrateId, ) { if !visited.insert(source) { return; } for dep in graph[source].dependencies.iter() { go(graph, visited, res, dep.crate_id) } res.push(source) } } // FIXME: this only finds one crate with the given root; we could have multiple pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> { let (&crate_id, _) = self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?; Some(crate_id) } /// Extends this crate graph by adding a complete disjoint second crate /// graph. /// /// The ids of the crates in the `other` graph are shifted by the return /// amount. pub fn extend(&mut self, other: CrateGraph) -> u32 { let start = self.arena.len() as u32; self.arena.extend(other.arena.into_iter().map(|(id, mut data)| { let new_id = id.shift(start); for dep in &mut data.dependencies { dep.crate_id = dep.crate_id.shift(start); } (new_id, data) })); start } fn find_path( &self, visited: &mut FxHashSet<CrateId>, from: CrateId, to: CrateId, ) -> Option<Vec<CrateId>> { if !visited.insert(from) { return None; } if from == to { return Some(vec![to]); } for dep in &self[from].dependencies { let crate_id = dep.crate_id; if let Some(mut path) = self.find_path(visited, crate_id, to) { path.push(from); return Some(path); } } None } // Work around for https://github.com/rust-analyzer/rust-analyzer/issues/6038. // As hacky as it gets. pub fn patch_cfg_if(&mut self) -> bool { let cfg_if = self.hacky_find_crate("cfg_if"); let std = self.hacky_find_crate("std"); match (cfg_if, std) { (Some(cfg_if), Some(std)) => { self.arena.get_mut(&cfg_if).unwrap().dependencies.clear(); self.arena .get_mut(&std) .unwrap() .dependencies .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if)); true } _ => false, } } fn hacky_find_crate(&self, display_name: &str) -> Option<CrateId> { self.iter().find(|it| self[*it].display_name.as_deref() == Some(display_name)) } } impl ops::Index<CrateId> for CrateGraph { type Output = CrateData; fn index(&self, crate_id: CrateId) -> &CrateData { &self.arena[&crate_id] } } impl CrateId { fn shift(self, amount: u32) -> CrateId { CrateId(self.0 + amount) } } impl CrateData { fn add_dep(&mut self, dep: Dependency) { self.dependencies.push(dep) } } impl FromStr for Edition { type Err = ParseEditionError; fn from_str(s: &str) -> Result<Self, Self::Err> { let res = match s { "2015" => Edition::Edition2015, "2018" => Edition::Edition2018, "2021" => Edition::Edition2021, _ => return Err(ParseEditionError { invalid_input: s.to_string() }), }; Ok(res) } } impl fmt::Display for Edition { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(match self { Edition::Edition2015 => "2015", Edition::Edition2018 => "2018", Edition::Edition2021 => "2021", }) } } impl FromIterator<(String, String)> for Env { fn from_iter<T: IntoIterator<Item = (String, String)>>(iter: T) -> Self { Env { entries: FromIterator::from_iter(iter) } } } impl Env { pub fn set(&mut self, env: &str, value: String) { self.entries.insert(env.to_owned(), value); } pub fn get(&self, env: &str) -> Option<String> { self.entries.get(env).cloned() } pub fn iter(&self) -> impl Iterator<Item = (&str, &str)> { self.entries.iter().map(|(k, v)| (k.as_str(), v.as_str())) } } #[derive(Debug)] pub struct ParseEditionError { invalid_input: String, } impl fmt::Display for ParseEditionError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "invalid edition: {:?}", self.invalid_input) } } impl std::error::Error for ParseEditionError {} #[derive(Debug)] pub struct CyclicDependenciesError { path: Vec<(CrateId, Option<CrateDisplayName>)>, } impl CyclicDependenciesError { fn from(&self) -> &(CrateId, Option<CrateDisplayName>) { self.path.first().unwrap() } fn to(&self) -> &(CrateId, Option<CrateDisplayName>) { self.path.last().unwrap() } } impl fmt::Display for CyclicDependenciesError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let render = |(id, name): &(CrateId, Option<CrateDisplayName>)| match name { Some(it) => format!("{}({:?})", it, id), None => format!("{:?}", id), }; let path = self.path.iter().rev().map(render).collect::<Vec<String>>().join(" -> "); write!( f, "cyclic deps: {} -> {}, alternative path: {}", render(self.from()), render(self.to()), path ) } } #[cfg(test)] mod tests { use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; #[test] fn detect_cyclic_dependency_indirect() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( FileId(1u32), Edition2018, None, None, CfgOptions::default(), CfgOptions::default(), Env::default(), Default::default(), false, Default::default(), ); let crate2 = graph.add_crate_root( FileId(2u32), Edition2018, None, None, CfgOptions::default(), CfgOptions::default(), Env::default(), Default::default(), false, Default::default(), ); let crate3 = graph.add_crate_root( FileId(3u32), Edition2018, None, None, CfgOptions::default(), CfgOptions::default(), Env::default(), Default::default(), false, Default::default(), ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) .is_ok()); assert!(graph .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3)) .is_ok()); assert!(graph .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1)) .is_err()); } #[test] fn detect_cyclic_dependency_direct() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( FileId(1u32), Edition2018, None, None, CfgOptions::default(), CfgOptions::default(), Env::default(), Default::default(), false, Default::default(), ); let crate2 = graph.add_crate_root( FileId(2u32), Edition2018, None, None, CfgOptions::default(), CfgOptions::default(), Env::default(), Default::default(), false, Default::default(), ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) .is_ok()); assert!(graph .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) .is_err()); } #[test] fn it_works() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( FileId(1u32), Edition2018, None, None, CfgOptions::default(), CfgOptions::default(), Env::default(), Default::default(), false, Default::default(), ); let crate2 = graph.add_crate_root( FileId(2u32), Edition2018, None, None, CfgOptions::default(), CfgOptions::default(), Env::default(), Default::default(), false, Default::default(), ); let crate3 = graph.add_crate_root( FileId(3u32), Edition2018, None, None, CfgOptions::default(), CfgOptions::default(), Env::default(), Default::default(), false, Default::default(), ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) .is_ok()); assert!(graph .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3)) .is_ok()); } #[test] fn dashes_are_normalized() { let mut graph = CrateGraph::default(); let crate1 = graph.add_crate_root( FileId(1u32), Edition2018, None, None, CfgOptions::default(), CfgOptions::default(), Env::default(), Default::default(), false, Default::default(), ); let crate2 = graph.add_crate_root( FileId(2u32), Edition2018, None, None, CfgOptions::default(), CfgOptions::default(), Env::default(), Default::default(), false, Default::default(), ); assert!(graph .add_dep( crate1, Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2) ) .is_ok()); assert_eq!( graph[crate1].dependencies, vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2)] ); } }
29.896189
103
0.56881
3a5ce7345ef8ffd8195a0465c69cea5ae99e82ea
97
mod entity; /// This structure defines a schema for a table #[derive(Debug)] pub struct Schema;
16.166667
47
0.731959
18aeede2a3f93ceb3ec5cf6ba037efebe3a95d90
6,316
mod client_socket_connection; mod connection_info; mod messages; mod room_actor; mod server_state; mod service_actor; use crate::room_actor::GetConnectionInfo; use actix_web::error::ErrorInternalServerError; use actix_web::web::{self, get}; use actix_web::{web::Data, App, Error, HttpRequest, HttpResponse, HttpServer, Result}; use actix_web_actors::ws::WsResponseBuilder; pub use client_socket_connection::ClientSocketConnection; use connection_info::ConnectionInfo; pub use messages::{AssignClientId, MessageFromClient, MessageFromServer}; pub use room_actor::RoomActor; use server_state::ServerState; pub use service_actor::{ServiceActor, ServiceActorContext}; use stateroom::StateroomServiceFactory; use std::time::{Duration, Instant}; const DEFAULT_IP: &str = "0.0.0.0"; /// Settings used by the server. pub struct Server { /// The duration of time between server-initiated WebSocket heartbeats. /// /// Defaults to 30 seconds. pub heartbeat_interval: Duration, /// The minimum amount of time between client heartbeats before a connection is dropped. /// /// Defaults to 5 minutes. pub heartbeat_timeout: Duration, /// The port to run the server on. Defaults to 8080. pub port: u32, /// The IP to listen on. Defaults to 0.0.0.0. pub ip: String, /// A local filesystem path to serve static files from, or None (default). pub static_path: Option<String>, /// A local filesystem path to serve from /client, or None (default). pub client_path: Option<String>, } impl Default for Server { fn default() -> Self { Server { heartbeat_interval: Duration::from_secs(30), heartbeat_timeout: Duration::from_secs(300), port: 8080, ip: DEFAULT_IP.to_string(), static_path: None, client_path: None, } } } impl Server { #[must_use] pub fn new() -> Self { Server::default() } #[cfg(feature = "serve-static")] #[must_use] pub fn with_static_path(mut self, static_path: Option<String>) -> Self { self.static_path = static_path; self } #[cfg(feature = "serve-static")] #[must_use] pub fn with_client_path(mut self, client_path: Option<String>) -> Self { self.client_path = client_path; self } #[must_use] pub fn with_heartbeat_interval(mut self, duration_seconds: u64) -> Self { self.heartbeat_interval = Duration::from_secs(duration_seconds); self } #[must_use] pub fn with_heartbeat_timeout(mut self, duration_seconds: u64) -> Self { self.heartbeat_timeout = Duration::from_secs(duration_seconds); self } #[must_use] pub fn with_port(mut self, port: u32) -> Self { self.port = port; self } #[must_use] pub fn with_ip(mut self, ip: String) -> Self { self.ip = ip; self } /// Start a server given a [StateroomService]. /// /// This function blocks until the server is terminated. While it is running, the following /// endpoints are available: /// - `/` (GET): return HTTP 200 if the server is running (useful as a baseline status check) /// - `/ws` (GET): initiate a WebSocket connection to the stateroom service. pub fn serve( self, service_factory: impl StateroomServiceFactory<ServiceActorContext>, ) -> std::io::Result<()> { let host = format!("{}:{}", self.ip, self.port); actix_web::rt::System::new().block_on(async move { let server_state = Data::new(ServerState::new(service_factory, self).unwrap()); let server = HttpServer::new(move || { #[allow(unused_mut)] // mut only needed with crate feature `serve-static`. let mut app = App::new() .app_data(server_state.clone()) .route("/status", get().to(status)) .route("/ws", get().to(websocket)); #[cfg(feature = "serve-static")] { if let Some(client_path) = &server_state.settings.client_path { //let client_dir = Path::new(client_path).parent().unwrap(); app = app.service(actix_files::Files::new("/client", client_path)); } if let Some(static_path) = &server_state.settings.static_path { app = app.service( actix_files::Files::new("/", static_path).index_file("index.html"), ); } } app }) .bind(&host)?; tracing::info!(%host, "Server is listening"); server.run().await }) } } async fn websocket(req: HttpRequest, stream: web::Payload) -> actix_web::Result<HttpResponse> { let server_state: &Data<ServerState> = req.app_data().expect("Could not load ServerState."); let room_addr = server_state.room_addr.clone(); let client_id = room_addr .send(AssignClientId) .await .map_err(|_| ErrorInternalServerError("Error getting room."))?; match WsResponseBuilder::new( ClientSocketConnection { room: room_addr.clone().recipient(), client_id, last_seen: Instant::now(), heartbeat_interval: server_state.settings.heartbeat_interval, heartbeat_timeout: server_state.settings.heartbeat_timeout, interval_handle: None, }, &req, stream, ) .start_with_addr() { Ok((addr, resp)) => { tracing::info!(?client_id, "New connection",); room_addr.do_send(MessageFromClient::Connect(client_id, addr.recipient())); Ok(resp) } Err(e) => Err(e), } } async fn status(req: HttpRequest) -> Result<web::Json<ConnectionInfo>, Error> { let server_state: &Data<ServerState> = req.app_data().expect("Could not load ServerState."); let room_addr = server_state.room_addr.clone(); let connection_info = room_addr .send(GetConnectionInfo) .await .map_err(|_| ErrorInternalServerError("Error getting connection info."))?; Ok(web::Json(connection_info)) }
32.22449
97
0.607505
f554e16b4f475452b81396897b5ca6b48a44ec1e
266
/// I want... /// /// # Anchor! pub struct Something; // @has intra_links_anchors/struct.SomeOtherType.html // @has - '//a/@href' '../intra_links_anchors/struct.Something.html#Anchor!' /// I want... /// /// To link to [Something#Anchor!] pub struct SomeOtherType;
20.461538
76
0.661654
293605d2c92bd42774308307c0273e62cac95a48
2,388
// this file is auto-generated by hap-codegen use serde::ser::{Serialize, SerializeStruct, Serializer}; use crate::{ accessory::{AccessoryInformation, HapAccessory}, service::{HapService, accessory_information::AccessoryInformationService, door::DoorService}, HapType, Result, }; /// Door Accessory. #[derive(Debug, Default)] pub struct DoorAccessory { /// ID of the Door Accessory. id: u64, /// Accessory Information Service. pub accessory_information: AccessoryInformationService, /// Door Service. pub door: DoorService, } impl DoorAccessory { /// Creates a new Door Accessory. pub fn new(id: u64, information: AccessoryInformation) -> Result<Self> { let accessory_information = information.to_service(1, id)?; let door_id = accessory_information.get_characteristics().len() as u64; let mut door = DoorService::new(1 + door_id + 1, id); door.set_primary(true); Ok(Self { id, accessory_information, door, }) } } impl HapAccessory for DoorAccessory { fn get_id(&self) -> u64 { self.id } fn set_id(&mut self, id: u64) { self.id = id; } fn get_service(&self, hap_type: HapType) -> Option<&dyn HapService> { for service in self.get_services() { if service.get_type() == hap_type { return Some(service); } } None } fn get_mut_service(&mut self, hap_type: HapType) -> Option<&mut dyn HapService> { for service in self.get_mut_services() { if service.get_type() == hap_type { return Some(service); } } None } fn get_services(&self) -> Vec<&dyn HapService> { vec![ &self.accessory_information, &self.door, ] } fn get_mut_services(&mut self) -> Vec<&mut dyn HapService> { vec![ &mut self.accessory_information, &mut self.door, ] } } impl Serialize for DoorAccessory { fn serialize<S: Serializer>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> { let mut state = serializer.serialize_struct("HapAccessory", 2)?; state.serialize_field("aid", &self.get_id())?; state.serialize_field("services", &self.get_services())?; state.end() } }
26.533333
95
0.595059
2895129e683fc047267af48c7cf2e75ad065d84b
289
mod exchange; mod grant; mod refresh; pub use self::exchange::{Exchange, ExchangeResponse}; pub use self::grant::{Grant, GrantUrl}; pub use self::refresh::{Refresh, RefreshResponse}; #[derive(Debug, Clone)] pub struct Credential { pub client_id: String, pub client_secret: String, }
20.642857
53
0.737024
21de9bb7eaee281cec7d16ac08a816b3c758d331
5,043
use cosmwasm_std::{Addr, BlockInfo, Decimal, StdResult, Storage, Uint128}; use cosmwasm_storage::{Bucket, ReadonlyBucket}; use pylon_token::common::OrderBy; use pylon_utils::range::{calc_range_end, calc_range_start}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::cmp::{max, min}; use std::convert::TryInto; use crate::constant::{DEFAULT_QUERY_LIMIT, MAX_QUERY_LIMIT}; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct Config { pub start: u64, pub period: u64, pub reward_token: Addr, pub reward_rate: Decimal, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct State { pub last_update_time: u64, pub reward_per_token_stored: Decimal, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct Airdrop { pub config: Config, pub state: State, } impl Default for Airdrop { fn default() -> Self { Airdrop { config: Config { start: 0, period: 0, reward_token: Addr::unchecked(""), reward_rate: Default::default(), }, state: State { last_update_time: 0, reward_per_token_stored: Default::default(), }, } } } impl Airdrop { pub fn finish(&self) -> u64 { self.config.start + self.config.period } pub fn applicable_time(&self, block: &BlockInfo) -> u64 { min(self.finish(), max(self.config.start, block.time.seconds())) } pub fn load(storage: &dyn Storage, id: &u64) -> Option<Airdrop> { ReadonlyBucket::new(storage, super::PREFIX_AIRDROP) .may_load(&id.to_be_bytes()) .unwrap() } pub fn load_range( storage: &dyn Storage, start_after: Option<u64>, limit: Option<u32>, order_by: Option<OrderBy>, ) -> StdResult<Vec<(u64, Airdrop)>> { let limit = limit.unwrap_or(DEFAULT_QUERY_LIMIT).min(MAX_QUERY_LIMIT) as usize; let (start, end, order_by) = match order_by { Some(OrderBy::Asc) => (calc_range_start(start_after), None, OrderBy::Asc), _ => (None, calc_range_end(start_after), OrderBy::Desc), }; ReadonlyBucket::new(storage, super::PREFIX_AIRDROP) .range(start.as_deref(), end.as_deref(), order_by.into()) .take(limit) .map( |item: StdResult<(Vec<u8>, Airdrop)>| -> StdResult<(u64, Airdrop)> { let (k, v) = item.unwrap(); Ok((u64::from_be_bytes(k.try_into().unwrap()), v)) }, ) .collect() } pub fn save(storage: &mut dyn Storage, id: &u64, airdrop: &Airdrop) -> StdResult<()> { Bucket::new(storage, super::PREFIX_AIRDROP).save(&id.to_be_bytes(), airdrop) } } #[derive(Default, Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct Reward { pub reward: Uint128, pub reward_per_token_paid: Decimal, } impl Reward { pub fn load(storage: &dyn Storage, address: &Addr, id: &u64) -> StdResult<Reward> { Ok( ReadonlyBucket::multilevel( storage, &[super::PREFIX_AIRDROP_REWARD, address.as_bytes()], ) .may_load(&id.to_be_bytes())? .unwrap_or_default(), ) } pub fn load_range( storage: &dyn Storage, address: &Addr, start_after: Option<u64>, limit: Option<u32>, order_by: Option<OrderBy>, ) -> StdResult<Vec<(u64, Reward)>> { let limit = limit.unwrap_or(DEFAULT_QUERY_LIMIT).min(MAX_QUERY_LIMIT) as usize; let (start, end, order_by) = match order_by { Some(OrderBy::Asc) => (calc_range_start(start_after), None, OrderBy::Asc), _ => (None, calc_range_end(start_after), OrderBy::Desc), }; ReadonlyBucket::multilevel(storage, &[super::PREFIX_AIRDROP_REWARD, address.as_bytes()]) .range(start.as_deref(), end.as_deref(), order_by.into()) .take(limit) .map( |item: StdResult<(Vec<u8>, Reward)>| -> StdResult<(u64, Reward)> { let (k, v) = item.unwrap(); Ok((u64::from_be_bytes(k.try_into().unwrap()), v)) }, ) .collect() } pub fn save( storage: &mut dyn Storage, address: &Addr, airdrop_id: &u64, reward: &Reward, ) -> StdResult<()> { let mut bucket: Bucket<Reward> = Bucket::multilevel(storage, &[super::PREFIX_AIRDROP_REWARD, address.as_bytes()]); bucket.save(&airdrop_id.to_be_bytes(), reward) } pub fn remove(storage: &mut dyn Storage, address: &Addr, airdrop_id: &u64) { let mut bucket: Bucket<Reward> = Bucket::multilevel(storage, &[super::PREFIX_AIRDROP_REWARD, address.as_bytes()]); bucket.remove(&airdrop_id.to_be_bytes()) } }
32.960784
96
0.581797
c180bb073ca7994668d62a6d1ae0cb961ea070bd
2,099
use crate::{codec::Encode, util::PartialBuffer}; use std::{fmt, io::Result}; use brotli2::{ raw::{CoStatus, Compress, CompressOp}, CompressParams, }; pub struct BrotliEncoder { compress: Compress, } impl BrotliEncoder { pub(crate) fn new(params: &CompressParams) -> Self { let mut compress = Compress::new(); compress.set_params(params); Self { compress } } fn encode( &mut self, input: &mut PartialBuffer<&[u8]>, output: &mut PartialBuffer<&mut [u8]>, op: CompressOp, ) -> Result<CoStatus> { let mut in_buf = input.unwritten(); let mut out_buf = output.unwritten_mut(); let original_input_len = in_buf.len(); let original_output_len = out_buf.len(); let status = self.compress.compress(op, &mut in_buf, &mut out_buf)?; let input_len = original_input_len - in_buf.len(); let output_len = original_output_len - out_buf.len(); input.advance(input_len); output.advance(output_len); Ok(status) } } impl Encode for BrotliEncoder { fn encode( &mut self, input: &mut PartialBuffer<&[u8]>, output: &mut PartialBuffer<&mut [u8]>, ) -> Result<()> { self.encode(input, output, CompressOp::Process).map(drop) } fn flush(&mut self, output: &mut PartialBuffer<&mut [u8]>) -> Result<bool> { match self.encode(&mut PartialBuffer::new(&[][..]), output, CompressOp::Flush)? { CoStatus::Unfinished => Ok(false), CoStatus::Finished => Ok(true), } } fn finish(&mut self, output: &mut PartialBuffer<&mut [u8]>) -> Result<bool> { match self.encode(&mut PartialBuffer::new(&[][..]), output, CompressOp::Finish)? { CoStatus::Unfinished => Ok(false), CoStatus::Finished => Ok(true), } } } impl fmt::Debug for BrotliEncoder { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("BrotliEncoder") .field("compress", &"<no debug>") .finish() } }
27.986667
90
0.580276
87e74778ea37a31c98a5b06ca19f0f25fc283fc0
3,902
use xhighlight::syntax::rust::Rust; use xhighlight::render::{Renderer, HtmlRenderer}; #[macro_use] extern crate lazy_static; const RUST_CSS: [(Rust, &str); 19] = [ // Here, you define the CSS classes for tokens // E.g. a keyword is encoded as <span class="kwd">...</span> (Rust::Text, ""), (Rust::Identifier, ""), (Rust::Keyword, "kwd"), (Rust::Operator, "opr"), (Rust::FnCall, "fun"), (Rust::Punctuation, "pun"), (Rust::Lifetime, "lif"), (Rust::PrimitiveType, "typ"), (Rust::Number, "num"), (Rust::Bool, "boo"), (Rust::MacroCall, "mac"), (Rust::Annotation, "ann"), (Rust::String, "str"), (Rust::StringEscape, "esc"), (Rust::Char, "chr"), (Rust::LineComment, "com"), (Rust::BlockComment, "com"), (Rust::DocComment, "doc"), (Rust::RawLiteral, "raw"), ]; const TEST_STR: &str = r#####################################" /// This is some serious shit! /// This, too. #[allow(dead_code)] pub struct HelloWorld { x: bool, y: String } // test fn main() { let mut my_val = HelloWorld { x: /* blah */ true, y: r#######"Hello \n \"World!\""####### }; my_val.y += 3; }"#####################################; #[test] pub fn test() { let mut parser = Rust::make_parser(); let output = HtmlRenderer::new(&mut parser) .set_mapping(&RUST_CSS) .render(TEST_STR); println!("{}", output); } #[cfg(test)] pub mod self_impl { use xhighlight::parse::Highlight; #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] enum MyLanguage { Text, Keyword, Str, Number, Comment, } impl Highlight for MyLanguage {} // List all keywords; this should be efficient thanks to regex optimizations const KEYWORD: &str = r"\b(for|while|do|switch|case|default|continue|break|if|else|try|catch|finally|throw|synchronized|this|public|protected|private|static|final|abstract|volatile|transient|native|new|var|super|void|class|enum|interface|extends|import|package|instanceof|assert|strictfp|true|false|null|boolean|byte|char|double|float|int|long|short)\b"; // Allow double-quoted string with escape sequences const STRING: &str = r#""(\\.|[^"])*""#; // Numbers: Floats (e.g. 3.14159e-5) and integers in decimal, binary or hex const NUMBER: &str = r"(\d+|\d*\.\d+)([eE][+-]?\d+)?|0x[0-9a-fA-F]+|0b[01]+"; // Allow // line comments and /* block comments */ const COMMENT: &str = r"//.*|/\*.*?\*/"; use xhighlight::parse::{RegexPat, Parser}; use xhighlight::render::{Renderer, HtmlRenderer}; use self::MyLanguage::*; const MY_MAPPING: [(MyLanguage, &str); 5] = [ (Text, ""), (Keyword, "kwd"), (Str, "str"), (Number, "num"), (Comment, "com"), ]; lazy_static! { // Use lazy_static to compile the regexes only once // Note that the order is important: // When two regexes match at a string index, it will always choose the first one static ref REGEXES: Vec<(RegexPat<MyLanguage>, MyLanguage)> = { vec![ (RegexPat::regex(KEYWORD, Keyword), Text), (RegexPat::regex(COMMENT, Comment), Text), (RegexPat::regex(STRING, Str), Text), (RegexPat::regex(NUMBER, Number), Text), ] }; } pub fn parse_java_to_html(string: &str) -> String { let mut parser = Parser::new(Text); parser.add_matcher(Text, &REGEXES); HtmlRenderer::new(&mut parser) .set_mapping(&MY_MAPPING) .render(string) } #[test] pub fn test() { println!("{}", parse_java_to_html(r#"public class HelloWorld {\ private String s = "Hello World"; }"#)); } }
32.516667
358
0.549462
79642ca43cdf24112fd5ba4f8a1d939f7e8ce50b
412
extern crate jemalloc_sys; extern crate jemallocator; // Work around https://github.com/alexcrichton/jemallocator/issues/19 #[global_allocator] static A: jemallocator::Jemalloc = jemallocator::Jemalloc; #[test] fn smoke() { unsafe { let ptr = jemalloc_sys::malloc(4); *(ptr as *mut u32) = 0xDECADE; assert_eq!(*(ptr as *mut u32), 0xDECADE); jemalloc_sys::free(ptr); } }
24.235294
69
0.662621
e8c48f016563712256eeb1ef0c1f17db66cdf97b
4,667
// Reads the url use std::ops::{Deref, Drop}; use std::env; use std::fs::{File, remove_file}; use std::path::{Path, PathBuf}; use std::io::{self, BufRead, Read, BufWriter, Write}; use std::process::{Command, ChildStderr, Stdio}; use std::string::FromUtf8Error; use reqwest::{get, Response}; use time::get_time; fn execute(path: String, lang: Option<&String>) -> Option<String> { let mut cmd = Command::new("tesseract"); if lang.is_some(){ let lang_str : String = lang.unwrap().to_owned(); cmd.arg("-l").arg(lang_str.as_str()); } let output = cmd.arg(path).arg("stdout").output().unwrap(); match String::from_utf8(output.stdout) { Ok(st) => Some(st), Err(e) => None, } } struct TempImageFile { filename: String, file: File, path: String, } impl TempImageFile { fn new(prefix: &str, suffix: &str) -> Self { let mut filename = String::new(); let timestamp: String = get_time().sec.to_string(); filename.push_str(prefix); filename.push_str(&timestamp[..]); filename.push_str(suffix); let mut abspath = String::new(); abspath.push_str(env::temp_dir() .into_os_string() .into_string() .unwrap() .deref()); abspath.push_str("/"); abspath.push_str(filename.clone().deref()); debug!("creating a filename {:?}", abspath.as_str()); TempImageFile { filename: filename.clone(), file: File::create(&abspath[..]).unwrap(), path: abspath, } } fn into_file(&mut self) -> &mut File { &mut self.file } fn path(&self) -> String { self.path.clone() } } impl Drop for TempImageFile { fn drop(&mut self) -> () { debug!("deleting file {:?}", self.path); remove_file(Path::new(&self.path[..])); } } #[derive(Debug, Copy, Clone, PartialEq)] pub enum ImageFormat { PNG, JPG, TIFF, BMP, GIF, } impl<'a> From<&'a str> for ImageFormat { fn from(r: &'a str) -> Self { match r { "png" => ImageFormat::PNG, "jpg" => ImageFormat::JPG, "tiff" => ImageFormat::TIFF, "bmp" => ImageFormat::BMP, "gif" => ImageFormat::GIF, _ => ImageFormat::JPG, } } } impl<'a> From<ImageFormat> for &'a str { fn from(r: ImageFormat) -> Self { match r { ImageFormat::PNG => "png", ImageFormat::JPG => "jpg", ImageFormat::TIFF => "tiff", ImageFormat::BMP => "bmp", ImageFormat::GIF => "gif", _ => "jpg", } } } pub struct ImageReader<T: Read>{ reader: T, format: ImageFormat, } impl<T: Read> ImageReader<T> { pub fn tempfile(&self) -> TempImageFile { let mut suf: String = String::new(); suf.push_str("."); suf.push_str(self.format.into()); TempImageFile::new("tess_", &suf[..]) } pub fn new(R: T, format: ImageFormat) -> Self { ImageReader { reader: R, format } } pub fn text(&mut self, lang: Option<&String>) -> Option<String> { let mut temporary = self.tempfile(); let path = temporary.path(); let mut writer = BufWriter::new(temporary.into_file()); let mut buff: Vec<u8> = Vec::new(); let amt = self.reader.read_to_end(&mut buff).unwrap(); writer.write_all(buff.as_slice()); writer.flush(); let tfile = writer.into_inner().unwrap(); execute(path, lang) } } #[derive(Debug, Clone)] pub struct ImageBuilder { url: String, } impl ImageBuilder { pub fn from_url(url: &str) -> Self { ImageBuilder { url: url.to_string() } } pub fn format(&self) -> ImageFormat { let item: &str = self.url.split(".").last().unwrap(); item.into() } pub fn reader(&self) -> ImageReader<Response> { let response = get(&self.url[..]).unwrap(); ImageReader::new(response, self.format()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_image_builder_format() { let image = ImageBuilder::from_url("https://www.pyimagesearch.com/wp-content/uploads/2017/06/example_01.png"); assert_eq!(image.format(), ImageFormat::PNG); } #[test] fn test_image_reader_text() { let image = ImageBuilder::from_url("https://i.stack.imgur.com/t3qWG.png"); let mut reader = image.reader(); let text = reader.text(None).unwrap(); assert_ne!(text.len(), 0); } }
26.367232
118
0.544461
9bfa1b150259f6c2b6146fa33e30e3beb9924838
31,582
//! Low-Level binding for [Array API](https://docs.scipy.org/doc/numpy/reference/c-api.array.html) use libc::FILE; use pyo3::ffi::{self, PyObject, PyTypeObject}; use std::ops::Deref; use std::os::raw::*; use std::ptr; use std::sync::{Once, ONCE_INIT}; use npyffi::*; pub(crate) const MOD_NAME: &str = "numpy.core.multiarray"; const CAPSULE_NAME: &str = "_ARRAY_API"; /// A global variable which stores a ['capsule'](https://docs.python.org/3/c-api/capsule.html) /// pointer to [Numpy Array API](https://docs.scipy.org/doc/numpy/reference/c-api.array.html). /// /// You can acceess raw c APIs via this variable and its Deref implementation. /// /// See [PyArrayAPI](struct.PyArrayAPI.html) for what methods you can use via this variable. /// /// # Example /// ``` /// # extern crate pyo3; extern crate numpy; fn main() { /// use numpy::{PyArray, npyffi::types::NPY_SORTKIND, PY_ARRAY_API}; /// use pyo3::Python; /// let gil = Python::acquire_gil(); /// let array = PyArray::from_slice(gil.python(), &[3, 2, 4]); /// unsafe { /// PY_ARRAY_API.PyArray_Sort(array.as_array_ptr(), 0, NPY_SORTKIND::NPY_QUICKSORT); /// } /// assert_eq!(array.as_slice(), &[2, 3, 4]) /// # } /// ``` pub static PY_ARRAY_API: PyArrayAPI = PyArrayAPI { __private_field: (), }; pub struct PyArrayAPI { __private_field: (), } impl Deref for PyArrayAPI { type Target = PyArrayAPI_Inner; fn deref(&self) -> &Self::Target { static INIT_API: Once = ONCE_INIT; static mut ARRAY_API_CACHE: PyArrayAPI_Inner = PyArrayAPI_Inner(ptr::null()); unsafe { if ARRAY_API_CACHE.0.is_null() { let api = get_numpy_api(MOD_NAME, CAPSULE_NAME); INIT_API.call_once(move || { ARRAY_API_CACHE = PyArrayAPI_Inner(api); }); } &ARRAY_API_CACHE } } } #[allow(non_camel_case_types)] pub struct PyArrayAPI_Inner(*const *const c_void); impl PyArrayAPI_Inner { impl_api![0; PyArray_GetNDArrayCVersion() -> c_uint]; impl_api![40; PyArray_SetNumericOps(dict: *mut PyObject) -> c_int]; impl_api![41; PyArray_GetNumericOps() -> *mut PyObject]; impl_api![42; PyArray_INCREF(mp: *mut PyArrayObject) -> c_int]; impl_api![43; PyArray_XDECREF(mp: *mut PyArrayObject) -> c_int]; impl_api![44; PyArray_SetStringFunction(op: *mut PyObject, repr: c_int)]; impl_api![45; PyArray_DescrFromType(type_: c_int) -> *mut PyArray_Descr]; impl_api![46; PyArray_TypeObjectFromType(type_: c_int) -> *mut PyObject]; impl_api![47; PyArray_Zero(arr: *mut PyArrayObject) -> *mut c_char]; impl_api![48; PyArray_One(arr: *mut PyArrayObject) -> *mut c_char]; impl_api![49; PyArray_CastToType(arr: *mut PyArrayObject, dtype: *mut PyArray_Descr, is_f_order: c_int) -> *mut PyObject]; impl_api![50; PyArray_CastTo(out: *mut PyArrayObject, mp: *mut PyArrayObject) -> c_int]; impl_api![51; PyArray_CastAnyTo(out: *mut PyArrayObject, mp: *mut PyArrayObject) -> c_int]; impl_api![52; PyArray_CanCastSafely(fromtype: c_int, totype: c_int) -> c_int]; impl_api![53; PyArray_CanCastTo(from: *mut PyArray_Descr, to: *mut PyArray_Descr) -> npy_bool]; impl_api![54; PyArray_ObjectType(op: *mut PyObject, minimum_type: c_int) -> c_int]; impl_api![55; PyArray_DescrFromObject(op: *mut PyObject, mintype: *mut PyArray_Descr) -> *mut PyArray_Descr]; impl_api![56; PyArray_ConvertToCommonType(op: *mut PyObject, retn: *mut c_int) -> *mut *mut PyArrayObject]; impl_api![57; PyArray_DescrFromScalar(sc: *mut PyObject) -> *mut PyArray_Descr]; impl_api![58; PyArray_DescrFromTypeObject(type_: *mut PyObject) -> *mut PyArray_Descr]; impl_api![59; PyArray_Size(op: *mut PyObject) -> npy_intp]; impl_api![60; PyArray_Scalar(data: *mut c_void, descr: *mut PyArray_Descr, base: *mut PyObject) -> *mut PyObject]; impl_api![61; PyArray_FromScalar(scalar: *mut PyObject, outcode: *mut PyArray_Descr) -> *mut PyObject]; impl_api![62; PyArray_ScalarAsCtype(scalar: *mut PyObject, ctypeptr: *mut c_void)]; impl_api![63; PyArray_CastScalarToCtype(scalar: *mut PyObject, ctypeptr: *mut c_void, outcode: *mut PyArray_Descr) -> c_int]; impl_api![64; PyArray_CastScalarDirect(scalar: *mut PyObject, indescr: *mut PyArray_Descr, ctypeptr: *mut c_void, outtype: c_int) -> c_int]; impl_api![65; PyArray_ScalarFromObject(object: *mut PyObject) -> *mut PyObject]; impl_api![66; PyArray_GetCastFunc(descr: *mut PyArray_Descr, type_num: c_int) -> PyArray_VectorUnaryFunc]; impl_api![67; PyArray_FromDims(nd: c_int, d: *mut c_int, type_: c_int) -> *mut PyObject]; impl_api![68; PyArray_FromDimsAndDataAndDescr(nd: c_int, d: *mut c_int, descr: *mut PyArray_Descr, data: *mut c_char) -> *mut PyObject]; impl_api![69; PyArray_FromAny(op: *mut PyObject, newtype: *mut PyArray_Descr, min_depth: c_int, max_depth: c_int, flags: c_int, context: *mut PyObject) -> *mut PyObject]; impl_api![70; PyArray_EnsureArray(op: *mut PyObject) -> *mut PyObject]; impl_api![71; PyArray_EnsureAnyArray(op: *mut PyObject) -> *mut PyObject]; impl_api![72; PyArray_FromFile(fp: *mut FILE, dtype: *mut PyArray_Descr, num: npy_intp, sep: *mut c_char) -> *mut PyObject]; impl_api![73; PyArray_FromString(data: *mut c_char, slen: npy_intp, dtype: *mut PyArray_Descr, num: npy_intp, sep: *mut c_char) -> *mut PyObject]; impl_api![74; PyArray_FromBuffer(buf: *mut PyObject, type_: *mut PyArray_Descr, count: npy_intp, offset: npy_intp) -> *mut PyObject]; impl_api![75; PyArray_FromIter(obj: *mut PyObject, dtype: *mut PyArray_Descr, count: npy_intp) -> *mut PyObject]; impl_api![76; PyArray_Return(mp: *mut PyArrayObject) -> *mut PyObject]; impl_api![77; PyArray_GetField(self_: *mut PyArrayObject, typed: *mut PyArray_Descr, offset: c_int) -> *mut PyObject]; impl_api![78; PyArray_SetField(self_: *mut PyArrayObject, dtype: *mut PyArray_Descr, offset: c_int, val: *mut PyObject) -> c_int]; impl_api![79; PyArray_Byteswap(self_: *mut PyArrayObject, inplace: npy_bool) -> *mut PyObject]; impl_api![80; PyArray_Resize(self_: *mut PyArrayObject, newshape: *mut PyArray_Dims, refcheck: c_int, order: NPY_ORDER) -> *mut PyObject]; impl_api![81; PyArray_MoveInto(dst: *mut PyArrayObject, src: *mut PyArrayObject) -> c_int]; impl_api![82; PyArray_CopyInto(dst: *mut PyArrayObject, src: *mut PyArrayObject) -> c_int]; impl_api![83; PyArray_CopyAnyInto(dst: *mut PyArrayObject, src: *mut PyArrayObject) -> c_int]; impl_api![84; PyArray_CopyObject(dest: *mut PyArrayObject, src_object: *mut PyObject) -> c_int]; impl_api![85; PyArray_NewCopy(obj: *mut PyArrayObject, order: NPY_ORDER) -> *mut PyObject]; impl_api![86; PyArray_ToList(self_: *mut PyArrayObject) -> *mut PyObject]; impl_api![87; PyArray_ToString(self_: *mut PyArrayObject, order: NPY_ORDER) -> *mut PyObject]; impl_api![88; PyArray_ToFile(self_: *mut PyArrayObject, fp: *mut FILE, sep: *mut c_char, format: *mut c_char) -> c_int]; impl_api![89; PyArray_Dump(self_: *mut PyObject, file: *mut PyObject, protocol: c_int) -> c_int]; impl_api![90; PyArray_Dumps(self_: *mut PyObject, protocol: c_int) -> *mut PyObject]; impl_api![91; PyArray_ValidType(type_: c_int) -> c_int]; impl_api![92; PyArray_UpdateFlags(ret: *mut PyArrayObject, flagmask: c_int)]; impl_api![93; PyArray_New(subtype: *mut PyTypeObject, nd: c_int, dims: *mut npy_intp, type_num: c_int, strides: *mut npy_intp, data: *mut c_void, itemsize: c_int, flags: c_int, obj: *mut PyObject) -> *mut PyObject]; impl_api![94; PyArray_NewFromDescr(subtype: *mut PyTypeObject, descr: *mut PyArray_Descr, nd: c_int, dims: *mut npy_intp, strides: *mut npy_intp, data: *mut c_void, flags: c_int, obj: *mut PyObject) -> *mut PyObject]; impl_api![95; PyArray_DescrNew(base: *mut PyArray_Descr) -> *mut PyArray_Descr]; impl_api![96; PyArray_DescrNewFromType(type_num: c_int) -> *mut PyArray_Descr]; impl_api![97; PyArray_GetPriority(obj: *mut PyObject, default_: f64) -> f64]; impl_api![98; PyArray_IterNew(obj: *mut PyObject) -> *mut PyObject]; // impl_api![99; PyArray_MultiIterNew(n: c_int, ...) -> *mut PyObject]; impl_api![100; PyArray_PyIntAsInt(o: *mut PyObject) -> c_int]; impl_api![101; PyArray_PyIntAsIntp(o: *mut PyObject) -> npy_intp]; impl_api![102; PyArray_Broadcast(mit: *mut PyArrayMultiIterObject) -> c_int]; impl_api![103; PyArray_FillObjectArray(arr: *mut PyArrayObject, obj: *mut PyObject)]; impl_api![104; PyArray_FillWithScalar(arr: *mut PyArrayObject, obj: *mut PyObject) -> c_int]; impl_api![105; PyArray_CheckStrides(elsize: c_int, nd: c_int, numbytes: npy_intp, offset: npy_intp, dims: *mut npy_intp, newstrides: *mut npy_intp) -> npy_bool]; impl_api![106; PyArray_DescrNewByteorder(self_: *mut PyArray_Descr, newendian: c_char) -> *mut PyArray_Descr]; impl_api![107; PyArray_IterAllButAxis(obj: *mut PyObject, inaxis: *mut c_int) -> *mut PyObject]; impl_api![108; PyArray_CheckFromAny(op: *mut PyObject, descr: *mut PyArray_Descr, min_depth: c_int, max_depth: c_int, requires: c_int, context: *mut PyObject) -> *mut PyObject]; impl_api![109; PyArray_FromArray(arr: *mut PyArrayObject, newtype: *mut PyArray_Descr, flags: c_int) -> *mut PyObject]; impl_api![110; PyArray_FromInterface(origin: *mut PyObject) -> *mut PyObject]; impl_api![111; PyArray_FromStructInterface(input: *mut PyObject) -> *mut PyObject]; impl_api![112; PyArray_FromArrayAttr(op: *mut PyObject, typecode: *mut PyArray_Descr, context: *mut PyObject) -> *mut PyObject]; impl_api![113; PyArray_ScalarKind(typenum: c_int, arr: *mut *mut PyArrayObject) -> NPY_SCALARKIND]; impl_api![114; PyArray_CanCoerceScalar(thistype: c_int, neededtype: c_int, scalar: NPY_SCALARKIND) -> c_int]; impl_api![115; PyArray_NewFlagsObject(obj: *mut PyObject) -> *mut PyObject]; impl_api![116; PyArray_CanCastScalar(from: *mut PyTypeObject, to: *mut PyTypeObject) -> npy_bool]; impl_api![117; PyArray_CompareUCS4(s1: *mut npy_ucs4, s2: *mut npy_ucs4, len: usize) -> c_int]; impl_api![118; PyArray_RemoveSmallest(multi: *mut PyArrayMultiIterObject) -> c_int]; impl_api![119; PyArray_ElementStrides(obj: *mut PyObject) -> c_int]; impl_api![120; PyArray_Item_INCREF(data: *mut c_char, descr: *mut PyArray_Descr)]; impl_api![121; PyArray_Item_XDECREF(data: *mut c_char, descr: *mut PyArray_Descr)]; impl_api![122; PyArray_FieldNames(fields: *mut PyObject) -> *mut PyObject]; impl_api![123; PyArray_Transpose(ap: *mut PyArrayObject, permute: *mut PyArray_Dims) -> *mut PyObject]; impl_api![124; PyArray_TakeFrom(self0: *mut PyArrayObject, indices0: *mut PyObject, axis: c_int, out: *mut PyArrayObject, clipmode: NPY_CLIPMODE) -> *mut PyObject]; impl_api![125; PyArray_PutTo(self_: *mut PyArrayObject, values0: *mut PyObject, indices0: *mut PyObject, clipmode: NPY_CLIPMODE) -> *mut PyObject]; impl_api![126; PyArray_PutMask(self_: *mut PyArrayObject, values0: *mut PyObject, mask0: *mut PyObject) -> *mut PyObject]; impl_api![127; PyArray_Repeat(aop: *mut PyArrayObject, op: *mut PyObject, axis: c_int) -> *mut PyObject]; impl_api![128; PyArray_Choose(ip: *mut PyArrayObject, op: *mut PyObject, out: *mut PyArrayObject, clipmode: NPY_CLIPMODE) -> *mut PyObject]; impl_api![129; PyArray_Sort(op: *mut PyArrayObject, axis: c_int, which: NPY_SORTKIND) -> c_int]; impl_api![130; PyArray_ArgSort(op: *mut PyArrayObject, axis: c_int, which: NPY_SORTKIND) -> *mut PyObject]; impl_api![131; PyArray_SearchSorted(op1: *mut PyArrayObject, op2: *mut PyObject, side: NPY_SEARCHSIDE, perm: *mut PyObject) -> *mut PyObject]; impl_api![132; PyArray_ArgMax(op: *mut PyArrayObject, axis: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![133; PyArray_ArgMin(op: *mut PyArrayObject, axis: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![134; PyArray_Reshape(self_: *mut PyArrayObject, shape: *mut PyObject) -> *mut PyObject]; impl_api![135; PyArray_Newshape(self_: *mut PyArrayObject, newdims: *mut PyArray_Dims, order: NPY_ORDER) -> *mut PyObject]; impl_api![136; PyArray_Squeeze(self_: *mut PyArrayObject) -> *mut PyObject]; impl_api![137; PyArray_View(self_: *mut PyArrayObject, type_: *mut PyArray_Descr, pytype: *mut PyTypeObject) -> *mut PyObject]; impl_api![138; PyArray_SwapAxes(ap: *mut PyArrayObject, a1: c_int, a2: c_int) -> *mut PyObject]; impl_api![139; PyArray_Max(ap: *mut PyArrayObject, axis: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![140; PyArray_Min(ap: *mut PyArrayObject, axis: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![141; PyArray_Ptp(ap: *mut PyArrayObject, axis: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![142; PyArray_Mean(self_: *mut PyArrayObject, axis: c_int, rtype: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![143; PyArray_Trace(self_: *mut PyArrayObject, offset: c_int, axis1: c_int, axis2: c_int, rtype: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![144; PyArray_Diagonal(self_: *mut PyArrayObject, offset: c_int, axis1: c_int, axis2: c_int) -> *mut PyObject]; impl_api![145; PyArray_Clip(self_: *mut PyArrayObject, min: *mut PyObject, max: *mut PyObject, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![146; PyArray_Conjugate(self_: *mut PyArrayObject, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![147; PyArray_Nonzero(self_: *mut PyArrayObject) -> *mut PyObject]; impl_api![148; PyArray_Std(self_: *mut PyArrayObject, axis: c_int, rtype: c_int, out: *mut PyArrayObject, variance: c_int) -> *mut PyObject]; impl_api![149; PyArray_Sum(self_: *mut PyArrayObject, axis: c_int, rtype: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![150; PyArray_CumSum(self_: *mut PyArrayObject, axis: c_int, rtype: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![151; PyArray_Prod(self_: *mut PyArrayObject, axis: c_int, rtype: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![152; PyArray_CumProd(self_: *mut PyArrayObject, axis: c_int, rtype: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![153; PyArray_All(self_: *mut PyArrayObject, axis: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![154; PyArray_Any(self_: *mut PyArrayObject, axis: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![155; PyArray_Compress(self_: *mut PyArrayObject, condition: *mut PyObject, axis: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![156; PyArray_Flatten(a: *mut PyArrayObject, order: NPY_ORDER) -> *mut PyObject]; impl_api![157; PyArray_Ravel(arr: *mut PyArrayObject, order: NPY_ORDER) -> *mut PyObject]; impl_api![158; PyArray_MultiplyList(l1: *mut npy_intp, n: c_int) -> npy_intp]; impl_api![159; PyArray_MultiplyIntList(l1: *mut c_int, n: c_int) -> c_int]; impl_api![160; PyArray_GetPtr(obj: *mut PyArrayObject, ind: *mut npy_intp) -> *mut c_void]; impl_api![161; PyArray_CompareLists(l1: *mut npy_intp, l2: *mut npy_intp, n: c_int) -> c_int]; impl_api![162; PyArray_AsCArray(op: *mut *mut PyObject, ptr: *mut c_void, dims: *mut npy_intp, nd: c_int, typedescr: *mut PyArray_Descr) -> c_int]; impl_api![163; PyArray_As1D(op: *mut *mut PyObject, ptr: *mut *mut c_char, d1: *mut c_int, typecode: c_int) -> c_int]; impl_api![164; PyArray_As2D(op: *mut *mut PyObject, ptr: *mut *mut *mut c_char, d1: *mut c_int, d2: *mut c_int, typecode: c_int) -> c_int]; impl_api![165; PyArray_Free(op: *mut PyObject, ptr: *mut c_void) -> c_int]; impl_api![166; PyArray_Converter(object: *mut PyObject, address: *mut *mut PyObject) -> c_int]; impl_api![167; PyArray_IntpFromSequence(seq: *mut PyObject, vals: *mut npy_intp, maxvals: c_int) -> c_int]; impl_api![168; PyArray_Concatenate(op: *mut PyObject, axis: c_int) -> *mut PyObject]; impl_api![169; PyArray_InnerProduct(op1: *mut PyObject, op2: *mut PyObject) -> *mut PyObject]; impl_api![170; PyArray_MatrixProduct(op1: *mut PyObject, op2: *mut PyObject) -> *mut PyObject]; impl_api![171; PyArray_CopyAndTranspose(op: *mut PyObject) -> *mut PyObject]; impl_api![172; PyArray_Correlate(op1: *mut PyObject, op2: *mut PyObject, mode: c_int) -> *mut PyObject]; impl_api![173; PyArray_TypestrConvert(itemsize: c_int, gentype: c_int) -> c_int]; impl_api![174; PyArray_DescrConverter(obj: *mut PyObject, at: *mut *mut PyArray_Descr) -> c_int]; impl_api![175; PyArray_DescrConverter2(obj: *mut PyObject, at: *mut *mut PyArray_Descr) -> c_int]; impl_api![176; PyArray_IntpConverter(obj: *mut PyObject, seq: *mut PyArray_Dims) -> c_int]; impl_api![177; PyArray_BufferConverter(obj: *mut PyObject, buf: *mut PyArray_Chunk) -> c_int]; impl_api![178; PyArray_AxisConverter(obj: *mut PyObject, axis: *mut c_int) -> c_int]; impl_api![179; PyArray_BoolConverter(object: *mut PyObject, val: *mut npy_bool) -> c_int]; impl_api![180; PyArray_ByteorderConverter(obj: *mut PyObject, endian: *mut c_char) -> c_int]; impl_api![181; PyArray_OrderConverter(object: *mut PyObject, val: *mut NPY_ORDER) -> c_int]; impl_api![182; PyArray_EquivTypes(type1: *mut PyArray_Descr, type2: *mut PyArray_Descr) -> c_uchar]; impl_api![183; PyArray_Zeros(nd: c_int, dims: *mut npy_intp, type_: *mut PyArray_Descr, is_f_order: c_int) -> *mut PyObject]; impl_api![184; PyArray_Empty(nd: c_int, dims: *mut npy_intp, type_: *mut PyArray_Descr, is_f_order: c_int) -> *mut PyObject]; impl_api![185; PyArray_Where(condition: *mut PyObject, x: *mut PyObject, y: *mut PyObject) -> *mut PyObject]; impl_api![186; PyArray_Arange(start: f64, stop: f64, step: f64, type_num: c_int) -> *mut PyObject]; impl_api![187; PyArray_ArangeObj(start: *mut PyObject, stop: *mut PyObject, step: *mut PyObject, dtype: *mut PyArray_Descr) -> *mut PyObject]; impl_api![188; PyArray_SortkindConverter(obj: *mut PyObject, sortkind: *mut NPY_SORTKIND) -> c_int]; impl_api![189; PyArray_LexSort(sort_keys: *mut PyObject, axis: c_int) -> *mut PyObject]; impl_api![190; PyArray_Round(a: *mut PyArrayObject, decimals: c_int, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![191; PyArray_EquivTypenums(typenum1: c_int, typenum2: c_int) -> c_uchar]; impl_api![192; PyArray_RegisterDataType(descr: *mut PyArray_Descr) -> c_int]; impl_api![193; PyArray_RegisterCastFunc(descr: *mut PyArray_Descr, totype: c_int, castfunc: PyArray_VectorUnaryFunc) -> c_int]; impl_api![194; PyArray_RegisterCanCast(descr: *mut PyArray_Descr, totype: c_int, scalar: NPY_SCALARKIND) -> c_int]; impl_api![195; PyArray_InitArrFuncs(f: *mut PyArray_ArrFuncs)]; impl_api![196; PyArray_IntTupleFromIntp(len: c_int, vals: *mut npy_intp) -> *mut PyObject]; impl_api![197; PyArray_TypeNumFromName(str: *mut c_char) -> c_int]; impl_api![198; PyArray_ClipmodeConverter(object: *mut PyObject, val: *mut NPY_CLIPMODE) -> c_int]; impl_api![199; PyArray_OutputConverter(object: *mut PyObject, address: *mut *mut PyArrayObject) -> c_int]; impl_api![200; PyArray_BroadcastToShape(obj: *mut PyObject, dims: *mut npy_intp, nd: c_int) -> *mut PyObject]; impl_api![201; _PyArray_SigintHandler(signum: c_int)]; impl_api![202; _PyArray_GetSigintBuf() -> *mut c_void]; impl_api![203; PyArray_DescrAlignConverter(obj: *mut PyObject, at: *mut *mut PyArray_Descr) -> c_int]; impl_api![204; PyArray_DescrAlignConverter2(obj: *mut PyObject, at: *mut *mut PyArray_Descr) -> c_int]; impl_api![205; PyArray_SearchsideConverter(obj: *mut PyObject, addr: *mut c_void) -> c_int]; impl_api![206; PyArray_CheckAxis(arr: *mut PyArrayObject, axis: *mut c_int, flags: c_int) -> *mut PyObject]; impl_api![207; PyArray_OverflowMultiplyList(l1: *mut npy_intp, n: c_int) -> npy_intp]; impl_api![208; PyArray_CompareString(s1: *mut c_char, s2: *mut c_char, len: usize) -> c_int]; // impl_api![209; PyArray_MultiIterFromObjects(mps: *mut *mut PyObject, n: c_int, nadd: c_int, ...) -> *mut PyObject]; impl_api![210; PyArray_GetEndianness() -> c_int]; impl_api![211; PyArray_GetNDArrayCFeatureVersion() -> c_uint]; impl_api![212; PyArray_Correlate2(op1: *mut PyObject, op2: *mut PyObject, mode: c_int) -> *mut PyObject]; impl_api![213; PyArray_NeighborhoodIterNew(x: *mut PyArrayIterObject, bounds: *mut npy_intp, mode: c_int, fill: *mut PyArrayObject) -> *mut PyObject]; impl_api![219; PyArray_SetDatetimeParseFunction(op: *mut PyObject)]; impl_api![220; PyArray_DatetimeToDatetimeStruct(val: npy_datetime, fr: NPY_DATETIMEUNIT, result: *mut npy_datetimestruct)]; impl_api![221; PyArray_TimedeltaToTimedeltaStruct(val: npy_timedelta, fr: NPY_DATETIMEUNIT, result: *mut npy_timedeltastruct)]; impl_api![222; PyArray_DatetimeStructToDatetime(fr: NPY_DATETIMEUNIT, d: *mut npy_datetimestruct) -> npy_datetime]; impl_api![223; PyArray_TimedeltaStructToTimedelta(fr: NPY_DATETIMEUNIT, d: *mut npy_timedeltastruct) -> npy_datetime]; impl_api![224; NpyIter_New(op: *mut PyArrayObject, flags: npy_uint32, order: NPY_ORDER, casting: NPY_CASTING, dtype: *mut PyArray_Descr) -> *mut NpyIter]; impl_api![225; NpyIter_MultiNew(nop: c_int, op_in: *mut *mut PyArrayObject, flags: npy_uint32, order: NPY_ORDER, casting: NPY_CASTING, op_flags: *mut npy_uint32, op_request_dtypes: *mut *mut PyArray_Descr) -> *mut NpyIter]; impl_api![226; NpyIter_AdvancedNew(nop: c_int, op_in: *mut *mut PyArrayObject, flags: npy_uint32, order: NPY_ORDER, casting: NPY_CASTING, op_flags: *mut npy_uint32, op_request_dtypes: *mut *mut PyArray_Descr, oa_ndim: c_int, op_axes: *mut *mut c_int, itershape: *mut npy_intp, buffersize: npy_intp) -> *mut NpyIter]; impl_api![227; NpyIter_Copy(iter: *mut NpyIter) -> *mut NpyIter]; impl_api![228; NpyIter_Deallocate(iter: *mut NpyIter) -> c_int]; impl_api![229; NpyIter_HasDelayedBufAlloc(iter: *mut NpyIter) -> npy_bool]; impl_api![230; NpyIter_HasExternalLoop(iter: *mut NpyIter) -> npy_bool]; impl_api![231; NpyIter_EnableExternalLoop(iter: *mut NpyIter) -> c_int]; impl_api![232; NpyIter_GetInnerStrideArray(iter: *mut NpyIter) -> *mut npy_intp]; impl_api![233; NpyIter_GetInnerLoopSizePtr(iter: *mut NpyIter) -> *mut npy_intp]; impl_api![234; NpyIter_Reset(iter: *mut NpyIter, errmsg: *mut *mut c_char) -> c_int]; impl_api![235; NpyIter_ResetBasePointers(iter: *mut NpyIter, baseptrs: *mut *mut c_char, errmsg: *mut *mut c_char) -> c_int]; impl_api![236; NpyIter_ResetToIterIndexRange(iter: *mut NpyIter, istart: npy_intp, iend: npy_intp, errmsg: *mut *mut c_char) -> c_int]; impl_api![237; NpyIter_GetNDim(iter: *mut NpyIter) -> c_int]; impl_api![238; NpyIter_GetNOp(iter: *mut NpyIter) -> c_int]; impl_api![239; NpyIter_GetIterNext(iter: *mut NpyIter, errmsg: *mut *mut c_char) -> NpyIter_IterNextFunc]; impl_api![240; NpyIter_GetIterSize(iter: *mut NpyIter) -> npy_intp]; impl_api![241; NpyIter_GetIterIndexRange(iter: *mut NpyIter, istart: *mut npy_intp, iend: *mut npy_intp)]; impl_api![242; NpyIter_GetIterIndex(iter: *mut NpyIter) -> npy_intp]; impl_api![243; NpyIter_GotoIterIndex(iter: *mut NpyIter, iterindex: npy_intp) -> c_int]; impl_api![244; NpyIter_HasMultiIndex(iter: *mut NpyIter) -> npy_bool]; impl_api![245; NpyIter_GetShape(iter: *mut NpyIter, outshape: *mut npy_intp) -> c_int]; impl_api![246; NpyIter_GetGetMultiIndex(iter: *mut NpyIter, errmsg: *mut *mut c_char) -> NpyIter_GetMultiIndexFunc]; impl_api![247; NpyIter_GotoMultiIndex(iter: *mut NpyIter, multi_index: *mut npy_intp) -> c_int]; impl_api![248; NpyIter_RemoveMultiIndex(iter: *mut NpyIter) -> c_int]; impl_api![249; NpyIter_HasIndex(iter: *mut NpyIter) -> npy_bool]; impl_api![250; NpyIter_IsBuffered(iter: *mut NpyIter) -> npy_bool]; impl_api![251; NpyIter_IsGrowInner(iter: *mut NpyIter) -> npy_bool]; impl_api![252; NpyIter_GetBufferSize(iter: *mut NpyIter) -> npy_intp]; impl_api![253; NpyIter_GetIndexPtr(iter: *mut NpyIter) -> *mut npy_intp]; impl_api![254; NpyIter_GotoIndex(iter: *mut NpyIter, flat_index: npy_intp) -> c_int]; impl_api![255; NpyIter_GetDataPtrArray(iter: *mut NpyIter) -> *mut *mut c_char]; impl_api![256; NpyIter_GetDescrArray(iter: *mut NpyIter) -> *mut *mut PyArray_Descr]; impl_api![257; NpyIter_GetOperandArray(iter: *mut NpyIter) -> *mut *mut PyArrayObject]; impl_api![258; NpyIter_GetIterView(iter: *mut NpyIter, i: npy_intp) -> *mut PyArrayObject]; impl_api![259; NpyIter_GetReadFlags(iter: *mut NpyIter, outreadflags: *mut c_char)]; impl_api![260; NpyIter_GetWriteFlags(iter: *mut NpyIter, outwriteflags: *mut c_char)]; impl_api![261; NpyIter_DebugPrint(iter: *mut NpyIter)]; impl_api![262; NpyIter_IterationNeedsAPI(iter: *mut NpyIter) -> npy_bool]; impl_api![263; NpyIter_GetInnerFixedStrideArray(iter: *mut NpyIter, out_strides: *mut npy_intp)]; impl_api![264; NpyIter_RemoveAxis(iter: *mut NpyIter, axis: c_int) -> c_int]; impl_api![265; NpyIter_GetAxisStrideArray(iter: *mut NpyIter, axis: c_int) -> *mut npy_intp]; impl_api![266; NpyIter_RequiresBuffering(iter: *mut NpyIter) -> npy_bool]; impl_api![267; NpyIter_GetInitialDataPtrArray(iter: *mut NpyIter) -> *mut *mut c_char]; impl_api![268; NpyIter_CreateCompatibleStrides(iter: *mut NpyIter, itemsize: npy_intp, outstrides: *mut npy_intp) -> c_int]; impl_api![269; PyArray_CastingConverter(obj: *mut PyObject, casting: *mut NPY_CASTING) -> c_int]; impl_api![270; PyArray_CountNonzero(self_: *mut PyArrayObject) -> npy_intp]; impl_api![271; PyArray_PromoteTypes(type1: *mut PyArray_Descr, type2: *mut PyArray_Descr) -> *mut PyArray_Descr]; impl_api![272; PyArray_MinScalarType(arr: *mut PyArrayObject) -> *mut PyArray_Descr]; impl_api![273; PyArray_ResultType(narrs: npy_intp, arr: *mut *mut PyArrayObject, ndtypes: npy_intp, dtypes: *mut *mut PyArray_Descr) -> *mut PyArray_Descr]; impl_api![274; PyArray_CanCastArrayTo(arr: *mut PyArrayObject, to: *mut PyArray_Descr, casting: NPY_CASTING) -> npy_bool]; impl_api![275; PyArray_CanCastTypeTo(from: *mut PyArray_Descr, to: *mut PyArray_Descr, casting: NPY_CASTING) -> npy_bool]; impl_api![276; PyArray_EinsteinSum(subscripts: *mut c_char, nop: npy_intp, op_in: *mut *mut PyArrayObject, dtype: *mut PyArray_Descr, order: NPY_ORDER, casting: NPY_CASTING, out: *mut PyArrayObject) -> *mut PyArrayObject]; impl_api![277; PyArray_NewLikeArray(prototype: *mut PyArrayObject, order: NPY_ORDER, dtype: *mut PyArray_Descr, subok: c_int) -> *mut PyObject]; impl_api![278; PyArray_GetArrayParamsFromObject(op: *mut PyObject, requested_dtype: *mut PyArray_Descr, writeable: npy_bool, out_dtype: *mut *mut PyArray_Descr, out_ndim: *mut c_int, out_dims: *mut npy_intp, out_arr: *mut *mut PyArrayObject, context: *mut PyObject) -> c_int]; impl_api![279; PyArray_ConvertClipmodeSequence(object: *mut PyObject, modes: *mut NPY_CLIPMODE, n: c_int) -> c_int]; impl_api![280; PyArray_MatrixProduct2(op1: *mut PyObject, op2: *mut PyObject, out: *mut PyArrayObject) -> *mut PyObject]; impl_api![281; NpyIter_IsFirstVisit(iter: *mut NpyIter, iop: c_int) -> npy_bool]; impl_api![282; PyArray_SetBaseObject(arr: *mut PyArrayObject, obj: *mut PyObject) -> c_int]; impl_api![283; PyArray_CreateSortedStridePerm(ndim: c_int, strides: *mut npy_intp, out_strideperm: *mut npy_stride_sort_item)]; impl_api![284; PyArray_RemoveAxesInPlace(arr: *mut PyArrayObject, flags: *mut npy_bool)]; impl_api![285; PyArray_DebugPrint(obj: *mut PyArrayObject)]; impl_api![286; PyArray_FailUnlessWriteable(obj: *mut PyArrayObject, name: *const c_char) -> c_int]; impl_api![287; PyArray_SetUpdateIfCopyBase(arr: *mut PyArrayObject, base: *mut PyArrayObject) -> c_int]; impl_api![288; PyDataMem_NEW(size: usize) -> *mut c_void]; impl_api![289; PyDataMem_FREE(ptr: *mut c_void)]; impl_api![290; PyDataMem_RENEW(ptr: *mut c_void, size: usize) -> *mut c_void]; impl_api![291; PyDataMem_SetEventHook(newhook: PyDataMem_EventHookFunc, user_data: *mut c_void, old_data: *mut *mut c_void) -> PyDataMem_EventHookFunc]; impl_api![293; PyArray_MapIterSwapAxes(mit: *mut PyArrayMapIterObject, ret: *mut *mut PyArrayObject, getmap: c_int)]; impl_api![294; PyArray_MapIterArray(a: *mut PyArrayObject, index: *mut PyObject) -> *mut PyObject]; impl_api![295; PyArray_MapIterNext(mit: *mut PyArrayMapIterObject)]; impl_api![296; PyArray_Partition(op: *mut PyArrayObject, ktharray: *mut PyArrayObject, axis: c_int, which: NPY_SELECTKIND) -> c_int]; impl_api![297; PyArray_ArgPartition(op: *mut PyArrayObject, ktharray: *mut PyArrayObject, axis: c_int, which: NPY_SELECTKIND) -> *mut PyObject]; impl_api![298; PyArray_SelectkindConverter(obj: *mut PyObject, selectkind: *mut NPY_SELECTKIND) -> c_int]; impl_api![299; PyDataMem_NEW_ZEROED(size: usize, elsize: usize) -> *mut c_void]; impl_api![300; PyArray_CheckAnyScalarExact(obj: *mut PyObject) -> c_int]; impl_api![301; PyArray_MapIterArrayCopyIfOverlap(a: *mut PyArrayObject, index: *mut PyObject, copy_if_overlap: c_int, extra_op: *mut PyArrayObject) -> *mut PyObject]; } /// Define PyTypeObject related to Array API macro_rules! impl_array_type { ($(($offset:expr, $tname:ident)),*) => { #[allow(non_camel_case_types)] #[repr(i32)] pub enum ArrayType { $($tname),* } impl PyArrayAPI_Inner { pub unsafe fn get_type_object(&self, ty: ArrayType) -> *mut PyTypeObject { match ty { $( ArrayType::$tname => *(self.0.offset($offset)) as *mut PyTypeObject ),* } } } } } // impl_array_type!; impl_array_type!( (1, PyBigArray_Type), (2, PyArray_Type), (3, PyArrayDescr_Type), (4, PyArrayFlags_Type), (5, PyArrayIter_Type), (6, PyArrayMultiIter_Type), (7, NPY_NUMUSERTYPES), (8, PyBoolArrType_Type), (9, _PyArrayScalar_BoolValues), (10, PyGenericArrType_Type), (11, PyNumberArrType_Type), (12, PyIntegerArrType_Type), (13, PySignedIntegerArrType_Type), (14, PyUnsignedIntegerArrType_Type), (15, PyInexactArrType_Type), (16, PyFloatingArrType_Type), (17, PyComplexFloatingArrType_Type), (18, PyFlexibleArrType_Type), (19, PyCharacterArrType_Type), (20, PyByteArrType_Type), (21, PyShortArrType_Type), (22, PyIntArrType_Type), (23, PyLongArrType_Type), (24, PyLongLongArrType_Type), (25, PyUByteArrType_Type), (26, PyUShortArrType_Type), (27, PyUIntArrType_Type), (28, PyULongArrType_Type), (29, PyULongLongArrType_Type), (30, PyFloatArrType_Type), (31, PyDoubleArrType_Type), (32, PyLongDoubleArrType_Type), (33, PyCFloatArrType_Type), (34, PyCDoubleArrType_Type), (35, PyCLongDoubleArrType_Type), (36, PyObjectArrType_Type), (37, PyStringArrType_Type), (38, PyUnicodeArrType_Type), (39, PyVoidArrType_Type) ); #[allow(non_snake_case)] pub unsafe fn PyArray_Check(op: *mut PyObject) -> c_int { ffi::PyObject_TypeCheck(op, PY_ARRAY_API.get_type_object(ArrayType::PyArray_Type)) } #[allow(non_snake_case)] pub unsafe fn PyArray_CheckExact(op: *mut PyObject) -> c_int { (ffi::Py_TYPE(op) == PY_ARRAY_API.get_type_object(ArrayType::PyArray_Type)) as c_int } #[test] fn call_api() { use pyo3::Python; let _gil = Python::acquire_gil(); unsafe { assert_eq!( PY_ARRAY_API.PyArray_MultiplyIntList([1, 2, 3].as_mut_ptr(), 3), 6 ); } }
78.758105
320
0.715123
0a29a2e18379953d60cae5ee462c23911a3b6cdb
7,503
use crate::{ buffer::event::{BufferPool, BufferWriter, PoolInfo}, data::{ArconEvent, ArconEventWrapper, ArconMessage, ArconType, NodeID}, stream::channel::{strategy::send, Channel}, }; use kompact::prelude::{ComponentDefinition, SerError}; use std::sync::Arc; /// A Broadcast strategy for one-to-many message sending #[allow(dead_code)] pub struct Broadcast<A> where A: ArconType, { /// A buffer pool of EventBuffer's buffer_pool: BufferPool<ArconEventWrapper<A>>, /// Vec of Channels that messages are broadcasted to channels: Vec<Arc<Channel<A>>>, /// A buffer holding outgoing events curr_buffer: BufferWriter<ArconEventWrapper<A>>, /// An Identifier that is embedded in each outgoing message sender_id: NodeID, /// Struct holding information regarding the BufferPool pool_info: PoolInfo, } impl<A> Broadcast<A> where A: ArconType, { pub fn new(channels: Vec<Channel<A>>, sender_id: NodeID, pool_info: PoolInfo) -> Broadcast<A> { assert!( channels.len() > 1, "Number of Channels must exceed 1 for a Broadcast strategy" ); assert!( channels.len() < pool_info.capacity, "Strategy must be initialised with a pool capacity larger than amount of channels" ); let mut buffer_pool: BufferPool<ArconEventWrapper<A>> = BufferPool::new( pool_info.capacity, pool_info.buffer_size, pool_info.allocator.clone(), ) .expect("failed to initialise BufferPool"); let curr_buffer = buffer_pool .try_get() .expect("failed to fetch initial buffer"); Broadcast { buffer_pool, channels: channels.into_iter().map(Arc::new).collect(), curr_buffer, sender_id, pool_info, } } #[inline] fn push_event(&mut self, event: ArconEvent<A>) -> Option<ArconMessage<A>> { self.curr_buffer.push(event.into()).map(|e| { let msg = self.message(); self.curr_buffer.push(e); msg }) } #[inline] fn message(&mut self) -> ArconMessage<A> { let reader = self.curr_buffer.reader(); let msg = ArconMessage { events: reader, sender: self.sender_id, }; // TODO: Should probably not busy wait here.. self.curr_buffer = self.buffer_pool.get(); msg } #[inline] pub fn add(&mut self, event: ArconEvent<A>) -> Vec<(Arc<Channel<A>>, ArconMessage<A>)> { match &event { ArconEvent::Element(_) => self .push_event(event) .map(move |msg| { self.channels .iter() .map(|c| (c.clone(), msg.clone())) .collect() }) .unwrap_or_else(Vec::new), _ => match self.push_event(event) { Some(msg) => { let msg_two = self.message(); self.channels .iter() .map(|c| (c.clone(), msg.clone())) .chain(self.channels.iter().map(|c| (c.clone(), msg_two.clone()))) .collect() } None => { let msg = self.message(); self.channels .iter() .map(|c| (c.clone(), msg.clone())) .collect() } }, } } #[inline] pub fn flush<CD>(&mut self, source: &CD) where CD: ComponentDefinition + Sized + 'static, { for (i, channel) in self.channels.iter().enumerate() { if i == self.channels.len() - 1 { // This is the last channel, thus we can use curr_buffer let reader = self.curr_buffer.reader(); let msg = ArconMessage { events: reader, sender: self.sender_id, }; if let Err(SerError::BufferError(err)) = send(channel, msg, source) { // TODO: Figure out how to get more space for `tell_serialised` panic!("Buffer Error {}", err); }; } else { // Get a new writer let mut writer = self.buffer_pool.get(); // Copy data from our current writer into the new writer... writer.copy_from_writer(&self.curr_buffer); let msg = ArconMessage { events: writer.reader(), sender: self.sender_id, }; if let Err(SerError::BufferError(err)) = send(channel, msg, source) { // TODO: Figure out how to get more space for `tell_serialised` panic!("Buffer Error {}", err); }; } } // We are finished, set a new BufferWriter to curr_buffer // TODO: Should probably not busy wait here.. self.curr_buffer = self.buffer_pool.get(); } #[inline] pub fn num_channels(&self) -> usize { self.channels.len() } } #[cfg(test)] mod tests { use super::{Channel, *}; use crate::{ application::Application, data::{ArconElement, Watermark}, stream::{ channel::strategy::{send, tests::*, ChannelStrategy}, node::debug::DebugNode, }, }; use kompact::prelude::*; use std::sync::Arc; #[test] fn broadcast_local_test() { let mut app = Application::default(); let pool_info = app.get_pool_info(); let system = app.data_system(); let components: u32 = 8; let total_msgs: u64 = 10; let mut channels: Vec<Channel<Input>> = Vec::new(); let mut comps: Vec<Arc<crate::prelude::Component<DebugNode<Input>>>> = Vec::new(); for _i in 0..components { let comp = system.create(DebugNode::<Input>::new); system.start(&comp); let actor_ref: ActorRefStrong<ArconMessage<Input>> = comp.actor_ref().hold().expect("failed to fetch"); channels.push(Channel::Local(actor_ref)); comps.push(comp); } let mut channel_strategy: ChannelStrategy<Input> = ChannelStrategy::Broadcast(Broadcast::new(channels, NodeID::new(1), pool_info)); // take one comp as channel source // just for testing... let comp = &comps[0]; comp.on_definition(|cd| { for _i in 0..total_msgs { let elem = ArconElement::new(Input { id: 1 }); for (channel, msg) in channel_strategy.push(ArconEvent::Element(elem)) { send(&channel, msg, cd).unwrap(); } } // force a flush through a marker for (channel, msg) in channel_strategy.push(ArconEvent::Watermark(Watermark::new(0))) { send(&channel, msg, cd).unwrap(); } }); std::thread::sleep(std::time::Duration::from_secs(1)); // Each of the 8 components should have the same amount of msgs.. for comp in comps { comp.on_definition(|cd| { assert_eq!(cd.data.len() as u64, total_msgs); }); } app.shutdown(); } }
33.64574
99
0.518193
217f7983c39064ebc852686936d6a3364347ae62
5,416
use futures::future::FutureResult; use std::time::{SystemTime, UNIX_EPOCH}; use super::*; use graph::data::subgraph::schema::*; fn check_subgraph_exists( store: Arc<dyn NetworkStore>, subgraph_id: SubgraphDeploymentId, ) -> impl Future<Item = bool, Error = Error> { future::result( store .get(SubgraphDeploymentEntity::key(subgraph_id)) .map_err(|e| e.into()) .map(|entity| entity.map_or(false, |_| true)), ) } fn create_subgraph( store: Arc<dyn NetworkStore>, subgraph_name: SubgraphName, subgraph_id: SubgraphDeploymentId, start_block: Option<EthereumBlockPointer>, ) -> FutureResult<(), Error> { let mut ops = vec![]; // Ensure the subgraph itself doesn't already exist ops.push(MetadataOperation::AbortUnless { description: "Subgraph entity should not exist".to_owned(), query: SubgraphEntity::query() .filter(EntityFilter::new_equal("name", subgraph_name.to_string())), entity_ids: vec![], }); // Create the subgraph entity (e.g. `ethereum/mainnet`) let created_at = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(); let subgraph_entity_id = generate_entity_id(); ops.extend( SubgraphEntity::new(subgraph_name.clone(), None, None, created_at) .write_operations(&subgraph_entity_id) .into_iter() .map(|op| op.into()), ); // Ensure the subgraph version doesn't already exist ops.push(MetadataOperation::AbortUnless { description: "Subgraph version should not exist".to_owned(), query: SubgraphVersionEntity::query() .filter(EntityFilter::new_equal("id", subgraph_id.to_string())), entity_ids: vec![], }); // Create a subgraph version entity; we're using the same ID for // version and deployment to make clear they belong together let version_entity_id = subgraph_id.to_string(); ops.extend( SubgraphVersionEntity::new(subgraph_entity_id.clone(), subgraph_id.clone(), created_at) .write_operations(&version_entity_id) .into_iter() .map(|op| op.into()), ); // Immediately make this version the current one ops.extend(SubgraphEntity::update_pending_version_operations( &subgraph_entity_id, None, )); ops.extend(SubgraphEntity::update_current_version_operations( &subgraph_entity_id, Some(version_entity_id), )); // Ensure the deployment doesn't already exist ops.push(MetadataOperation::AbortUnless { description: "Subgraph deployment entity must not exist".to_owned(), query: SubgraphDeploymentEntity::query() .filter(EntityFilter::new_equal("id", subgraph_id.to_string())), entity_ids: vec![], }); // Create a fake manifest let manifest = SubgraphManifest { id: subgraph_id.clone(), location: subgraph_name.to_string(), spec_version: String::from("0.0.1"), description: None, repository: None, schema: Schema::parse(include_str!("./ethereum.graphql"), subgraph_id.clone()) .expect("valid Ethereum network subgraph schema"), data_sources: vec![], graft: None, templates: vec![], }; // Create deployment entity let chain_head_block = match store.chain_head_ptr() { Ok(block_ptr) => block_ptr, Err(e) => return future::err(e.into()), }; ops.extend( SubgraphDeploymentEntity::new(&manifest, false, false, start_block, chain_head_block) .create_operations(&manifest.id), ); // Create a deployment assignment entity ops.extend( SubgraphDeploymentAssignmentEntity::new(NodeId::new("__builtin").unwrap()) .write_operations(&subgraph_id) .into_iter() .map(|op| op.into()), ); future::result( store .create_subgraph_deployment(&manifest.schema, ops) .map_err(|e| e.into()), ) } pub fn ensure_subgraph_exists( subgraph_name: SubgraphName, subgraph_id: SubgraphDeploymentId, logger: Logger, store: Arc<dyn NetworkStore>, start_block: Option<EthereumBlockPointer>, ) -> impl Future<Item = (), Error = Error> { debug!(logger, "Ensure that the network subgraph exists"); let logger_for_created = logger.clone(); check_subgraph_exists(store.clone(), subgraph_id.clone()) .from_err() .and_then(move |subgraph_exists| { if subgraph_exists { debug!(logger, "Network subgraph deployment already exists"); Box::new(future::ok(())) as Box<dyn Future<Item = _, Error = _> + Send> } else { debug!(logger, "Network subgraph deployment needs to be created"); Box::new( create_subgraph( store.clone(), subgraph_name.clone(), subgraph_id.clone(), start_block, ) .inspect(move |_| { debug!(logger_for_created, "Created Ethereum network subgraph"); }), ) } }) .map_err(move |e| format_err!("Failed to ensure Ethereum network subgraph exists: {}", e)) }
34.496815
98
0.607644
ff44454481f0508d6b26748915d4bdcbe465393f
1,588
use crate::error::*; use serde::Serialize; use snafu::ResultExt; use std::collections::BTreeMap; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Variables(BTreeMap<String, serde_yaml::Value>); impl Variables { pub fn new() -> Self { Variables(BTreeMap::new()) } pub fn append(&mut self, v: &mut Variables) { self.0.append(&mut v.0); } pub fn insert<K: Into<String>, V: Serialize>(&mut self, key: K, value: V) -> Result<()> { self.0.insert( key.into(), serde_yaml::to_value(value).context(crate::SerdeYaml {})?, ); Ok(()) } pub fn to_value(s: &str) -> Result<serde_yaml::Value> { //serde_yaml::to_value(value).context(crate::SerdeYaml {}) serde_yaml::from_str::<serde_yaml::Value>(s).context(crate::SerdeYaml {}) } } #[cfg(test)] mod tests { use super::*; use spectral::prelude::*; #[test] fn test_to_value() { assert_that!(&Variables::to_value("v1").unwrap()) .is_equal_to(&serde_yaml::Value::String("v1".to_owned())); assert_that!(&Variables::to_value("true").unwrap()) .is_equal_to(&serde_yaml::Value::Bool(true)); assert_that!(&Variables::to_value("false").unwrap()) .is_equal_to(&serde_yaml::Value::Bool(false)); assert_that!(&Variables::to_value("\"true\"").unwrap()) .is_equal_to(&serde_yaml::Value::String("true".to_owned())); assert_that!(&Variables::to_value("42").unwrap()) .is_equal_to(&serde_yaml::to_value(42).unwrap()); } }
31.137255
93
0.593199
ab1f5f14c9540f784123741fe761d61ac1cb72be
1,222
extern crate barn; use ::fringe; pub use self::barn::basic::*; use self::barn::scheduler::{self, Request}; static mut SCHEDULER: Option<Scheduler> = None; pub fn start<F: FnOnce() + Send + 'static>(f: F) { let stack = fringe::OwnedStack::new(16 * 1024); let thread = Thread::new(stack, f); let mut q = Queue::new(); q.push_front(thread); unsafe { SCHEDULER = Some(Scheduler::new(q)); debug!("scheduler is none? {}", SCHEDULER.is_none()); //loop {} SCHEDULER.as_mut().unwrap().run(); } } pub fn spawn<F: FnOnce() + Send + 'static>(f: F, size: usize) { let t = Thread::new(fringe::OwnedStack::new(size), f); Thread::suspend(Request::Schedule(scheduler::Node::<Unit>::new(t))); } pub fn yield_now() { Thread::suspend(Request::Yield); } /// A type indicating whether a timed wait on a condition variable returned /// due to a time out or not. #[derive(Debug, PartialEq, Eq, Copy, Clone)] #[stable(feature = "wait_timeout", since = "1.5.0")] pub struct WaitTimeoutResult(bool); impl WaitTimeoutResult { /// Returns whether the wait was known to have timed out. #[stable(feature = "wait_timeout", since = "1.5.0")] pub fn timed_out(&self) -> bool { self.0 } }
26
75
0.641571
09872f8b13ea93edc6ae03fe2f85add08e7b2e9c
19,359
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #[cfg(feature = "simd")] use packed_simd::*; #[cfg(feature = "simd")] use std::ops::{Add, BitAnd, BitAndAssign, BitOr, BitOrAssign, Div, Mul, Neg, Not, Sub}; use super::*; /// A subtype of primitive type that represents numeric values. /// /// SIMD operations are defined in this trait if available on the target system. #[cfg(simd)] pub trait ArrowNumericType: ArrowPrimitiveType where Self::Simd: Add<Output = Self::Simd> + Sub<Output = Self::Simd> + Mul<Output = Self::Simd> + Div<Output = Self::Simd> + Copy, Self::SimdMask: BitAnd<Output = Self::SimdMask> + BitOr<Output = Self::SimdMask> + BitAndAssign + BitOrAssign + Not<Output = Self::SimdMask> + Copy, { /// Defines the SIMD type that should be used for this numeric type type Simd; /// Defines the SIMD Mask type that should be used for this numeric type type SimdMask; /// The number of SIMD lanes available fn lanes() -> usize; /// Initializes a SIMD register to a constant value fn init(value: Self::Native) -> Self::Simd; /// Loads a slice into a SIMD register fn load(slice: &[Self::Native]) -> Self::Simd; /// Creates a new SIMD mask for this SIMD type filling it with `value` fn mask_init(value: bool) -> Self::SimdMask; /// Creates a new SIMD mask for this SIMD type from the lower-most bits of the given `mask`. /// The number of bits used corresponds to the number of lanes of this type fn mask_from_u64(mask: u64) -> Self::SimdMask; /// Creates a bitmask from the given SIMD mask. /// Each bit corresponds to one vector lane, starting with the least-significant bit. fn mask_to_u64(mask: &Self::SimdMask) -> u64; /// Gets the value of a single lane in a SIMD mask fn mask_get(mask: &Self::SimdMask, idx: usize) -> bool; /// Sets the value of a single lane of a SIMD mask fn mask_set(mask: Self::SimdMask, idx: usize, value: bool) -> Self::SimdMask; /// Selects elements of `a` and `b` using `mask` fn mask_select(mask: Self::SimdMask, a: Self::Simd, b: Self::Simd) -> Self::Simd; /// Returns `true` if any of the lanes in the mask are `true` fn mask_any(mask: Self::SimdMask) -> bool; /// Performs a SIMD binary operation fn bin_op<F: Fn(Self::Simd, Self::Simd) -> Self::Simd>( left: Self::Simd, right: Self::Simd, op: F, ) -> Self::Simd; /// SIMD version of equal fn eq(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; /// SIMD version of not equal fn ne(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; /// SIMD version of less than fn lt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; /// SIMD version of less than or equal to fn le(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; /// SIMD version of greater than fn gt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; /// SIMD version of greater than or equal to fn ge(left: Self::Simd, right: Self::Simd) -> Self::SimdMask; /// Writes a SIMD result back to a slice fn write(simd_result: Self::Simd, slice: &mut [Self::Native]); fn unary_op<F: Fn(Self::Simd) -> Self::Simd>(a: Self::Simd, op: F) -> Self::Simd; } #[cfg(not(simd))] pub trait ArrowNumericType: ArrowPrimitiveType {} macro_rules! make_numeric_type { ($impl_ty:ty, $native_ty:ty, $simd_ty:ident, $simd_mask_ty:ident) => { #[cfg(simd)] impl ArrowNumericType for $impl_ty { type Simd = $simd_ty; type SimdMask = $simd_mask_ty; #[inline] fn lanes() -> usize { Self::Simd::lanes() } #[inline] fn init(value: Self::Native) -> Self::Simd { Self::Simd::splat(value) } #[inline] fn load(slice: &[Self::Native]) -> Self::Simd { unsafe { Self::Simd::from_slice_unaligned_unchecked(slice) } } #[inline] fn mask_init(value: bool) -> Self::SimdMask { Self::SimdMask::splat(value) } #[inline] fn mask_from_u64(mask: u64) -> Self::SimdMask { // this match will get removed by the compiler since the number of lanes is known at // compile-time for each concrete numeric type match Self::lanes() { 8 => { // the bit position in each lane indicates the index of that lane let vecidx = i64x8::new(1, 2, 4, 8, 16, 32, 64, 128); // broadcast the lowermost 8 bits of mask to each lane let vecmask = i64x8::splat((mask & 0xFF) as i64); // compute whether the bit corresponding to each lanes index is set let vecmask = (vecidx & vecmask).eq(vecidx); // transmute is necessary because the different match arms return different // mask types, at runtime only one of those expressions will exist per type, // with the type being equal to `SimdMask`. unsafe { std::mem::transmute(vecmask) } } 16 => { // same general logic as for 8 lanes, extended to 16 bits let vecidx = i32x16::new( 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, ); let vecmask = i32x16::splat((mask & 0xFFFF) as i32); let vecmask = (vecidx & vecmask).eq(vecidx); unsafe { std::mem::transmute(vecmask) } } 32 => { // compute two separate m32x16 vector masks from from the lower-most 32 bits of `mask` // and then combine them into one m16x32 vector mask by writing and reading a temporary let tmp = &mut [0_i16; 32]; let vecidx = i32x16::new( 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, ); let vecmask = i32x16::splat((mask & 0xFFFF) as i32); let vecmask = (vecidx & vecmask).eq(vecidx); i16x16::from_cast(vecmask) .write_to_slice_unaligned(&mut tmp[0..16]); let vecmask = i32x16::splat(((mask >> 16) & 0xFFFF) as i32); let vecmask = (vecidx & vecmask).eq(vecidx); i16x16::from_cast(vecmask) .write_to_slice_unaligned(&mut tmp[16..32]); unsafe { std::mem::transmute(i16x32::from_slice_unaligned(tmp)) } } 64 => { // compute four m32x16 vector masks from from all 64 bits of `mask` // and convert them into one m8x64 vector mask by writing and reading a temporary let tmp = &mut [0_i8; 64]; let vecidx = i32x16::new( 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, 32768, ); let vecmask = i32x16::splat((mask & 0xFFFF) as i32); let vecmask = (vecidx & vecmask).eq(vecidx); i8x16::from_cast(vecmask) .write_to_slice_unaligned(&mut tmp[0..16]); let vecmask = i32x16::splat(((mask >> 16) & 0xFFFF) as i32); let vecmask = (vecidx & vecmask).eq(vecidx); i8x16::from_cast(vecmask) .write_to_slice_unaligned(&mut tmp[16..32]); let vecmask = i32x16::splat(((mask >> 32) & 0xFFFF) as i32); let vecmask = (vecidx & vecmask).eq(vecidx); i8x16::from_cast(vecmask) .write_to_slice_unaligned(&mut tmp[32..48]); let vecmask = i32x16::splat(((mask >> 48) & 0xFFFF) as i32); let vecmask = (vecidx & vecmask).eq(vecidx); i8x16::from_cast(vecmask) .write_to_slice_unaligned(&mut tmp[48..64]); unsafe { std::mem::transmute(i8x64::from_slice_unaligned(tmp)) } } _ => panic!("Invalid number of vector lanes"), } } #[inline] fn mask_to_u64(mask: &Self::SimdMask) -> u64 { mask.bitmask() as u64 } #[inline] fn mask_get(mask: &Self::SimdMask, idx: usize) -> bool { unsafe { mask.extract_unchecked(idx) } } #[inline] fn mask_set(mask: Self::SimdMask, idx: usize, value: bool) -> Self::SimdMask { unsafe { mask.replace_unchecked(idx, value) } } /// Selects elements of `a` and `b` using `mask` #[inline] fn mask_select( mask: Self::SimdMask, a: Self::Simd, b: Self::Simd, ) -> Self::Simd { mask.select(a, b) } #[inline] fn mask_any(mask: Self::SimdMask) -> bool { mask.any() } #[inline] fn bin_op<F: Fn(Self::Simd, Self::Simd) -> Self::Simd>( left: Self::Simd, right: Self::Simd, op: F, ) -> Self::Simd { op(left, right) } #[inline] fn eq(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.eq(right) } #[inline] fn ne(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.ne(right) } #[inline] fn lt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.lt(right) } #[inline] fn le(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.le(right) } #[inline] fn gt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.gt(right) } #[inline] fn ge(left: Self::Simd, right: Self::Simd) -> Self::SimdMask { left.ge(right) } #[inline] fn write(simd_result: Self::Simd, slice: &mut [Self::Native]) { unsafe { simd_result.write_to_slice_unaligned_unchecked(slice) }; } #[inline] fn unary_op<F: Fn(Self::Simd) -> Self::Simd>( a: Self::Simd, op: F, ) -> Self::Simd { op(a) } } #[cfg(not(simd))] impl ArrowNumericType for $impl_ty {} }; } make_numeric_type!(Int8Type, i8, i8x64, m8x64); make_numeric_type!(Int16Type, i16, i16x32, m16x32); make_numeric_type!(Int32Type, i32, i32x16, m32x16); make_numeric_type!(Int64Type, i64, i64x8, m64x8); make_numeric_type!(UInt8Type, u8, u8x64, m8x64); make_numeric_type!(UInt16Type, u16, u16x32, m16x32); make_numeric_type!(UInt32Type, u32, u32x16, m32x16); make_numeric_type!(UInt64Type, u64, u64x8, m64x8); make_numeric_type!(Float32Type, f32, f32x16, m32x16); make_numeric_type!(Float64Type, f64, f64x8, m64x8); make_numeric_type!(TimestampSecondType, i64, i64x8, m64x8); make_numeric_type!(TimestampMillisecondType, i64, i64x8, m64x8); make_numeric_type!(TimestampMicrosecondType, i64, i64x8, m64x8); make_numeric_type!(TimestampNanosecondType, i64, i64x8, m64x8); make_numeric_type!(Date32Type, i32, i32x16, m32x16); make_numeric_type!(Date64Type, i64, i64x8, m64x8); make_numeric_type!(Timestamp32Type, i32, i32x16, m32x16); make_numeric_type!(Time32SecondType, i32, i32x16, m32x16); make_numeric_type!(Time32MillisecondType, i32, i32x16, m32x16); make_numeric_type!(Time64MicrosecondType, i64, i64x8, m64x8); make_numeric_type!(Time64NanosecondType, i64, i64x8, m64x8); make_numeric_type!(IntervalYearMonthType, i32, i32x16, m32x16); make_numeric_type!(IntervalDayTimeType, i64, i64x8, m64x8); make_numeric_type!(DurationSecondType, i64, i64x8, m64x8); make_numeric_type!(DurationMillisecondType, i64, i64x8, m64x8); make_numeric_type!(DurationMicrosecondType, i64, i64x8, m64x8); make_numeric_type!(DurationNanosecondType, i64, i64x8, m64x8); /// A subtype of primitive type that represents signed numeric values. /// /// SIMD operations are defined in this trait if available on the target system. #[cfg(simd)] pub trait ArrowSignedNumericType: ArrowNumericType where Self::SignedSimd: Neg<Output = Self::SignedSimd>, { /// Defines the SIMD type that should be used for this numeric type type SignedSimd; /// Loads a slice of signed numeric type into a SIMD register fn load_signed(slice: &[Self::Native]) -> Self::SignedSimd; /// Performs a SIMD unary operation on signed numeric type fn signed_unary_op<F: Fn(Self::SignedSimd) -> Self::SignedSimd>( a: Self::SignedSimd, op: F, ) -> Self::SignedSimd; /// Writes a signed SIMD result back to a slice fn write_signed(simd_result: Self::SignedSimd, slice: &mut [Self::Native]); } #[cfg(not(simd))] pub trait ArrowSignedNumericType: ArrowNumericType where Self::Native: std::ops::Neg<Output = Self::Native>, { } macro_rules! make_signed_numeric_type { ($impl_ty:ty, $simd_ty:ident) => { #[cfg(simd)] impl ArrowSignedNumericType for $impl_ty { type SignedSimd = $simd_ty; #[inline] fn load_signed(slice: &[Self::Native]) -> Self::SignedSimd { unsafe { Self::SignedSimd::from_slice_unaligned_unchecked(slice) } } #[inline] fn signed_unary_op<F: Fn(Self::SignedSimd) -> Self::SignedSimd>( a: Self::SignedSimd, op: F, ) -> Self::SignedSimd { op(a) } #[inline] fn write_signed(simd_result: Self::SignedSimd, slice: &mut [Self::Native]) { unsafe { simd_result.write_to_slice_unaligned_unchecked(slice) }; } } #[cfg(not(simd))] impl ArrowSignedNumericType for $impl_ty {} }; } make_signed_numeric_type!(Int8Type, i8x64); make_signed_numeric_type!(Int16Type, i16x32); make_signed_numeric_type!(Int32Type, i32x16); make_signed_numeric_type!(Int64Type, i64x8); make_signed_numeric_type!(Float32Type, f32x16); make_signed_numeric_type!(Float64Type, f64x8); #[cfg(simd)] pub trait ArrowFloatNumericType: ArrowNumericType { fn pow(base: Self::Simd, raise: Self::Simd) -> Self::Simd; } #[cfg(not(simd))] pub trait ArrowFloatNumericType: ArrowNumericType {} macro_rules! make_float_numeric_type { ($impl_ty:ty, $simd_ty:ident) => { #[cfg(simd)] impl ArrowFloatNumericType for $impl_ty { #[inline] fn pow(base: Self::Simd, raise: Self::Simd) -> Self::Simd { base.powf(raise) } } #[cfg(not(simd))] impl ArrowFloatNumericType for $impl_ty {} }; } make_float_numeric_type!(Float32Type, f32x16); make_float_numeric_type!(Float64Type, f64x8); #[cfg(all(test, simd_x86))] mod tests { use crate::datatypes::{ ArrowNumericType, Float32Type, Float64Type, Int32Type, Int64Type, Int8Type, UInt16Type, }; use packed_simd::*; use FromCast; /// calculate the expected mask by iterating over all bits macro_rules! expected_mask { ($T:ty, $MASK:expr) => {{ let mask = $MASK; // simd width of all types is currently 64 bytes -> 512 bits let lanes = 64 / std::mem::size_of::<$T>(); // translate each set bit into a value of all ones (-1) of the correct type (0..lanes) .map(|i| (if (mask & (1 << i)) != 0 { -1 } else { 0 })) .collect::<Vec<$T>>() }}; } #[test] fn test_mask_f64() { let mask = 0b10101010; let actual = Float64Type::mask_from_u64(mask); let expected = expected_mask!(i64, mask); let expected = m64x8::from_cast(i64x8::from_slice_unaligned(expected.as_slice())); assert_eq!(expected, actual); } #[test] fn test_mask_u64() { let mask = 0b01010101; let actual = Int64Type::mask_from_u64(mask); let expected = expected_mask!(i64, mask); let expected = m64x8::from_cast(i64x8::from_slice_unaligned(expected.as_slice())); assert_eq!(expected, actual); } #[test] fn test_mask_f32() { let mask = 0b10101010_10101010; let actual = Float32Type::mask_from_u64(mask); let expected = expected_mask!(i32, mask); let expected = m32x16::from_cast(i32x16::from_slice_unaligned(expected.as_slice())); assert_eq!(expected, actual); } #[test] fn test_mask_i32() { let mask = 0b01010101_01010101; let actual = Int32Type::mask_from_u64(mask); let expected = expected_mask!(i32, mask); let expected = m32x16::from_cast(i32x16::from_slice_unaligned(expected.as_slice())); assert_eq!(expected, actual); } #[test] fn test_mask_u16() { let mask = 0b01010101_01010101_10101010_10101010; let actual = UInt16Type::mask_from_u64(mask); let expected = expected_mask!(i16, mask); dbg!(&expected); let expected = m16x32::from_cast(i16x32::from_slice_unaligned(expected.as_slice())); assert_eq!(expected, actual); } #[test] fn test_mask_i8() { let mask = 0b01010101_01010101_10101010_10101010_01010101_01010101_10101010_10101010; let actual = Int8Type::mask_from_u64(mask); let expected = expected_mask!(i8, mask); let expected = m8x64::from_cast(i8x64::from_slice_unaligned(expected.as_slice())); assert_eq!(expected, actual); } }
36.117537
111
0.564595
90d1a8b7c8b599572b74b19554e422d014bec1e5
3,619
#[cfg(feature = "managed")] mod tests { use std::sync::Arc; use std::time::Duration; use async_trait::async_trait; use tokio::sync::mpsc::{channel, Receiver, Sender}; use tokio::sync::Mutex; use tokio::task::yield_now; use tokio::time::timeout; use deadpool::managed::{RecycleError, RecycleResult}; type Pool = deadpool::managed::Pool<Manager>; #[derive(Clone)] struct Manager { create_rx: Arc<Mutex<Receiver<Result<(), ()>>>>, recycle_rx: Arc<Mutex<Receiver<Result<(), ()>>>>, remote_control: RemoteControl, } #[derive(Clone)] struct RemoteControl { create_tx: Sender<Result<(), ()>>, recycle_tx: Sender<Result<(), ()>>, } impl RemoteControl { pub fn create_ok(&mut self) { self.create_tx.try_send(Ok(())).unwrap(); } pub fn create_err(&mut self) { self.create_tx.try_send(Err(())).unwrap(); } /* pub fn recycle_ok(&mut self) { self.recycle_tx.try_send(Ok(())).unwrap(); } pub fn recycle_err(&mut self) { self.recycle_tx.try_send(Err(())).unwrap(); } */ } impl Manager { pub fn new() -> Self { let (create_tx, create_rx) = channel(16); let (recycle_tx, recycle_rx) = channel(16); Self { create_rx: Arc::new(Mutex::new(create_rx)), recycle_rx: Arc::new(Mutex::new(recycle_rx)), remote_control: RemoteControl { create_tx, recycle_tx, }, } } } #[async_trait] impl deadpool::managed::Manager for Manager { type Type = (); type Error = (); async fn create(&self) -> Result<(), ()> { self.create_rx.lock().await.recv().await.unwrap() } async fn recycle(&self, _conn: &mut ()) -> RecycleResult<()> { match self.recycle_rx.lock().await.recv().await.unwrap() { Ok(()) => Ok(()), Err(e) => Err(RecycleError::Backend(e)), } } } // When the pool is drained, all connections fail to create and the #[tokio::main(flavor = "current_thread")] #[test] async fn test_pool_drained() { let manager = Manager::new(); let mut rc = manager.remote_control.clone(); let pool = Pool::builder(manager).max_size(1).build().unwrap(); let pool_clone = pool.clone(); // let first task grab the only connection let get_1 = tokio::spawn(async move { pool_clone.get().await.unwrap(); }); yield_now().await; assert_eq!(pool.status().size, 1); assert_eq!(pool.status().available, 0); // let second task wait for the connection let pool_clone = pool.clone(); let get_2 = tokio::spawn(async move { pool_clone.get().await.unwrap(); }); yield_now().await; assert_eq!(pool.status().size, 1); assert_eq!(pool.status().available, -1); // first task receives an error rc.create_err(); assert!(get_1.await.is_err()); // the second task should now be able to create an object rc.create_ok(); let result = timeout(Duration::from_millis(10), get_2).await; assert!(result.is_ok(), "get_2 should not time out"); assert!(result.unwrap().is_ok(), "get_2 should receive an object"); assert_eq!(pool.status().size, 1); assert_eq!(pool.status().available, 1); } }
32.603604
75
0.540481
69bb2e1e694a01488b7821041518d113031a2e39
7,422
//! const fn combinations iter adapter //! //! # Examples //! //! ``` //! use const_combinations::IterExt; //! //! let mut combinations = (1..5).combinations(); //! assert_eq!(combinations.next(), Some([1, 2, 3])); //! assert_eq!(combinations.next(), Some([1, 2, 4])); //! assert_eq!(combinations.next(), Some([1, 3, 4])); //! assert_eq!(combinations.next(), Some([2, 3, 4])); //! assert_eq!(combinations.next(), None); //! ``` #![no_std] #![feature(maybe_uninit_uninit_array)] extern crate alloc; mod combinations; mod permutations; pub use combinations::{Combinations, SliceCombinations}; pub use permutations::{Permutations, SlicePermutations}; /// An extension trait adding `combinations` and `permutations` to `Iterator`. pub trait IterExt: Iterator { /// Return an iterator adaptor that iterates over the k-length combinations of /// the elements from an iterator. /// /// The iterator produces a new array per iteration, and clones the iterator /// elements. If `K` is greater than the length of the input iterator the /// resulting iterator adaptor will yield no items. /// /// # Examples /// /// ``` /// use const_combinations::IterExt; /// /// let mut combinations = (1..5).combinations(); /// assert_eq!(combinations.next(), Some([1, 2, 3])); /// assert_eq!(combinations.next(), Some([1, 2, 4])); /// assert_eq!(combinations.next(), Some([1, 3, 4])); /// assert_eq!(combinations.next(), Some([2, 3, 4])); /// assert_eq!(combinations.next(), None); /// ``` /// /// Note: Combinations does not take into account the equality of the iterated values. /// /// ``` /// # use const_combinations::IterExt; /// let mut combinations = vec![1, 2, 2].into_iter().combinations(); /// assert_eq!(combinations.next(), Some([1, 2])); // Note: these are the same /// assert_eq!(combinations.next(), Some([1, 2])); // Note: these are the same /// assert_eq!(combinations.next(), Some([2, 2])); /// assert_eq!(combinations.next(), None); /// ``` fn combinations<const K: usize>(self) -> Combinations<Self, K> where Self: Sized, Self::Item: Clone, { Combinations::new(self) } /// Return an iterator adaptor that iterates over the k-length permutations of /// the elements from an iterator. /// /// The iterator produces a new array per iteration, and clones the iterator /// elements. If `K` is greater than the length of the input iterator the /// resulting iterator adaptor will yield no items. /// /// # Examples /// /// ``` /// # use const_combinations::IterExt; /// let mut permutations = (0..3).permutations(); /// assert_eq!(permutations.next(), Some([0, 1])); /// assert_eq!(permutations.next(), Some([1, 0])); /// assert_eq!(permutations.next(), Some([0, 2])); /// assert_eq!(permutations.next(), Some([2, 0])); /// assert_eq!(permutations.next(), Some([1, 2])); /// assert_eq!(permutations.next(), Some([2, 1])); /// assert_eq!(permutations.next(), None); /// ``` /// /// Note: Permutations does not take into account the equality of the iterated values. /// /// ``` /// # use const_combinations::IterExt; /// let mut permutations = vec![2, 2].into_iter().permutations(); /// assert_eq!(permutations.next(), Some([2, 2])); // Note: these are the same /// assert_eq!(permutations.next(), Some([2, 2])); // Note: these are the same /// assert_eq!(permutations.next(), None); /// ``` fn permutations<const K: usize>(self) -> Permutations<Self, K> where Self: Sized, Self::Item: Clone, { Permutations::new(self) } } impl<I> IterExt for I where I: Iterator {} /// An extension trait adding `combinations` and `permutations` to `Slice`. pub trait SliceExt<T> { /// Return an iterator that iterates over the k-length combinations of /// the elements from a slice. /// /// The iterator produces a new array per iteration, and returns references to the /// elements of the slice. If `K` is greater than the length of the input slice the /// resulting iterator will yield no items. /// /// # Examples /// /// ``` /// use const_combinations::SliceExt; /// /// let mut combinations = [1, 2, 3, 4].combinations(); /// assert_eq!(combinations.next(), Some([&1, &2, &3])); /// assert_eq!(combinations.next(), Some([&1, &2, &4])); /// assert_eq!(combinations.next(), Some([&1, &3, &4])); /// assert_eq!(combinations.next(), Some([&2, &3, &4])); /// assert_eq!(combinations.next(), None); /// ``` /// /// Note: Combinations does not take into account the equality of the slice elements. /// /// ``` /// # use const_combinations::SliceExt; /// let mut combinations = [1, 2, 2].combinations(); /// assert_eq!(combinations.next(), Some([&1, &2])); // Note: these are the same /// assert_eq!(combinations.next(), Some([&1, &2])); // Note: these are the same /// assert_eq!(combinations.next(), Some([&2, &2])); /// assert_eq!(combinations.next(), None); /// ``` fn combinations<const K: usize>(&self) -> SliceCombinations<T, K>; /// Return an iterator that iterates over the k-length permutations of /// the elements from a slice. /// /// The iterator produces a new array per iteration, and clones the iterator /// elements. If `K` is greater than the length of the input slice the /// resulting iterator adaptor will yield no items. /// /// # Examples /// /// ``` /// # use const_combinations::SliceExt; /// let mut permutations = [0, 1, 2].permutations(); /// assert_eq!(permutations.next(), Some([&0, &1])); /// assert_eq!(permutations.next(), Some([&1, &0])); /// assert_eq!(permutations.next(), Some([&0, &2])); /// assert_eq!(permutations.next(), Some([&2, &0])); /// assert_eq!(permutations.next(), Some([&1, &2])); /// assert_eq!(permutations.next(), Some([&2, &1])); /// assert_eq!(permutations.next(), None); /// ``` /// /// Note: Permutations does not take into account the equality of the slice elements. /// /// ``` /// # use const_combinations::SliceExt; /// let mut permutations = [2, 2].permutations(); /// assert_eq!(permutations.next(), Some([&2, &2])); // Note: these are the same /// assert_eq!(permutations.next(), Some([&2, &2])); // Note: these are the same /// assert_eq!(permutations.next(), None); /// ``` fn permutations<const K: usize>(&self) -> SlicePermutations<T, K>; } impl<T> SliceExt<T> for [T] { fn combinations<const K: usize>(&self) -> SliceCombinations<T, K> { SliceCombinations::new(self) } fn permutations<const K: usize>(&self) -> SlicePermutations<T, K> { SlicePermutations::new(self) } } fn make_array<T, F, const N: usize>(f: F) -> [T; N] where F: Fn(usize) -> T, { use core::mem::MaybeUninit; // Create the result array based on the indices let mut out: [MaybeUninit<T>; N] = MaybeUninit::uninit_array(); // NOTE: this clippy attribute can be removed once we can `collect` into `[usize; K]`. #[allow(clippy::clippy::needless_range_loop)] for i in 0..N { out[i] = MaybeUninit::new(f(i)); } unsafe { out.as_ptr().cast::<[T; N]>().read() } }
37.11
90
0.598356
899d0cf864e7dcb9b77f0a984c4da0fb8ca63526
3,203
use std::collections::HashMap; use crate::errors::Result; use crate::request::get_requests::get; use crate::request::put_requests::put; use crate::{Client, QueryMeta, QueryOptions, WriteMeta, WriteOptions}; #[serde(default)] #[derive(Clone, Default, Eq, PartialEq, Serialize, Deserialize, Debug)] pub struct SessionID { pub ID: String, } #[serde(default)] #[derive(Clone, Default, Eq, PartialEq, Serialize, Deserialize, Debug)] pub struct SessionEntry { pub CreateIndex: Option<u64>, pub ID: Option<String>, pub Name: Option<String>, pub Node: Option<String>, pub LockDelay: Option<u64>, //TODO: Change this to a Durations pub Behavior: Option<String>, pub Checks: Option<Vec<String>>, pub TTL: Option<String>, } pub trait Session { fn create( &self, session: &SessionEntry, options: Option<&WriteOptions>, ) -> Result<(SessionEntry, WriteMeta)>; fn destroy(&self, id: &str, options: Option<&WriteOptions>) -> Result<(bool, WriteMeta)>; fn info( &self, id: &str, options: Option<&QueryOptions>, ) -> Result<(Vec<SessionEntry>, QueryMeta)>; fn list(&self, options: Option<&QueryOptions>) -> Result<(Vec<SessionEntry>, QueryMeta)>; fn node( &self, node: &str, options: Option<&QueryOptions>, ) -> Result<(Vec<SessionEntry>, QueryMeta)>; fn renew( &self, id: &str, options: Option<&WriteOptions>, ) -> Result<(Vec<SessionEntry>, WriteMeta)>; } impl Session for Client { fn create( &self, session: &SessionEntry, options: Option<&WriteOptions>, ) -> Result<(SessionEntry, WriteMeta)> { put( "/v1/session/create", Some(session), &self.config, HashMap::new(), options, ) } fn destroy(&self, id: &str, options: Option<&WriteOptions>) -> Result<(bool, WriteMeta)> { let path = format!("/v1/session/destroy/{}", id); put( &path, None as Option<&()>, &self.config, HashMap::new(), options, ) } fn info( &self, id: &str, options: Option<&QueryOptions>, ) -> Result<(Vec<SessionEntry>, QueryMeta)> { let path = format!("/v1/session/info/{}", id); get(&path, &self.config, HashMap::new(), options) } fn list(&self, options: Option<&QueryOptions>) -> Result<(Vec<SessionEntry>, QueryMeta)> { get("/v1/session/list", &self.config, HashMap::new(), options) } fn node( &self, node: &str, options: Option<&QueryOptions>, ) -> Result<(Vec<SessionEntry>, QueryMeta)> { let path = format!("/v1/session/node/{}", node); get(&path, &self.config, HashMap::new(), options) } fn renew( &self, id: &str, options: Option<&WriteOptions>, ) -> Result<(Vec<SessionEntry>, WriteMeta)> { let path = format!("/v1/session/renew/{}", id); put( &path, None as Option<&()>, &self.config, HashMap::new(), options, ) } }
28.855856
94
0.558851
11b2e9d601653f9f02b1f5d5c2e5c330ae2e3eb3
309
// run-rustfix #[allow(unused_parens)] fn main() { let _ = +1; //~ ERROR leading `+` is not supported let _ = (1.0 + +2.0) * +3.0; //~ ERROR leading `+` is not supported //~| ERROR leading `+` is not supported let _ = [+3, 4+6]; //~ ERROR leading `+` is not supported }
34.333333
71
0.517799
f86b0bc0c33cf219a3de9f60e8afcb1dcd39f21b
1,475
use rubysys::types::{c_char, c_int, c_void, size_t, Value}; extern "C" { // void * // rb_check_typeddata(VALUE obj, const rb_data_type_t *data_type) pub fn rb_check_typeddata(object: Value, data_type: *const RbDataType) -> *mut c_void; // int // rb_typeddata_inherited_p(const rb_data_type_t *child, const rb_data_type_t *parent) pub fn rb_typeddata_inherited_p(child: *const RbDataType, parent: *const RbDataType) -> c_int; // int // rb_typeddata_is_kind_of(VALUE obj, const rb_data_type_t *data_type) pub fn rb_typeddata_is_kind_of(object: Value, data_type: *const RbDataType) -> c_int; // VALUE // rb_data_typed_object_wrap(VALUE klass, void *datap, const rb_data_type_t *type) pub fn rb_data_typed_object_wrap( klass: Value, data: *mut c_void, data_type: *const RbDataType, ) -> Value; } #[repr(C)] pub struct RbDataTypeFunction { pub dmark: Option<extern "C" fn(*mut c_void)>, pub dfree: Option<extern "C" fn(*mut c_void)>, pub dsize: Option<extern "C" fn(*const c_void) -> size_t>, pub reserved: [*mut c_void; 2], } unsafe impl Send for RbDataTypeFunction {} unsafe impl Sync for RbDataTypeFunction {} #[repr(C)] pub struct RbDataType { pub wrap_struct_name: *const c_char, pub function: RbDataTypeFunction, pub parent: *const RbDataType, pub data: *mut c_void, pub flags: Value, } unsafe impl Send for RbDataType {} unsafe impl Sync for RbDataType {}
33.522727
98
0.696271
ab1ab431ec907f6be811d646c51f85357dc70afe
127
use arrows::send; use arrows::Addr; use arrows::Msg; fn main() { let _rs = send!(Addr::listen_addr(), Msg::shutdown()); }
15.875
58
0.629921
232ec9aac6d0bea7caefe158f7497f3916595932
134
table! { books (id) { id -> Int4, title -> Varchar, author -> Varchar, published -> Bool, } }
14.888889
26
0.41791
dd19590e017fdfd54cf10cb1511fa55b3ecd709e
148,686
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use crate::models::*; pub mod domains { use crate::models::*; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, ) -> std::result::Result<Domain, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}", operation_config.base_path(), subscription_id, resource_group_name, domain_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: Domain = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, domain_info: &Domain, ) -> std::result::Result<Domain, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}", operation_config.base_path(), subscription_id, resource_group_name, domain_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(domain_info).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: Domain = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(create_or_update::Error::DefaultResponse { status_code }), } } pub mod create_or_update { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, domain_update_parameters: &DomainUpdateParameters, ) -> std::result::Result<update::Response, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}", operation_config.base_path(), subscription_id, resource_group_name, domain_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(domain_update_parameters).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(update::Response::Ok200), http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: Domain = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Created201(rsp_value)) } status_code => Err(update::Error::DefaultResponse { status_code }), } } pub mod update { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, Created201(Domain), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}", operation_config.base_path(), subscription_id, resource_group_name, domain_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => Err(delete::Error::DefaultResponse { status_code }), } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_subscription( operation_config: &crate::OperationConfig, subscription_id: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<DomainsListResult, list_by_subscription::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.EventGrid/domains", operation_config.base_path(), subscription_id ); let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_subscription::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_subscription::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: DomainsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_by_subscription::Error::DefaultResponse { status_code }), } } pub mod list_by_subscription { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_resource_group( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<DomainsListResult, list_by_resource_group::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains", operation_config.base_path(), subscription_id, resource_group_name ); let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_resource_group::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_by_resource_group::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_resource_group::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: DomainsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_by_resource_group::Error::DefaultResponse { status_code }), } } pub mod list_by_resource_group { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_shared_access_keys( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, ) -> std::result::Result<DomainSharedAccessKeys, list_shared_access_keys::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}/listKeys", operation_config.base_path(), subscription_id, resource_group_name, domain_name ); let mut url = url::Url::parse(url_str).map_err(list_shared_access_keys::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_shared_access_keys::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_shared_access_keys::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_shared_access_keys::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: DomainSharedAccessKeys = serde_json::from_slice(rsp_body) .map_err(|source| list_shared_access_keys::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_shared_access_keys::Error::DefaultResponse { status_code }), } } pub mod list_shared_access_keys { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn regenerate_key( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, regenerate_key_request: &DomainRegenerateKeyRequest, ) -> std::result::Result<DomainSharedAccessKeys, regenerate_key::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}/regenerateKey", operation_config.base_path(), subscription_id, resource_group_name, domain_name ); let mut url = url::Url::parse(url_str).map_err(regenerate_key::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(regenerate_key::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(regenerate_key_request).map_err(regenerate_key::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(regenerate_key::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(regenerate_key::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: DomainSharedAccessKeys = serde_json::from_slice(rsp_body).map_err(|source| regenerate_key::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(regenerate_key::Error::DefaultResponse { status_code }), } } pub mod regenerate_key { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod domain_topics { use crate::models::*; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, domain_topic_name: &str, ) -> std::result::Result<DomainTopic, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}/topics/{}", operation_config.base_path(), subscription_id, resource_group_name, domain_name, domain_topic_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: DomainTopic = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, domain_topic_name: &str, ) -> std::result::Result<DomainTopic, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}/topics/{}", operation_config.base_path(), subscription_id, resource_group_name, domain_name, domain_topic_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: DomainTopic = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(create_or_update::Error::DefaultResponse { status_code }), } } pub mod create_or_update { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, domain_topic_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}/topics/{}", operation_config.base_path(), subscription_id, resource_group_name, domain_name, domain_topic_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => Err(delete::Error::DefaultResponse { status_code }), } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_domain( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<DomainTopicsListResult, list_by_domain::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}/topics", operation_config.base_path(), subscription_id, resource_group_name, domain_name ); let mut url = url::Url::parse(url_str).map_err(list_by_domain::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_domain::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_domain::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_domain::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: DomainTopicsListResult = serde_json::from_slice(rsp_body).map_err(|source| list_by_domain::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_by_domain::Error::DefaultResponse { status_code }), } } pub mod list_by_domain { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod event_subscriptions { use crate::models::*; pub async fn get( operation_config: &crate::OperationConfig, scope: &str, event_subscription_name: &str, ) -> std::result::Result<EventSubscription, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/providers/Microsoft.EventGrid/eventSubscriptions/{}", operation_config.base_path(), scope, event_subscription_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscription = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, scope: &str, event_subscription_name: &str, event_subscription_info: &EventSubscription, ) -> std::result::Result<EventSubscription, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/providers/Microsoft.EventGrid/eventSubscriptions/{}", operation_config.base_path(), scope, event_subscription_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(event_subscription_info).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: EventSubscription = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(create_or_update::Error::DefaultResponse { status_code }), } } pub mod create_or_update { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, scope: &str, event_subscription_name: &str, event_subscription_update_parameters: &EventSubscriptionUpdateParameters, ) -> std::result::Result<EventSubscription, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/providers/Microsoft.EventGrid/eventSubscriptions/{}", operation_config.base_path(), scope, event_subscription_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(event_subscription_update_parameters).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: EventSubscription = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(update::Error::DefaultResponse { status_code }), } } pub mod update { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, scope: &str, event_subscription_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/providers/Microsoft.EventGrid/eventSubscriptions/{}", operation_config.base_path(), scope, event_subscription_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => Err(delete::Error::DefaultResponse { status_code }), } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get_full_url( operation_config: &crate::OperationConfig, scope: &str, event_subscription_name: &str, ) -> std::result::Result<EventSubscriptionFullUrl, get_full_url::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/{}/providers/Microsoft.EventGrid/eventSubscriptions/{}/getFullUrl", operation_config.base_path(), scope, event_subscription_name ); let mut url = url::Url::parse(url_str).map_err(get_full_url::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get_full_url::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get_full_url::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(get_full_url::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionFullUrl = serde_json::from_slice(rsp_body).map_err(|source| get_full_url::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get_full_url::Error::DefaultResponse { status_code }), } } pub mod get_full_url { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_global_by_subscription( operation_config: &crate::OperationConfig, subscription_id: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<EventSubscriptionsListResult, list_global_by_subscription::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.EventGrid/eventSubscriptions", operation_config.base_path(), subscription_id ); let mut url = url::Url::parse(url_str).map_err(list_global_by_subscription::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_global_by_subscription::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_global_by_subscription::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_global_by_subscription::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_global_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_global_by_subscription::Error::DefaultResponse { status_code }), } } pub mod list_global_by_subscription { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_global_by_subscription_for_topic_type( operation_config: &crate::OperationConfig, subscription_id: &str, topic_type_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<EventSubscriptionsListResult, list_global_by_subscription_for_topic_type::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.EventGrid/topicTypes/{}/eventSubscriptions", operation_config.base_path(), subscription_id, topic_type_name ); let mut url = url::Url::parse(url_str).map_err(list_global_by_subscription_for_topic_type::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_global_by_subscription_for_topic_type::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_global_by_subscription_for_topic_type::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_global_by_subscription_for_topic_type::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_global_by_subscription_for_topic_type::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_global_by_subscription_for_topic_type::Error::DefaultResponse { status_code }), } } pub mod list_global_by_subscription_for_topic_type { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_global_by_resource_group( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<EventSubscriptionsListResult, list_global_by_resource_group::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/eventSubscriptions", operation_config.base_path(), subscription_id, resource_group_name ); let mut url = url::Url::parse(url_str).map_err(list_global_by_resource_group::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_global_by_resource_group::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_global_by_resource_group::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_global_by_resource_group::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_global_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_global_by_resource_group::Error::DefaultResponse { status_code }), } } pub mod list_global_by_resource_group { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_global_by_resource_group_for_topic_type( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, topic_type_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<EventSubscriptionsListResult, list_global_by_resource_group_for_topic_type::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topicTypes/{}/eventSubscriptions", operation_config.base_path(), subscription_id, resource_group_name, topic_type_name ); let mut url = url::Url::parse(url_str).map_err(list_global_by_resource_group_for_topic_type::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_global_by_resource_group_for_topic_type::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_global_by_resource_group_for_topic_type::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_global_by_resource_group_for_topic_type::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_global_by_resource_group_for_topic_type::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_global_by_resource_group_for_topic_type::Error::DefaultResponse { status_code }), } } pub mod list_global_by_resource_group_for_topic_type { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_regional_by_subscription( operation_config: &crate::OperationConfig, subscription_id: &str, location: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<EventSubscriptionsListResult, list_regional_by_subscription::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.EventGrid/locations/{}/eventSubscriptions", operation_config.base_path(), subscription_id, location ); let mut url = url::Url::parse(url_str).map_err(list_regional_by_subscription::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_regional_by_subscription::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_regional_by_subscription::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_regional_by_subscription::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_regional_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_regional_by_subscription::Error::DefaultResponse { status_code }), } } pub mod list_regional_by_subscription { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_regional_by_resource_group( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, location: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<EventSubscriptionsListResult, list_regional_by_resource_group::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/locations/{}/eventSubscriptions", operation_config.base_path(), subscription_id, resource_group_name, location ); let mut url = url::Url::parse(url_str).map_err(list_regional_by_resource_group::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_regional_by_resource_group::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_regional_by_resource_group::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_regional_by_resource_group::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_regional_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_regional_by_resource_group::Error::DefaultResponse { status_code }), } } pub mod list_regional_by_resource_group { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_regional_by_subscription_for_topic_type( operation_config: &crate::OperationConfig, subscription_id: &str, location: &str, topic_type_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<EventSubscriptionsListResult, list_regional_by_subscription_for_topic_type::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.EventGrid/locations/{}/topicTypes/{}/eventSubscriptions", operation_config.base_path(), subscription_id, location, topic_type_name ); let mut url = url::Url::parse(url_str).map_err(list_regional_by_subscription_for_topic_type::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_regional_by_subscription_for_topic_type::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_regional_by_subscription_for_topic_type::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_regional_by_subscription_for_topic_type::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_regional_by_subscription_for_topic_type::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_regional_by_subscription_for_topic_type::Error::DefaultResponse { status_code }), } } pub mod list_regional_by_subscription_for_topic_type { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_regional_by_resource_group_for_topic_type( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, location: &str, topic_type_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<EventSubscriptionsListResult, list_regional_by_resource_group_for_topic_type::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/locations/{}/topicTypes/{}/eventSubscriptions", operation_config.base_path(), subscription_id, resource_group_name, location, topic_type_name ); let mut url = url::Url::parse(url_str).map_err(list_regional_by_resource_group_for_topic_type::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_regional_by_resource_group_for_topic_type::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_regional_by_resource_group_for_topic_type::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_regional_by_resource_group_for_topic_type::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_regional_by_resource_group_for_topic_type::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_regional_by_resource_group_for_topic_type::Error::DefaultResponse { status_code }), } } pub mod list_regional_by_resource_group_for_topic_type { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_resource( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, provider_namespace: &str, resource_type_name: &str, resource_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<EventSubscriptionsListResult, list_by_resource::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/{}/{}/providers/Microsoft.EventGrid/eventSubscriptions", operation_config.base_path(), subscription_id, resource_group_name, provider_namespace, resource_type_name, resource_name ); let mut url = url::Url::parse(url_str).map_err(list_by_resource::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_resource::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_resource::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_resource::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_resource::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_by_resource::Error::DefaultResponse { status_code }), } } pub mod list_by_resource { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_domain_topic( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, domain_name: &str, topic_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<EventSubscriptionsListResult, list_by_domain_topic::Error> { let http_client = operation_config.http_client(); let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/domains/{}/topics/{}/providers/Microsoft.EventGrid/eventSubscriptions" , operation_config . base_path () , subscription_id , resource_group_name , domain_name , topic_name) ; let mut url = url::Url::parse(url_str).map_err(list_by_domain_topic::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_domain_topic::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_domain_topic::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_domain_topic::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventSubscriptionsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_domain_topic::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_by_domain_topic::Error::DefaultResponse { status_code }), } } pub mod list_by_domain_topic { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod operations { use crate::models::*; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationsListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/providers/Microsoft.EventGrid/operations", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: OperationsListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list::Error::DefaultResponse { status_code }), } } pub mod list { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod topics { use crate::models::*; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, topic_name: &str, ) -> std::result::Result<Topic, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}", operation_config.base_path(), subscription_id, resource_group_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: Topic = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, topic_name: &str, topic_info: &Topic, ) -> std::result::Result<Topic, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}", operation_config.base_path(), subscription_id, resource_group_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(create_or_update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(create_or_update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(topic_info).map_err(create_or_update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(create_or_update::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(create_or_update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: Topic = serde_json::from_slice(rsp_body) .map_err(|source| create_or_update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(create_or_update::Error::DefaultResponse { status_code }), } } pub mod create_or_update { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, topic_name: &str, topic_update_parameters: &TopicUpdateParameters, ) -> std::result::Result<update::Response, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}", operation_config.base_path(), subscription_id, resource_group_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(topic_update_parameters).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(update::Response::Ok200), http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: Topic = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Created201(rsp_value)) } status_code => Err(update::Error::DefaultResponse { status_code }), } } pub mod update { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200, Created201(Topic), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, topic_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}", operation_config.base_path(), subscription_id, resource_group_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => Err(delete::Error::DefaultResponse { status_code }), } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_subscription( operation_config: &crate::OperationConfig, subscription_id: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<TopicsListResult, list_by_subscription::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.EventGrid/topics", operation_config.base_path(), subscription_id ); let mut url = url::Url::parse(url_str).map_err(list_by_subscription::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_subscription::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_subscription::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_subscription::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: TopicsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_subscription::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_by_subscription::Error::DefaultResponse { status_code }), } } pub mod list_by_subscription { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_resource_group( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<TopicsListResult, list_by_resource_group::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics", operation_config.base_path(), subscription_id, resource_group_name ); let mut url = url::Url::parse(url_str).map_err(list_by_resource_group::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_resource_group::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_by_resource_group::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_resource_group::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: TopicsListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_resource_group::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_by_resource_group::Error::DefaultResponse { status_code }), } } pub mod list_by_resource_group { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_shared_access_keys( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, topic_name: &str, ) -> std::result::Result<TopicSharedAccessKeys, list_shared_access_keys::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}/listKeys", operation_config.base_path(), subscription_id, resource_group_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(list_shared_access_keys::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_shared_access_keys::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder .body(req_body) .map_err(list_shared_access_keys::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_shared_access_keys::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: TopicSharedAccessKeys = serde_json::from_slice(rsp_body) .map_err(|source| list_shared_access_keys::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_shared_access_keys::Error::DefaultResponse { status_code }), } } pub mod list_shared_access_keys { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn regenerate_key( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, topic_name: &str, regenerate_key_request: &TopicRegenerateKeyRequest, ) -> std::result::Result<TopicSharedAccessKeys, regenerate_key::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/topics/{}/regenerateKey", operation_config.base_path(), subscription_id, resource_group_name, topic_name ); let mut url = url::Url::parse(url_str).map_err(regenerate_key::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(regenerate_key::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(regenerate_key_request).map_err(regenerate_key::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(regenerate_key::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(regenerate_key::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: TopicSharedAccessKeys = serde_json::from_slice(rsp_body).map_err(|source| regenerate_key::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(regenerate_key::Error::DefaultResponse { status_code }), } } pub mod regenerate_key { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_event_types( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, provider_namespace: &str, resource_type_name: &str, resource_name: &str, ) -> std::result::Result<EventTypesListResult, list_event_types::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/{}/{}/{}/providers/Microsoft.EventGrid/eventTypes", operation_config.base_path(), subscription_id, resource_group_name, provider_namespace, resource_type_name, resource_name ); let mut url = url::Url::parse(url_str).map_err(list_event_types::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_event_types::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_event_types::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_event_types::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventTypesListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_event_types::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_event_types::Error::DefaultResponse { status_code }), } } pub mod list_event_types { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod private_endpoint_connections { use crate::models::*; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, parent_type: &str, parent_name: &str, private_endpoint_connection_name: &str, ) -> std::result::Result<PrivateEndpointConnection, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/{}/{}/privateEndpointConnections/{}", operation_config.base_path(), subscription_id, resource_group_name, parent_type, parent_name, private_endpoint_connection_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: PrivateEndpointConnection = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, parent_type: &str, parent_name: &str, private_endpoint_connection_name: &str, private_endpoint_connection: &PrivateEndpointConnection, ) -> std::result::Result<update::Response, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/{}/{}/privateEndpointConnections/{}", operation_config.base_path(), subscription_id, resource_group_name, parent_type, parent_name, private_endpoint_connection_name ); let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(update::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = azure_core::to_json(private_endpoint_connection).map_err(update::Error::SerializeError)?; req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: PrivateEndpointConnection = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: PrivateEndpointConnection = serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?; Ok(update::Response::Created201(rsp_value)) } status_code => Err(update::Error::DefaultResponse { status_code }), } } pub mod update { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Ok200(PrivateEndpointConnection), Created201(PrivateEndpointConnection), } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, parent_type: &str, parent_name: &str, private_endpoint_connection_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/{}/{}/privateEndpointConnections/{}", operation_config.base_path(), subscription_id, resource_group_name, parent_type, parent_name, private_endpoint_connection_name ); let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(delete::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => Err(delete::Error::DefaultResponse { status_code }), } } pub mod delete { use crate::{models, models::*}; #[derive(Debug)] pub enum Response { Accepted202, NoContent204, } #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_resource( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, parent_type: &str, parent_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<PrivateEndpointConnectionListResult, list_by_resource::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/{}/{}/privateEndpointConnections", operation_config.base_path(), subscription_id, resource_group_name, parent_type, parent_name ); let mut url = url::Url::parse(url_str).map_err(list_by_resource::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_resource::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_resource::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_resource::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: PrivateEndpointConnectionListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_resource::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_by_resource::Error::DefaultResponse { status_code }), } } pub mod list_by_resource { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod private_link_resources { use crate::models::*; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, parent_type: &str, parent_name: &str, private_link_resource_name: &str, ) -> std::result::Result<PrivateLinkResource, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/{}/{}/privateLinkResources/{}", operation_config.base_path(), subscription_id, resource_group_name, parent_type, parent_name, private_link_resource_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: PrivateLinkResource = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_by_resource( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, parent_type: &str, parent_name: &str, filter: Option<&str>, top: Option<i32>, ) -> std::result::Result<PrivateLinkResourcesListResult, list_by_resource::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.EventGrid/{}/{}/privateLinkResources", operation_config.base_path(), subscription_id, resource_group_name, parent_type, parent_name ); let mut url = url::Url::parse(url_str).map_err(list_by_resource::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_by_resource::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(filter) = filter { url.query_pairs_mut().append_pair("$filter", filter); } if let Some(top) = top { url.query_pairs_mut().append_pair("$top", top.to_string().as_str()); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_by_resource::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_by_resource::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: PrivateLinkResourcesListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_by_resource::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_by_resource::Error::DefaultResponse { status_code }), } } pub mod list_by_resource { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } } pub mod topic_types { use crate::models::*; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<TopicTypesListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!("{}/providers/Microsoft.EventGrid/topicTypes", operation_config.base_path(),); let mut url = url::Url::parse(url_str).map_err(list::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(list::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: TopicTypesListResult = serde_json::from_slice(rsp_body).map_err(|source| list::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list::Error::DefaultResponse { status_code }), } } pub mod list { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn get(operation_config: &crate::OperationConfig, topic_type_name: &str) -> std::result::Result<TopicTypeInfo, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/providers/Microsoft.EventGrid/topicTypes/{}", operation_config.base_path(), topic_type_name ); let mut url = url::Url::parse(url_str).map_err(get::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(get::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(get::Error::BuildRequestError)?; let rsp = http_client.execute_request(req).await.map_err(get::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: TopicTypeInfo = serde_json::from_slice(rsp_body).map_err(|source| get::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(get::Error::DefaultResponse { status_code }), } } pub mod get { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } pub async fn list_event_types( operation_config: &crate::OperationConfig, topic_type_name: &str, ) -> std::result::Result<EventTypesListResult, list_event_types::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/providers/Microsoft.EventGrid/topicTypes/{}/eventTypes", operation_config.base_path(), topic_type_name ); let mut url = url::Url::parse(url_str).map_err(list_event_types::Error::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .map_err(list_event_types::Error::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).map_err(list_event_types::Error::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .map_err(list_event_types::Error::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: EventTypesListResult = serde_json::from_slice(rsp_body) .map_err(|source| list_event_types::Error::DeserializeError(source, rsp_body.clone()))?; Ok(rsp_value) } status_code => Err(list_event_types::Error::DefaultResponse { status_code }), } } pub mod list_event_types { use crate::{models, models::*}; #[derive(Debug, thiserror :: Error)] pub enum Error { #[error("HTTP status code {}", status_code)] DefaultResponse { status_code: http::StatusCode }, #[error("Failed to parse request URL: {0}")] ParseUrlError(url::ParseError), #[error("Failed to build request: {0}")] BuildRequestError(http::Error), #[error("Failed to execute request: {0}")] ExecuteRequestError(azure_core::HttpError), #[error("Failed to serialize request body: {0}")] SerializeError(serde_json::Error), #[error("Failed to deserialize response: {0}, body: {1:?}")] DeserializeError(serde_json::Error, bytes::Bytes), #[error("Failed to get access token: {0}")] GetTokenError(azure_core::Error), } } }
49.562
277
0.605477
08878f250bf09c148542d0f67b102f172a499df2
21,180
use super::pins::pin_header::PinHeader; use super::timesets::timeset::Timeset; use crate::pins::vec_to_ppin_ids; use origen::core::tester::TesterSource; use origen::error::Error; use origen::testers::SupportedTester; use origen::{Operation, STATUS, TEST}; use pyo3::prelude::*; use pyo3::types::{PyAny, PyDict, PyTuple}; use std::collections::HashMap; #[pymodule] pub fn tester(_py: Python, m: &PyModule) -> PyResult<()> { m.add_class::<PyTester>()?; Ok(()) } #[pyclass(subclass)] #[derive(Debug)] /// Python interface for the tester backend. pub struct PyTester { python_testers: HashMap<SupportedTester, PyObject>, instantiated_testers: HashMap<SupportedTester, PyObject>, metadata: Vec<PyObject>, } #[pymethods] impl PyTester { #[new] fn new() -> PyResult<Self> { origen::tester().init()?; Ok(PyTester { python_testers: HashMap::new(), instantiated_testers: HashMap::new(), metadata: vec![], }) } fn _start_eq_block(&self, testers: Vec<&str>) -> PyResult<(usize, usize, Vec<String>)> { let mut ts: Vec<SupportedTester> = vec![]; let mut clean_testers: Vec<String> = vec![]; for t in testers { let st = SupportedTester::new(t)?; clean_testers.push(st.to_string()); ts.push(st); } let refs = origen::tester().start_tester_eq_block(ts)?; Ok((refs.0, refs.1, clean_testers)) } fn _end_eq_block(&self, pat_ref_id: usize, prog_ref_id: usize) -> PyResult<()> { origen::tester().end_tester_eq_block(pat_ref_id, prog_ref_id)?; Ok(()) } fn _start_neq_block(&self, testers: Vec<&str>) -> PyResult<(usize, usize, Vec<String>)> { let mut ts: Vec<SupportedTester> = vec![]; let mut clean_testers: Vec<String> = vec![]; for t in testers { let st = SupportedTester::new(t)?; clean_testers.push(st.to_string()); ts.push(st); } let refs = origen::tester().start_tester_neq_block(ts)?; Ok((refs.0, refs.1, clean_testers)) } fn _end_neq_block(&self, pat_ref_id: usize, prog_ref_id: usize) -> PyResult<()> { origen::tester().end_tester_neq_block(pat_ref_id, prog_ref_id)?; Ok(()) } /// Prints out the AST for the current flow to the console (for debugging) #[getter] fn ast(&self) -> PyResult<()> { if Operation::GenerateFlow == STATUS.operation() { println!("{}", origen::FLOW.to_string()); } else { println!("{}", origen::TEST.to_string()); } Ok(()) } /// Write out the AST to the given file (for debugging) fn ast_to_file(&self, file: &str) -> PyResult<()> { let contents = { if Operation::GenerateFlow == STATUS.operation() { origen::FLOW.to_string() } else { origen::TEST.to_string() } }; std::fs::write(file, contents)?; Ok(()) } /// This resets the tester, clearing all loaded targets and any other state, making /// it ready for a fresh target load. /// This should only be called from Python code for testing, it will be called automatically /// by Origen before loading targets. fn reset(_self: PyRef<Self>) -> PyResult<()> { Ok(origen::tester().reset()?) } /// This is called by Origen at the start of a generate command, it should never be called by /// application code fn _prepare_for_generate(&self) -> PyResult<()> { origen::tester().prepare_for_generate()?; Ok(()) } fn _stats(&self) -> PyResult<Vec<u8>> { Ok(origen::tester().stats.to_pickle()) } #[getter] /// Property for the current :class:`_origen.dut.timesets.Timeset` or None, if no timeset has been set. /// Set to ``None`` to clear the current timeset. /// /// Returns: /// :class:`_origen.dut.timesets.Timeset` or ``None`` /// /// >>> # Initially no timeset has been set /// >>> origen.tester.timeset /// None /// >>> origen.tester.timeset = origen.dut.timesets.Timeset['my_timeset'] /// origen.dut.timesets.Timeset['my_timeset'] /// >>> origen.tester.timeset /// origen.dut.timesets.Timeset['my_timeset'] /// >>> # Clear the current timeset /// >>> origen.tester.timeset = None /// None /// >>> origen.tester.timeset /// None /// /// See Also /// -------- /// * :meth:`set_timeset` /// * :class:`_origen.dut.timesets.Timeset` /// * :ref:`Timing <guides/testers/timing:Timing>` fn get_timeset(&self) -> PyResult<PyObject> { let tester = origen::tester(); let dut = origen::dut(); let gil = Python::acquire_gil(); let py = gil.python(); if let Some(t) = tester.get_timeset(&dut) { Ok(Py::new( py, Timeset { name: t.name.clone(), model_id: t.model_id, }, ) .unwrap() .to_object(py)) } else { Ok(py.None()) } } #[setter] // Note - do not add doc strings here. Add to get_timeset above. fn timeset(&self, timeset: &PyAny) -> PyResult<()> { let (model_id, timeset_name); // If the timeset is a string, assume its a timeset name on the DUT. // If not, it should be either None, to clear the timeset, // or a timeset object, in which case we'll look up the name and model ID and go from there. if let Ok(_timeset) = timeset.extract::<String>() { model_id = 0; timeset_name = _timeset; } else { if timeset.get_type().name()?.to_string() == "NoneType" { { let mut tester = origen::TESTER.lock().unwrap(); tester.clear_timeset()?; } self.issue_callbacks("clear_timeset")?; return Ok(()); } else if timeset.get_type().name()?.to_string() == "Timeset" { let gil = Python::acquire_gil(); let py = gil.python(); let obj = timeset.to_object(py); model_id = obj .getattr(py, "__origen__model_id__")? .extract::<usize>(py)?; timeset_name = obj.getattr(py, "name")?.extract::<String>(py)?; } else { return type_error!(format!("Could not interpret 'timeset' argument as String or _origen.dut.timesets.Timeset object! (class '{}')", timeset.get_type().name()?)); } } { { let mut tester = origen::TESTER.lock().unwrap(); let dut = origen::DUT.lock().unwrap(); tester.set_timeset(&dut, model_id, &timeset_name)?; } self.issue_callbacks("set_timeset")?; } Ok(()) } /// set_timeset(timeset) /// /// Sets the timeset. /// /// >>> origen.tester.set_timeset(origen.dut.timesets['my_timeset']) /// origen.tester.timesets['my_timeset'] /// /// Parameters: /// timeset (_origen.dut.timesets.Timeset, None): Timeset to set as current, or ``None`` to clear /// /// See Also /// -------- /// * :meth:`timeset` /// * :class:`_origen.dut.timesets.Timeset` /// * :ref:`Timing <guides/testers/timing:Timing>` fn set_timeset(&self, timeset: &PyAny) -> PyResult<PyObject> { self.timeset(timeset)?; self.get_timeset() } #[getter] fn get_pin_header(&self) -> PyResult<PyObject> { let tester = origen::tester(); let dut = origen::dut(); let gil = Python::acquire_gil(); let py = gil.python(); if let Some(header) = tester.get_pin_header(&dut) { Ok(Py::new( py, PinHeader { name: header.name.clone(), model_id: header.model_id, }, ) .unwrap() .to_object(py)) } else { Ok(py.None()) } } #[setter] fn pin_header(&self, pin_header: &PyAny) -> PyResult<()> { let (model_id, pin_header_name); if pin_header.get_type().name()?.to_string() == "NoneType" { { let mut tester = origen::TESTER.lock().unwrap(); tester.clear_pin_header()?; } self.issue_callbacks("clear_pin_header")?; return Ok(()); } else if pin_header.get_type().name()?.to_string() == "PinHeader" { let gil = Python::acquire_gil(); let py = gil.python(); let obj = pin_header.to_object(py); model_id = obj .getattr(py, "__origen__model_id__")? .extract::<usize>(py)?; pin_header_name = obj.getattr(py, "name")?.extract::<String>(py)?; } else { return type_error!(format!("Could not interpret 'pin_header' argument as _origen.dut.Pins.PinHeader object! (class '{}')", pin_header.get_type().name()?)); } { { let mut tester = origen::TESTER.lock().unwrap(); let dut = origen::DUT.lock().unwrap(); tester.set_pin_header(&dut, model_id, &pin_header_name)?; } self.issue_callbacks("set_pin_header")?; } Ok(()) } fn set_pin_header(&self, pin_header: &PyAny) -> PyResult<PyObject> { self.pin_header(pin_header)?; self.get_pin_header() } /// cc(comment: str) -> self /// /// Inserts a single-line comment into the AST. /// /// >>> origen.tester.cc("my comment") /// <self> /// >>> origen.tester.cc("my first comment").cc("my second comment") /// <self> /// /// See Also /// -------- /// * {{ link_to('prog-gen:comments', 'Commenting pattern source') }} /// * {{ link_to('pat-gen:comments', 'Commenting program source') }} fn cc(slf: PyRef<Self>, comment: &str) -> PyResult<Py<Self>> { { let mut tester = origen::tester(); tester.cc(&comment)?; } slf.issue_callbacks("cc")?; Ok(slf.into()) } #[pyo3(text_signature = "($self, header_comments)")] pub fn generate_pattern_header(&self, header_comments: &PyDict) -> PyResult<()> { let tester = origen::tester(); Ok(tester.generate_pattern_header( match header_comments.get_item("app") { Some(comments) => Some(comments.extract::<Vec<String>>()?), None => None, }, match header_comments.get_item("pattern") { Some(comments) => Some(comments.extract::<Vec<String>>()?), None => None, }, )?) } fn end_pattern(&self) -> PyResult<()> { let tester = origen::tester(); Ok(tester.end_pattern()?) } fn issue_callbacks(&self, func: &str) -> PyResult<()> { // Get the current targeted testers let targets; { let tester = origen::tester(); targets = tester.targets().clone(); } // issue callbacks in the order which they were targeted for (i, t) in targets.iter().enumerate() { match t { TesterSource::External(g) => { // External testers which the backend can't render itself. Need to render them here. match self.instantiated_testers.get(g) { Some(inst) => { // The tester here is a PyObject - a handle on the class itself. // Instantiate it and call its render method with the AST. let gil = Python::acquire_gil(); let py = gil.python(); let last_node = TEST.get(0).unwrap().to_pickle(); let args = PyTuple::new(py, &[func.to_object(py), last_node.to_object(py)]); // The issue callback function is located in origen.generator.tester_api.TesterAPI // Easier to handle the actual calls there and since its all happening in the Python domain, doesn't really matter // whether it happens here or there. inst.call_method1(py, "__origen__issue_callback__", args)?; } None => { return Err(PyErr::from(Error::new(&format!( "Something's gone wrong and Python tester {} cannot be found!", g )))) } } } _ => { let mut tester = origen::tester(); tester.issue_callback_at(i)?; } } } Ok(()) } /// cycle(**kwargs) -> self #[args(kwargs = "**")] fn cycle(slf: PyRef<Self>, kwargs: Option<&PyDict>) -> PyResult<Py<Self>> { { let mut tester = origen::tester(); let mut repeat = None; if let Some(_kwargs) = kwargs { if let Some(_kwarg) = _kwargs.get_item("repeat") { repeat = Some(_kwarg.extract::<usize>()?); } } tester.cycle(repeat)?; } slf.issue_callbacks("cycle")?; Ok(slf.into()) } fn repeat(slf: PyRef<Self>, count: usize) -> PyResult<Py<Self>> { let gil = Python::acquire_gil(); let py = gil.python(); let kwargs = PyDict::new(py); kwargs.set_item("repeat", count)?; Self::cycle(slf, Some(&kwargs)) } #[args( label = "None", symbol = "None", cycles = "None", mask = "None", pins = "None" )] fn overlay( slf: PyRef<Self>, label: Option<String>, symbol: Option<String>, pins: Option<Vec<&PyAny>>, cycles: Option<usize>, mask: Option<num_bigint::BigUint>, ) -> PyResult<Py<Self>> { let pin_ids; { if let Some(p) = pins { crate::dut::PyDUT::ensure_pins("dut")?; let dut = origen::dut(); pin_ids = Some(vec_to_ppin_ids(&dut, p)?); } else { pin_ids = None } } { let tester = origen::tester(); tester.overlay(&origen::Overlay::new(label, symbol, cycles, mask, pin_ids)?)?; } slf.issue_callbacks("overlay")?; Ok(slf.into()) } #[args(symbol = "None", cycles = "None", mask = "None", pins = "None")] fn capture( slf: PyRef<Self>, symbol: Option<String>, cycles: Option<usize>, mask: Option<num_bigint::BigUint>, pins: Option<Vec<&PyAny>>, ) -> PyResult<Py<Self>> { let pin_ids; { if let Some(p) = pins { crate::dut::PyDUT::ensure_pins("dut")?; let dut = origen::dut(); pin_ids = Some(vec_to_ppin_ids(&dut, p)?); } else { pin_ids = None } } { let tester = origen::tester(); tester.capture(&origen::Capture::new(symbol, cycles, mask, pin_ids)?)?; } slf.issue_callbacks("capture")?; Ok(slf.into()) } fn register_tester(&mut self, g: &PyAny) -> PyResult<()> { let mut tester = origen::tester(); let gil = Python::acquire_gil(); let py = gil.python(); let obj = g.to_object(py); let mut n = obj.getattr(py, "__module__")?.extract::<String>(py)?; n.push_str(&format!( ".{}", obj.getattr(py, "__qualname__")?.extract::<String>(py)? )); let t_id = tester.register_external_tester(&n)?; self.python_testers.insert(t_id, obj); Ok(()) } #[args(testers = "*")] fn target(&mut self, testers: &PyTuple) -> PyResult<Vec<String>> { if testers.len() > 0 { let mut tester = origen::tester(); for g in testers.iter() { // Accept either a string name or the actual class of the tester if let Ok(name) = g.extract::<String>() { tester.target(SupportedTester::new(&name)?)?; } else { let gil = Python::acquire_gil(); let py = gil.python(); let obj = g.to_object(py); let mut n = obj.getattr(py, "__module__")?.extract::<String>(py)?; n.push_str(&format!( ".{}", obj.getattr(py, "__qualname__")?.extract::<String>(py)? )); // Assume a tester loaded via a class is a custom tester let t = tester.target(SupportedTester::new(&format!("CUSTOM::{}", n))?)?; match t { TesterSource::External(gen) => { let klass = self.python_testers.get(gen).unwrap(); let inst = klass.call0(py)?; self.instantiated_testers.insert(gen.to_owned(), inst); } _ => {} } } } } self.targets() } #[getter] fn targets(&self) -> PyResult<Vec<String>> { let tester = origen::tester(); Ok(tester.targets_as_strs().clone()) } /// Attempts to render the pattern on all targeted testers and returns paths to the /// output files that have been created. /// There is no need for the Python side to do anything with those, but they are returned /// in case they are useful in future. /// Continue on fail means that any errors will be logged but Origen will continue, if false /// it will blow up and immediately return an error to Python. #[args(continue_on_fail = false)] fn render_pattern(&self, continue_on_fail: bool) -> PyResult<Vec<String>> { if origen::LOGGER.has_keyword("show_unprocessed_ast") { origen::LOGGER.info("Showing Unprocessed AST"); origen::LOGGER.info(&format!("{:?}", origen::TEST)); } let mut rendered_patterns: Vec<String> = vec![]; let targets; { let tester = origen::tester(); targets = tester.targets().clone(); } for (i, t) in targets.iter().enumerate() { match t { TesterSource::External(g) => { // External testers which the backend can't render itself. Need to render them here. match self.instantiated_testers.get(g) { Some(inst) => { // The tester here is a PyObject - a handle on the class itself. // Instantiate it and call its render method with the AST. let gil = Python::acquire_gil(); let py = gil.python(); let _pat = inst.call_method0(py, "render_pattern")?; // TODO - How do we convert this to a path to do the diffing? } None => { // Don't bother masking this type of error, this should be fatal let msg = format!( "Something's gone wrong and Python tester {} cannot be found!", g ); return Err(PyErr::from(Error::new(&msg))); } } } _ => { let mut tester = origen::tester(); let pat = tester.render_pattern_for_target_at(i, true); match pat { Err(e) => { let msg = e.to_string(); if continue_on_fail { STATUS.inc_unhandled_error_count(); log_error!("{}", &msg); } else { return Err(PyErr::from(Error::new(&msg))); } } Ok(paths) => { for path in &paths { rendered_patterns.push(format!("{}", path.display())); } } } } } } Ok(rendered_patterns) } #[getter] fn testers(&self) -> PyResult<Vec<String>> { Ok(SupportedTester::all_names()) } }
36.205128
177
0.493012
716c2cc39e5910acadceca32e536e92a4f108cf5
2,226
pub fn default_generic_type_parameter_and_operator_overloading() { use std::ops::Add; #[derive(Debug, PartialEq)] struct Point { x: i32, y: i32, } impl Add for Point { type Output = Point; fn add(self, other: Point) -> Point { Point { x: self.x + other.x, y: self.y + other.y, } } } struct Millimeters(u32); struct Meters(u32); impl Add<Meters> for Millimeters { type Output = Millimeters; fn add(self, other: Meters) -> Millimeters { Millimeters(self.0 + (other.0 * 1000)) } } assert_eq!( Point { x: 1, y: 0 } + Point { x: 2, y: 3 }, Point { x: 3, y: 3 } ); } pub fn use_fully_qualified_syntax() { trait Animal { fn baby_name() -> String; } struct Dog; impl Dog { fn baby_name() -> String { String::from("Spot") } } impl Animal for Dog { fn baby_name() -> String { String::from("puppy") } } println!("A baby dog is called a {}", Dog::baby_name()); // error[E0283]: type annotations required: cannot resolve `_: Animal` // println!("A baby dog is called a {}", Animal::baby_name()); println!("A baby dog is called a {}", <Dog as Animal>::baby_name()); } pub fn use_super_trait() { use std::fmt; trait OutlinePrint: fmt::Display { fn outline_print(&self) { let output = self.to_string(); let len = output.len(); println!("{}", "*".repeat(len + 4)); println!("*{}*", " ".repeat(len + 2)); println!("* {} *", output); println!("*{}*", " ".repeat(len + 2)); println!("{}", "*".repeat(len + 4)); } } struct Point { x: i32, y: i32, } impl OutlinePrint for Point {} impl fmt::Display for Point { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "({}, {})", self.x, self.y) } } let p = Point { x: 0, y: 12 }; p.outline_print(); } pub fn use_newtype_pattern() { use std::fmt; struct Wrapper(Vec<String>); impl fmt::Display for Wrapper { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{}]", self.0.join(", ")) } } let w = Wrapper(vec![String::from("hello"), String::from("world")]); println!("w = {}", w); }
20.422018
72
0.544924
189027b86bb4cce1843e9c4fbd530a0df1657e29
7,312
use crate::command_args::CommandArgs; use crate::documentation::get_full_help; use crate::evaluate::block::run_block; use crate::evaluation_context::EvaluationContext; use crate::example::Example; use nu_errors::ShellError; use nu_parser::ParserScope; use nu_protocol::hir::Block; use nu_protocol::{ReturnSuccess, Signature, UntaggedValue}; use nu_source::{DbgDocBldr, DebugDocBuilder, PrettyDebugWithSource, Span, Tag}; use nu_stream::{OutputStream, ToOutputStream}; use std::sync::Arc; pub trait WholeStreamCommand: Send + Sync { fn name(&self) -> &str; fn signature(&self) -> Signature { Signature::new(self.name()).desc(self.usage()).filter() } fn usage(&self) -> &str; fn extra_usage(&self) -> &str { "" } fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError>; fn is_binary(&self) -> bool { false } // Commands that are not meant to be run by users fn is_internal(&self) -> bool { false } fn examples(&self) -> Vec<Example> { Vec::new() } } // Custom commands are blocks, so we can use the information in the block to also // implement a WholeStreamCommand #[allow(clippy::suspicious_else_formatting)] impl WholeStreamCommand for Block { fn name(&self) -> &str { &self.params.name } fn signature(&self) -> Signature { self.params.clone() } fn usage(&self) -> &str { &self.params.usage } fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> { let call_info = args.call_info.clone(); let mut block = self.clone(); block.set_redirect(call_info.args.external_redirection); let ctx = EvaluationContext::from_args(&args); let evaluated = call_info.evaluate(&ctx)?; let input = args.input; ctx.scope.enter_scope(); if let Some(args) = evaluated.args.positional { let mut args_iter = args.into_iter().peekable(); let mut params_iter = self.params.positional.iter(); loop { match (args_iter.peek(), params_iter.next()) { (Some(_), Some(param)) => { let name = param.0.name(); // we just checked the peek above, so this should be infallible if let Some(arg) = args_iter.next() { if name.starts_with('$') { ctx.scope.add_var(name.to_string(), arg); } else { ctx.scope.add_var(format!("${}", name), arg); } } } (Some(arg), None) => { if block.params.rest_positional.is_none() { ctx.scope.exit_scope(); return Err(ShellError::labeled_error( "Unexpected argument to command", "unexpected argument", arg.tag.span, )); } else { break; } } _ => break, } } if block.params.rest_positional.is_some() { let elements: Vec<_> = args_iter.collect(); let start = if let Some(first) = elements.first() { first.tag.span.start() } else { 0 }; let end = if let Some(last) = elements.last() { last.tag.span.end() } else { 0 }; ctx.scope.add_var( "$rest", UntaggedValue::Table(elements).into_value(Span::new(start, end)), ); } } if let Some(args) = evaluated.args.named { for named in &block.params.named { let name = named.0; if let Some(value) = args.get(name) { if name.starts_with('$') { ctx.scope.add_var(name, value.clone()); } else { ctx.scope.add_var(format!("${}", name), value.clone()); } } else if name.starts_with('$') { ctx.scope .add_var(name, UntaggedValue::nothing().into_untagged_value()); } else { ctx.scope.add_var( format!("${}", name), UntaggedValue::nothing().into_untagged_value(), ); } } } else { for named in &block.params.named { let name = named.0; if name.starts_with('$') { ctx.scope .add_var(name, UntaggedValue::nothing().into_untagged_value()); } else { ctx.scope.add_var( format!("${}", name), UntaggedValue::nothing().into_untagged_value(), ); } } } let result = run_block(&block, &ctx, input); ctx.scope.exit_scope(); result.map(|x| x.to_output_stream()) } fn is_binary(&self) -> bool { false } fn is_internal(&self) -> bool { false } fn examples(&self) -> Vec<Example> { vec![] } } #[derive(Clone)] pub struct Command(Arc<dyn WholeStreamCommand>); impl PrettyDebugWithSource for Command { fn pretty_debug(&self, source: &str) -> DebugDocBuilder { DbgDocBldr::typed( "whole stream command", DbgDocBldr::description(self.name()) + DbgDocBldr::space() + DbgDocBldr::equals() + DbgDocBldr::space() + self.signature().pretty_debug(source), ) } } impl std::fmt::Debug for Command { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "Command({})", self.name()) } } impl Command { pub fn name(&self) -> &str { self.0.name() } pub fn signature(&self) -> Signature { self.0.signature() } pub fn usage(&self) -> &str { self.0.usage() } pub fn examples(&self) -> Vec<Example> { self.0.examples() } pub fn run(&self, args: CommandArgs) -> Result<OutputStream, ShellError> { if args.call_info.switch_present("help") { let cl = self.0.clone(); Ok(OutputStream::one(Ok(ReturnSuccess::Value( UntaggedValue::string(get_full_help(&*cl, &args.scope)).into_value(Tag::unknown()), )))) } else { self.0.run(args) } } pub fn is_binary(&self) -> bool { self.0.is_binary() } pub fn is_internal(&self) -> bool { self.0.is_internal() } pub fn stream_command(&self) -> &dyn WholeStreamCommand { &*self.0 } } pub fn whole_stream_command(command: impl WholeStreamCommand + 'static) -> Command { Command(Arc::new(command)) }
30.722689
99
0.485914
e909114101602ef88b6d2fa83e6c904f1346f2a6
2,764
use std::sync::Arc; use std::{fs, path}; use crate::tfpb::graph::GraphDef; use tract_core::model::{InletId, Model, OutletId}; use tract_core::{ToTract, TractResult, Tractify}; /// Load a SharedTensor protobul model from a file. pub fn for_path<P: AsRef<path::Path>>(p: P) -> TractResult<Model> { for_reader(fs::File::open(p)?) } /// Load a Tract model from a reader. pub fn for_reader<R: ::std::io::Read>(r: R) -> TractResult<Model> { graphdef_for_reader(r)?.tractify() } /// Load a SharedTensor protobuf graph def from a reader. pub fn graphdef_for_reader<R: ::std::io::Read>(mut r: R) -> TractResult<GraphDef> { Ok(::protobuf::parse_from_reader::<GraphDef>(&mut r).map_err(|e| format!("{:?}", e))?) } /// Load a SharedTensor protobuf graph def from a path pub fn graphdef_for_path<P: AsRef<path::Path>>(p: P) -> TractResult<GraphDef> { graphdef_for_reader(fs::File::open(p)?) } pub fn optimize(model: Model) -> TractResult<Model> { let model = model.into_optimized()?; model.into_optimized() } impl Tractify<GraphDef> for Model { fn tractify(graph: &GraphDef) -> TractResult<Model> { let mut model = Model::default().with_context(Arc::new(crate::optim::TensorflowContext)); let op_builder = crate::ops::OpBuilder::new(); for pbnode in graph.get_node().iter() { let name = pbnode.get_name().to_string(); let node_id = model.add_node( name.clone(), op_builder .build(pbnode) .map_err(|e| format!("While building node {}, {}", name, e.description()))?, )?; // From the node_def.proto documentation: // Each input is "node:src_output" with "node" being a string name and // "src_output" indicating which output tensor to use from "node". If // "src_output" is 0 the ":0" suffix can be omitted. Regular inputs may // optionally be followed by control inputs that have the format "^node". for (ix, i) in pbnode.get_input().iter().enumerate() { let input: (&str, usize) = if i.starts_with("^") { (&i[1..], 0) } else { let splits: Vec<_> = i.splitn(2, ':').collect(); ( splits[0], if splits.len() > 1 { splits[1].parse::<usize>()? } else { 0 }, ) }; let prec = model.node_by_name(input.0)?.id; model.add_edge(OutletId::new(prec, input.1), InletId::new(node_id, ix))?; } } Ok(model) } }
38.388889
97
0.543777
71fb47b545e0b77b626085d9db8a8fc5228c9dcc
8,489
#![deny(missing_docs)] //! Types subcrate for kitsune-p2p. /// Re-exported dependencies. pub mod dependencies { pub use ::futures; pub use ::ghost_actor; pub use ::lair_keystore_api; pub use ::lair_keystore_api_0_0; pub use ::observability; pub use ::paste; pub use ::rustls; pub use ::serde; pub use ::serde_json; pub use ::thiserror; pub use ::tokio; pub use ::url2; } /// Typedef for result of `proc_count_now()`. /// This value is on the scale of microseconds. pub type ProcCountMicros = i64; /// Monotonically nondecreasing process tick count, backed by tokio::time::Instant /// as an i64 to facilitate reference times that may be less than the first /// call to this function. /// The returned value is on the scale of microseconds. pub fn proc_count_now_us() -> ProcCountMicros { use once_cell::sync::Lazy; use tokio::time::Instant; static PROC_COUNT: Lazy<Instant> = Lazy::new(Instant::now); let r = *PROC_COUNT; Instant::now().saturating_duration_since(r).as_micros() as i64 } /// Get the elapsed process count duration from a captured `ProcCount` to now. /// If the duration would be negative, this fn returns a zero Duration. pub fn proc_count_us_elapsed(pc: ProcCountMicros) -> std::time::Duration { let dur = proc_count_now_us() - pc; let dur = if dur < 0 { 0 } else { dur as u64 }; std::time::Duration::from_micros(dur) } /// Helper function for the common case of returning this nested Unit type. pub fn unit_ok_fut<E1, E2>() -> Result<MustBoxFuture<'static, Result<(), E2>>, E1> { use futures::FutureExt; Ok(async move { Ok(()) }.boxed().into()) } /// Helper function for the common case of returning this boxed future type. pub fn ok_fut<E1, R: Send + 'static>(result: R) -> Result<MustBoxFuture<'static, R>, E1> { use futures::FutureExt; Ok(async move { result }.boxed().into()) } /// Helper function for the common case of returning this boxed future type. pub fn box_fut<'a, R: Send + 'a>(result: R) -> MustBoxFuture<'a, R> { use futures::FutureExt; async move { result }.boxed().into() } use ::ghost_actor::dependencies::tracing; use ghost_actor::dependencies::must_future::MustBoxFuture; pub use ::lair_keystore_api_0_0::actor::CertDigest; /// Wrapper around CertDigest that provides some additional debugging helpers. #[derive(Clone)] #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] pub struct Tx2Cert(pub Arc<(CertDigest, String, String)>); impl Tx2Cert { /// get the tls cert digest pub fn as_digest(&self) -> &CertDigest { self.as_ref() } /// get the cert bytes pub fn as_bytes(&self) -> &[u8] { self.as_ref() } /// get the base64 representation pub fn as_str(&self) -> &str { self.as_ref() } /// get the base64 nickname pub fn as_nick(&self) -> &str { &self.0 .2 } } impl std::fmt::Debug for Tx2Cert { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("Cert(")?; f.write_str(self.as_nick())?; f.write_str(")")?; Ok(()) } } impl PartialEq for Tx2Cert { fn eq(&self, oth: &Self) -> bool { self.0 .0.eq(&oth.0 .0) } } impl Eq for Tx2Cert {} impl PartialOrd for Tx2Cert { fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { self.0 .0.partial_cmp(&other.0 .0) } } impl Ord for Tx2Cert { fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.0 .0.cmp(&other.0 .0) } } impl std::hash::Hash for Tx2Cert { fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.0 .0.hash(state); } } impl std::ops::Deref for Tx2Cert { type Target = CertDigest; fn deref(&self) -> &Self::Target { &self.0 .0 } } impl std::convert::AsRef<CertDigest> for Tx2Cert { fn as_ref(&self) -> &CertDigest { std::ops::Deref::deref(self) } } impl std::convert::AsRef<[u8]> for Tx2Cert { fn as_ref(&self) -> &[u8] { &self.0 .0 } } impl std::convert::AsRef<str> for Tx2Cert { fn as_ref(&self) -> &str { &self.0 .1 } } impl From<Vec<u8>> for Tx2Cert { fn from(v: Vec<u8>) -> Self { let d: CertDigest = v.into(); d.into() } } impl From<Arc<Vec<u8>>> for Tx2Cert { fn from(v: Arc<Vec<u8>>) -> Self { let d: CertDigest = v.into(); d.into() } } impl From<CertDigest> for Tx2Cert { fn from(c: CertDigest) -> Self { let b64 = base64::encode_config(&**c, base64::URL_SAFE_NO_PAD); let nick = { let (start, _) = b64.split_at(6); let (_, end) = b64.split_at(b64.len() - 6); format!("{}..{}", start, end) }; Self(Arc::new((c, b64, nick))) } } impl From<&CertDigest> for Tx2Cert { fn from(c: &CertDigest) -> Self { c.clone().into() } } impl From<Tx2Cert> for CertDigest { fn from(d: Tx2Cert) -> Self { d.0 .0.clone() } } impl From<&Tx2Cert> for CertDigest { fn from(d: &Tx2Cert) -> Self { d.0 .0.clone() } } use config::KitsuneP2pTuningParams; use std::sync::Arc; /// Error related to remote communication. #[derive(Debug, thiserror::Error)] #[non_exhaustive] pub enum KitsuneErrorKind { /// Temp error type for internal logic. #[error("Unit")] Unit, /// The operation timed out. #[error("Operation timed out")] TimedOut, /// This object is closed, calls on it are invalid. #[error("This object is closed, calls on it are invalid.")] Closed, /// Unspecified error. #[error(transparent)] Other(Box<dyn std::error::Error + Send + Sync>), } impl PartialEq for KitsuneErrorKind { fn eq(&self, oth: &Self) -> bool { match self { Self::TimedOut => { if let Self::TimedOut = oth { return true; } } Self::Closed => { if let Self::Closed = oth { return true; } } _ => (), } false } } /// Error related to remote communication. #[derive(Clone, Debug)] pub struct KitsuneError(pub Arc<KitsuneErrorKind>); impl std::fmt::Display for KitsuneError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.0) } } impl std::error::Error for KitsuneError {} impl KitsuneError { /// the "kind" of this KitsuneError pub fn kind(&self) -> &KitsuneErrorKind { &self.0 } /// promote a custom error type to a KitsuneError pub fn other(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> Self { Self(Arc::new(KitsuneErrorKind::Other(e.into()))) } } impl From<KitsuneErrorKind> for KitsuneError { fn from(k: KitsuneErrorKind) -> Self { Self(Arc::new(k)) } } impl From<String> for KitsuneError { fn from(s: String) -> Self { #[derive(Debug, thiserror::Error)] struct OtherError(String); impl std::fmt::Display for OtherError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.0) } } KitsuneError::other(OtherError(s)) } } impl From<&str> for KitsuneError { fn from(s: &str) -> Self { s.to_string().into() } } impl From<KitsuneError> for () { fn from(_: KitsuneError) {} } impl From<()> for KitsuneError { fn from(_: ()) -> Self { KitsuneErrorKind::Unit.into() } } /// Result type for remote communication. pub type KitsuneResult<T> = Result<T, KitsuneError>; mod timeout; pub use timeout::*; pub mod agent_info; pub mod async_lazy; mod auto_stream_select; pub use auto_stream_select::*; pub mod bin_types; pub mod bootstrap; pub mod codec; pub mod combinators; pub mod config; pub mod consistency; pub mod metrics; pub mod reverse_semaphore; pub mod task_agg; pub mod tls; pub mod tx2; pub use kitsune_p2p_dht_arc as dht_arc; use metrics::metric_task; #[cfg(test)] mod tests { use super::*; #[tokio::test(flavor = "multi_thread")] async fn test_tx2_digest() { let d: Tx2Cert = vec![0xdb; 32].into(); println!("raw_debug: {:?}", d); println!("as_digest: {:?}", d.as_digest()); println!("as_bytes: {:?}", d.as_bytes()); println!("as_str: {:?}", d.as_str()); println!("as_nick: {:?}", d.as_nick()); } }
24.821637
90
0.595123
b9669dce6d73cdccbaf84c8f39daf9c11837ff06
1,812
/* * DMNTK - Decision Model and Notation Toolkit * * MIT license * * Copyright (c) 2018-2022 Dariusz Depta Engos Software * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * Apache license, Version 2.0 * * Copyright (c) 2018-2022 Dariusz Depta Engos Software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use super::super::*; use crate::model_evaluator::ModelEvaluator; use std::sync::Arc; lazy_static! { static ref MODEL_EVALUATOR: Arc<ModelEvaluator> = build_model_evaluator(dmntk_examples::DMN_2_0001); } #[test] fn _0001() { let ctx = context(r#"{Full Name: "John Doe"}"#); assert_decision(&MODEL_EVALUATOR, "Greeting Message", &ctx, r#""Hello John Doe""#); } #[test] fn _0002() { let ctx = context(r#"{"Full Name": "George Gershwin"}"#); assert_decision(&MODEL_EVALUATOR, "Greeting Message", &ctx, r#""Hello George Gershwin""#); }
34.846154
102
0.727373
e86aa9a9c21ee3b4104c7838474ce90aae78b6ae
38,377
use crate::{ create_terrain_layer_material, make_save_file_selector, make_scene_file_filter, scene::{ commands::{graph::AddNodeCommand, sound::AddSoundSourceCommand, PasteCommand}, EditorScene, Selection, }, send_sync_message, settings::{Settings, SettingsWindow}, GameEngine, Message, }; use rg3d::gui::message::UiMessage; use rg3d::gui::{UiNode, UserInterface}; use rg3d::{ core::{ algebra::{Matrix4, Vector2}, pool::Handle, scope_profile, }, gui::{ file_browser::FileSelectorBuilder, menu::{MenuBuilder, MenuItemBuilder, MenuItemContent}, message::{ FileSelectorMessage, MenuItemMessage, MessageBoxMessage, MessageDirection, UiMessageData, WidgetMessage, WindowMessage, }, messagebox::{MessageBoxBuilder, MessageBoxButtons}, widget::WidgetBuilder, window::{WindowBuilder, WindowTitle}, Thickness, }, scene::{ base::BaseBuilder, camera::CameraBuilder, decal::DecalBuilder, light::{ directional::DirectionalLightBuilder, point::PointLightBuilder, spot::SpotLightBuilder, BaseLightBuilder, }, mesh::{ surface::{Surface, SurfaceData}, Mesh, MeshBuilder, }, node::Node, particle_system::{ emitter::base::BaseEmitterBuilder, emitter::sphere::SphereEmitterBuilder, ParticleSystemBuilder, }, sprite::SpriteBuilder, terrain::{LayerDefinition, TerrainBuilder}, }, sound::source::{generic::GenericSourceBuilder, spatial::SpatialSourceBuilder}, }; use std::sync::{mpsc::Sender, Arc, RwLock}; pub struct Menu { pub menu: Handle<UiNode>, new_scene: Handle<UiNode>, save: Handle<UiNode>, save_as: Handle<UiNode>, load: Handle<UiNode>, close_scene: Handle<UiNode>, undo: Handle<UiNode>, redo: Handle<UiNode>, copy: Handle<UiNode>, paste: Handle<UiNode>, create_pivot: Handle<UiNode>, create_cube: Handle<UiNode>, create_cone: Handle<UiNode>, create_sphere: Handle<UiNode>, create_cylinder: Handle<UiNode>, create_quad: Handle<UiNode>, create_decal: Handle<UiNode>, create_point_light: Handle<UiNode>, create_spot_light: Handle<UiNode>, create_directional_light: Handle<UiNode>, create_terrain: Handle<UiNode>, exit: Handle<UiNode>, message_sender: Sender<Message>, save_file_selector: Handle<UiNode>, load_file_selector: Handle<UiNode>, create_camera: Handle<UiNode>, create_sprite: Handle<UiNode>, create_particle_system: Handle<UiNode>, create_sound_source: Handle<UiNode>, create_spatial_sound_source: Handle<UiNode>, sidebar: Handle<UiNode>, world_outliner: Handle<UiNode>, asset_browser: Handle<UiNode>, open_settings: Handle<UiNode>, configure: Handle<UiNode>, light_panel: Handle<UiNode>, pub settings: SettingsWindow, configure_message: Handle<UiNode>, log_panel: Handle<UiNode>, create: Handle<UiNode>, edit: Handle<UiNode>, open_path_fixer: Handle<UiNode>, } pub struct MenuContext<'a, 'b> { pub engine: &'a mut GameEngine, pub editor_scene: Option<&'b mut EditorScene>, pub sidebar_window: Handle<UiNode>, pub world_outliner_window: Handle<UiNode>, pub asset_window: Handle<UiNode>, pub configurator_window: Handle<UiNode>, pub light_panel: Handle<UiNode>, pub log_panel: Handle<UiNode>, pub settings: &'b mut Settings, pub path_fixer: Handle<UiNode>, } fn switch_window_state(window: Handle<UiNode>, ui: &mut UserInterface, center: bool) { let current_state = ui.node(window).visibility(); ui.send_message(if current_state { WindowMessage::close(window, MessageDirection::ToWidget) } else { WindowMessage::open(window, MessageDirection::ToWidget, center) }) } impl Menu { pub fn new( engine: &mut GameEngine, message_sender: Sender<Message>, settings: &Settings, ) -> Self { let min_size = Vector2::new(120.0, 22.0); let new_scene; let save; let save_as; let close_scene; let load; let redo; let undo; let copy; let paste; let create_cube; let create_cone; let create_sphere; let create_cylinder; let create_quad; let create_point_light; let create_spot_light; let create_directional_light; let exit; let create_camera; let create_sprite; let create_decal; let create_particle_system; let create_terrain; let sidebar; let asset_browser; let world_outliner; let open_settings; let configure; let light_panel; let log_panel; let create_pivot; let create_sound_source; let create_spatial_sound_source; let open_path_fixer; let ctx = &mut engine.user_interface.build_ctx(); let configure_message = MessageBoxBuilder::new( WindowBuilder::new(WidgetBuilder::new().with_width(250.0).with_height(150.0)) .open(false) .with_title(WindowTitle::Text("Warning".to_owned())), ) .with_text("Cannot reconfigure editor while scene is open! Close scene first and retry.") .with_buttons(MessageBoxButtons::Ok) .build(ctx); let create = MenuItemBuilder::new(WidgetBuilder::new().with_margin(Thickness::right(10.0))) .with_content(MenuItemContent::text_with_shortcut("Create", "")) .with_items(vec![ { create_pivot = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Pivot")) .build(ctx); create_pivot }, MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Mesh")) .with_items(vec![ { create_cube = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Cube")) .build(ctx); create_cube }, { create_sphere = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Sphere")) .build(ctx); create_sphere }, { create_cylinder = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Cylinder")) .build(ctx); create_cylinder }, { create_cone = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Cone")) .build(ctx); create_cone }, { create_quad = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Quad")) .build(ctx); create_quad }, ]) .build(ctx), MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Sound")) .with_items(vec![ { create_sound_source = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("2D Source")) .build(ctx); create_sound_source }, { create_spatial_sound_source = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("3D Source")) .build(ctx); create_spatial_sound_source }, ]) .build(ctx), MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Light")) .with_items(vec![ { create_directional_light = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Directional Light")) .build(ctx); create_directional_light }, { create_spot_light = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Spot Light")) .build(ctx); create_spot_light }, { create_point_light = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Point Light")) .build(ctx); create_point_light }, ]) .build(ctx), { create_camera = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Camera")) .build(ctx); create_camera }, { create_sprite = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Sprite")) .build(ctx); create_sprite }, { create_particle_system = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Particle System")) .build(ctx); create_particle_system }, { create_terrain = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Terrain")) .build(ctx); create_terrain }, { create_decal = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Decal")) .build(ctx); create_decal }, ]) .build(ctx); let edit = MenuItemBuilder::new(WidgetBuilder::new().with_margin(Thickness::right(10.0))) .with_content(MenuItemContent::text_with_shortcut("Edit", "")) .with_items(vec![ { undo = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text_with_shortcut("Undo", "Ctrl+Z")) .build(ctx); undo }, { redo = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text_with_shortcut("Redo", "Ctrl+Y")) .build(ctx); redo }, { copy = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text_with_shortcut("Copy", "Ctrl+C")) .build(ctx); copy }, { paste = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text_with_shortcut("Paste", "Ctrl+V")) .build(ctx); paste }, ]) .build(ctx); let menu = MenuBuilder::new(WidgetBuilder::new().on_row(0)) .with_items(vec![ MenuItemBuilder::new(WidgetBuilder::new().with_margin(Thickness::right(10.0))) .with_content(MenuItemContent::text("File")) .with_items(vec![ { new_scene = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text_with_shortcut( "New Scene", "Ctrl+N", )) .build(ctx); new_scene }, { save = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text_with_shortcut( "Save Scene", "Ctrl+S", )) .build(ctx); save }, { save_as = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text_with_shortcut( "Save Scene As...", "Ctrl+Shift+S", )) .build(ctx); save_as }, { load = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text_with_shortcut( "Load Scene...", "Ctrl+L", )) .build(ctx); load }, { close_scene = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text_with_shortcut( "Close Scene", "Ctrl+Q", )) .build(ctx); close_scene }, { open_settings = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Settings...")) .build(ctx); open_settings }, { configure = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Configure...")) .build(ctx); configure }, { exit = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text_with_shortcut( "Exit", "Alt+F4", )) .build(ctx); exit }, ]) .build(ctx), edit, create, MenuItemBuilder::new(WidgetBuilder::new().with_margin(Thickness::right(10.0))) .with_content(MenuItemContent::text_with_shortcut("View", "")) .with_items(vec![ { sidebar = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Sidebar")) .build(ctx); sidebar }, { asset_browser = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Asset Browser")) .build(ctx); asset_browser }, { world_outliner = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("World Outliner")) .build(ctx); world_outliner }, { light_panel = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Light Panel")) .build(ctx); light_panel }, { log_panel = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Log Panel")) .build(ctx); log_panel }, ]) .build(ctx), MenuItemBuilder::new(WidgetBuilder::new().with_margin(Thickness::right(10.0))) .with_content(MenuItemContent::text_with_shortcut("Utils", "")) .with_items(vec![{ open_path_fixer = MenuItemBuilder::new(WidgetBuilder::new().with_min_size(min_size)) .with_content(MenuItemContent::text("Path Fixer")) .build(ctx); open_path_fixer }]) .build(ctx), ]) .build(ctx); let save_file_selector = make_save_file_selector(ctx); let load_file_selector = FileSelectorBuilder::new( WindowBuilder::new(WidgetBuilder::new().with_width(300.0).with_height(400.0)) .open(false) .with_title(WindowTitle::Text("Select a Scene To Load".into())), ) .with_filter(make_scene_file_filter()) .build(ctx); Self { menu, new_scene, save, save_as, close_scene, load, undo, redo, create_cube, create_cone, create_sphere, create_cylinder, create_quad, create_point_light, create_spot_light, create_directional_light, exit, settings: SettingsWindow::new(engine, message_sender.clone(), settings), message_sender, save_file_selector, load_file_selector, create_camera, create_sprite, create_particle_system, sidebar, world_outliner, asset_browser, open_settings, configure, configure_message, light_panel, copy, paste, log_panel, create_pivot, create_terrain, create_sound_source, create_spatial_sound_source, create, edit, open_path_fixer, create_decal, } } pub fn open_load_file_selector(&self, ui: &mut UserInterface) { ui.send_message(WindowMessage::open_modal( self.load_file_selector, MessageDirection::ToWidget, true, )); ui.send_message(FileSelectorMessage::root( self.load_file_selector, MessageDirection::ToWidget, Some(std::env::current_dir().unwrap()), )); } pub fn sync_to_model(&mut self, editor_scene: Option<&EditorScene>, ui: &mut UserInterface) { scope_profile!(); for &widget in [ self.close_scene, self.save, self.save_as, self.create, self.edit, ] .iter() { send_sync_message( ui, WidgetMessage::enabled(widget, MessageDirection::ToWidget, editor_scene.is_some()), ); } } pub fn handle_ui_message(&mut self, message: &UiMessage, ctx: MenuContext) { scope_profile!(); if let Some(scene) = ctx.editor_scene.as_ref() { self.settings .handle_message(message, scene, ctx.engine, ctx.settings); } match message.data() { UiMessageData::FileSelector(FileSelectorMessage::Commit(path)) => { if message.destination() == self.save_file_selector { self.message_sender .send(Message::SaveScene(path.to_owned())) .unwrap(); } else if message.destination() == self.load_file_selector { self.message_sender .send(Message::LoadScene(path.to_owned())) .unwrap(); } } UiMessageData::MenuItem(MenuItemMessage::Click) => { if message.destination() == self.create_cube { let mut mesh = Mesh::default(); mesh.set_name("Cube"); mesh.add_surface(Surface::new(Arc::new(RwLock::new(SurfaceData::make_cube( Matrix4::identity(), ))))); let node = Node::Mesh(mesh); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(node))) .unwrap(); } else if message.destination() == self.create_spot_light { let node = SpotLightBuilder::new(BaseLightBuilder::new( BaseBuilder::new().with_name("SpotLight"), )) .with_distance(10.0) .with_hotspot_cone_angle(45.0f32.to_radians()) .with_falloff_angle_delta(2.0f32.to_radians()) .build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(node))) .unwrap(); } else if message.destination() == self.create_pivot { let node = BaseBuilder::new().with_name("Pivot").build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(node))) .unwrap(); } else if message.destination() == self.create_point_light { let node = PointLightBuilder::new(BaseLightBuilder::new( BaseBuilder::new().with_name("PointLight"), )) .with_radius(10.0) .build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(node))) .unwrap(); } else if message.destination() == self.create_directional_light { let node = DirectionalLightBuilder::new(BaseLightBuilder::new( BaseBuilder::new().with_name("DirectionalLight"), )) .build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(node))) .unwrap(); } else if message.destination() == self.create_cone { let mesh = MeshBuilder::new(BaseBuilder::new().with_name("Cone")) .with_surfaces(vec![Surface::new(Arc::new(RwLock::new( SurfaceData::make_cone(16, 0.5, 1.0, &Matrix4::identity()), )))]) .build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(mesh))) .unwrap(); } else if message.destination() == self.create_cylinder { let mesh = MeshBuilder::new(BaseBuilder::new().with_name("Cylinder")) .with_surfaces(vec![Surface::new(Arc::new(RwLock::new( SurfaceData::make_cylinder(16, 0.5, 1.0, true, &Matrix4::identity()), )))]) .build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(mesh))) .unwrap(); } else if message.destination() == self.create_sphere { let mesh = MeshBuilder::new(BaseBuilder::new().with_name("Sphere")) .with_surfaces(vec![Surface::new(Arc::new(RwLock::new( SurfaceData::make_sphere(16, 16, 0.5, &Matrix4::identity()), )))]) .build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(mesh))) .unwrap(); } else if message.destination() == self.create_quad { let mesh = MeshBuilder::new(BaseBuilder::new().with_name("Quad")) .with_surfaces(vec![Surface::new(Arc::new(RwLock::new( SurfaceData::make_quad(&Matrix4::identity()), )))]) .build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(mesh))) .unwrap(); } else if message.destination() == self.create_camera { let node = CameraBuilder::new(BaseBuilder::new().with_name("Camera")) .enabled(false) .build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(node))) .unwrap(); } else if message.destination() == self.create_sprite { let node = SpriteBuilder::new(BaseBuilder::new().with_name("Sprite")).build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(node))) .unwrap(); } else if message.destination() == self.create_sound_source { let source = GenericSourceBuilder::new() .with_name("2D Source") .build_source() .unwrap(); self.message_sender .send(Message::do_scene_command(AddSoundSourceCommand::new( source, ))) .unwrap(); } else if message.destination() == self.create_spatial_sound_source { let source = SpatialSourceBuilder::new( GenericSourceBuilder::new() .with_name("3D Source") .build() .unwrap(), ) .build_source(); self.message_sender .send(Message::do_scene_command(AddSoundSourceCommand::new( source, ))) .unwrap(); } else if message.destination() == self.create_particle_system { let node = ParticleSystemBuilder::new(BaseBuilder::new().with_name("ParticleSystem")) .with_emitters(vec![SphereEmitterBuilder::new( BaseEmitterBuilder::new() .with_max_particles(100) .resurrect_particles(true), ) .with_radius(1.0) .build()]) .build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(node))) .unwrap(); } else if message.destination() == self.create_terrain { let node = TerrainBuilder::new(BaseBuilder::new().with_name("Terrain")) .with_layers(vec![LayerDefinition { material: create_terrain_layer_material(), mask_property_name: "maskTexture".to_owned(), }]) .with_height_map_resolution(4.0) .build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(node))) .unwrap(); } else if message.destination() == self.create_decal { let node = DecalBuilder::new(BaseBuilder::new().with_name("Decal")).build_node(); self.message_sender .send(Message::do_scene_command(AddNodeCommand::new(node))) .unwrap(); } else if message.destination() == self.save { if let Some(scene_path) = ctx.editor_scene.as_ref().map(|s| s.path.as_ref()).flatten() { self.message_sender .send(Message::SaveScene(scene_path.clone())) .unwrap(); } else { // If scene wasn't saved yet - open Save As window. ctx.engine .user_interface .send_message(WindowMessage::open_modal( self.save_file_selector, MessageDirection::ToWidget, true, )); ctx.engine .user_interface .send_message(FileSelectorMessage::path( self.save_file_selector, MessageDirection::ToWidget, std::env::current_dir().unwrap(), )); } } else if message.destination() == self.save_as { ctx.engine .user_interface .send_message(WindowMessage::open_modal( self.save_file_selector, MessageDirection::ToWidget, true, )); ctx.engine .user_interface .send_message(FileSelectorMessage::path( self.save_file_selector, MessageDirection::ToWidget, std::env::current_dir().unwrap(), )); } else if message.destination() == self.load { self.open_load_file_selector(&mut ctx.engine.user_interface); } else if message.destination() == self.close_scene { self.message_sender.send(Message::CloseScene).unwrap(); } else if message.destination() == self.copy { if let Some(editor_scene) = ctx.editor_scene { if let Selection::Graph(selection) = &editor_scene.selection { editor_scene.clipboard.fill_from_selection( selection, editor_scene.scene, &editor_scene.physics, ctx.engine, ); } } } else if message.destination() == self.paste { if let Some(editor_scene) = ctx.editor_scene { if !editor_scene.clipboard.is_empty() { self.message_sender .send(Message::do_scene_command(PasteCommand::new())) .unwrap(); } } } else if message.destination() == self.undo { self.message_sender.send(Message::UndoSceneCommand).unwrap(); } else if message.destination() == self.redo { self.message_sender.send(Message::RedoSceneCommand).unwrap(); } else if message.destination() == self.exit { self.message_sender .send(Message::Exit { force: false }) .unwrap(); } else if message.destination() == self.new_scene { self.message_sender.send(Message::NewScene).unwrap(); } else if message.destination() == self.asset_browser { switch_window_state(ctx.asset_window, &mut ctx.engine.user_interface, false); } else if message.destination() == self.light_panel { switch_window_state(ctx.light_panel, &mut ctx.engine.user_interface, true); } else if message.destination() == self.world_outliner { switch_window_state( ctx.world_outliner_window, &mut ctx.engine.user_interface, false, ); } else if message.destination() == self.sidebar { switch_window_state(ctx.sidebar_window, &mut ctx.engine.user_interface, false); } else if message.destination() == self.log_panel { switch_window_state(ctx.log_panel, &mut ctx.engine.user_interface, false); } else if message.destination() == self.open_settings { self.settings .open(&ctx.engine.user_interface, ctx.settings, None); } else if message.destination() == self.open_path_fixer { ctx.engine .user_interface .send_message(WindowMessage::open_modal( ctx.path_fixer, MessageDirection::ToWidget, true, )); } else if message.destination() == self.configure { if ctx.editor_scene.is_none() { ctx.engine .user_interface .send_message(WindowMessage::open_modal( ctx.configurator_window, MessageDirection::ToWidget, true, )); } else { ctx.engine .user_interface .send_message(MessageBoxMessage::open( self.configure_message, MessageDirection::ToWidget, None, None, )); } } } _ => (), } } }
44.676368
99
0.448341
38911c48918cb9ca5617dc7e1149451f40caccfd
10,235
//! Types for Assets, Jig and LearningPath. use std::{ collections::HashMap, fmt::{self, Debug}, str::FromStr, }; use chrono::{DateTime, Utc}; // use dyn_clone::DynClone; use serde::{Deserialize, Serialize}; use uuid::Uuid; use crate::domain::{ category::CategoryId, // learning_path::AdditionalResource, meta::{AffiliationId, AgeRangeId}, module::LiteModule, }; use super::{ course::{CourseId, CourseResponse}, jig::{JigId, JigResponse}, }; /// AssetType #[derive(Copy, Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] pub enum AssetType { /// JIG Jig, /// Resource Resource, /// Course Course, } impl AssetType { /// check if jig pub fn is_jig(&self) -> bool { matches!(self, Self::Jig) } /// check if resource pub fn is_resource(&self) -> bool { matches!(self, Self::Resource) } /// check if course pub fn is_course(&self) -> bool { matches!(self, Self::Course) } } /// AssetId #[derive(Copy, Clone, Eq, PartialEq, Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub enum AssetId { /// JIG ID JigId(JigId), /// Course ID CourseId(CourseId), } impl From<JigId> for AssetId { fn from(jig_id: JigId) -> Self { Self::JigId(jig_id) } } impl From<CourseId> for AssetId { fn from(course_id: CourseId) -> Self { Self::CourseId(course_id) } } impl AssetId { /// get jig id value as ref pub fn unwrap_jig(&self) -> &JigId { match self { Self::JigId(jig_id) => jig_id, _ => panic!(), } } /// get course id value as ref pub fn unwrap_course(&self) -> &CourseId { match self { Self::CourseId(course_id) => course_id, _ => panic!(), } } /// get the id uuid pub fn uuid(&self) -> &Uuid { match self { Self::JigId(jig_id) => &jig_id.0, Self::CourseId(course_id) => &course_id.0, } } /// check if jig pub fn is_jig_id(&self) -> bool { matches!(self, Self::JigId(_)) } /// check if course pub fn is_course_id(&self) -> bool { matches!(self, Self::CourseId(_)) } } /// Asset #[derive(Clone, Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub enum Asset { /// JIG ID associated with the module. Jig(JigResponse), /// Course ID associated with the module. Course(CourseResponse), } impl From<JigResponse> for Asset { fn from(jig: JigResponse) -> Self { Self::Jig(jig) } } impl From<CourseResponse> for Asset { fn from(course: CourseResponse) -> Self { Self::Course(course) } } impl Asset { /// get jig value as ref pub fn unwrap_jig(&self) -> &JigResponse { match self { Self::Jig(jig) => jig, _ => panic!(), } } /// get course value as ref pub fn unwrap_course(&self) -> &CourseResponse { match self { Self::Course(course) => course, _ => panic!(), } } /// get id pub fn id(&self) -> AssetId { match self { Self::Jig(jig) => jig.id.into(), Self::Course(course) => course.id.into(), } } /// get id pub fn published_at(&self) -> Option<DateTime<Utc>> { match self { Self::Jig(jig) => jig.published_at, Self::Course(course) => course.published_at, } } /// get display_name pub fn display_name(&self) -> &String { match self { Self::Jig(jig) => &jig.jig_data.display_name, Self::Course(course) => &course.course_data.display_name, } } /// get language pub fn language(&self) -> &String { match self { Self::Jig(jig) => &jig.jig_data.language, Self::Course(course) => &course.course_data.language, } } /// get description pub fn description(&self) -> &String { match self { Self::Jig(jig) => &jig.jig_data.description, Self::Course(course) => &course.course_data.description, } } /// get cover pub fn cover(&self) -> Option<&LiteModule> { match self { Self::Jig(jig) => jig.jig_data.modules.first(), Self::Course(_) => todo!(), } } /// get privacy_level pub fn privacy_level(&self) -> &PrivacyLevel { match self { Self::Jig(jig) => &jig.jig_data.privacy_level, Self::Course(course) => &course.course_data.privacy_level, } } /// get other_keywords pub fn other_keywords(&self) -> &String { match self { Self::Jig(jig) => &jig.jig_data.other_keywords, Self::Course(course) => &course.course_data.other_keywords, } } /// get translated_keywords pub fn translated_keywords(&self) -> &String { match self { Self::Jig(jig) => &jig.jig_data.translated_keywords, Self::Course(course) => &course.course_data.translated_keywords, } } /// get age_ranges pub fn age_ranges(&self) -> &Vec<AgeRangeId> { match self { Self::Jig(jig) => &jig.jig_data.age_ranges, Self::Course(course) => &course.course_data.age_ranges, } } /// get affiliations pub fn affiliations(&self) -> &Vec<AffiliationId> { match self { Self::Jig(jig) => &jig.jig_data.affiliations, Self::Course(course) => &course.course_data.affiliations, } } /// get categories pub fn categories(&self) -> &Vec<CategoryId> { match self { Self::Jig(jig) => &jig.jig_data.categories, Self::Course(course) => &course.course_data.categories, } } // pub fn additional_resources(&self) -> &Vec<AdditionalResource> { // match self { // Self::Jig(_) => todo!(), // Self::Course(_) => todo!(), // } // } /// get translated_description pub fn translated_description(&self) -> &HashMap<String, String> { match self { Self::Jig(jig) => &jig.jig_data.translated_description, Self::Course(course) => &course.course_data.translated_description, } } } // dyn_clone::clone_trait_object!(Asset); /// Special parameter for allowing implicit `me` as a user. #[derive(Clone, Eq, PartialEq, Debug)] pub enum UserOrMe { /// We should use the user found in the session auth. Me, /// we should use the provided user. User(Uuid), } impl serde::Serialize for UserOrMe { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { match self { UserOrMe::Me => serializer.serialize_str("me"), UserOrMe::User(id) => serializer.collect_str(&id.to_hyphenated()), } } } impl<'de> serde::Deserialize<'de> for UserOrMe { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = UserOrMe; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("`me` or `<uuid>`") } fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: serde::de::Error, { if value == "me" { Ok(UserOrMe::Me) } else { Uuid::from_str(value) .map(UserOrMe::User) .map_err(|e| E::custom(format!("failed to parse id: {}", e))) } } } deserializer.deserialize_str(Visitor) } } /// Access level for the jig. #[derive(Serialize, Deserialize, Copy, Clone, Eq, PartialEq, Debug)] #[cfg_attr(feature = "backend", derive(sqlx::Type))] #[serde(rename_all = "camelCase")] #[repr(i16)] pub enum PrivacyLevel { /// Publicly available and indexed. Can be shared with others. Public = 0, /// Not indexed, but can be accessed by non-owners if the id is known. "Private" in the front-end Unlisted = 1, /// NOT IMPLEMENTED. Only available to the author. Private = 2, } impl PrivacyLevel { /// Represents the privacy level as a `str`. Relevant for Algolia tag filtering. pub fn as_str(&self) -> &'static str { match self { Self::Public => "public", Self::Unlisted => "unlisted", Self::Private => "private", } } } impl Default for PrivacyLevel { fn default() -> Self { Self::Public } } /// Whether the data is draft or live. #[derive(Serialize, Deserialize, Clone, Copy, Debug)] #[cfg_attr(feature = "backend", derive(sqlx::Type))] #[serde(rename_all = "camelCase")] #[repr(i16)] pub enum DraftOrLive { /// Represents a draft copy Draft = 0, /// Represents a live copy Live = 1, } impl DraftOrLive { /// Returns `true` for a [`Self::Live`] value. /// /// ``` /// let x = DraftOrLive::Live; /// assert_eq!(x.is_live(), true); /// /// let x = DraftOrLive::Draft; /// assert_eq!(x.is_live(), false); /// ``` pub fn is_live(&self) -> bool { matches!(*self, DraftOrLive::Live) } /// Returns `true` for a [`Draft`] value. /// /// ``` /// let x = DraftOrLive::Live; /// assert_eq!(x.is_draft(), false); /// /// let x = DraftOrLive::Draft; /// assert_eq!(x.is_draft(), true); /// ``` pub fn is_draft(&self) -> bool { !self.is_live() } } impl From<DraftOrLive> for bool { fn from(draft_or_live: DraftOrLive) -> Self { match draft_or_live { DraftOrLive::Draft => false, DraftOrLive::Live => true, } } } impl From<bool> for DraftOrLive { fn from(draft_or_live: bool) -> Self { match draft_or_live { false => DraftOrLive::Draft, true => DraftOrLive::Live, } } }
24.782082
101
0.546556
d5378eae7abc38f331c2b8f8e57d1858fb1ad131
6,725
use std::sync::Arc; use druid::{ piet::{Text, TextLayout, TextLayoutBuilder}, BoxConstraints, Command, Cursor, Data, Env, Event, EventCtx, FontFamily, LayoutCtx, LifeCycle, LifeCycleCtx, MouseEvent, PaintCtx, Point, Rect, RenderContext, Size, Target, UpdateCtx, Widget, WidgetId, }; use lapce_data::{ command::{LapceUICommand, LAPCE_NEW_COMMAND, LAPCE_UI_COMMAND}, config::LapceTheme, data::LapceWindowData, keypress::Alignment, menu::MenuData, }; pub struct Menu { widget_id: WidgetId, line_height: f64, } impl Menu { pub fn new(data: &MenuData) -> Self { Self { widget_id: data.widget_id, line_height: 30.0, } } pub fn request_focus(&self, ctx: &mut EventCtx) { ctx.request_focus(); } fn mouse_move( &self, ctx: &mut EventCtx, mouse_event: &MouseEvent, data: &mut LapceWindowData, ) { ctx.set_handled(); ctx.set_cursor(&Cursor::Pointer); let n = (mouse_event.pos.y / self.line_height).floor() as usize; if n < data.menu.items.len() { Arc::make_mut(&mut data.menu).active = n; } } fn mouse_down( &self, ctx: &mut EventCtx, mouse_event: &MouseEvent, data: &LapceWindowData, ) { ctx.set_handled(); let n = (mouse_event.pos.y / self.line_height).floor() as usize; if let Some(item) = data.menu.items.get(n) { ctx.submit_command(Command::new( LAPCE_UI_COMMAND, LapceUICommand::Focus, Target::Widget(data.active_id), )); ctx.submit_command(Command::new( LAPCE_NEW_COMMAND, item.command.clone(), Target::Widget(data.active_id), )); } } } impl Widget<LapceWindowData> for Menu { fn id(&self) -> Option<WidgetId> { Some(self.widget_id) } fn event( &mut self, ctx: &mut EventCtx, event: &Event, data: &mut LapceWindowData, env: &Env, ) { match event { Event::KeyDown(key_event) => { if data.menu.shown { let keypress = data.keypress.clone(); let mut_keypress = Arc::make_mut(&mut data.keypress); mut_keypress.key_down( ctx, key_event, Arc::make_mut(&mut data.menu), env, ); data.keypress = keypress; ctx.set_handled(); } } Event::MouseMove(mouse_event) => { if data.menu.shown { self.mouse_move(ctx, mouse_event, data); } } Event::MouseDown(mouse_event) => { if data.menu.shown { self.mouse_down(ctx, mouse_event, data); } } Event::Command(cmd) if cmd.is(LAPCE_UI_COMMAND) => { let command = cmd.get_unchecked(LAPCE_UI_COMMAND); if let LapceUICommand::Focus = command { self.request_focus(ctx); } } _ => (), } } fn lifecycle( &mut self, ctx: &mut LifeCycleCtx, event: &LifeCycle, _data: &LapceWindowData, _env: &Env, ) { if let LifeCycle::FocusChanged(is_focused) = event { if !is_focused { ctx.submit_command(Command::new( LAPCE_UI_COMMAND, LapceUICommand::HideMenu, Target::Auto, )); } } } fn update( &mut self, ctx: &mut UpdateCtx, old_data: &LapceWindowData, data: &LapceWindowData, _env: &Env, ) { if !old_data.menu.items.same(&data.menu.items) { ctx.request_layout(); } if !old_data.menu.shown != data.menu.shown { ctx.request_paint(); } if !old_data.menu.active != data.menu.active { ctx.request_paint(); } } fn layout( &mut self, _ctx: &mut LayoutCtx, _bc: &BoxConstraints, data: &LapceWindowData, _env: &Env, ) -> Size { let height = self.line_height * data.menu.items.len() as f64; Size::new(300.0, height) } fn paint(&mut self, ctx: &mut PaintCtx, data: &LapceWindowData, _env: &Env) { if !data.menu.shown { return; } if data.menu.items.len() == 0 { return; } let rect = ctx.size().to_rect(); let shadow_width = 5.0; ctx.blurred_rect( rect, shadow_width, data.config .get_color_unchecked(LapceTheme::LAPCE_DROPDOWN_SHADOW), ); ctx.fill( rect, data.config .get_color_unchecked(LapceTheme::PANEL_BACKGROUND), ); if ctx.is_hot() { let line_rect = Rect::ZERO .with_origin(Point::new( 0.0, data.menu.active as f64 * self.line_height, )) .with_size(Size::new(ctx.size().width, self.line_height)); ctx.fill( line_rect, data.config.get_color_unchecked(LapceTheme::PANEL_CURRENT), ); } for (i, item) in data.menu.items.iter().enumerate() { let text_layout = ctx .text() .new_text_layout(item.text.clone()) .font(FontFamily::SYSTEM_UI, 13.0) .text_color( data.config .get_color_unchecked(LapceTheme::EDITOR_FOREGROUND) .clone(), ) .build() .unwrap(); ctx.draw_text( &text_layout, Point::new( 10.0, self.line_height * i as f64 + (self.line_height - text_layout.size().height) / 2.0, ), ); if let Some(keymaps) = data.keypress.command_keymaps.get(&item.command.cmd) { let origin = Point::new( rect.x1, self.line_height * i as f64 + self.line_height / 2.0, ); keymaps[0].paint(ctx, origin, Alignment::Right, &data.config); } } } }
28.375527
81
0.474796
e23232835acb6b429c39178551a82d5ca6ce5a28
3,786
use ic_base_types::CanisterId; #[cfg(target_arch = "x86_64")] pub mod ids; // WARNING: The NNS canisters MUST be installed in the NNS subnet, // in the following order, otherwise they won't be able to find // each other. // // These constants are used to write a file with the PB below in // nns/common/build.rs. // // NOTES (IMPORTANT!) // ~~~~~~~~~~~~~~~~~~ // - This is dependent on the implementation of function // `CanisterManager::generate_new_canister_id`. // - Unless you only add at the end, be sure to double check with // `rs/nns/canister_ids.json`. pub const REGISTRY_CANISTER_INDEX_IN_NNS_SUBNET: u64 = 0; pub const GOVERNANCE_CANISTER_INDEX_IN_NNS_SUBNET: u64 = 1; pub const LEDGER_CANISTER_INDEX_IN_NNS_SUBNET: u64 = 2; pub const ROOT_CANISTER_INDEX_IN_NNS_SUBNET: u64 = 3; pub const CYCLES_MINTING_CANISTER_INDEX_IN_NNS_SUBNET: u64 = 4; pub const LIFELINE_CANISTER_INDEX_IN_NNS_SUBNET: u64 = 5; pub const GENESIS_TOKEN_CANISTER_INDEX_IN_NNS_SUBNET: u64 = 6; pub const IDENTITY_CANISTER_INDEX_IN_NNS_SUBNET: u64 = 7; pub const NNS_UI_CANISTER_INDEX_IN_NNS_SUBNET: u64 = 8; /// The names of all expected .wasm files to set up the NNS. pub const NNS_CANISTER_WASMS: [&str; 9] = [ // The lifeline is not present! Because its wasm is embedded in the source code using // include_bytes, it is not provided on the path. We want to change that, though. "registry-canister", "governance-canister", "ledger-canister", "root-canister", "cycles-minting-canister", // The lifeline is built differently, which explains why its wasm has a different name pattern. "lifeline", "genesis-token-canister", "identity-canister", "nns-ui-canister", ]; pub const NUM_NNS_CANISTERS: usize = NNS_CANISTER_WASMS.len(); pub const REGISTRY_CANISTER_ID: CanisterId = CanisterId::from_u64(REGISTRY_CANISTER_INDEX_IN_NNS_SUBNET); pub const GOVERNANCE_CANISTER_ID: CanisterId = CanisterId::from_u64(GOVERNANCE_CANISTER_INDEX_IN_NNS_SUBNET); pub const LEDGER_CANISTER_ID: CanisterId = CanisterId::from_u64(LEDGER_CANISTER_INDEX_IN_NNS_SUBNET); pub const ROOT_CANISTER_ID: CanisterId = CanisterId::from_u64(ROOT_CANISTER_INDEX_IN_NNS_SUBNET); pub const CYCLES_MINTING_CANISTER_ID: CanisterId = CanisterId::from_u64(CYCLES_MINTING_CANISTER_INDEX_IN_NNS_SUBNET); pub const LIFELINE_CANISTER_ID: CanisterId = CanisterId::from_u64(LIFELINE_CANISTER_INDEX_IN_NNS_SUBNET); pub const GENESIS_TOKEN_CANISTER_ID: CanisterId = CanisterId::from_u64(GENESIS_TOKEN_CANISTER_INDEX_IN_NNS_SUBNET); pub const IDENTITY_CANISTER_ID: CanisterId = CanisterId::from_u64(IDENTITY_CANISTER_INDEX_IN_NNS_SUBNET); pub const NNS_UI_CANISTER_ID: CanisterId = CanisterId::from_u64(NNS_UI_CANISTER_INDEX_IN_NNS_SUBNET); pub const ALL_NNS_CANISTER_IDS: [&CanisterId; 9] = [ &REGISTRY_CANISTER_ID, &GOVERNANCE_CANISTER_ID, &LEDGER_CANISTER_ID, &ROOT_CANISTER_ID, &CYCLES_MINTING_CANISTER_ID, &LIFELINE_CANISTER_ID, &GENESIS_TOKEN_CANISTER_ID, &IDENTITY_CANISTER_ID, &NNS_UI_CANISTER_ID, ]; // The memory allocation for the ledger, governance and registry canisters // (4GiB) const NNS_MAX_CANISTER_MEMORY_ALLOCATION_IN_BYTES: u64 = 4 * 1024 * 1024 * 1024; // The default memory allocation to set for the remaining NNS canister (1GiB) const NNS_DEFAULT_CANISTER_MEMORY_ALLOCATION_IN_BYTES: u64 = 1024 * 1024 * 1024; /// Returns the memory allocation of the given nns canister. pub fn memory_allocation_of(canister_id: CanisterId) -> u64 { if [ LEDGER_CANISTER_ID, GOVERNANCE_CANISTER_ID, REGISTRY_CANISTER_ID, ] .contains(&canister_id) { NNS_MAX_CANISTER_MEMORY_ALLOCATION_IN_BYTES } else { NNS_DEFAULT_CANISTER_MEMORY_ALLOCATION_IN_BYTES } }
38.632653
99
0.767036
5d878d30a42f081e7a034db2516966b66e7fc97f
611
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. use hashbrown::{hash_map, HashMap}; use super::lexer::{Lexer, Token}; struct Parser<'a> { buf: &'a [u8], lexer: Lexer<'a>, //types: HashMap<&'a str, } impl<'a> Parser<'a> { pub fn new(buf: &'a [u8]) -> Self { Self { buf, lexer: Lexer::new(buf), } } }
24.44
77
0.603928
0ac57faaccc291a6c0cfc8346501684b39077d96
1,606
// This file is part of nvml. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/nvml/master/COPYRIGHT. No part of predicator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file. // Copyright © 2017 The developers of nvml. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/nvml/master/COPYRIGHT. use ExtendedNonNull; use ToNonNull; use hyper_thread::*; use super::*; use super::arc::CtoStrongArc; use super::arc::CtoStrongArcInner; use super::free_list::FreeList; use super::free_list::FreeListElement; use super::free_list::OwnedFreeListElement; use ::std::cell::UnsafeCell; use ::std::cmp::min; use ::std::fmt; use ::std::fmt::Debug; use ::std::fmt::Formatter; use ::std::marker::PhantomData; use ::std::mem::uninitialized; use ::std::mem::zeroed; use ::std::ops::Deref; use ::std::ops::DerefMut; use ::std::ptr::null_mut; use ::std::ptr::write; use ::std::sync::atomic::AtomicU32; use ::std::sync::atomic::AtomicUsize; use ::std::sync::atomic::AtomicPtr; use ::std::sync::atomic::Ordering::Relaxed; use ::std::sync::atomic::Ordering::Release; use ::std::sync::atomic::Ordering::SeqCst; include!("DoubleCacheAligned.rs"); include!("ExtendedAtomic.rs"); include!("HazardPointerPerHyperThread.rs"); include!("Node.rs"); include!("NodeFullOrDrained.rs"); include!("OutOfMemoryError.rs"); include!("PersistentFetchAndAddArrayQueue.rs");
39.170732
373
0.750311
72b66a4ff95c3e458001e0a4b4e59569519f7a87
1,097
// Copyright 2021 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #![deny(missing_docs)] //! This crate provides utilities for the Fuchsia display-controller API. /// Custom error definitions for `fuchsia.hardware.display` and sysmem API functions. mod error; /// The `types` module defines convenions wrappers for FIDL data types in the /// `fuchsia.hardware.display` library. mod types; /// Stateless representation of a display configuration. mod config; /// Helper functions setting up shared image buffers that can be assigned to display layers. mod image; /// The `Controller` type is a client-side abstraction for the `fuchsia.hardware.display.Controller` /// protocol. mod controller; /// Rust bindings for the Fuchsia-canonical `zx_pixel_format_t` declared in /// //zircon/system/public/zircon/pixelformat.h. mod pixel_format; pub use config::*; pub use controller::{Controller, VsyncEvent}; pub use error::*; pub use image::*; pub use pixel_format::PixelFormat; pub use types::*;
30.472222
100
0.757521
2f0cb2858e84119eadc7568ce4544963a86fa2ba
1,330
#[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::CIENR { #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } } #[doc = r" Proxy"] pub struct _CENRLW<'a> { w: &'a mut W, } impl<'a> _CENRLW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 255; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:7 - Ones written to this address clear bits in the IENR, thus disabling the interrupts. Bit n clears bit n in the IENR register. 0 = No operation. 1 = Disable rising edge or level interrupt."] #[inline] pub fn cenrl(&mut self) -> _CENRLW { _CENRLW { w: self } } }
26.6
212
0.533835
e87991a58347e940ca5f1eae2d10cc7ccbf516b5
6,861
#[doc = "Reader of register TPIUCTRL"] pub type R = crate::R<u32, super::TPIUCTRL>; #[doc = "Writer for register TPIUCTRL"] pub type W = crate::W<u32, super::TPIUCTRL>; #[doc = "Register TPIUCTRL `reset()`'s with value 0"] impl crate::ResetValue for super::TPIUCTRL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "This field selects the frequency of the ARM M4 TPIU port.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum CLKSEL_A { #[doc = "0: Low power state."] LOWPWR = 0, #[doc = "1: Selects HFRC divided by 2 as the source TPIU clock"] HFRCDIV2 = 1, #[doc = "2: Selects HFRC divided by 8 as the source TPIU clock"] HFRCDIV8 = 2, #[doc = "3: Selects HFRC divided by 16 as the source TPIU clock"] HFRCDIV16 = 3, #[doc = "4: Selects HFRC divided by 32 as the source TPIU clock"] HFRCDIV32 = 4, } impl From<CLKSEL_A> for u8 { #[inline(always)] fn from(variant: CLKSEL_A) -> Self { variant as _ } } #[doc = "Reader of field `CLKSEL`"] pub type CLKSEL_R = crate::R<u8, CLKSEL_A>; impl CLKSEL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> crate::Variant<u8, CLKSEL_A> { use crate::Variant::*; match self.bits { 0 => Val(CLKSEL_A::LOWPWR), 1 => Val(CLKSEL_A::HFRCDIV2), 2 => Val(CLKSEL_A::HFRCDIV8), 3 => Val(CLKSEL_A::HFRCDIV16), 4 => Val(CLKSEL_A::HFRCDIV32), i => Res(i), } } #[doc = "Checks if the value of the field is `LOWPWR`"] #[inline(always)] pub fn is_lowpwr(&self) -> bool { *self == CLKSEL_A::LOWPWR } #[doc = "Checks if the value of the field is `HFRCDIV2`"] #[inline(always)] pub fn is_hfrcdiv2(&self) -> bool { *self == CLKSEL_A::HFRCDIV2 } #[doc = "Checks if the value of the field is `HFRCDIV8`"] #[inline(always)] pub fn is_hfrcdiv8(&self) -> bool { *self == CLKSEL_A::HFRCDIV8 } #[doc = "Checks if the value of the field is `HFRCDIV16`"] #[inline(always)] pub fn is_hfrcdiv16(&self) -> bool { *self == CLKSEL_A::HFRCDIV16 } #[doc = "Checks if the value of the field is `HFRCDIV32`"] #[inline(always)] pub fn is_hfrcdiv32(&self) -> bool { *self == CLKSEL_A::HFRCDIV32 } } #[doc = "Write proxy for field `CLKSEL`"] pub struct CLKSEL_W<'a> { w: &'a mut W, } impl<'a> CLKSEL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: CLKSEL_A) -> &'a mut W { unsafe { self.bits(variant.into()) } } #[doc = "Low power state."] #[inline(always)] pub fn lowpwr(self) -> &'a mut W { self.variant(CLKSEL_A::LOWPWR) } #[doc = "Selects HFRC divided by 2 as the source TPIU clock"] #[inline(always)] pub fn hfrcdiv2(self) -> &'a mut W { self.variant(CLKSEL_A::HFRCDIV2) } #[doc = "Selects HFRC divided by 8 as the source TPIU clock"] #[inline(always)] pub fn hfrcdiv8(self) -> &'a mut W { self.variant(CLKSEL_A::HFRCDIV8) } #[doc = "Selects HFRC divided by 16 as the source TPIU clock"] #[inline(always)] pub fn hfrcdiv16(self) -> &'a mut W { self.variant(CLKSEL_A::HFRCDIV16) } #[doc = "Selects HFRC divided by 32 as the source TPIU clock"] #[inline(always)] pub fn hfrcdiv32(self) -> &'a mut W { self.variant(CLKSEL_A::HFRCDIV32) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 8)) | (((value as u32) & 0x07) << 8); self.w } } #[doc = "TPIU Enable field. When set, the ARM M4 TPIU is enabled and data can be streamed out of the MCU's SWO port using the ARM ITM and TPIU modules.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ENABLE_A { #[doc = "0: Disable the TPIU."] DIS = 0, #[doc = "1: Enable the TPIU."] EN = 1, } impl From<ENABLE_A> for bool { #[inline(always)] fn from(variant: ENABLE_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `ENABLE`"] pub type ENABLE_R = crate::R<bool, ENABLE_A>; impl ENABLE_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ENABLE_A { match self.bits { false => ENABLE_A::DIS, true => ENABLE_A::EN, } } #[doc = "Checks if the value of the field is `DIS`"] #[inline(always)] pub fn is_dis(&self) -> bool { *self == ENABLE_A::DIS } #[doc = "Checks if the value of the field is `EN`"] #[inline(always)] pub fn is_en(&self) -> bool { *self == ENABLE_A::EN } } #[doc = "Write proxy for field `ENABLE`"] pub struct ENABLE_W<'a> { w: &'a mut W, } impl<'a> ENABLE_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: ENABLE_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Disable the TPIU."] #[inline(always)] pub fn dis(self) -> &'a mut W { self.variant(ENABLE_A::DIS) } #[doc = "Enable the TPIU."] #[inline(always)] pub fn en(self) -> &'a mut W { self.variant(ENABLE_A::EN) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl R { #[doc = "Bits 8:10 - This field selects the frequency of the ARM M4 TPIU port."] #[inline(always)] pub fn clksel(&self) -> CLKSEL_R { CLKSEL_R::new(((self.bits >> 8) & 0x07) as u8) } #[doc = "Bit 0 - TPIU Enable field. When set, the ARM M4 TPIU is enabled and data can be streamed out of the MCU's SWO port using the ARM ITM and TPIU modules."] #[inline(always)] pub fn enable(&self) -> ENABLE_R { ENABLE_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bits 8:10 - This field selects the frequency of the ARM M4 TPIU port."] #[inline(always)] pub fn clksel(&mut self) -> CLKSEL_W { CLKSEL_W { w: self } } #[doc = "Bit 0 - TPIU Enable field. When set, the ARM M4 TPIU is enabled and data can be streamed out of the MCU's SWO port using the ARM ITM and TPIU modules."] #[inline(always)] pub fn enable(&mut self) -> ENABLE_W { ENABLE_W { w: self } } }
31.617512
174
0.570325
2f6957aec37534326dc83f5ec4d0b997b9935507
1,773
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(warnings)] // not used on emscripten use std::env; use net::{SocketAddr, SocketAddrV4, SocketAddrV6, Ipv4Addr, Ipv6Addr, ToSocketAddrs}; use std::sync::atomic::{AtomicUsize, Ordering}; static mut PORT: u16 = 0; pub fn next_test_ip4() -> SocketAddr { unsafe { PORT = PORT + 1; } // let port = PORT.fetch_add(1, Ordering::SeqCst) as u16 + base_port(); SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), unsafe { PORT + base_port() })) } pub fn next_test_ip6() -> SocketAddr { unsafe { PORT = PORT + 1; } SocketAddr::V6(SocketAddrV6::new(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1), unsafe { PORT + base_port() }, 0, 0)) } pub fn sa4(a: Ipv4Addr, p: u16) -> SocketAddr { SocketAddr::V4(SocketAddrV4::new(a, p)) } pub fn sa6(a: Ipv6Addr, p: u16) -> SocketAddr { SocketAddr::V6(SocketAddrV6::new(a, p, 0, 0)) } pub fn tsa<A: ToSocketAddrs>(a: A) -> Result<Vec<SocketAddr>, String> { match a.to_socket_addrs() { Ok(a) => Ok(a.collect()), Err(e) => Err(e.to_string()), } } // The bots run multiple builds at the same time, and these builds // all want to use ports. This function figures out which workspace // it is running in and assigns a port range based on it. fn base_port() -> u16 { 19600 }
34.096154
97
0.655386
e8fcb80f44b5432d96edcd4fae12f55ff927a69b
853
use std::io; fn main() { let mut buf = String::new(); io::stdin().read_line(&mut buf).unwrap(); let mut integers = buf.split_whitespace(); let m: u16 = integers.next().unwrap().parse().unwrap(); let n: u16 = integers.next().unwrap().parse().unwrap(); let p: u16 = integers.next().unwrap().parse().unwrap(); match m < n { true => match n < p { true => print!("{} {} {}", m, n, p), false => match m < p { true => print!("{} {} {}", m, p, n), false => print!("{} {} {}", p, m, n), }, }, false => match m < p { true => print!("{} {} {}", n, m, p), false => match n < p { true => print!("{} {} {}", n, p, m), false => print!("{} {} {}", p, n, m), }, }, } }
31.592593
59
0.385698
4b01f54e0b3b283ae6b0d3dd1cb40c2c1e6637ee
15,157
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license. use crate::colors; use crate::emit::TypeLib; use crate::errors::get_error_class_name; use deno_core::error::custom_error; use deno_core::error::AnyError; use deno_core::ModuleSpecifier; use deno_graph::Dependency; use deno_graph::MediaType; use deno_graph::ModuleGraph; use deno_graph::ModuleGraphError; use deno_graph::ModuleKind; use deno_graph::Range; use deno_graph::Resolved; use std::collections::BTreeMap; use std::collections::HashMap; use std::collections::HashSet; use std::collections::VecDeque; use std::sync::Arc; pub(crate) fn contains_specifier( v: &[(ModuleSpecifier, ModuleKind)], specifier: &ModuleSpecifier, ) -> bool { v.iter().any(|(s, _)| s == specifier) } #[derive(Debug, Clone)] #[allow(clippy::large_enum_variant)] pub(crate) enum ModuleEntry { Module { code: Arc<String>, dependencies: BTreeMap<String, Dependency>, media_type: MediaType, /// A set of type libs that the module has passed a type check with this /// session. This would consist of window, worker or both. checked_libs: HashSet<TypeLib>, maybe_types: Option<Resolved>, }, Configuration { dependencies: BTreeMap<String, Resolved>, }, Error(ModuleGraphError), Redirect(ModuleSpecifier), } /// Composes data from potentially many `ModuleGraph`s. #[derive(Debug, Default)] pub(crate) struct GraphData { modules: HashMap<ModuleSpecifier, ModuleEntry>, /// Map of first known referrer locations for each module. Used to enhance /// error messages. referrer_map: HashMap<ModuleSpecifier, Range>, configurations: HashSet<ModuleSpecifier>, cjs_esm_translations: HashMap<ModuleSpecifier, String>, } impl GraphData { /// Store data from `graph` into `self`. pub(crate) fn add_graph(&mut self, graph: &ModuleGraph, reload: bool) { for (specifier, result) in graph.specifiers() { if !reload && self.modules.contains_key(&specifier) { continue; } if let Some(found) = graph.redirects.get(&specifier) { let module_entry = ModuleEntry::Redirect(found.clone()); self.modules.insert(specifier.clone(), module_entry); continue; } match result { Ok((_, _, media_type)) => { let module = graph.get(&specifier).unwrap(); if module.kind == ModuleKind::Synthetic { let mut dependencies = BTreeMap::new(); for (specifier, dependency) in &module.dependencies { if !matches!(dependency.maybe_type, Resolved::None) { dependencies .insert(specifier.clone(), dependency.maybe_type.clone()); if let Resolved::Ok { specifier, range, .. } = &dependency.maybe_type { let entry = self.referrer_map.entry(specifier.clone()); entry.or_insert_with(|| range.clone()); } } } self.modules.insert( module.specifier.clone(), ModuleEntry::Configuration { dependencies }, ); self.configurations.insert(module.specifier.clone()); } let code = match &module.maybe_source { Some(source) => source.clone(), None => continue, }; let maybe_types = module .maybe_types_dependency .as_ref() .map(|(_, r)| r.clone()); if let Some(Resolved::Ok { specifier, range, .. }) = &maybe_types { let specifier = graph.redirects.get(specifier).unwrap_or(specifier); let entry = self.referrer_map.entry(specifier.clone()); entry.or_insert_with(|| range.clone()); } for dep in module.dependencies.values() { #[allow(clippy::manual_flatten)] for resolved in [&dep.maybe_code, &dep.maybe_type] { if let Resolved::Ok { specifier, range, .. } = resolved { let specifier = graph.redirects.get(specifier).unwrap_or(specifier); let entry = self.referrer_map.entry(specifier.clone()); entry.or_insert_with(|| range.clone()); } } } let module_entry = ModuleEntry::Module { code, dependencies: module.dependencies.clone(), media_type, checked_libs: Default::default(), maybe_types, }; self.modules.insert(specifier, module_entry); } Err(error) => { let module_entry = ModuleEntry::Error(error); self.modules.insert(specifier, module_entry); } } } } pub(crate) fn entries(&self) -> HashMap<&ModuleSpecifier, &ModuleEntry> { self.modules.iter().collect() } /// Walk dependencies from `roots` and return every encountered specifier. /// Return `None` if any modules are not known. pub(crate) fn walk<'a>( &'a self, roots: &[(ModuleSpecifier, ModuleKind)], follow_dynamic: bool, follow_type_only: bool, check_js: bool, ) -> Option<HashMap<&'a ModuleSpecifier, &'a ModuleEntry>> { let mut result = HashMap::<&'a ModuleSpecifier, &'a ModuleEntry>::new(); let mut seen = HashSet::<&ModuleSpecifier>::new(); let mut visiting = VecDeque::<&ModuleSpecifier>::new(); for (root, _) in roots { seen.insert(root); visiting.push_back(root); } for root in &self.configurations { seen.insert(root); visiting.push_back(root); } while let Some(specifier) = visiting.pop_front() { let (specifier, entry) = match self.modules.get_key_value(specifier) { Some(pair) => pair, None => return None, }; result.insert(specifier, entry); match entry { ModuleEntry::Module { dependencies, maybe_types, media_type, .. } => { let check_types = (check_js || !matches!( media_type, MediaType::JavaScript | MediaType::Mjs | MediaType::Cjs | MediaType::Jsx )) && follow_type_only; if check_types { if let Some(Resolved::Ok { specifier, .. }) = maybe_types { if !seen.contains(specifier) { seen.insert(specifier); visiting.push_front(specifier); } } } for (_, dep) in dependencies.iter().rev() { if !dep.is_dynamic || follow_dynamic { let mut resolutions = vec![&dep.maybe_code]; if check_types { resolutions.push(&dep.maybe_type); } #[allow(clippy::manual_flatten)] for resolved in resolutions { if let Resolved::Ok { specifier, .. } = resolved { if !seen.contains(specifier) { seen.insert(specifier); visiting.push_front(specifier); } } } } } } ModuleEntry::Configuration { dependencies } => { for resolved in dependencies.values() { if let Resolved::Ok { specifier, .. } = resolved { if !seen.contains(specifier) { seen.insert(specifier); visiting.push_front(specifier); } } } } ModuleEntry::Error(_) => {} ModuleEntry::Redirect(specifier) => { if !seen.contains(specifier) { seen.insert(specifier); visiting.push_front(specifier); } } } } Some(result) } /// Clone part of `self`, containing only modules which are dependencies of /// `roots`. Returns `None` if any roots are not known. pub(crate) fn graph_segment( &self, roots: &[(ModuleSpecifier, ModuleKind)], ) -> Option<Self> { let mut modules = HashMap::new(); let mut referrer_map = HashMap::new(); let entries = match self.walk(roots, true, true, true) { Some(entries) => entries, None => return None, }; for (specifier, module_entry) in entries { modules.insert(specifier.clone(), module_entry.clone()); if let Some(referrer) = self.referrer_map.get(specifier) { referrer_map.insert(specifier.clone(), referrer.clone()); } } Some(Self { modules, referrer_map, configurations: self.configurations.clone(), cjs_esm_translations: Default::default(), }) } /// Check if `roots` and their deps are available. Returns `Some(Ok(()))` if /// so. Returns `Some(Err(_))` if there is a known module graph or resolution /// error statically reachable from `roots`. Returns `None` if any modules are /// not known. pub(crate) fn check( &self, roots: &[(ModuleSpecifier, ModuleKind)], follow_type_only: bool, check_js: bool, ) -> Option<Result<(), AnyError>> { let entries = match self.walk(roots, false, follow_type_only, check_js) { Some(entries) => entries, None => return None, }; for (specifier, module_entry) in entries { match module_entry { ModuleEntry::Module { dependencies, maybe_types, media_type, .. } => { let check_types = (check_js || !matches!( media_type, MediaType::JavaScript | MediaType::Mjs | MediaType::Cjs | MediaType::Jsx )) && follow_type_only; if check_types { if let Some(Resolved::Err(error)) = maybe_types { let range = error.range(); if !range.specifier.as_str().contains("$deno") { return Some(Err(custom_error( get_error_class_name(&error.clone().into()), format!("{}\n at {}", error, range), ))); } return Some(Err(error.clone().into())); } } for (_, dep) in dependencies.iter() { if !dep.is_dynamic { let mut resolutions = vec![&dep.maybe_code]; if check_types { resolutions.push(&dep.maybe_type); } #[allow(clippy::manual_flatten)] for resolved in resolutions { if let Resolved::Err(error) = resolved { let range = error.range(); if !range.specifier.as_str().contains("$deno") { return Some(Err(custom_error( get_error_class_name(&error.clone().into()), format!("{}\n at {}", error, range), ))); } return Some(Err(error.clone().into())); } } } } } ModuleEntry::Configuration { dependencies } => { for resolved_result in dependencies.values() { if let Resolved::Err(error) = resolved_result { let range = error.range(); if !range.specifier.as_str().contains("$deno") { return Some(Err(custom_error( get_error_class_name(&error.clone().into()), format!("{}\n at {}", error, range), ))); } return Some(Err(error.clone().into())); } } } ModuleEntry::Error(error) => { if !contains_specifier(roots, specifier) { if let Some(range) = self.referrer_map.get(specifier) { if !range.specifier.as_str().contains("$deno") { let message = error.to_string(); return Some(Err(custom_error( get_error_class_name(&error.clone().into()), format!("{}\n at {}", message, range), ))); } } } return Some(Err(error.clone().into())); } _ => {} } } Some(Ok(())) } /// Mark `roots` and all of their dependencies as type checked under `lib`. /// Assumes that all of those modules are known. pub(crate) fn set_type_checked( &mut self, roots: &[(ModuleSpecifier, ModuleKind)], lib: &TypeLib, ) { let specifiers: Vec<ModuleSpecifier> = match self.walk(roots, true, true, true) { Some(entries) => entries.into_keys().cloned().collect(), None => unreachable!("contains module not in graph data"), }; for specifier in specifiers { if let ModuleEntry::Module { checked_libs, .. } = self.modules.get_mut(&specifier).unwrap() { checked_libs.insert(lib.clone()); } } } /// Check if `roots` are all marked as type checked under `lib`. pub(crate) fn is_type_checked( &self, roots: &[(ModuleSpecifier, ModuleKind)], lib: &TypeLib, ) -> bool { roots.iter().all(|(r, _)| { let found = self.follow_redirect(r); match self.modules.get(&found) { Some(ModuleEntry::Module { checked_libs, .. }) => { checked_libs.contains(lib) } _ => false, } }) } /// If `specifier` is known and a redirect, return the found specifier. /// Otherwise return `specifier`. pub(crate) fn follow_redirect( &self, specifier: &ModuleSpecifier, ) -> ModuleSpecifier { match self.modules.get(specifier) { Some(ModuleEntry::Redirect(s)) => s.clone(), _ => specifier.clone(), } } pub(crate) fn get<'a>( &'a self, specifier: &ModuleSpecifier, ) -> Option<&'a ModuleEntry> { self.modules.get(specifier) } // TODO(bartlomieju): after saving translated source // it's never removed, potentially leading to excessive // memory consumption pub(crate) fn add_cjs_esm_translation( &mut self, specifier: &ModuleSpecifier, source: String, ) { let prev = self .cjs_esm_translations .insert(specifier.to_owned(), source); assert!(prev.is_none()); } pub(crate) fn get_cjs_esm_translation<'a>( &'a self, specifier: &ModuleSpecifier, ) -> Option<&'a String> { self.cjs_esm_translations.get(specifier) } } impl From<&ModuleGraph> for GraphData { fn from(graph: &ModuleGraph) -> Self { let mut graph_data = GraphData::default(); graph_data.add_graph(graph, false); graph_data } } /// Like `graph.valid()`, but enhanced with referrer info. pub(crate) fn graph_valid( graph: &ModuleGraph, follow_type_only: bool, check_js: bool, ) -> Result<(), AnyError> { GraphData::from(graph) .check(&graph.roots, follow_type_only, check_js) .unwrap() } /// Calls `graph.lock()` and exits on errors. pub(crate) fn graph_lock_or_exit(graph: &ModuleGraph) { if let Err(err) = graph.lock() { log::error!("{} {}", colors::red("error:"), err); std::process::exit(10); } }
32.525751
80
0.556509
2fcce85d3e8666f8d7a6df4cbe2897e5ff8c0b4b
5,119
#[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::RQR { #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } } #[doc = "Values that can be written to the field `RXFRQ`"] pub enum RXFRQW { #[doc = "clears the RXNE flag. This allows to discard the received data without reading it, and avoid an overrun condition"] DISCARD, } impl RXFRQW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { RXFRQW::DISCARD => true, } } } #[doc = r" Proxy"] pub struct _RXFRQW<'a> { w: &'a mut W, } impl<'a> _RXFRQW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: RXFRQW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "clears the RXNE flag. This allows to discard the received data without reading it, and avoid an overrun condition"] #[inline] pub fn discard(self) -> &'a mut W { self.variant(RXFRQW::DISCARD) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 3; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `MMRQ`"] pub enum MMRQW { #[doc = "Puts the USART in mute mode and sets the RWU flag"] MUTE, } impl MMRQW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { MMRQW::MUTE => true, } } } #[doc = r" Proxy"] pub struct _MMRQW<'a> { w: &'a mut W, } impl<'a> _MMRQW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: MMRQW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Puts the USART in mute mode and sets the RWU flag"] #[inline] pub fn mute(self) -> &'a mut W { self.variant(MMRQW::MUTE) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `SBKRQ`"] pub enum SBKRQW { #[doc = "sets the SBKF flag and request to send a BREAK on the line, as soon as the transmit machine is available"] BREAK, } impl SBKRQW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { SBKRQW::BREAK => true, } } } #[doc = r" Proxy"] pub struct _SBKRQW<'a> { w: &'a mut W, } impl<'a> _SBKRQW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: SBKRQW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "sets the SBKF flag and request to send a BREAK on the line, as soon as the transmit machine is available"] #[inline] pub fn break_(self) -> &'a mut W { self.variant(SBKRQW::BREAK) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 3 - Receive data flush request"] #[inline] pub fn rxfrq(&mut self) -> _RXFRQW { _RXFRQW { w: self } } #[doc = "Bit 2 - Mute mode request"] #[inline] pub fn mmrq(&mut self) -> _MMRQW { _MMRQW { w: self } } #[doc = "Bit 1 - Send break request"] #[inline] pub fn sbkrq(&mut self) -> _SBKRQW { _SBKRQW { w: self } } }
26.251282
128
0.524517
3982a8205ee36fdd2ffc1d8163ff5cd197f4a75b
3,629
use crate::imp::history::file_hist::file_history::FileHistory; use crate::imp::history::file_hist::history_file_data::HistoryFileData; use std::path::Path; use crate::imp::common::path::hash_dir_path::hash_dir_path; use std::ops::Range; use crate::common::remove_hash_dir; use crate::error::FsResult; /// Represents every file history in every hash directory of a project #[derive(Debug)] pub struct FileHistories{ vec : Vec<(u128, FileHistory)>, } impl FileHistories{ pub(crate) fn new(vec : Vec<(u128, FileHistory)>) -> FileHistories{ FileHistories{ vec } } /// list every HistoryFileData chronologically pub fn list_files(&self) -> Vec<HistoryFileData>{ self.vec.iter().flat_map(|(hash, his)|{ //let hash = *hash; his.list_files().into_iter() .map(move |props| HistoryFileData::new(*hash, his, props)) }).collect() } /// gets the newest HisotryFileData pub fn get_newest_file_data(&self) -> Option<HistoryFileData>{ self.vec.last().and_then(|(hash, his)| his.get_newest_prop().map(|prop| HistoryFileData::new(*hash, his, prop))) } pub fn iter(&self) -> impl Iterator<Item=&(u128, FileHistory)>{ self.vec.iter() } /// remove old files other than latest n files. This function consumes history data pub fn remove_old_files<P : AsRef<Path>>(self, keep_latest : usize, history_dir : P) -> FsResult<()>{ let mut s = self; unsafe{ s.remove_old_files_us(keep_latest, history_dir) } } /// remove old files other than latest n files. This function doesn't consume history data, /// and the data will be inconsistent with the actual files pub unsafe fn remove_old_files_us<P : AsRef<Path>>(&mut self, keep_latest : usize, history_dir : P) -> FsResult<()>{ let len = self.vec.len(); let history_dir = history_dir.as_ref(); if len == 0{ return Ok(()); } if keep_latest == 0{ // 最新ファイルに付随する制御ファイルがあって面倒なので、ディレクトリごと全部削除してしまう remove_hash_dirs(&mut self.vec, 0..len,history_dir)?; return Ok(()); } let (hash, his) = self.vec.last().unwrap(); let hash_dir_path = hash_dir_path(history_dir, *hash); if keep_latest < his.list_files().len(){ // 最新ハッシュの全てをkeepしない。 // つまり過去ハッシュフォルダは全削除し、 // 最新ハッシュの一部だけを残す let len = self.vec.len(); if 2 <= len { remove_hash_dirs(&mut self.vec, 0..len - 1, history_dir)?; } let (_,last) = self.vec.remove(0); last.remove_old_files(keep_latest, hash_dir_path); return Ok(()); } // こっちは最新ハッシュの全てをkeepする場合 // この場合、全削除されるハッシュフォルダがあるなら、それだけ削除する let mut sum = 0; let mut ind = self.vec.len() - 1; //hash_dir単位でのいい加減な削除。 while 1 <= ind{ let index = ind; ind -= 1; let (_, his) = self.vec.get(index).unwrap(); let len = his.list_files().len(); sum += len; if keep_latest < sum{ remove_hash_dirs(&mut self.vec,0..index, history_dir.as_ref())?; return Ok(()); } } Ok(()) } } fn remove_hash_dirs(vec : &mut Vec<(u128, FileHistory)>, range : Range<usize>, history_dir : &Path) -> FsResult<()>{ //drainとか使ってファイルとデータを一致させようと頑張ってはいるが、最終的に技術的難題にぶつかって完全な同期を諦めているのであまり意味がない for (hash, _) in vec.drain(range){ remove_hash_dir(history_dir, hash)?; } Ok(()) }
37.412371
120
0.587765
d755c163bba0e284f01c3e9b9dc1c084f6472b6b
394
fn main() { // Cast a function pointer such that on a call, the argument gets transmuted // from raw ptr to reference. This is ABI-compatible, so it's not the call that // should fail, but validation should. fn f(_x: &i32) { } let g: fn(*const i32) = unsafe { std::mem::transmute(f as fn(&i32)) }; g(0usize as *const i32) //~^ ERROR encountered a null reference }
32.833333
83
0.639594
4b78c35a7803a1f7c936dccf889cb4e126e5005c
6,578
//! Fairing implementation use ::log::{error, info}; use rocket::http::{self, uri::Origin, Status}; use rocket::{self, error_, info_, log_, outcome::Outcome, Request}; use crate::{ actual_request_response, origin, preflight_response, request_headers, validate, Cors, Error, }; /// Request Local State to store CORS validation results enum CorsValidation { Success, Failure, } /// Create a `Handler` for Fairing error handling #[derive(Clone)] struct FairingErrorRoute {} #[rocket::async_trait] impl rocket::handler::Handler for FairingErrorRoute { async fn handle<'r, 's: 'r>( &'s self, request: &'r Request<'_>, _: rocket::Data, ) -> rocket::handler::Outcome<'r> { let status = request .param::<u16>(0) .unwrap_or(Ok(0)) .unwrap_or_else(|e| { error_!("Fairing Error Handling Route error: {:?}", e); 500 }); let status = Status::from_code(status).unwrap_or(Status::InternalServerError); Outcome::Failure(status) } } /// Create a new `Route` for Fairing handling fn fairing_route(rank: isize) -> rocket::Route { rocket::Route::ranked(rank, http::Method::Get, "/<status>", FairingErrorRoute {}) } /// Modifies a `Request` to route to Fairing error handler fn route_to_fairing_error_handler(options: &Cors, status: u16, request: &mut Request<'_>) { let origin = Origin::parse_owned(format!("{}/{}", options.fairing_route_base, status)).unwrap(); request.set_method(http::Method::Get); request.set_uri(origin); } fn on_response_wrapper( options: &Cors, request: &Request<'_>, response: &mut rocket::Response<'_>, ) -> Result<(), Error> { let origin = match origin(request)? { None => { // Not a CORS request return Ok(()); } Some(origin) => origin, }; let result = request.local_cache(|| unreachable!("This should not be executed so late")); if let CorsValidation::Failure = *result { // Nothing else for us to do return Ok(()); } let origin = origin.to_string(); let cors_response = if request.method() == http::Method::Options { let headers = request_headers(request)?; preflight_response(options, &origin, headers.as_ref()) } else { actual_request_response(options, &origin) }; cors_response.merge(response); // If this was an OPTIONS request and no route can be found, we should turn this // into a HTTP 204 with no content body. // This allows the user to not have to specify an OPTIONS route for everything. // // TODO: Is there anyway we can make this smarter? Only modify status codes for // requests where an actual route exist? if request.method() == http::Method::Options && request.route().is_none() { info_!( "CORS Fairing: Turned missing route {} into an OPTIONS pre-flight request", request ); response.set_status(Status::NoContent); let _ = response.take_body(); } Ok(()) } #[rocket::async_trait] impl rocket::fairing::Fairing for Cors { fn info(&self) -> rocket::fairing::Info { rocket::fairing::Info { name: "CORS", kind: rocket::fairing::Kind::Attach | rocket::fairing::Kind::Request | rocket::fairing::Kind::Response, } } async fn on_attach(&self, rocket: rocket::Rocket) -> Result<rocket::Rocket, rocket::Rocket> { Ok(rocket.mount( &self.fairing_route_base, vec![fairing_route(self.fairing_route_rank)], )) } async fn on_request(&self, request: &mut Request<'_>, _: &mut rocket::Data) { let result = match validate(self, request) { Ok(_) => CorsValidation::Success, Err(err) => { error_!("CORS Error: {}", err); let status = err.status(); route_to_fairing_error_handler(self, status.code, request); CorsValidation::Failure } }; let _ = request.local_cache(|| result); } async fn on_response<'r>(&self, request: &'r Request<'_>, response: &mut rocket::Response<'r>) { if let Err(err) = on_response_wrapper(self, request, response) { error_!("Fairings on_response error: {}\nMost likely a bug", err); response.set_status(Status::InternalServerError); let _ = response.take_body(); } } } #[cfg(test)] mod tests { use rocket::http::{Method, Status}; use rocket::local::blocking::Client; use rocket::Rocket; use crate::{AllowedHeaders, AllowedOrigins, Cors, CorsOptions}; const CORS_ROOT: &str = "/my_cors"; fn make_cors_options() -> Cors { let allowed_origins = AllowedOrigins::some_exact(&["https://www.acme.com"]); CorsOptions { allowed_origins, allowed_methods: vec![Method::Get].into_iter().map(From::from).collect(), allowed_headers: AllowedHeaders::some(&["Authorization", "Accept"]), allow_credentials: true, fairing_route_base: CORS_ROOT.to_string(), ..Default::default() } .to_cors() .expect("Not to fail") } fn rocket(fairing: Cors) -> Rocket { Rocket::ignite().attach(fairing) } #[test] #[allow(non_snake_case)] fn FairingErrorRoute_returns_passed_in_status() { let client = Client::tracked(rocket(make_cors_options())).expect("to not fail"); let request = client.get(format!("{}/403", CORS_ROOT)); let response = request.dispatch(); assert_eq!(Status::Forbidden, response.status()); } #[test] #[allow(non_snake_case)] fn FairingErrorRoute_returns_500_for_unknown_status() { let client = Client::tracked(rocket(make_cors_options())).expect("to not fail"); let request = client.get(format!("{}/999", CORS_ROOT)); let response = request.dispatch(); assert_eq!(Status::InternalServerError, response.status()); } #[rocket::async_test] async fn error_route_is_mounted_on_attach() { let rocket = rocket(make_cors_options()); let expected_uri = format!("{}/<status>", CORS_ROOT); let error_route = rocket .routes() .find(|r| r.method == Method::Get && r.uri.to_string() == expected_uri); assert!(error_route.is_some()); } // Rest of the things can only be tested in integration tests }
32.564356
100
0.604895
8f9cf888b0ff7416573ef082e294563940f0959d
23,759
include!("../src/main.rs"); #[cfg(test)] mod tests { use super::{ config_options, create_header, decode_token, encode_token, is_payload_item, is_timestamp_or_duration, translate_algorithm, OutputFormat, Payload, PayloadItem, SupportedAlgorithms, }; use chrono::{Duration, TimeZone, Utc}; use jsonwebtoken::{Algorithm, Header, TokenData}; use serde_json::{from_value, json}; fn empty_args() -> impl IntoIterator<Item = String> { std::iter::empty() } #[test] fn payload_item_from_string() { let string = Some("this=that"); let result = PayloadItem::from_string(string); let expected = Some(PayloadItem("this".to_string(), json!("that"))); assert_eq!(result, expected); } #[test] fn payload_item_from_string_with_name() { let string = Some("that"); let result = PayloadItem::from_string_with_name(string, "this"); let expected = Some(PayloadItem("this".to_string(), json!("that"))); assert_eq!(result, expected); } #[test] fn payload_item_from_none() { let result = PayloadItem::from_string(None); assert_eq!(result, None); } #[test] fn payload_item_from_none_with_name() { let result = PayloadItem::from_string_with_name(None, "this"); assert_eq!(result, None); } #[test] fn split_payload_item() { let string = "this=that"; let result = PayloadItem::split_payload_item(string); let expected = PayloadItem("this".to_string(), json!("that")); assert_eq!(result, expected); } #[test] fn payload_from_payload_items() { let _matcher = config_options().get_matches_from_safe(empty_args()); let payload_item_one = PayloadItem::from_string(Some("this=that")).unwrap(); let payload_item_two = PayloadItem::from_string(Some("full=yolo")).unwrap(); let payloads = vec![payload_item_one, payload_item_two]; let result = Payload::from_payloads(payloads); let payload = result.0; assert!(payload.contains_key("this")); assert!(payload.contains_key("full")); assert_eq!(payload["this"], json!("that")); assert_eq!(payload["full"], json!("yolo")); } #[test] fn supported_algorithm_from_string() { assert_eq!( SupportedAlgorithms::from_string("HS256"), SupportedAlgorithms::HS256 ); assert_eq!( SupportedAlgorithms::from_string("HS384"), SupportedAlgorithms::HS384 ); assert_eq!( SupportedAlgorithms::from_string("HS512"), SupportedAlgorithms::HS512 ); assert_eq!( SupportedAlgorithms::from_string("RS256"), SupportedAlgorithms::RS256 ); assert_eq!( SupportedAlgorithms::from_string("RS384"), SupportedAlgorithms::RS384 ); assert_eq!( SupportedAlgorithms::from_string("RS512"), SupportedAlgorithms::RS512 ); assert_eq!( SupportedAlgorithms::from_string("yolo"), SupportedAlgorithms::HS256 ); } #[test] fn is_valid_payload_item() { assert!(is_payload_item("this=that".to_string()).is_ok()); } #[test] fn is_invalid_payload_item() { assert!(is_payload_item("this".to_string()).is_err()); assert!(is_payload_item("this=that=yolo".to_string()).is_err()); assert!(is_payload_item("this-that_yolo".to_string()).is_err()); } #[test] fn is_valid_timestamp_or_duration() { assert!(is_timestamp_or_duration("2".to_string()).is_ok()); assert!(is_timestamp_or_duration("39874398".to_string()).is_ok()); assert!(is_timestamp_or_duration("12h".to_string()).is_ok()); assert!(is_timestamp_or_duration("1 day -1 hour".to_string()).is_ok()); assert!(is_timestamp_or_duration("+30 min".to_string()).is_ok()); } #[test] fn is_invalid_timestamp_or_duration() { assert!(is_timestamp_or_duration("yolo".to_string()).is_err()); assert!(is_timestamp_or_duration("2398ybdfiud93".to_string()).is_err()); assert!(is_timestamp_or_duration("1 day -1 hourz".to_string()).is_err()); } #[test] fn translates_algorithm() { assert_eq!( translate_algorithm(SupportedAlgorithms::HS256), Algorithm::HS256 ); assert_eq!( translate_algorithm(SupportedAlgorithms::HS384), Algorithm::HS384 ); assert_eq!( translate_algorithm(SupportedAlgorithms::HS512), Algorithm::HS512 ); assert_eq!( translate_algorithm(SupportedAlgorithms::RS256), Algorithm::RS256 ); assert_eq!( translate_algorithm(SupportedAlgorithms::RS384), Algorithm::RS384 ); assert_eq!( translate_algorithm(SupportedAlgorithms::RS512), Algorithm::RS512 ); } #[test] fn creates_jwt_header_with_kid() { let algorithm = Algorithm::HS256; let kid = Some("yolo"); let result = create_header(algorithm, kid); let mut expected = Header::new(algorithm); expected.kid = kid.map(|k| k.to_string()); assert_eq!(result, expected); } #[test] fn creates_jwt_header_without_kid() { let algorithm = Algorithm::HS256; let kid = None; let result = create_header(algorithm, kid); let mut expected = Header::new(algorithm); expected.kid = kid.map(|k| k.to_string()); assert_eq!(result, expected); } #[test] fn encodes_a_token() { let exp = (Utc::now() + Duration::minutes(60)).timestamp(); let nbf = Utc::now().timestamp(); let encode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "encode", "-S", "1234567890", "-A", "HS256", "-a", "yolo", "-e", &exp.to_string(), "-i", "yolo-service", "-k", "1234", "-n", &nbf.to_string(), "--jti", "yolo-jti", "-P", "this=that", "-P", "number=10", "-P", "array=[1, 2, 3]", "-P", "object={\"foo\": \"bar\"}", "-s", "yolo-subject", "{\"test\":\"json value\",\"bool\":true,\"json_number\":1}", ]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "decode", "-S", "1234567890", &encoded_token]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (decoded_token, _, _) = decode_token(&decode_matches); assert!(decoded_token.is_ok()); let TokenData { claims, header } = decoded_token.unwrap(); assert_eq!(header.alg, Algorithm::HS256); assert_eq!(header.kid, Some("1234".to_string())); assert_eq!(claims.0["aud"], "yolo"); assert_eq!(claims.0["iss"], "yolo-service"); assert_eq!(claims.0["sub"], "yolo-subject"); assert_eq!(claims.0["nbf"], nbf); assert_eq!(claims.0["exp"], exp); assert_eq!(claims.0["jti"], "yolo-jti"); assert_eq!(claims.0["this"], "that"); assert_eq!(claims.0["test"], "json value"); assert_eq!(claims.0["bool"], true); assert_eq!(claims.0["json_number"], 1); assert_eq!(claims.0["number"], 10); assert_eq!(claims.0["array"].to_string(), "[1,2,3]"); assert_eq!(claims.0["object"]["foo"], "bar"); } #[test] fn adds_iat_automatically() { let encode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "encode", "--exp", "-S", "1234567890"]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "decode", "-S", "1234567890", &encoded_token]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (decoded_token, _, _) = decode_token(&decode_matches); assert!(decoded_token.is_ok()); let TokenData { claims, header: _ } = decoded_token.unwrap(); let iat = from_value::<i64>(claims.0["iat"].clone()); assert!(iat.is_ok()); assert!(iat.unwrap().is_positive()); } #[test] fn stops_exp_from_automatically_being_added() { let encode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "encode", "-S", "1234567890"]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "decode", "-S", "1234567890", &encoded_token]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (decoded_token, token_data, _) = decode_token(&decode_matches); assert!(decoded_token.is_err()); let TokenData { claims, header: _ } = token_data.unwrap(); assert!(claims.0.get("exp").is_none()); } #[test] fn adds_default_exp_automatically() { let encode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "encode", "--exp", "-S", "1234567890"]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "decode", "-S", "1234567890", &encoded_token]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (decoded_token, _, _) = decode_token(&decode_matches); assert!(decoded_token.is_ok()); let TokenData { claims, header: _ } = decoded_token.unwrap(); let exp = from_value::<i64>(claims.0["exp"].clone()); assert!(exp.is_ok()); assert!(exp.unwrap().is_positive()); } #[test] fn stops_iat_from_automatically_being_added() { let encode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "encode", "--no-iat", "--exp", "-S", "1234567890", ]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "decode", "-S", "1234567890", &encoded_token]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (decoded_token, _, _) = decode_token(&decode_matches); assert!(decoded_token.is_ok()); let TokenData { claims, header: _ } = decoded_token.unwrap(); assert!(claims.0.get("iat").is_none()); } #[test] fn allows_for_a_custom_exp() { let exp = (Utc::now() + Duration::minutes(60)).timestamp(); let encode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "encode", "-S", "1234567890", "-e", &exp.to_string(), ]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "decode", "-S", "1234567890", &encoded_token]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (decoded_token, _, _) = decode_token(&decode_matches); assert!(decoded_token.is_ok()); let TokenData { claims, header: _ } = decoded_token.unwrap(); let exp_claim = from_value::<i64>(claims.0["exp"].clone()); assert!(exp_claim.is_ok()); assert_eq!(exp_claim.unwrap(), exp); } #[test] fn allows_for_a_custom_exp_as_systemd_string() { let encode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "encode", "-S", "1234567890", "-e", "+10 min -30 sec", ]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "decode", "-S", "1234567890", &encoded_token]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (decoded_token, _, _) = decode_token(&decode_matches); assert!(decoded_token.is_ok()); let TokenData { claims, header: _ } = decoded_token.unwrap(); let exp_claim = from_value::<i64>(claims.0["exp"].clone()); let iat_claim = from_value::<i64>(claims.0["iat"].clone()); assert!(iat_claim.is_ok()); let iat = iat_claim.unwrap(); assert!(exp_claim.is_ok()); let exp = exp_claim.unwrap(); assert!(iat.is_positive()); assert!(exp.is_positive()); assert_eq!(exp - iat, (10 * 60 - 30)); } #[test] fn allows_for_nbf_as_systemd_string() { let encode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "encode", "-S", "1234567890", "--exp", "-n", "+5 min", ]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec!["jwt", "decode", "-S", "1234567890", &encoded_token]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (decoded_token, _, _) = decode_token(&decode_matches); assert!(decoded_token.is_ok()); let TokenData { claims, header: _ } = decoded_token.unwrap(); let nbf_claim = from_value::<i64>(claims.0["nbf"].clone()); let iat_claim = from_value::<i64>(claims.0["iat"].clone()); assert!(iat_claim.is_ok()); let iat = iat_claim.unwrap(); assert!(nbf_claim.is_ok()); let nbf = nbf_claim.unwrap(); assert!(iat.is_positive()); assert!(nbf.is_positive()); assert_eq!(nbf - iat, (5 * 60)); } #[test] fn decodes_a_token() { let matches = config_options() .get_matches_from_safe(vec![ "jwt", "decode", "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE4OTM0NTYwMDAsImlhdCI6MTU0MjQ5MjMxMywidGhpcyI6InRoYXQifQ.YTWit46_AEMMVv0P48NeJJIqXmMHarGjfRxtR7jLlxE", "-S", "1234567890", "-A", "HS256", ]) .unwrap(); let decode_matches = matches.subcommand_matches("decode").unwrap(); let (result, _, _) = decode_token(&decode_matches); assert!(result.is_ok()); } #[test] fn decodes_a_token_as_json() { let matches = config_options() .get_matches_from_safe(vec![ "jwt", "decode", "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ0aGlzIjoidGhhdCJ9.AdAECLE_4iRa0uomMEdsMV2hDXv1vhLpym567-AzhrM", "-j", ]) .unwrap(); let decode_matches = matches.subcommand_matches("decode").unwrap(); let (result, _, format) = decode_token(&decode_matches); assert!(result.is_ok()); assert!(format == OutputFormat::Json); } #[test] fn decodes_a_token_with_invalid_secret() { let matches = config_options() .get_matches_from_safe(vec![ "jwt", "decode", "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ0aGlzIjoidGhhdCJ9.AdAECLE_4iRa0uomMEdsMV2hDXv1vhLpym567-AzhrM", "-S", "yolo", "-A", "HS256", ]) .unwrap(); let decode_matches = matches.subcommand_matches("decode").unwrap(); let (result, _, _) = decode_token(&decode_matches); assert!(result.is_err()); } #[test] fn decodes_a_token_without_a_secret() { let matches = config_options() .get_matches_from_safe(vec![ "jwt", "decode", "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ0aGlzIjoidGhhdCJ9.AdAECLE_4iRa0uomMEdsMV2hDXv1vhLpym567-AzhrM", "-A", "HS256", ]) .unwrap(); let decode_matches = matches.subcommand_matches("decode").unwrap(); let (result, _, _) = decode_token(&decode_matches); assert!(result.is_ok()); } #[test] fn decodes_a_token_without_an_alg() { let matches = config_options() .get_matches_from_safe(vec![ "jwt", "decode", "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ0aGlzIjoidGhhdCJ9.AdAECLE_4iRa0uomMEdsMV2hDXv1vhLpym567-AzhrM", ]) .unwrap(); let decode_matches = matches.subcommand_matches("decode").unwrap(); let (result, _, _) = decode_token(&decode_matches); assert!(result.is_ok()); } #[test] fn decodes_a_token_without_a_typ() { let matches = config_options() .get_matches_from_safe(vec![ "jwt", "decode", "eyJhbGciOiJIUzI1NiJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.SEQijh6tEuOOAAKpHPuKxgFqEvlTNP1jj4FUNoBwXaM", ]) .unwrap(); let decode_matches = matches.subcommand_matches("decode").unwrap(); let (result, _, _) = decode_token(&decode_matches); assert!(result.is_ok()); } #[test] fn decodes_a_token_with_leading_and_trailing_whitespace() { let matches = config_options() .get_matches_from_safe(vec![ "jwt", "decode", " eyJhbGciOiJIUzI1NiJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.SEQijh6tEuOOAAKpHPuKxgFqEvlTNP1jj4FUNoBwXaM ", ]) .unwrap(); let decode_matches = matches.subcommand_matches("decode").unwrap(); let (result, _, _) = decode_token(&decode_matches); assert!(result.is_ok()); } #[test] fn encodes_and_decodes_an_rsa_token_using_key_from_file() { let body: String = "{\"field\":\"value\"}".to_string(); let encode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "encode", "-A", "RS256", "--exp", "-S", "@./tests/private_rsa_key.der", &body, ]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "decode", "-S", "@./tests/public_rsa_key.der", "-A", "RS256", &encoded_token, ]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (result, _, _) = decode_token(&decode_matches); assert!(result.is_ok()); } #[test] fn encodes_and_decodes_an_ecdsa_token_using_key_from_file() { let body: String = "{\"field\":\"value\"}".to_string(); let encode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "encode", "-A", "ES256", "--exp", "-S", "@./tests/private_ecdsa_key.pk8", &body, ]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "decode", "-S", "@./tests/public_ecdsa_key.pk8", "-A", "ES256", &encoded_token, ]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (result, _, _) = decode_token(&decode_matches); dbg!(&result); assert!(result.is_ok()); } #[test] fn shows_timestamps_as_iso_dates() { let exp = (Utc::now() + Duration::minutes(60)).timestamp(); let nbf = Utc::now().timestamp(); let encode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "encode", "--exp", &exp.to_string(), "--nbf", &nbf.to_string(), "-S", "1234567890", ]) .unwrap(); let encode_matches = encode_matcher.subcommand_matches("encode").unwrap(); let encoded_token = encode_token(&encode_matches).unwrap(); let decode_matcher = config_options() .get_matches_from_safe(vec![ "jwt", "decode", "-S", "1234567890", "--iso8601", &encoded_token, ]) .unwrap(); let decode_matches = decode_matcher.subcommand_matches("decode").unwrap(); let (decoded_token, token_data, _) = decode_token(&decode_matches); assert!(decoded_token.is_ok()); let TokenData { claims, header: _ } = token_data.unwrap(); assert!(claims.0.get("iat").is_some()); assert!(claims.0.get("nbf").is_some()); assert!(claims.0.get("exp").is_some()); assert_eq!( claims.0.get("iat"), Some(&Utc.timestamp(nbf, 0).to_rfc3339().into()) ); assert_eq!( claims.0.get("nbf"), Some(&Utc.timestamp(nbf, 0).to_rfc3339().into()) ); assert_eq!( claims.0.get("exp"), Some(&Utc.timestamp(exp, 0).to_rfc3339().into()) ); } }
34.786237
166
0.551202
0aa0df71ef55efd200a1178def45b879c6ac7390
1,983
use std::env; use std::path::PathBuf; fn main() -> Result<(), ()> { println!("cargo:rerun-if-changed=build.rs"); println!("cargo:rerun-if-changed=wrapper.h"); println!("cargo:rerun-if-changed=Cargo.lock"); // The bindgen::Builder is the main entry point // to bindgen, and lets you build up options for // the resulting bindings. let bindings = bindgen::Builder::default() // The input header we would like to generate // bindings for. .header("wrapper.h") .rustfmt_bindings(true) .whitelist_var("RATCHET_.*") .whitelist_var("SG_.*") .whitelist_var("KEY_EXCHANGE_.*") .whitelist_var("CIPHERTEXT_.*") .whitelist_var("KEY_EXCHANGE_.*") .whitelist_type("signal_.*") .whitelist_type("session_.*") .whitelist_type("sender_.*") .whitelist_type("retchat_.*") .whitelist_type("curve_.*") .whitelist_type("ec_.*") .whitelist_type("ciphertext_.*") .whitelist_function("signal_.*") .whitelist_function("session_.*") .whitelist_function("sender_.*") .whitelist_function("retchat_.*") .whitelist_function("curve_.*") .whitelist_function("ec_.*") .whitelist_function("ciphertext_.*") .whitelist_function("pre_key.*") .whitelist_function("hkdf_.*") .rust_target(bindgen::RustTarget::Nightly) // Finish the builder and generate the bindings. .generate()?; // Write the bindings to the $OUT_DIR/bindings.rs file. let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); bindings .write_to_file(out_path.join("bindings.rs")) .expect("Couldn't write bindings!"); let dst = cmake::build("libsignal-protocol-c"); println!("cargo:rustc-link-search=native={}", dst.display()); println!("cargo:rustc-flags=-L {}/lib", dst.display()); println!("cargo:rustc-link-lib=static=signal-protocol-c"); Ok(()) }
38.134615
65
0.616238
75375af201b9d93dc9666012e385d8a5881762f7
3,269
extern crate sdl2; use sdl2::pixels::Color; use sdl2::event::Event; use sdl2::keyboard::Keycode; use sdl2::rect::Rect; #[allow(dead_code)] enum Mode { Points, Lines, } const MODE: Mode = Mode::Lines; const CLEAR_FRAMES: bool = false; const k: i32 = 10; fn main() { let sdl_context = sdl2::init().unwrap(); let video_subsystem = sdl_context.video().unwrap(); let window = video_subsystem.window("Circle", 800, 600) .position_centered() .opengl() .build() .unwrap(); let mut renderer = window.renderer().build().unwrap(); let mut event_pump = sdl_context.event_pump().unwrap(); let mut running = true; let cx = 200; let cy = 200; let r = 100; let mut x = r; let mut y = 0; let mut err = 0; let mut pause = false; while running { for event in event_pump.poll_iter() { match event { Event::Quit { .. } | Event::KeyDown { keycode: Some(Keycode::Escape), .. } => { running = false; }, Event::KeyDown { keycode: Some(Keycode::R), .. } => { x = r; y = 0; err = 0; }, Event::KeyDown { keycode: Some(Keycode::S), .. } => { pause = !pause; } _ => () } } if CLEAR_FRAMES { renderer.set_draw_color(Color::RGB(0, 0, 0)); renderer.clear(); } renderer.set_draw_color(Color::RGB(255, 0, 0)); if x >= y { match MODE { Mode::Points => { renderer.fill_rect(Rect::new(cx + x - k, cy + y - k, k as u32, k as u32)); renderer.fill_rect(Rect::new(cx + x - k, cy - y - k, k as u32, k as u32)); renderer.fill_rect(Rect::new(cx + y - k, cy + x - k, k as u32, k as u32)); renderer.fill_rect(Rect::new(cx + y - k, cy - x - k, k as u32, k as u32)); renderer.fill_rect(Rect::new(cx - y - k, cy + x - k, k as u32, k as u32)); renderer.fill_rect(Rect::new(cx - y - k, cy - x - k, k as u32, k as u32)); renderer.fill_rect(Rect::new(cx - x - k, cy + y - k, k as u32, k as u32)); renderer.fill_rect(Rect::new(cx - x - k, cy - y - k, k as u32, k as u32)); }, Mode::Lines => { renderer.fill_rect(Rect::new(cx + x - k, cy - y - k, k as u32, 2 * y as u32)); renderer.fill_rect(Rect::new(cx + y - k, cy - x - k, k as u32, 2 * x as u32)); renderer.fill_rect(Rect::new(cx - y - k, cy - x - k, k as u32, 2 * x as u32)); renderer.fill_rect(Rect::new(cx - x - k, cy - y - k, k as u32, 2 * y as u32)); }, } if !pause { y += 1; err += 1 + 2 * y; if 2 * (err - x) + 1 > 0 { x -= 1; err += 1 - 2 * x; } } } renderer.present(); std::thread::sleep(std::time::Duration::from_millis(100)); } }
29.1875
98
0.437749
ffa2d7e5a4e155eca7e0f09f69e0a7b243bbf63d
4,124
use crate::datastruct::{Currency, SqlResult}; use rusqlite::{params, Result, NO_PARAMS}; // SELECT SUM(c.balance) - SUM(d.balance) FROM Credits as c, Debits as d; pub fn check_integrity( conn: r2d2::PooledConnection<r2d2_sqlite::SqliteConnectionManager>, ) -> Result<bool> { let mut stmt = conn.prepare("SELECT SUM(c.balance) - SUM(d.balance) FROM Credits as c, Debits as d")?; let query = stmt .query_map(NO_PARAMS, |row| { Ok(SqlResult { value: row.get(0).unwrap(), }) }) .and_then(|mapped_rows| { Ok(mapped_rows .map(|row| row.unwrap()) .collect::<Vec<SqlResult>>()) })?; let result = if query[0].value == 0 { true } else { false }; Ok(result) } pub fn current_balance( conn: r2d2::PooledConnection<r2d2_sqlite::SqliteConnectionManager>, account: i32, ) -> Result<SqlResult> { let mut stmt = conn.prepare("SELECT (SELECT ifnull(SUM(balance),0) as \"Debits\" FROM Debits WHERE account = ?1) - (SELECT ifnull(SUM(balance),0) as \"Credits\" FROM Credits WHERE account = ?1)")?; stmt.query_row(params![account], |row| { Ok(SqlResult { value: row.get(0).unwrap(), }) }) } pub fn list_currencies( conn: r2d2::PooledConnection<r2d2_sqlite::SqliteConnectionManager>, ) -> Result<Vec<Currency>> { let mut stmt = conn.prepare("SELECT code, numeric_code, minor_unit, name FROM Currency")?; let result = stmt .query_map(NO_PARAMS, |row| { Ok(Currency { code: row.get(0).unwrap(), numeric_code: row.get(1).unwrap(), minor_unit: row.get(2).unwrap(), name: row.get(3).unwrap(), }) }) .and_then(|mapped_rows| { Ok(mapped_rows .map(|row| row.unwrap()) .collect::<Vec<Currency>>()) })?; Ok(result) } #[cfg(test)] mod tests { use super::*; use r2d2_sqlite::SqliteConnectionManager; use rusqlite::params; #[test] fn lists_currencies_returns_struct() { let manager = SqliteConnectionManager::memory(); let pool = r2d2::Pool::new(manager).unwrap(); let conn = pool.get().unwrap(); let _ = conn.execute( "CREATE TABLE \"Currency\" ( \"code\" TEXT NOT NULL UNIQUE, \"numeric_code\" INTEGER NOT NULL UNIQUE, \"minor_unit\" INTEGER NOT NULL DEFAULT 2, \"name\" TEXT NOT NULL UNIQUE, PRIMARY KEY(\"code\") )", params![], ); let num = conn.execute( "INSERT INTO Currency (code, numeric_code, minor_unit, name) VALUES ('GBP', '826', '2', 'Pound Sterling');", params![], ); assert_eq!(num.unwrap(), 1); let expected = Currency { code: String::from("GBP"), numeric_code: 826, minor_unit: 2, name: String::from("Pound Sterling"), }; let result = list_currencies(conn).unwrap(); assert_eq!(expected, result[0]); } #[test] fn list_currencies_can_return_multiple_currencies() { let manager = SqliteConnectionManager::memory(); let pool = r2d2::Pool::new(manager).unwrap(); let conn = pool.get().unwrap(); let _ = conn.execute( "CREATE TABLE \"Currency\" ( \"code\" TEXT NOT NULL UNIQUE, \"numeric_code\" INTEGER NOT NULL UNIQUE, \"minor_unit\" INTEGER NOT NULL DEFAULT 2, \"name\" TEXT NOT NULL UNIQUE, PRIMARY KEY(\"code\") )", params![], ); let num = conn.execute( "INSERT INTO Currency (code, numeric_code, minor_unit, name) VALUES ('GBP', '826', '2', 'Pound Sterling'), ('EUR', '978', '2', 'Euro'), ('PLN', '985', '2', 'Zloty');", params![], ); assert_eq!(num.unwrap(), 3); let result = list_currencies(conn).unwrap(); assert_eq!(result.len(), 3) } }
30.10219
201
0.541222
91236fb88e0914ee4abd67f207116a1dd610a8d0
2,143
#[allow(unused_imports)] use futures::stream::futures_unordered::FuturesUnordered; #[allow(unused_imports)] use futures::stream::StreamExt; use std::error::Error; use std::thread::sleep; use std::time::Instant; #[allow(unused_imports)] use tokio::net::TcpStream; use tokio::prelude::*; #[tokio::main] async fn main() -> Result<(), Box<dyn Error + Send + Sync>> { let now = Instant::now(); // Equivalent to FuturesUnordered, but without allocation, less wieldy for many futures match futures::future::join3( tokio::spawn(task("task1", now.clone())), tokio::spawn(task("task2", now.clone())), tokio::spawn(task("task3", now.clone())) ).await { (x, y, z) => { // dbg!("{:?}", (&x, &y, &z)); x??; y??; z? } } } async fn task( label: &'static str, now: std::time::Instant, ) -> Result<(), Box<dyn Error + Send + Sync>> { // Simulate network delay using Tokio async delay for 2 seconds println!( "OS Thread {:?} - {} started: {:?}", std::thread::current().id(), label, now.elapsed(), ); tokio::time::delay_for(tokio::time::Duration::from_secs(2)).await; // Write to server - server will echo this back to us with 8 second delay let mut stream = TcpStream::connect("127.0.0.1:6142").await?; stream.write_all(label.as_bytes()).await?; println!( "OS Thread {:?} - {} written: {:?}", std::thread::current().id(), label, now.elapsed() ); // Read 5 chars we expect (to avoid dealing with EOF, etc.) let mut buffer = [0; 5]; stream.read_exact(&mut buffer).await?; stream.shutdown(std::net::Shutdown::Both)?; println!( "OS Thread {:?} - {} read: {:?}", std::thread::current().id(), label, now.elapsed() ); // Simulate computation work by sleeping actual thread for 4 seconds sleep(std::time::Duration::from_secs(4)); println!( "OS Thread {:?} - {} finished: {:?}", std::thread::current().id(), std::str::from_utf8(&buffer)?, now.elapsed() ); Ok(()) }
29.356164
91
0.564162
1cfb2415a748851f6b23a4c720876c0015cd59e2
4,114
use std::fmt; use bytes::{Buf, BufMut}; use crate::proto::coding::{self, BufExt, BufMutExt}; #[derive(Debug, PartialEq)] pub enum Error { Overflow, UnexpectedEnd, } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> fmt::Result { match self { Error::Overflow => write!(f, "value overflow"), Error::UnexpectedEnd => write!(f, "unexpected end"), } } } pub fn decode<B: Buf>(size: u8, buf: &mut B) -> Result<(u8, usize), Error> { assert!(size <= 8); let mut first = buf.get::<u8>()?; // NOTE: following casts to u8 intend to trim the most significant bits, they are used as a // workaround for shiftoverflow errors when size == 8. let flags = ((first as usize) >> size) as u8; let mask = 0xFF >> (8 - size); first &= mask; // if first < 2usize.pow(size) - 1 if first < mask { return Ok((flags, first as usize)); } let mut value = mask as usize; let mut power = 0usize; loop { let byte = buf.get::<u8>()? as usize; value += (byte & 127) << power; power += 7; if byte & 128 == 0 { break; } if power >= MAX_POWER { return Err(Error::Overflow); } } Ok((flags, value)) } pub fn encode<B: BufMut>(size: u8, flags: u8, value: usize, buf: &mut B) { assert!(size <= 8); // NOTE: following casts to u8 intend to trim the most significant bits, they are used as a // workaround for shiftoverflow errors when size == 8. let mask = !(0xFF << size) as u8; let flags = ((flags as usize) << size) as u8; // if value < 2usize.pow(size) - 1 if value < (mask as usize) { buf.write(flags | value as u8); return; } buf.write(mask | flags); let mut remaining = value - mask as usize; while remaining >= 128 { let rest = (remaining % 128) as u8; buf.write(rest + 128); remaining /= 128; } buf.write(remaining as u8); } #[cfg(target_pointer_width = "64")] const MAX_POWER: usize = 10 * 7; #[cfg(target_pointer_width = "32")] const MAX_POWER: usize = 5 * 7; impl From<coding::UnexpectedEnd> for Error { fn from(_: coding::UnexpectedEnd) -> Self { Error::UnexpectedEnd } } #[cfg(test)] mod test { use std::io::Cursor; fn check_codec(size: u8, flags: u8, value: usize, data: &[u8]) { let mut buf = Vec::new(); super::encode(size, flags, value, &mut buf); assert_eq!(buf, data); let mut read = Cursor::new(&buf); assert_eq!((flags, value), super::decode(size, &mut read).unwrap()); } #[test] fn codec_5_bits() { check_codec(5, 0b101, 10, &[0b1010_1010]); check_codec(5, 0b101, 0, &[0b1010_0000]); check_codec(5, 0b010, 1337, &[0b0101_1111, 154, 10]); check_codec(5, 0b010, 31, &[0b0101_1111, 0]); check_codec( 5, 0b010, usize::max_value(), &[95, 224, 255, 255, 255, 255, 255, 255, 255, 255, 1], ); } #[test] fn codec_8_bits() { check_codec(8, 0, 42, &[0b0010_1010]); check_codec(8, 0, 424_242, &[255, 179, 240, 25]); check_codec( 8, 0, usize::max_value(), &[255, 128, 254, 255, 255, 255, 255, 255, 255, 255, 1], ); } #[test] #[should_panic] fn size_too_big_value() { let mut buf = vec![]; super::encode(9, 1, 1, &mut buf); } #[test] #[should_panic] fn size_too_big_of_size() { let buf = vec![]; let mut read = Cursor::new(&buf); super::decode(9, &mut read).unwrap(); } #[cfg(target_pointer_width = "64")] #[test] fn overflow() { let buf = vec![255, 128, 254, 255, 255, 255, 255, 255, 255, 255, 255, 1]; let mut read = Cursor::new(&buf); assert!(super::decode(8, &mut read).is_err()); } #[test] fn number_never_ends_with_0x80() { check_codec(4, 0b0001, 143, &[31, 128, 1]); } }
26.037975
95
0.535002
336a61708683e0d6b05bea15cc3b490a54d67632
8,526
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use rustc::ty; use std::cell::Cell; use syntax_pos::Span; #[derive(Clone)] pub struct ElisionFailureInfo { pub name: String, pub lifetime_count: usize, pub have_bound_regions: bool } pub type ElidedLifetime = Result<ty::Region, Option<Vec<ElisionFailureInfo>>>; /// Defines strategies for handling regions that are omitted. For /// example, if one writes the type `&Foo`, then the lifetime of /// this reference has been omitted. When converting this /// type, the generic functions in astconv will invoke `anon_regions` /// on the provided region-scope to decide how to translate this /// omitted region. /// /// It is not always legal to omit regions, therefore `anon_regions` /// can return `Err(())` to indicate that this is not a scope in which /// regions can legally be omitted. pub trait RegionScope { fn anon_regions(&self, span: Span, count: usize) -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>>; /// If an object omits any explicit lifetime bound, and none can /// be derived from the object traits, what should we use? If /// `None` is returned, an explicit annotation is required. fn object_lifetime_default(&self, span: Span) -> Option<ty::Region>; /// The "base" default is the initial default for a scope. This is /// 'static except for in fn bodies, where it is a fresh inference /// variable. You shouldn't call this except for as part of /// computing `object_lifetime_default` (in particular, in legacy /// modes, it may not be relevant). fn base_object_lifetime_default(&self, span: Span) -> ty::Region; } // A scope in which all regions must be explicitly named. This is used // for types that appear in structs and so on. #[derive(Copy, Clone)] pub struct ExplicitRscope; impl RegionScope for ExplicitRscope { fn anon_regions(&self, _span: Span, _count: usize) -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> { Err(None) } fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> { Some(self.base_object_lifetime_default(span)) } fn base_object_lifetime_default(&self, _span: Span) -> ty::Region { ty::ReStatic } } // Same as `ExplicitRscope`, but provides some extra information for diagnostics pub struct UnelidableRscope(Option<Vec<ElisionFailureInfo>>); impl UnelidableRscope { pub fn new(v: Option<Vec<ElisionFailureInfo>>) -> UnelidableRscope { UnelidableRscope(v) } } impl RegionScope for UnelidableRscope { fn anon_regions(&self, _span: Span, _count: usize) -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> { let UnelidableRscope(ref v) = *self; Err(v.clone()) } fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> { Some(self.base_object_lifetime_default(span)) } fn base_object_lifetime_default(&self, _span: Span) -> ty::Region { ty::ReStatic } } // A scope in which omitted anonymous region defaults to // `default`. This is used after the `->` in function signatures. The // latter use may go away. Note that object-lifetime defaults work a // bit differently, as specified in RFC #599. pub struct ElidableRscope { default: ty::Region, } impl ElidableRscope { pub fn new(r: ty::Region) -> ElidableRscope { ElidableRscope { default: r } } } impl RegionScope for ElidableRscope { fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> { // Per RFC #599, object-lifetimes default to 'static unless // overridden by context, and this takes precedence over // lifetime elision. Some(self.base_object_lifetime_default(span)) } fn base_object_lifetime_default(&self, _span: Span) -> ty::Region { ty::ReStatic } fn anon_regions(&self, _span: Span, count: usize) -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> { Ok(vec![self.default; count]) } } /// A scope in which we generate anonymous, late-bound regions for /// omitted regions. This occurs in function signatures. pub struct BindingRscope { anon_bindings: Cell<u32>, } impl BindingRscope { pub fn new() -> BindingRscope { BindingRscope { anon_bindings: Cell::new(0), } } fn next_region(&self) -> ty::Region { let idx = self.anon_bindings.get(); self.anon_bindings.set(idx + 1); ty::ReLateBound(ty::DebruijnIndex::new(1), ty::BrAnon(idx)) } } impl RegionScope for BindingRscope { fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> { // Per RFC #599, object-lifetimes default to 'static unless // overridden by context, and this takes precedence over the // binding defaults in a fn signature. Some(self.base_object_lifetime_default(span)) } fn base_object_lifetime_default(&self, _span: Span) -> ty::Region { ty::ReStatic } fn anon_regions(&self, _: Span, count: usize) -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> { Ok((0..count).map(|_| self.next_region()).collect()) } } /// A scope which overrides the default object lifetime but has no other effect. pub struct ObjectLifetimeDefaultRscope<'r> { base_scope: &'r (RegionScope+'r), default: ty::ObjectLifetimeDefault, } impl<'r> ObjectLifetimeDefaultRscope<'r> { pub fn new(base_scope: &'r (RegionScope+'r), default: ty::ObjectLifetimeDefault) -> ObjectLifetimeDefaultRscope<'r> { ObjectLifetimeDefaultRscope { base_scope: base_scope, default: default, } } } impl<'r> RegionScope for ObjectLifetimeDefaultRscope<'r> { fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> { match self.default { ty::ObjectLifetimeDefault::Ambiguous => None, ty::ObjectLifetimeDefault::BaseDefault => // NB: This behavior changed in Rust 1.3. Some(self.base_object_lifetime_default(span)), ty::ObjectLifetimeDefault::Specific(r) => Some(r), } } fn base_object_lifetime_default(&self, span: Span) -> ty::Region { self.base_scope.base_object_lifetime_default(span) } fn anon_regions(&self, span: Span, count: usize) -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> { self.base_scope.anon_regions(span, count) } } /// A scope which simply shifts the Debruijn index of other scopes /// to account for binding levels. pub struct ShiftedRscope<'r> { base_scope: &'r (RegionScope+'r) } impl<'r> ShiftedRscope<'r> { pub fn new(base_scope: &'r (RegionScope+'r)) -> ShiftedRscope<'r> { ShiftedRscope { base_scope: base_scope } } } impl<'r> RegionScope for ShiftedRscope<'r> { fn object_lifetime_default(&self, span: Span) -> Option<ty::Region> { self.base_scope.object_lifetime_default(span) .map(|r| ty::fold::shift_region(r, 1)) } fn base_object_lifetime_default(&self, span: Span) -> ty::Region { ty::fold::shift_region(self.base_scope.base_object_lifetime_default(span), 1) } fn anon_regions(&self, span: Span, count: usize) -> Result<Vec<ty::Region>, Option<Vec<ElisionFailureInfo>>> { match self.base_scope.anon_regions(span, count) { Ok(mut v) => { for r in &mut v { *r = ty::fold::shift_region(*r, 1); } Ok(v) } Err(errs) => { Err(errs) } } } }
32.052632
85
0.62327
1a3058608a50f03e74cb4771726b46efcfaa365c
1,458
use std::fmt::Write; use termion; use termion::color; use xrl::Style; use failure::Error; fn get_color(argb_color: u32) -> color::Rgb { let r = ((argb_color & 0x00ff_0000) >> 16) as u8; let g = ((argb_color & 0x0000_ff00) >> 8) as u8; let b = (argb_color & 0x0000_00ff) as u8; color::Rgb(r, g, b) } pub fn set_style(style: &Style) -> Result<String, Error> { if style.id == 0 { return Ok(format!("{}", termion::style::Invert)); } let mut s = String::new(); if let Some(fg_color) = style.fg_color { write!(&mut s, "{}", color::Fg(get_color(fg_color)))?; } if style.bg_color != 0 { write!(&mut s, "{}", color::Bg(get_color(style.bg_color)))?; } if style.italic { write!(&mut s, "{}", termion::style::Italic)?; } if style.underline { write!(&mut s, "{}", termion::style::Underline)?; } Ok(s) } pub fn reset_style(style: &Style) -> Result<String, Error> { if style.id == 0 { return Ok(format!("{}", termion::style::NoInvert)); } let mut s = String::new(); if style.fg_color.is_some() { write!(&mut s, "{}", color::Fg(color::Reset))?; } if style.bg_color != 0 { write!(&mut s, "{}", color::Bg(color::Reset))?; } if style.italic { write!(&mut s, "{}", termion::style::NoItalic)?; } if style.underline { write!(&mut s, "{}", termion::style::NoUnderline)?; } Ok(s) }
25.137931
68
0.541152
393420e5726cb63bd736f6f68f0fea4a0b46460c
4,254
#![deny(clippy::all)] #![deny(clippy::pedantic)] #![allow(clippy::cast_possible_truncation)] #![allow(clippy::cast_precision_loss)] #![allow(clippy::cast_sign_loss)] #![forbid(unsafe_code)] use std::fs::File; use std::io::{self, Read}; use std::process; use arrayvec::ArrayVec; use byteorder::{ByteOrder, NativeEndian}; use colored::Colorize; use cpal::traits::{DeviceTrait, EventLoopTrait, HostTrait}; use cpal::{StreamData, UnknownTypeOutputBuffer}; use error_iter::ErrorIter; use thiserror::Error; use sonant::{Error as SonantError, Song, Synth}; #[derive(Debug, Error)] pub enum Error { #[error("Missing filename argument")] MissingFilename, #[error("Sonant error")] Sonant(#[from] SonantError), #[error("I/O error")] IO(#[from] io::Error), } impl ErrorIter for Error {} fn main() { handle_errors(player()); } fn player() -> Result<(), Error> { let mut args = std::env::args().skip(1); let filename = args.next().ok_or(Error::MissingFilename)?; // cpal boilerplate let host = cpal::default_host(); let event_loop = host.event_loop(); let device = host .default_output_device() .expect("no output device available"); let mut supported_formats_range = device .supported_output_formats() .expect("error while querying formats"); let format = supported_formats_range .next() .expect("no supported format?!") .with_max_sample_rate(); let stream_id = event_loop.build_output_stream(&device, &format).unwrap(); event_loop .play_stream(stream_id) .expect("failed to play_stream"); // Read the file let mut file = File::open(filename)?; let mut data = Vec::new(); file.read_to_end(&mut data)?; // Create a seed for the PRNG let mut seed = [0_u8; 16]; getrandom::getrandom(&mut seed).expect("failed to getrandom"); let seed = ( NativeEndian::read_u64(&seed[0..8]), NativeEndian::read_u64(&seed[8..16]), ); // Load a sonant song and create a synth let song = Song::from_slice(&data)?; let mut synth = Synth::new(&song, seed, format.sample_rate.0 as f32) .flat_map(ArrayVec::from) .peekable(); // cpal event loop; this is the actual audio player event_loop.run(move |stream_id, stream_result| { let stream_data = match stream_result { Ok(data) => data, Err(err) => { eprintln!("an error occurred on stream {:?}: {}", stream_id, err); return; } }; match stream_data { StreamData::Output { buffer: UnknownTypeOutputBuffer::U16(mut buffer), } => { let max = f32::from(i16::max_value()); for (elem, sample) in buffer.iter_mut().zip(synth.by_ref()) { *elem = sample.mul_add(max, max).round() as u16; } if synth.peek() == None { process::exit(0); } } StreamData::Output { buffer: UnknownTypeOutputBuffer::I16(mut buffer), } => { for (elem, sample) in buffer.iter_mut().zip(synth.by_ref()) { *elem = (sample * f32::from(i16::max_value())).round() as i16; } if synth.peek() == None { process::exit(0); } } StreamData::Output { buffer: UnknownTypeOutputBuffer::F32(mut buffer), } => { for (elem, sample) in buffer.iter_mut().zip(synth.by_ref()) { *elem = sample; } if synth.peek() == None { process::exit(0); } } _ => (), } }); } pub fn handle_errors<E>(result: Result<(), E>) where E: std::error::Error + ErrorIter + 'static, { match result { Err(e) => { eprintln!("{} {}", "error:".red(), e); for cause in e.chain().skip(1) { eprintln!("{} {}", "caused by:".bright_red(), cause); } process::exit(1); } Ok(()) => (), }; }
28.938776
82
0.537377
abf169676ed6547643d4a10c2b283994de5eaa1b
8,909
use std::collections::HashMap; use std::io::BufWriter; use std::iter::FromIterator; use image::codecs::bmp::BmpEncoder; use image::codecs::farbfeld::FarbfeldEncoder; use image::codecs::jpeg::JpegEncoder; use image::codecs::png::PngEncoder; use image::codecs::pnm::PnmEncoder; use image::codecs::tga::TgaEncoder; use image::ColorType; use lockbook_models::drawing::{ColorAlias, ColorRGB, Drawing, Stroke}; use raqote::{ DrawOptions, DrawTarget, LineCap, LineJoin, PathBuilder, SolidSource, Source, StrokeStyle, }; use serde::Deserialize; use serde_json::error::Category; use crate::model::errors::core_err_unexpected; use crate::CoreError; pub fn parse_drawing(drawing_bytes: &[u8]) -> Result<Drawing, CoreError> { // represent an empty string as an empty drawing, rather than returning an error if drawing_bytes.is_empty() { return Ok(Drawing::default()); } match serde_json::from_slice::<Drawing>(drawing_bytes) { Ok(d) => Ok(d), Err(e) => match e.classify() { Category::Io => Err(CoreError::Unexpected(String::from("json io"))), Category::Syntax | Category::Data | Category::Eof => Err(CoreError::DrawingInvalid), }, } } #[derive(Deserialize, Debug)] pub enum SupportedImageFormats { Png, Jpeg, Pnm, Tga, Farbfeld, Bmp, } impl std::str::FromStr for SupportedImageFormats { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s.to_lowercase().as_str() { "png" => Ok(Self::Png), "jpeg" | "jpg" => Ok(Self::Jpeg), "bmp" => Ok(Self::Bmp), "tga" => Ok(Self::Tga), "pnm" => Ok(Self::Pnm), "farbfeld" => Ok(Self::Farbfeld), unsupp => Err(format!("unsupported image format '{}'", unsupp)), } } } pub fn export_drawing( drawing_bytes: &[u8], format: SupportedImageFormats, render_theme: Option<HashMap<ColorAlias, ColorRGB>>, ) -> Result<Vec<u8>, CoreError> { let drawing = parse_drawing(drawing_bytes)?; let theme = match render_theme { Some(theme) => theme, None => match drawing.theme { None => HashMap::<_, _>::from_iter(IntoIterator::into_iter([ (ColorAlias::White, ColorRGB { r: 0xFF, g: 0xFF, b: 0xFF }), (ColorAlias::Black, ColorRGB { r: 0x00, g: 0x00, b: 0x00 }), (ColorAlias::Red, ColorRGB { r: 0xFF, g: 0x00, b: 0x00 }), (ColorAlias::Green, ColorRGB { r: 0x00, g: 0xFF, b: 0x00 }), (ColorAlias::Yellow, ColorRGB { r: 0xFF, g: 0xFF, b: 0x00 }), (ColorAlias::Blue, ColorRGB { r: 0x00, g: 0x00, b: 0xFF }), (ColorAlias::Magenta, ColorRGB { r: 0xFF, g: 0x00, b: 0xFF }), (ColorAlias::Cyan, ColorRGB { r: 0x00, g: 0xFF, b: 0xFF }), ])), Some(theme) => theme, }, }; let (width, height) = get_drawing_bounds(drawing.strokes.as_slice()); let mut draw_target = DrawTarget::new(width as i32, height as i32); for stroke in drawing.strokes { let color_rgb = theme .get(&stroke.color) .ok_or_else(|| CoreError::Unexpected(String::from("unable to get color from alias")))?; if stroke.points_x.len() != stroke.points_y.len() || stroke.points_y.len() != stroke.points_girth.len() { return Err(CoreError::Unexpected(String::from("unequal points and girth metrics"))); } if stroke.alpha > 1.0 || stroke.alpha < 0.0 { return Err(CoreError::Unexpected(String::from("invalid alpha value"))); } for point_index in 0..stroke.points_x.len() - 1 { let mut pb = PathBuilder::new(); let x1 = stroke .points_x .get(point_index) .ok_or_else(|| { CoreError::Unexpected(String::from("unable to get color from alias")) })? .to_owned(); let y1 = stroke .points_y .get(point_index) .ok_or_else(|| { CoreError::Unexpected(String::from("unable to get color from alias")) })? .to_owned(); let x2 = stroke .points_x .get(point_index + 1) .ok_or_else(|| { CoreError::Unexpected(String::from("unable to get color from alias")) })? .to_owned(); let y2 = stroke .points_y .get(point_index + 1) .ok_or_else(|| { CoreError::Unexpected(String::from("unable to get color from alias")) })? .to_owned(); pb.move_to(x1, y1); pb.line_to(x2, y2); pb.close(); let path = pb.finish(); draw_target.stroke( &path, &Source::Solid(SolidSource { r: color_rgb.r, g: color_rgb.g, b: color_rgb.b, a: (stroke.alpha * 255.0) as u8, }), &StrokeStyle { cap: LineCap::Round, join: LineJoin::Round, width: stroke .points_girth .get(point_index) .ok_or_else(|| { CoreError::Unexpected(String::from("unable to get stroke girth")) })? .to_owned(), miter_limit: 10.0, dash_array: Vec::new(), dash_offset: 0.0, }, &DrawOptions::new(), ); } } let mut buffer = Vec::<u8>::new(); let mut buf_writer = BufWriter::new(&mut buffer); let mut drawing_bytes: Vec<u8> = Vec::new(); for pixel in draw_target.into_vec().iter() { let (r, g, b, a) = u32_byte_to_u8_bytes(pixel.to_owned()); drawing_bytes.push(r); drawing_bytes.push(g); drawing_bytes.push(b); drawing_bytes.push(a); } match format { SupportedImageFormats::Png => PngEncoder::new(&mut buf_writer).encode( drawing_bytes.as_slice(), width, height, ColorType::Rgba8, ), SupportedImageFormats::Pnm => PnmEncoder::new(&mut buf_writer).encode( drawing_bytes.as_slice(), width, height, ColorType::Rgba8, ), SupportedImageFormats::Jpeg => JpegEncoder::new(&mut buf_writer).encode( drawing_bytes.as_slice(), width, height, ColorType::Rgba8, ), SupportedImageFormats::Tga => TgaEncoder::new(&mut buf_writer).encode( drawing_bytes.as_slice(), width, height, ColorType::Rgba8, ), SupportedImageFormats::Farbfeld => { FarbfeldEncoder::new(&mut buf_writer).encode(drawing_bytes.as_slice(), width, height) } SupportedImageFormats::Bmp => BmpEncoder::new(&mut buf_writer).encode( drawing_bytes.as_slice(), width, height, ColorType::Rgba8, ), } .map_err(core_err_unexpected)?; std::mem::drop(buf_writer); Ok(buffer) } fn u32_byte_to_u8_bytes(u32_byte: u32) -> (u8, u8, u8, u8) { let mut byte_1 = (u32_byte >> 16) & 0xffu32; let mut byte_2 = (u32_byte >> 8) & 0xffu32; let mut byte_3 = u32_byte & 0xffu32; let byte_4 = (u32_byte >> 24) & 0xffu32; if byte_4 > 0u32 { byte_1 = byte_1 * 255u32 / byte_4; byte_2 = byte_2 * 255u32 / byte_4; byte_3 = byte_3 * 255u32 / byte_4; } (byte_1 as u8, byte_2 as u8, byte_3 as u8, byte_4 as u8) } pub fn get_drawing_bounds(strokes: &[Stroke]) -> (u32, u32) { let stroke_to_max_x = |stroke: &Stroke| { stroke .points_x .iter() .zip(stroke.points_girth.clone()) .map(|(x, girth)| x + girth) .map(|num| num as u32) .max() .unwrap_or(0) }; let stroke_to_max_y = |stroke: &Stroke| { stroke .points_y .iter() .zip(stroke.points_girth.clone()) .map(|(y, girth)| y + girth) .map(|num| num as u32) .max() .unwrap_or(0) }; let max_x_and_girth = strokes .iter() .map(|stroke| stroke_to_max_x(stroke)) .max() .unwrap_or(0); let max_y_and_girth = strokes .iter() .map(|stroke| stroke_to_max_y(stroke)) .max() .unwrap_or(0); (max_x_and_girth + 20, max_y_and_girth + 20) }
32.278986
99
0.521046
d792dbf5037ae8a911ab879093be733cbaae3fb4
1,180
/* * Cosmos SDK - Legacy REST and gRPC Gateway docs * * A REST interface for state queries, legacy transactions * * The version of the OpenAPI document: 1.0.0 * * Generated by: https://openapi-generator.tech */ #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IbcCoreChannelV1QueryPacketAcknowledgementsResponse { #[serde(rename = "acknowledgements", skip_serializing_if = "Option::is_none")] pub acknowledgements: Option<Vec<crate::models::QueryPacketAcknowledgemetsResponseIsTheRequestTypeForTheQueryQueryPacketAcknowledgementsRpcMethodAcknowledgements>>, #[serde(rename = "pagination", skip_serializing_if = "Option::is_none")] pub pagination: Option<crate::models::PaginationResponse>, #[serde(rename = "height", skip_serializing_if = "Option::is_none")] pub height: Option<crate::models::QueryBlockHeight>, } impl IbcCoreChannelV1QueryPacketAcknowledgementsResponse { pub fn new() -> IbcCoreChannelV1QueryPacketAcknowledgementsResponse { IbcCoreChannelV1QueryPacketAcknowledgementsResponse { acknowledgements: None, pagination: None, height: None, } } }
33.714286
168
0.737288
eb30886de2b4fb3217d7048df7c25e1bb0b3e3a3
1,080
// Generated by `scripts/generate.js` use utils::vk_traits::*; /// Wrapper for [VkDriverId](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkDriverId.html). #[repr(i32)] #[derive(Debug, PartialEq, Copy, Clone)] pub enum VkDriverId { AmdProprietary = 1, AmdOpenSource = 2, MesaRadv = 3, NvidiaProprietary = 4, IntelProprietaryWindows = 5, IntelOpenSourceMesa = 6, ImaginationProprietary = 7, QualcommProprietary = 8, ArmProprietary = 9, GoogleSwiftshader = 10, GgpProprietary = 11, BroadcomProprietary = 12, } #[doc(hidden)] pub type RawVkDriverId = i32; impl VkWrappedType<RawVkDriverId> for VkDriverId { fn vk_to_raw(src: &VkDriverId, dst: &mut RawVkDriverId) { *dst = *src as i32 } } impl VkRawType<VkDriverId> for RawVkDriverId { fn vk_to_wrapped(src: &RawVkDriverId) -> VkDriverId { unsafe { *((src as *const i32) as *const VkDriverId) } } } impl Default for VkDriverId { fn default() -> VkDriverId { VkDriverId::AmdProprietary } }
24.545455
116
0.661111
f90ede35a13b5d5afb96d557e961a64c4c107a42
4,115
//! Variable data length implementation of the [`Registers`] trait using the //! [`embedded-hal`] blocking SPI trait, and an infallible GPIO pin. //! //! This uses the W5500 variable data length mode (VDM). //! In VDM mode the SPI frame data length is determined by the chip select pin. //! This is the preferred blocking implementation if your W5500 has an //! infallible chip select pin. //! //! [`embedded-hal`]: https://github.com/rust-embedded/embedded-hal //! [`Registers`]: crate::Registers use crate::spi::{vdm_header, AccessMode}; use embedded_hal::digital::v2::OutputPin; /// W5500 blocking variable data length implementation. #[derive(Debug)] #[cfg_attr(feature = "defmt", derive(defmt::Format))] pub struct W5500<SPI, CS> { /// SPI bus. spi: SPI, /// GPIO for chip select. cs: CS, } impl<SPI, CS, SpiError> W5500<SPI, CS> where SPI: embedded_hal::blocking::spi::Transfer<u8, Error = SpiError> + embedded_hal::blocking::spi::Write<u8, Error = SpiError>, CS: OutputPin<Error = core::convert::Infallible>, { /// Creates a new `W5500` driver from a SPI peripheral and a chip select /// digital I/O pin. /// /// # Safety /// /// The chip select pin must be high before being passed to this function. /// /// # Example /// /// ``` /// # use embedded_hal_mock as hal; /// # let spi = hal::spi::Mock::new(&[]); /// # struct Pin {}; /// # impl embedded_hal::digital::v2::OutputPin for Pin { /// # type Error = core::convert::Infallible; /// # fn set_low(&mut self) -> Result<(), Self::Error> { Ok(()) } /// # fn set_high(&mut self) -> Result<(), Self::Error> { Ok(()) } /// # } /// # let mut pin = Pin {}; /// use embedded_hal::digital::v2::OutputPin; /// use w5500_ll::blocking::vdm_infallible_gpio::W5500; /// /// pin.set_high().unwrap(); /// let mut w5500: W5500<_, _> = W5500::new(spi, pin); /// # Ok::<(), hal::MockError>(()) /// ``` pub fn new(spi: SPI, cs: CS) -> Self { W5500 { spi, cs } } /// Free the SPI bus and CS pin from the W5500. /// /// # Example /// /// ``` /// # use embedded_hal_mock as hal; /// # let spi = hal::spi::Mock::new(&[]); /// # struct Pin {}; /// # impl embedded_hal::digital::v2::OutputPin for Pin { /// # type Error = core::convert::Infallible; /// # fn set_low(&mut self) -> Result<(), Self::Error> { Ok(()) } /// # fn set_high(&mut self) -> Result<(), Self::Error> { Ok(()) } /// # } /// # let mut pin = Pin {}; /// use w5500_ll::blocking::vdm_infallible_gpio::W5500; /// /// let mut w5500 = W5500::new(spi, pin); /// let (spi, pin) = w5500.free(); /// ``` pub fn free(self) -> (SPI, CS) { (self.spi, self.cs) } #[inline] fn with_chip_enable<T, F>(&mut self, mut f: F) -> Result<T, SpiError> where F: FnMut(&mut SPI) -> Result<T, SpiError>, { self.cs.set_low().unwrap(); let result = f(&mut self.spi); self.cs.set_high().unwrap(); result } } impl<SPI, CS, SpiError> crate::Registers for W5500<SPI, CS> where SPI: embedded_hal::blocking::spi::Transfer<u8, Error = SpiError> + embedded_hal::blocking::spi::Write<u8, Error = SpiError>, CS: OutputPin<Error = core::convert::Infallible>, { /// SPI IO error type. type Error = SpiError; /// Read from the W5500. #[inline] fn read(&mut self, address: u16, block: u8, data: &mut [u8]) -> Result<(), Self::Error> { let header = vdm_header(address, block, AccessMode::Read); self.with_chip_enable(|spi| { spi.write(&header)?; spi.transfer(data)?; Ok(()) }) } /// Write to the W5500. #[inline] fn write(&mut self, address: u16, block: u8, data: &[u8]) -> Result<(), Self::Error> { let header = vdm_header(address, block, AccessMode::Write); self.with_chip_enable(|spi| { spi.write(&header)?; spi.write(data)?; Ok(()) }) } }
32.401575
93
0.559174
f9904ec7887dd18abdda275d6cafb1f9ba201189
43
#[cfg(test)] mod extended_line_method_test;
21.5
30
0.813953
38ab85d88265cbf673799cc69406af542293430f
3,330
use crate::manifest::State; use clap::ArgMatches; use log::{debug, info}; use snafu::{ResultExt, Snafu}; #[derive(Debug, Snafu)] pub enum PackageJogError { #[snafu(display("No supported package manager detected! Please check the docs for all supported package managers."))] NoManager { source: std::io::Error }, #[snafu(display( "The package {} does not have an install name for {}. Please add one!", package, manager ))] PackageNoManager { package: String, manager: String, source: std::io::Error, }, #[snafu(display( "The great package {} failed to install with manager {}: {}", package, manager, source ))] PackageInstallFail { package: String, manager: String, source: subprocess::PopenError, }, } pub fn jog(_matches: &ArgMatches, state: &mut State) -> Result<(), Box<dyn std::error::Error>> { let manager = match super::get_manager(state) { Some(m) => m, None => { Err(std::io::Error::from(std::io::ErrorKind::InvalidInput)).context(NoManager {})? } }; // TODO: Option to install all packages at once if let Some(packages) = &state.data.packages { for package in packages { let mut command = state .package_context .package_install_prefix .get_key_value(&manager) .unwrap() .0 .clone(); let package_name = package.package.clone(); let mut args = state .package_context .package_install_prefix .get(&manager) .unwrap() .2 .clone(); // Runs if we have overloads to deal with if package.package_overloads.len() != 0 { let mut to_use: (u8, String) = (0, "".into()); for overload in &package.package_overloads { let x = state .package_context .package_install_prefix .get_key_value(overload.0) .unwrap(); if x.1 .1 > to_use.0 { to_use = (x.1 .1, x.0.clone()); } } command = to_use.1; } // Runs if we need to run the command as root. if state .package_context .package_install_prefix .get(&command) .unwrap() .0 { args.insert(0, command); command = "sudo".into(); // TODO: Support doas. }; args.push(package_name.clone()); info!( "Installing package great {} with manager {}....", &package_name, &manager ); debug!("{} {:?}", &command, &args); subprocess::Exec::cmd(command) .args(&args) .popen() .context(PackageInstallFail { package: &package_name, manager: &manager, })?; } } else { info!("No work to do!"); } Ok(()) }
29.469027
121
0.466667
f8afc930aaa7acf6b5ae578e67447d15799ad6b6
6,692
//! Fully integrated benchmarks for rust-analyzer, which load real cargo //! projects. //! //! The benchmark here is used to debug specific performance regressions. If you //! notice that, eg, completion is slow in some specific case, you can modify //! code here exercise this specific completion, and thus have a fast //! edit/compile/test cycle. //! //! Note that "Rust Analyzer: Run" action does not allow running a single test //! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line" //! which you can use to paste the command in terminal and add `--release` manually. use std::{convert::TryFrom, sync::Arc}; use ide::{Change, CompletionConfig, FilePosition, TextSize}; use ide_db::helpers::{ insert_use::{ImportGranularity, InsertUseConfig}, SnippetCap, }; use test_utils::project_root; use vfs::{AbsPathBuf, VfsPath}; use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig}; #[test] fn integrated_highlighting_benchmark() { if std::env::var("RUN_SLOW_BENCHES").is_err() { return; } // Load rust-analyzer itself. let workspace_to_load = project_root(); let file = "./crates/ide_db/src/apply_change.rs"; let cargo_config = Default::default(); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, wrap_rustc: false, with_proc_macro: false, prefill_caches: false, }; let (mut host, vfs, _proc_macro) = { let _it = stdx::timeit("workspace loading"); load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() }; let file_id = { let file = workspace_to_load.join(file); let path = VfsPath::from(AbsPathBuf::assert(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) }; { let _it = stdx::timeit("initial"); let analysis = host.analysis(); analysis.highlight_as_html(file_id, false).unwrap(); } profile::init_from("*>100"); // let _s = profile::heartbeat_span(); { let _it = stdx::timeit("change"); let mut text = host.analysis().file_text(file_id).unwrap().to_string(); text.push_str("\npub fn _dummy() {}\n"); let mut change = Change::new(); change.change_file(file_id, Some(Arc::new(text))); host.apply_change(change); } { let _it = stdx::timeit("after change"); let _span = profile::cpu_span(); let analysis = host.analysis(); analysis.highlight_as_html(file_id, false).unwrap(); } } #[test] fn integrated_completion_benchmark() { if std::env::var("RUN_SLOW_BENCHES").is_err() { return; } // Load rust-analyzer itself. let workspace_to_load = project_root(); let file = "./crates/hir/src/lib.rs"; let cargo_config = Default::default(); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, wrap_rustc: false, with_proc_macro: false, prefill_caches: true, }; let (mut host, vfs, _proc_macro) = { let _it = stdx::timeit("workspace loading"); load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() }; let file_id = { let file = workspace_to_load.join(file); let path = VfsPath::from(AbsPathBuf::assert(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) }; { let _it = stdx::timeit("initial"); let analysis = host.analysis(); analysis.highlight_as_html(file_id, false).unwrap(); } profile::init_from("*>5"); // let _s = profile::heartbeat_span(); let completion_offset = { let _it = stdx::timeit("change"); let mut text = host.analysis().file_text(file_id).unwrap().to_string(); let completion_offset = patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)") + "sel".len(); let mut change = Change::new(); change.change_file(file_id, Some(Arc::new(text))); host.apply_change(change); completion_offset }; { let _p = profile::span("unqualified path completion"); let _span = profile::cpu_span(); let analysis = host.analysis(); let config = CompletionConfig { enable_postfix_completions: true, enable_imports_on_the_fly: true, enable_self_on_the_fly: true, add_call_parenthesis: true, add_call_argument_snippets: true, snippet_cap: SnippetCap::new(true), insert_use: InsertUseConfig { granularity: ImportGranularity::Crate, prefix_kind: hir::PrefixKind::ByCrate, enforce_granularity: true, group: true, skip_glob_imports: true, }, }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; analysis.completions(&config, position).unwrap(); } let completion_offset = { let _it = stdx::timeit("change"); let mut text = host.analysis().file_text(file_id).unwrap().to_string(); let completion_offset = patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)") + "self.".len(); let mut change = Change::new(); change.change_file(file_id, Some(Arc::new(text))); host.apply_change(change); completion_offset }; { let _p = profile::span("dot completion"); let _span = profile::cpu_span(); let analysis = host.analysis(); let config = CompletionConfig { enable_postfix_completions: true, enable_imports_on_the_fly: true, enable_self_on_the_fly: true, add_call_parenthesis: true, add_call_argument_snippets: true, snippet_cap: SnippetCap::new(true), insert_use: InsertUseConfig { granularity: ImportGranularity::Crate, prefix_kind: hir::PrefixKind::ByCrate, enforce_granularity: true, group: true, skip_glob_imports: true, }, }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; analysis.completions(&config, position).unwrap(); } } fn patch(what: &mut String, from: &str, to: &str) -> usize { let idx = what.find(from).unwrap(); *what = what.replacen(from, to, 1); idx }
34.142857
98
0.611775
726009feb5f52d16f8d81be6284f9c60022898d4
1,681
#![allow(dead_code)] fn main() { let x = 25; println!("Is x greater than 5?, {}", greater_than_five(x)); is_divisible(x); // let is_positive = if x >= 0 { true } else { false }; let is_positive = x >= 0; println!("Is x positive? {}", is_positive); count_to_10(); countdown_from(10); countdown_from_five(); countdown_from_three(); } fn greater_than_five(x: i32) -> bool { x > 5 // if x > 5 { // true // } else { // false // } } fn is_divisible(x: i32) { if x % 4 == 0 { println!("{} is divisible by 4", x); } else if x % 3 == 0 { println!("{} is divisible by 3", x); } else if x % 2 == 0 { println!("{} is divisible by 2", x); } else { println!("{} is not divisible by 2, 3 or 4", x); } } fn infinite_loop() { loop { println!("Again!"); } } fn count_to_10() { let mut counter = 0; let result = loop { counter += 1; if counter == 10 { break counter * 2; } }; println!("The result is: {}", result) } fn countdown_from(x: u32) { let mut number = x; while number != 0 { println!("{}!", number); number -= 1; } println!("Happy New Year!"); } fn countdown_from_five() { let mut numbers = [1,2,3,4,5]; numbers.reverse(); for number in numbers.iter() { println!("{}!", number); } println!("LIFTOFF!!!") } fn countdown_from_three() { for number in (1..=3).rev() { match number { 3 => println!("Ready..."), 2 => println!("Set..."), _ => println!("GO!!!"), } } }
18.88764
63
0.477692
e51f234d2ab7e6a27a7d29d664412c94190ff234
539
use super::{default_copy_getter, default_copy_setter, Control, ControlFieldGenerator}; use crate::ast::{AudioExtractField, ControlField, ControlType}; pub struct AudioExtractControl; impl Control for AudioExtractControl { fn control_type() -> ControlType { ControlType::AudioExtract } fn gen_fields(generator: &ControlFieldGenerator) { generator.generate( ControlField::AudioExtract(AudioExtractField::Value), &default_copy_getter, &default_copy_setter, ); } }
29.944444
86
0.699443
71c074a4dd7a06da15b06f816277e967432f7146
326
fn main() { let mut input = include_str!("../input.txt") .lines() .map(|i| i.parse::<usize>().unwrap()); let mut prev = input.next().unwrap(); let mut counter = 0; for i in input { if prev < i{ counter += 1; } prev = i } println!("{}", counter); }
19.176471
48
0.447853
e4d1203ebf810631d2a1b0a1c73a24ea8ebe0027
30,960
#![doc = "Peripheral access API for LPC178X_7X microcontrollers (generated using svd2rust v0.17.0)\n\nYou can find an overview of the API [here].\n\n[here]: https://docs.rs/svd2rust/0.17.0/svd2rust/#peripheral-api"] #![deny(const_err)] #![deny(dead_code)] #![deny(improper_ctypes)] #![deny(missing_docs)] #![deny(no_mangle_generic_items)] #![deny(non_shorthand_field_patterns)] #![deny(overflowing_literals)] #![deny(path_statements)] #![deny(patterns_in_fns_without_body)] #![deny(private_in_public)] #![deny(unconditional_recursion)] #![deny(unused_allocation)] #![deny(unused_comparisons)] #![deny(unused_parens)] #![deny(while_true)] #![allow(non_camel_case_types)] #![allow(non_snake_case)] #![no_std] extern crate bare_metal; extern crate cortex_m; #[cfg(feature = "rt")] extern crate cortex_m_rt; extern crate vcell; use core::marker::PhantomData; use core::ops::Deref; #[doc = r"Number available in the NVIC for configuring priority"] pub const NVIC_PRIO_BITS: u8 = 3; #[cfg(feature = "rt")] extern "C" { fn WWDT(); fn TIMER0(); fn TIMER1(); fn TIMER2(); fn TIMER3(); fn UART0(); fn UART1(); fn UART2(); fn UART3(); fn PWM1(); fn I2C0(); fn I2C1(); fn I2C2(); fn SSP0(); fn SSP1(); fn RTC(); fn EINT0(); fn EINT1(); fn EINT2(); fn EINT3(); fn ADC(); fn BOD(); fn USB(); fn CAN(); fn GPDMA(); fn I2S(); fn ETHERNET(); fn SDMMC(); fn MCPWM(); fn QEI(); fn USB_NEED_CLK(); fn UART4(); fn SSP2(); fn LCD(); fn GPIOINT(); fn PWM0(); fn EEPROM(); } #[doc(hidden)] pub union Vector { _handler: unsafe extern "C" fn(), _reserved: u32, } #[cfg(feature = "rt")] #[doc(hidden)] #[link_section = ".vector_table.interrupts"] #[no_mangle] pub static __INTERRUPTS: [Vector; 41] = [ Vector { _handler: WWDT }, Vector { _handler: TIMER0 }, Vector { _handler: TIMER1 }, Vector { _handler: TIMER2 }, Vector { _handler: TIMER3 }, Vector { _handler: UART0 }, Vector { _handler: UART1 }, Vector { _handler: UART2 }, Vector { _handler: UART3 }, Vector { _handler: PWM1 }, Vector { _handler: I2C0 }, Vector { _handler: I2C1 }, Vector { _handler: I2C2 }, Vector { _reserved: 0 }, Vector { _handler: SSP0 }, Vector { _handler: SSP1 }, Vector { _reserved: 0 }, Vector { _handler: RTC }, Vector { _handler: EINT0 }, Vector { _handler: EINT1 }, Vector { _handler: EINT2 }, Vector { _handler: EINT3 }, Vector { _handler: ADC }, Vector { _handler: BOD }, Vector { _handler: USB }, Vector { _handler: CAN }, Vector { _handler: GPDMA }, Vector { _handler: I2S }, Vector { _handler: ETHERNET }, Vector { _handler: SDMMC }, Vector { _handler: MCPWM }, Vector { _handler: QEI }, Vector { _reserved: 0 }, Vector { _handler: USB_NEED_CLK, }, Vector { _reserved: 0 }, Vector { _handler: UART4 }, Vector { _handler: SSP2 }, Vector { _handler: LCD }, Vector { _handler: GPIOINT }, Vector { _handler: PWM0 }, Vector { _handler: EEPROM }, ]; #[doc = r"Enumeration of all the interrupts"] #[derive(Copy, Clone, Debug)] #[repr(u8)] pub enum Interrupt { #[doc = "0 - WWDT"] WWDT = 0, #[doc = "1 - TIMER0"] TIMER0 = 1, #[doc = "2 - TIMER1"] TIMER1 = 2, #[doc = "3 - TIMER2"] TIMER2 = 3, #[doc = "4 - TIMER3"] TIMER3 = 4, #[doc = "5 - UART0"] UART0 = 5, #[doc = "6 - UART1"] UART1 = 6, #[doc = "7 - UART2"] UART2 = 7, #[doc = "8 - UART3"] UART3 = 8, #[doc = "9 - PWM1"] PWM1 = 9, #[doc = "10 - I2C0"] I2C0 = 10, #[doc = "11 - I2C1"] I2C1 = 11, #[doc = "12 - I2C2"] I2C2 = 12, #[doc = "14 - SSP0"] SSP0 = 14, #[doc = "15 - SSP1"] SSP1 = 15, #[doc = "17 - RTC"] RTC = 17, #[doc = "18 - EINT0"] EINT0 = 18, #[doc = "19 - EINT1"] EINT1 = 19, #[doc = "20 - EINT2"] EINT2 = 20, #[doc = "21 - EINT3"] EINT3 = 21, #[doc = "22 - ADC"] ADC = 22, #[doc = "23 - BOD"] BOD = 23, #[doc = "24 - USB"] USB = 24, #[doc = "25 - CAN"] CAN = 25, #[doc = "26 - GPDMA"] GPDMA = 26, #[doc = "27 - I2S"] I2S = 27, #[doc = "28 - ETHERNET"] ETHERNET = 28, #[doc = "29 - SDMMC"] SDMMC = 29, #[doc = "30 - MCPWM"] MCPWM = 30, #[doc = "31 - QEI"] QEI = 31, #[doc = "33 - USB_NEED_CLK"] USB_NEED_CLK = 33, #[doc = "35 - UART4"] UART4 = 35, #[doc = "36 - SSP2"] SSP2 = 36, #[doc = "37 - LCD"] LCD = 37, #[doc = "38 - GPIOINT"] GPIOINT = 38, #[doc = "39 - PWM0"] PWM0 = 39, #[doc = "40 - EEPROM"] EEPROM = 40, } unsafe impl bare_metal::Nr for Interrupt { #[inline(always)] fn nr(&self) -> u8 { *self as u8 } } #[cfg(feature = "rt")] pub use self::Interrupt as interrupt; pub use cortex_m::peripheral::Peripherals as CorePeripherals; pub use cortex_m::peripheral::{CBP, CPUID, DCB, DWT, FPB, ITM, MPU, NVIC, SCB, SYST, TPIU}; #[cfg(feature = "rt")] pub use cortex_m_rt::interrupt; #[allow(unused_imports)] use generic::*; #[doc = r"Common register and bit access and modify traits"] pub mod generic; #[doc = "Flash control block"] pub struct FLASHCTRL { _marker: PhantomData<*const ()>, } unsafe impl Send for FLASHCTRL {} impl FLASHCTRL { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const flashctrl::RegisterBlock { 0x0020_0000 as *const _ } } impl Deref for FLASHCTRL { type Target = flashctrl::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*FLASHCTRL::ptr() } } } #[doc = "Flash control block"] pub mod flashctrl; #[doc = "General purpose DMA controller"] pub struct GPDMA { _marker: PhantomData<*const ()>, } unsafe impl Send for GPDMA {} impl GPDMA { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const gpdma::RegisterBlock { 0x2008_0000 as *const _ } } impl Deref for GPDMA { type Target = gpdma::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*GPDMA::ptr() } } } #[doc = "General purpose DMA controller"] pub mod gpdma; #[doc = "Ethernet"] pub struct ETHERNET { _marker: PhantomData<*const ()>, } unsafe impl Send for ETHERNET {} impl ETHERNET { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const ethernet::RegisterBlock { 0x2008_4000 as *const _ } } impl Deref for ETHERNET { type Target = ethernet::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*ETHERNET::ptr() } } } #[doc = "Ethernet"] pub mod ethernet; #[doc = "LCD controller"] pub struct LCD { _marker: PhantomData<*const ()>, } unsafe impl Send for LCD {} impl LCD { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const lcd::RegisterBlock { 0x2008_8000 as *const _ } } impl Deref for LCD { type Target = lcd::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*LCD::ptr() } } } #[doc = "LCD controller"] pub mod lcd; #[doc = "USB device/host/OTG controller"] pub struct USB { _marker: PhantomData<*const ()>, } unsafe impl Send for USB {} impl USB { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const usb::RegisterBlock { 0x2008_c000 as *const _ } } impl Deref for USB { type Target = usb::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*USB::ptr() } } } #[doc = "USB device/host/OTG controller"] pub mod usb; #[doc = "CRC engine"] pub struct CRC { _marker: PhantomData<*const ()>, } unsafe impl Send for CRC {} impl CRC { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const crc::RegisterBlock { 0x2009_0000 as *const _ } } impl Deref for CRC { type Target = crc::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*CRC::ptr() } } } #[doc = "CRC engine"] pub mod crc; #[doc = "General Purpose I/O"] pub struct GPIO { _marker: PhantomData<*const ()>, } unsafe impl Send for GPIO {} impl GPIO { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const gpio::RegisterBlock { 0x2009_8000 as *const _ } } impl Deref for GPIO { type Target = gpio::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*GPIO::ptr() } } } #[doc = "General Purpose I/O"] pub mod gpio; #[doc = "ExternalMemory Controller (EMC)"] pub struct EMC { _marker: PhantomData<*const ()>, } unsafe impl Send for EMC {} impl EMC { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const emc::RegisterBlock { 0x2009_c000 as *const _ } } impl Deref for EMC { type Target = emc::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*EMC::ptr() } } } #[doc = "ExternalMemory Controller (EMC)"] pub mod emc; #[doc = "Windowed Watchdog Timer (WWDT)"] pub struct WWDT { _marker: PhantomData<*const ()>, } unsafe impl Send for WWDT {} impl WWDT { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const wwdt::RegisterBlock { 0x4000_0000 as *const _ } } impl Deref for WWDT { type Target = wwdt::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*WWDT::ptr() } } } #[doc = "Windowed Watchdog Timer (WWDT)"] pub mod wwdt; #[doc = "Timer0/1/2/3"] pub struct TIMER0 { _marker: PhantomData<*const ()>, } unsafe impl Send for TIMER0 {} impl TIMER0 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const timer0::RegisterBlock { 0x4000_4000 as *const _ } } impl Deref for TIMER0 { type Target = timer0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*TIMER0::ptr() } } } #[doc = "Timer0/1/2/3"] pub mod timer0; #[doc = "Timer0/1/2/3"] pub struct TIMER1 { _marker: PhantomData<*const ()>, } unsafe impl Send for TIMER1 {} impl TIMER1 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const timer0::RegisterBlock { 0x4000_8000 as *const _ } } impl Deref for TIMER1 { type Target = timer0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*TIMER1::ptr() } } } #[doc = "UART0/2/3"] pub struct UART0 { _marker: PhantomData<*const ()>, } unsafe impl Send for UART0 {} impl UART0 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const uart0::RegisterBlock { 0x4000_c000 as *const _ } } impl Deref for UART0 { type Target = uart0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*UART0::ptr() } } } #[doc = "UART0/2/3"] pub mod uart0; #[doc = "UART1"] pub struct UART1 { _marker: PhantomData<*const ()>, } unsafe impl Send for UART1 {} impl UART1 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const uart1::RegisterBlock { 0x4001_0000 as *const _ } } impl Deref for UART1 { type Target = uart1::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*UART1::ptr() } } } #[doc = "UART1"] pub mod uart1; #[doc = "Pulse Width Modulators (PWM0/1)"] pub struct PWM0 { _marker: PhantomData<*const ()>, } unsafe impl Send for PWM0 {} impl PWM0 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const pwm0::RegisterBlock { 0x4001_4000 as *const _ } } impl Deref for PWM0 { type Target = pwm0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*PWM0::ptr() } } } #[doc = "Pulse Width Modulators (PWM0/1)"] pub mod pwm0; #[doc = "Pulse Width Modulators (PWM0/1)"] pub struct PWM1 { _marker: PhantomData<*const ()>, } unsafe impl Send for PWM1 {} impl PWM1 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const pwm0::RegisterBlock { 0x4001_8000 as *const _ } } impl Deref for PWM1 { type Target = pwm0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*PWM1::ptr() } } } #[doc = "I2C bus interface"] pub struct I2C0 { _marker: PhantomData<*const ()>, } unsafe impl Send for I2C0 {} impl I2C0 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const i2c0::RegisterBlock { 0x4001_c000 as *const _ } } impl Deref for I2C0 { type Target = i2c0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*I2C0::ptr() } } } #[doc = "I2C bus interface"] pub mod i2c0; #[doc = "Real Time Clock (RTC)"] pub struct RTC { _marker: PhantomData<*const ()>, } unsafe impl Send for RTC {} impl RTC { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const rtc::RegisterBlock { 0x4002_4000 as *const _ } } impl Deref for RTC { type Target = rtc::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*RTC::ptr() } } } #[doc = "Real Time Clock (RTC)"] pub mod rtc; #[doc = "GPIO"] pub struct GPIOINT { _marker: PhantomData<*const ()>, } unsafe impl Send for GPIOINT {} impl GPIOINT { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const gpioint::RegisterBlock { 0x4002_8080 as *const _ } } impl Deref for GPIOINT { type Target = gpioint::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*GPIOINT::ptr() } } } #[doc = "GPIO"] pub mod gpioint; #[doc = "IOCON pin configuration"] pub struct IOCON { _marker: PhantomData<*const ()>, } unsafe impl Send for IOCON {} impl IOCON { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const iocon::RegisterBlock { 0x4002_c000 as *const _ } } impl Deref for IOCON { type Target = iocon::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*IOCON::ptr() } } } #[doc = "IOCON pin configuration"] pub mod iocon; #[doc = "SSP1 controller"] pub struct SSP1 { _marker: PhantomData<*const ()>, } unsafe impl Send for SSP1 {} impl SSP1 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const ssp1::RegisterBlock { 0x4003_0000 as *const _ } } impl Deref for SSP1 { type Target = ssp1::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*SSP1::ptr() } } } #[doc = "SSP1 controller"] pub mod ssp1; #[doc = "Analog-to-Digital Converter (ADC)"] pub struct ADC { _marker: PhantomData<*const ()>, } unsafe impl Send for ADC {} impl ADC { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const adc::RegisterBlock { 0x4003_4000 as *const _ } } impl Deref for ADC { type Target = adc::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*ADC::ptr() } } } #[doc = "Analog-to-Digital Converter (ADC)"] pub mod adc; #[doc = "CAN acceptance filter RAM"] pub struct CANAFRAM { _marker: PhantomData<*const ()>, } unsafe impl Send for CANAFRAM {} impl CANAFRAM { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const canafram::RegisterBlock { 0x4003_8000 as *const _ } } impl Deref for CANAFRAM { type Target = canafram::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*CANAFRAM::ptr() } } } #[doc = "CAN acceptance filter RAM"] pub mod canafram; #[doc = "CAN controller acceptance filter"] pub struct CANAF { _marker: PhantomData<*const ()>, } unsafe impl Send for CANAF {} impl CANAF { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const canaf::RegisterBlock { 0x4003_c000 as *const _ } } impl Deref for CANAF { type Target = canaf::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*CANAF::ptr() } } } #[doc = "CAN controller acceptance filter"] pub mod canaf; #[doc = "Central CAN controller"] pub struct CCAN { _marker: PhantomData<*const ()>, } unsafe impl Send for CCAN {} impl CCAN { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const ccan::RegisterBlock { 0x4004_0000 as *const _ } } impl Deref for CCAN { type Target = ccan::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*CCAN::ptr() } } } #[doc = "Central CAN controller"] pub mod ccan; #[doc = "CAN1 controller"] pub struct CAN1 { _marker: PhantomData<*const ()>, } unsafe impl Send for CAN1 {} impl CAN1 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const can1::RegisterBlock { 0x4004_4000 as *const _ } } impl Deref for CAN1 { type Target = can1::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*CAN1::ptr() } } } #[doc = "CAN1 controller"] pub mod can1; #[doc = "CAN1 controller"] pub struct CAN2 { _marker: PhantomData<*const ()>, } unsafe impl Send for CAN2 {} impl CAN2 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const can1::RegisterBlock { 0x4004_8000 as *const _ } } impl Deref for CAN2 { type Target = can1::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*CAN2::ptr() } } } #[doc = "I2C bus interface"] pub struct I2C1 { _marker: PhantomData<*const ()>, } unsafe impl Send for I2C1 {} impl I2C1 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const i2c0::RegisterBlock { 0x4005_c000 as *const _ } } impl Deref for I2C1 { type Target = i2c0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*I2C1::ptr() } } } #[doc = "SSP controller"] pub struct SSP0 { _marker: PhantomData<*const ()>, } unsafe impl Send for SSP0 {} impl SSP0 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const ssp1::RegisterBlock { 0x4008_8000 as *const _ } } impl Deref for SSP0 { type Target = ssp1::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*SSP0::ptr() } } } #[doc = "Digital-to-Analog Converter (DAC)"] pub struct DAC { _marker: PhantomData<*const ()>, } unsafe impl Send for DAC {} impl DAC { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const dac::RegisterBlock { 0x4008_c000 as *const _ } } impl Deref for DAC { type Target = dac::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*DAC::ptr() } } } #[doc = "Digital-to-Analog Converter (DAC)"] pub mod dac; #[doc = "Timer0/1/2/3"] pub struct TIMER2 { _marker: PhantomData<*const ()>, } unsafe impl Send for TIMER2 {} impl TIMER2 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const timer0::RegisterBlock { 0x4009_0000 as *const _ } } impl Deref for TIMER2 { type Target = timer0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*TIMER2::ptr() } } } #[doc = "Timer0/1/2/3"] pub struct TIMER3 { _marker: PhantomData<*const ()>, } unsafe impl Send for TIMER3 {} impl TIMER3 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const timer0::RegisterBlock { 0x4009_4000 as *const _ } } impl Deref for TIMER3 { type Target = timer0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*TIMER3::ptr() } } } #[doc = "UART0/2/3"] pub struct UART2 { _marker: PhantomData<*const ()>, } unsafe impl Send for UART2 {} impl UART2 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const uart0::RegisterBlock { 0x4009_8000 as *const _ } } impl Deref for UART2 { type Target = uart0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*UART2::ptr() } } } #[doc = "UART0/2/3"] pub struct UART3 { _marker: PhantomData<*const ()>, } unsafe impl Send for UART3 {} impl UART3 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const uart0::RegisterBlock { 0x4009_c000 as *const _ } } impl Deref for UART3 { type Target = uart0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*UART3::ptr() } } } #[doc = "I2C bus interface"] pub struct I2C2 { _marker: PhantomData<*const ()>, } unsafe impl Send for I2C2 {} impl I2C2 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const i2c0::RegisterBlock { 0x400a_0000 as *const _ } } impl Deref for I2C2 { type Target = i2c0::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*I2C2::ptr() } } } #[doc = "UART4"] pub struct UART4 { _marker: PhantomData<*const ()>, } unsafe impl Send for UART4 {} impl UART4 { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const uart4::RegisterBlock { 0x400a_4000 as *const _ } } impl Deref for UART4 { type Target = uart4::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*UART4::ptr() } } } #[doc = "UART4"] pub mod uart4; #[doc = "I2S interface"] pub struct I2S { _marker: PhantomData<*const ()>, } unsafe impl Send for I2S {} impl I2S { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const i2s::RegisterBlock { 0x400a_8000 as *const _ } } impl Deref for I2S { type Target = i2s::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*I2S::ptr() } } } #[doc = "I2S interface"] pub mod i2s; #[doc = "Motor Control PWM"] pub struct MCPWM { _marker: PhantomData<*const ()>, } unsafe impl Send for MCPWM {} impl MCPWM { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const mcpwm::RegisterBlock { 0x400b_8000 as *const _ } } impl Deref for MCPWM { type Target = mcpwm::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*MCPWM::ptr() } } } #[doc = "Motor Control PWM"] pub mod mcpwm; #[doc = "Quadrature Encoder Interface (QEI)"] pub struct QEI { _marker: PhantomData<*const ()>, } unsafe impl Send for QEI {} impl QEI { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const qei::RegisterBlock { 0x400b_c000 as *const _ } } impl Deref for QEI { type Target = qei::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*QEI::ptr() } } } #[doc = "Quadrature Encoder Interface (QEI)"] pub mod qei; #[doc = "SD card interface"] pub struct SDMMC { _marker: PhantomData<*const ()>, } unsafe impl Send for SDMMC {} impl SDMMC { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const sdmmc::RegisterBlock { 0x400c_0000 as *const _ } } impl Deref for SDMMC { type Target = sdmmc::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*SDMMC::ptr() } } } #[doc = "SD card interface"] pub mod sdmmc; #[doc = "System and clock control"] pub struct SYSCON { _marker: PhantomData<*const ()>, } unsafe impl Send for SYSCON {} impl SYSCON { #[doc = r"Returns a pointer to the register block"] #[inline(always)] pub const fn ptr() -> *const syscon::RegisterBlock { 0x400f_c000 as *const _ } } impl Deref for SYSCON { type Target = syscon::RegisterBlock; #[inline(always)] fn deref(&self) -> &Self::Target { unsafe { &*SYSCON::ptr() } } } #[doc = "System and clock control"] pub mod syscon; #[no_mangle] static mut DEVICE_PERIPHERALS: bool = false; #[doc = r"All the peripherals"] #[allow(non_snake_case)] pub struct Peripherals { #[doc = "FLASHCTRL"] pub FLASHCTRL: FLASHCTRL, #[doc = "GPDMA"] pub GPDMA: GPDMA, #[doc = "ETHERNET"] pub ETHERNET: ETHERNET, #[doc = "LCD"] pub LCD: LCD, #[doc = "USB"] pub USB: USB, #[doc = "CRC"] pub CRC: CRC, #[doc = "GPIO"] pub GPIO: GPIO, #[doc = "EMC"] pub EMC: EMC, #[doc = "WWDT"] pub WWDT: WWDT, #[doc = "TIMER0"] pub TIMER0: TIMER0, #[doc = "TIMER1"] pub TIMER1: TIMER1, #[doc = "UART0"] pub UART0: UART0, #[doc = "UART1"] pub UART1: UART1, #[doc = "PWM0"] pub PWM0: PWM0, #[doc = "PWM1"] pub PWM1: PWM1, #[doc = "I2C0"] pub I2C0: I2C0, #[doc = "RTC"] pub RTC: RTC, #[doc = "GPIOINT"] pub GPIOINT: GPIOINT, #[doc = "IOCON"] pub IOCON: IOCON, #[doc = "SSP1"] pub SSP1: SSP1, #[doc = "ADC"] pub ADC: ADC, #[doc = "CANAFRAM"] pub CANAFRAM: CANAFRAM, #[doc = "CANAF"] pub CANAF: CANAF, #[doc = "CCAN"] pub CCAN: CCAN, #[doc = "CAN1"] pub CAN1: CAN1, #[doc = "CAN2"] pub CAN2: CAN2, #[doc = "I2C1"] pub I2C1: I2C1, #[doc = "SSP0"] pub SSP0: SSP0, #[doc = "DAC"] pub DAC: DAC, #[doc = "TIMER2"] pub TIMER2: TIMER2, #[doc = "TIMER3"] pub TIMER3: TIMER3, #[doc = "UART2"] pub UART2: UART2, #[doc = "UART3"] pub UART3: UART3, #[doc = "I2C2"] pub I2C2: I2C2, #[doc = "UART4"] pub UART4: UART4, #[doc = "I2S"] pub I2S: I2S, #[doc = "MCPWM"] pub MCPWM: MCPWM, #[doc = "QEI"] pub QEI: QEI, #[doc = "SDMMC"] pub SDMMC: SDMMC, #[doc = "SYSCON"] pub SYSCON: SYSCON, } impl Peripherals { #[doc = r"Returns all the peripherals *once*"] #[inline] pub fn take() -> Option<Self> { cortex_m::interrupt::free(|_| { if unsafe { DEVICE_PERIPHERALS } { None } else { Some(unsafe { Peripherals::steal() }) } }) } #[doc = r"Unchecked version of `Peripherals::take`"] #[inline] pub unsafe fn steal() -> Self { DEVICE_PERIPHERALS = true; Peripherals { FLASHCTRL: FLASHCTRL { _marker: PhantomData, }, GPDMA: GPDMA { _marker: PhantomData, }, ETHERNET: ETHERNET { _marker: PhantomData, }, LCD: LCD { _marker: PhantomData, }, USB: USB { _marker: PhantomData, }, CRC: CRC { _marker: PhantomData, }, GPIO: GPIO { _marker: PhantomData, }, EMC: EMC { _marker: PhantomData, }, WWDT: WWDT { _marker: PhantomData, }, TIMER0: TIMER0 { _marker: PhantomData, }, TIMER1: TIMER1 { _marker: PhantomData, }, UART0: UART0 { _marker: PhantomData, }, UART1: UART1 { _marker: PhantomData, }, PWM0: PWM0 { _marker: PhantomData, }, PWM1: PWM1 { _marker: PhantomData, }, I2C0: I2C0 { _marker: PhantomData, }, RTC: RTC { _marker: PhantomData, }, GPIOINT: GPIOINT { _marker: PhantomData, }, IOCON: IOCON { _marker: PhantomData, }, SSP1: SSP1 { _marker: PhantomData, }, ADC: ADC { _marker: PhantomData, }, CANAFRAM: CANAFRAM { _marker: PhantomData, }, CANAF: CANAF { _marker: PhantomData, }, CCAN: CCAN { _marker: PhantomData, }, CAN1: CAN1 { _marker: PhantomData, }, CAN2: CAN2 { _marker: PhantomData, }, I2C1: I2C1 { _marker: PhantomData, }, SSP0: SSP0 { _marker: PhantomData, }, DAC: DAC { _marker: PhantomData, }, TIMER2: TIMER2 { _marker: PhantomData, }, TIMER3: TIMER3 { _marker: PhantomData, }, UART2: UART2 { _marker: PhantomData, }, UART3: UART3 { _marker: PhantomData, }, I2C2: I2C2 { _marker: PhantomData, }, UART4: UART4 { _marker: PhantomData, }, I2S: I2S { _marker: PhantomData, }, MCPWM: MCPWM { _marker: PhantomData, }, QEI: QEI { _marker: PhantomData, }, SDMMC: SDMMC { _marker: PhantomData, }, SYSCON: SYSCON { _marker: PhantomData, }, } } }
24.493671
215
0.559981
916a14631ad4662e0dc68f03dc43ea47767a5555
16,995
// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0. use std::borrow::Borrow; use std::fmt::{self, Debug, Display, Formatter}; use std::path::{Path, PathBuf}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex}; use std::time::Duration; use engine_rocks::raw::{ColumnFamilyOptions, DBIterator, SeekKey as DBSeekKey, DB}; use engine_rocks::raw_util::CFOptions; use engine_rocks::{RocksEngine as BaseRocksEngine, RocksEngineIterator}; use engine_traits::{CfName, CF_DEFAULT, CF_LOCK, CF_RAFT, CF_WRITE}; use engine_traits::{ IterOptions, Iterable, Iterator, KvEngine, KvEngines, Mutable, Peekable, ReadOptions, SeekKey, WriteBatchExt, }; use kvproto::kvrpcpb::Context; use tempfile::{Builder, TempDir}; use txn_types::{Key, Value}; use crate::storage::config::BlockCacheConfig; use tikv_util::escape; use tikv_util::time::ThreadReadId; use tikv_util::worker::{Runnable, Scheduler, Worker}; use super::{ Callback, CbContext, Cursor, Engine, Error, ErrorInner, Iterator as EngineIterator, Modify, Result, ScanMode, Snapshot, WriteData, }; pub use engine_rocks::RocksSnapshot; const TEMP_DIR: &str = ""; enum Task { Write(Vec<Modify>, Callback<()>), Snapshot(Callback<Arc<RocksSnapshot>>), Pause(Duration), } impl Display for Task { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match *self { Task::Write(..) => write!(f, "write task"), Task::Snapshot(_) => write!(f, "snapshot task"), Task::Pause(_) => write!(f, "pause"), } } } struct Runner(KvEngines<BaseRocksEngine, BaseRocksEngine>); impl Runnable<Task> for Runner { fn run(&mut self, t: Task) { match t { Task::Write(modifies, cb) => { cb((CbContext::new(), write_modifies(&self.0.kv, modifies))) } Task::Snapshot(cb) => cb((CbContext::new(), Ok(Arc::new(self.0.kv.snapshot())))), Task::Pause(dur) => std::thread::sleep(dur), } } } struct RocksEngineCore { // only use for memory mode temp_dir: Option<TempDir>, worker: Worker<Task>, } impl Drop for RocksEngineCore { fn drop(&mut self) { if let Some(h) = self.worker.stop() { if let Err(e) = h.join() { safe_panic!("RocksEngineCore engine thread panicked: {:?}", e); } } } } /// The RocksEngine is based on `RocksDB`. /// /// This is intended for **testing use only**. #[derive(Clone)] pub struct RocksEngine { core: Arc<Mutex<RocksEngineCore>>, sched: Scheduler<Task>, engines: KvEngines<BaseRocksEngine, BaseRocksEngine>, not_leader: Arc<AtomicBool>, } impl RocksEngine { pub fn new( path: &str, cfs: &[CfName], cfs_opts: Option<Vec<CFOptions<'_>>>, shared_block_cache: bool, ) -> Result<RocksEngine> { info!("RocksEngine: creating for path"; "path" => path); let (path, temp_dir) = match path { TEMP_DIR => { let td = Builder::new().prefix("temp-rocksdb").tempdir().unwrap(); (td.path().to_str().unwrap().to_owned(), Some(td)) } _ => (path.to_owned(), None), }; let mut worker = Worker::new("engine-rocksdb"); let db = Arc::new(engine_rocks::raw_util::new_engine( &path, None, cfs, cfs_opts, )?); // It does not use the raft_engine, so it is ok to fill with the same // rocksdb. let engines = KvEngines::new( BaseRocksEngine::from_db(db.clone()), BaseRocksEngine::from_db(db), shared_block_cache, ); box_try!(worker.start(Runner(engines.clone()))); Ok(RocksEngine { sched: worker.scheduler(), core: Arc::new(Mutex::new(RocksEngineCore { temp_dir, worker })), not_leader: Arc::new(AtomicBool::new(false)), engines, }) } pub fn trigger_not_leader(&self) { self.not_leader.store(true, Ordering::SeqCst); } pub fn pause(&self, dur: Duration) { self.sched.schedule(Task::Pause(dur)).unwrap(); } pub fn engines(&self) -> KvEngines<BaseRocksEngine, BaseRocksEngine> { self.engines.clone() } pub fn get_rocksdb(&self) -> BaseRocksEngine { self.engines.kv.clone() } pub fn stop(&self) { let mut core = self.core.lock().unwrap(); if let Some(h) = core.worker.stop() { h.join().unwrap(); } } } impl Display for RocksEngine { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "RocksDB") } } impl Debug for RocksEngine { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!( f, "RocksDB [is_temp: {}]", self.core.lock().unwrap().temp_dir.is_some() ) } } /// A builder to build a temporary `RocksEngine`. /// /// Only used for test purpose. #[must_use] pub struct TestEngineBuilder { path: Option<PathBuf>, cfs: Option<Vec<CfName>>, } impl TestEngineBuilder { pub fn new() -> Self { Self { path: None, cfs: None, } } /// Customize the data directory of the temporary engine. /// /// By default, TEMP_DIR will be used. pub fn path(mut self, path: impl AsRef<Path>) -> Self { self.path = Some(path.as_ref().to_path_buf()); self } /// Customize the CFs that engine will have. /// /// By default, engine will have all CFs. pub fn cfs(mut self, cfs: impl AsRef<[CfName]>) -> Self { self.cfs = Some(cfs.as_ref().to_vec()); self } /// Build a `RocksEngine`. pub fn build(self) -> Result<RocksEngine> { let path = match self.path { None => TEMP_DIR.to_owned(), Some(p) => p.to_str().unwrap().to_owned(), }; let cfs = self.cfs.unwrap_or_else(|| crate::storage::ALL_CFS.to_vec()); let cfg_rocksdb = crate::config::DbConfig::default(); let cache = BlockCacheConfig::default().build_shared_cache(); let cfs_opts = cfs .iter() .map(|cf| match *cf { CF_DEFAULT => CFOptions::new(CF_DEFAULT, cfg_rocksdb.defaultcf.build_opt(&cache)), CF_LOCK => CFOptions::new(CF_LOCK, cfg_rocksdb.lockcf.build_opt(&cache)), CF_WRITE => CFOptions::new(CF_WRITE, cfg_rocksdb.writecf.build_opt(&cache)), CF_RAFT => CFOptions::new(CF_RAFT, cfg_rocksdb.raftcf.build_opt(&cache)), _ => CFOptions::new(*cf, ColumnFamilyOptions::new()), }) .collect(); RocksEngine::new(&path, &cfs, Some(cfs_opts), cache.is_some()) } } /// Write modifications into a `BaseRocksEngine` instance. pub fn write_modifies(kv_engine: &BaseRocksEngine, modifies: Vec<Modify>) -> Result<()> { fail_point!("rockskv_write_modifies", |_| Err(box_err!("write failed"))); let mut wb = kv_engine.write_batch(); for rev in modifies { let res = match rev { Modify::Delete(cf, k) => { if cf == CF_DEFAULT { trace!("RocksEngine: delete"; "key" => %k); wb.delete(k.as_encoded()) } else { trace!("RocksEngine: delete_cf"; "cf" => cf, "key" => %k); wb.delete_cf(cf, k.as_encoded()) } } Modify::Put(cf, k, v) => { if cf == CF_DEFAULT { trace!("RocksEngine: put"; "key" => %k, "value" => escape(&v)); wb.put(k.as_encoded(), &v) } else { trace!("RocksEngine: put_cf"; "cf" => cf, "key" => %k, "value" => escape(&v)); wb.put_cf(cf, k.as_encoded(), &v) } } Modify::DeleteRange(cf, start_key, end_key, notify_only) => { trace!( "RocksEngine: delete_range_cf"; "cf" => cf, "start_key" => %start_key, "end_key" => %end_key, "notify_only" => notify_only, ); if !notify_only { wb.delete_range_cf(cf, start_key.as_encoded(), end_key.as_encoded()) } else { Ok(()) } } }; // TODO: turn the error into an engine error. if let Err(msg) = res { return Err(box_err!("{}", msg)); } } kv_engine.write(&wb)?; Ok(()) } impl Engine for RocksEngine { type Snap = Arc<RocksSnapshot>; fn kv_engine(&self) -> BaseRocksEngine { self.engines.kv.clone() } fn snapshot_on_kv_engine(&self, _: &[u8], _: &[u8]) -> Result<Self::Snap> { self.snapshot(&Context::default()) } fn modify_on_kv_engine(&self, modifies: Vec<Modify>) -> Result<()> { write_modifies(&self.engines.kv, modifies) } fn async_write(&self, _: &Context, batch: WriteData, cb: Callback<()>) -> Result<()> { fail_point!("rockskv_async_write", |_| Err(box_err!("write failed"))); if batch.modifies.is_empty() { return Err(Error::from(ErrorInner::EmptyRequest)); } box_try!(self.sched.schedule(Task::Write(batch.modifies, cb))); Ok(()) } fn async_snapshot( &self, _: &Context, _: Option<ThreadReadId>, cb: Callback<Self::Snap>, ) -> Result<()> { fail_point!("rockskv_async_snapshot", |_| Err(box_err!( "snapshot failed" ))); let not_leader = { let mut header = kvproto::errorpb::Error::default(); header.mut_not_leader().set_region_id(100); header }; fail_point!("rockskv_async_snapshot_not_leader", |_| { Err(Error::from(ErrorInner::Request(not_leader.clone()))) }); if self.not_leader.load(Ordering::SeqCst) { return Err(Error::from(ErrorInner::Request(not_leader))); } box_try!(self.sched.schedule(Task::Snapshot(cb))); Ok(()) } } impl Snapshot for Arc<RocksSnapshot> { type Iter = RocksEngineIterator; fn get(&self, key: &Key) -> Result<Option<Value>> { trace!("RocksSnapshot: get"; "key" => %key); let v = box_try!(self.get_value(key.as_encoded())); Ok(v.map(|v| v.to_vec())) } fn get_cf(&self, cf: CfName, key: &Key) -> Result<Option<Value>> { trace!("RocksSnapshot: get_cf"; "cf" => cf, "key" => %key); let v = box_try!(self.get_value_cf(cf, key.as_encoded())); Ok(v.map(|v| v.to_vec())) } fn get_cf_opt(&self, opts: ReadOptions, cf: CfName, key: &Key) -> Result<Option<Value>> { trace!("RocksSnapshot: get_cf"; "cf" => cf, "key" => %key); let v = box_try!(self.get_value_cf_opt(&opts, cf, key.as_encoded())); Ok(v.map(|v| v.to_vec())) } fn iter(&self, iter_opt: IterOptions, mode: ScanMode) -> Result<Cursor<Self::Iter>> { trace!("RocksSnapshot: create iterator"); let iter = self.iterator_opt(iter_opt)?; Ok(Cursor::new(iter, mode)) } fn iter_cf( &self, cf: CfName, iter_opt: IterOptions, mode: ScanMode, ) -> Result<Cursor<Self::Iter>> { trace!("RocksSnapshot: create cf iterator"); let iter = self.iterator_cf_opt(cf, iter_opt)?; Ok(Cursor::new(iter, mode)) } } impl EngineIterator for RocksEngineIterator { fn next(&mut self) -> Result<bool> { Iterator::next(self).map_err(Error::from) } fn prev(&mut self) -> Result<bool> { Iterator::prev(self).map_err(Error::from) } fn seek(&mut self, key: &Key) -> Result<bool> { Iterator::seek(self, key.as_encoded().as_slice().into()).map_err(Error::from) } fn seek_for_prev(&mut self, key: &Key) -> Result<bool> { Iterator::seek_for_prev(self, key.as_encoded().as_slice().into()).map_err(Error::from) } fn seek_to_first(&mut self) -> Result<bool> { Iterator::seek(self, SeekKey::Start).map_err(Error::from) } fn seek_to_last(&mut self) -> Result<bool> { Iterator::seek(self, SeekKey::End).map_err(Error::from) } fn valid(&self) -> Result<bool> { Iterator::valid(self).map_err(Error::from) } fn key(&self) -> &[u8] { Iterator::key(self) } fn value(&self) -> &[u8] { Iterator::value(self) } } impl<D: Borrow<DB> + Send> EngineIterator for DBIterator<D> { fn next(&mut self) -> Result<bool> { DBIterator::next(self).map_err(|e| box_err!(e)) } fn prev(&mut self) -> Result<bool> { DBIterator::prev(self).map_err(|e| box_err!(e)) } fn seek(&mut self, key: &Key) -> Result<bool> { DBIterator::seek(self, key.as_encoded().as_slice().into()).map_err(|e| box_err!(e)) } fn seek_for_prev(&mut self, key: &Key) -> Result<bool> { DBIterator::seek_for_prev(self, key.as_encoded().as_slice().into()).map_err(|e| box_err!(e)) } fn seek_to_first(&mut self) -> Result<bool> { DBIterator::seek(self, DBSeekKey::Start).map_err(|e| box_err!(e)) } fn seek_to_last(&mut self) -> Result<bool> { DBIterator::seek(self, DBSeekKey::End).map_err(|e| box_err!(e)) } fn valid(&self) -> Result<bool> { DBIterator::valid(self).map_err(|e| box_err!(e)) } fn key(&self) -> &[u8] { DBIterator::key(self) } fn value(&self) -> &[u8] { DBIterator::value(self) } } #[cfg(test)] mod tests { use super::super::perf_context::PerfStatisticsInstant; use super::super::tests::*; use super::super::CfStatistics; use super::*; #[test] fn test_rocksdb() { let engine = TestEngineBuilder::new() .cfs(TEST_ENGINE_CFS) .build() .unwrap(); test_base_curd_options(&engine) } #[test] fn test_rocksdb_linear() { let engine = TestEngineBuilder::new() .cfs(TEST_ENGINE_CFS) .build() .unwrap(); test_linear(&engine); } #[test] fn test_rocksdb_statistic() { let engine = TestEngineBuilder::new() .cfs(TEST_ENGINE_CFS) .build() .unwrap(); test_cfs_statistics(&engine); } #[test] fn rocksdb_reopen() { let dir = tempfile::Builder::new() .prefix("rocksdb_test") .tempdir() .unwrap(); { let engine = TestEngineBuilder::new() .path(dir.path()) .cfs(TEST_ENGINE_CFS) .build() .unwrap(); must_put_cf(&engine, "cf", b"k", b"v1"); } { let engine = TestEngineBuilder::new() .path(dir.path()) .cfs(TEST_ENGINE_CFS) .build() .unwrap(); assert_has_cf(&engine, "cf", b"k", b"v1"); } } #[test] fn test_rocksdb_perf_statistics() { let engine = TestEngineBuilder::new() .cfs(TEST_ENGINE_CFS) .build() .unwrap(); test_perf_statistics(&engine); } fn test_perf_statistics<E: Engine>(engine: &E) { must_put(engine, b"foo", b"bar1"); must_put(engine, b"foo2", b"bar2"); must_put(engine, b"foo3", b"bar3"); // deleted must_put(engine, b"foo4", b"bar4"); must_put(engine, b"foo42", b"bar42"); // deleted must_put(engine, b"foo5", b"bar5"); // deleted must_put(engine, b"foo6", b"bar6"); must_delete(engine, b"foo3"); must_delete(engine, b"foo42"); must_delete(engine, b"foo5"); let snapshot = engine.snapshot(&Context::default()).unwrap(); let mut iter = snapshot .iter(IterOptions::default(), ScanMode::Forward) .unwrap(); let mut statistics = CfStatistics::default(); let perf_statistics = PerfStatisticsInstant::new(); iter.seek(&Key::from_raw(b"foo30"), &mut statistics) .unwrap(); assert_eq!(perf_statistics.delta().0.internal_delete_skipped_count, 0); let perf_statistics = PerfStatisticsInstant::new(); iter.near_seek(&Key::from_raw(b"foo55"), &mut statistics) .unwrap(); assert_eq!(perf_statistics.delta().0.internal_delete_skipped_count, 2); let perf_statistics = PerfStatisticsInstant::new(); iter.prev(&mut statistics); assert_eq!(perf_statistics.delta().0.internal_delete_skipped_count, 2); iter.prev(&mut statistics); assert_eq!(perf_statistics.delta().0.internal_delete_skipped_count, 3); iter.prev(&mut statistics); assert_eq!(perf_statistics.delta().0.internal_delete_skipped_count, 3); } }
31.183486
100
0.557811
08ab2faf40133862282ecdb814130ae166657e5a
1,455
// Copyright (c) The Starcoin Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::cli_state::CliState; use crate::StarcoinOpt; use anyhow::Result; use scmd::{CommandAction, ExecContext}; use starcoin_account_api::AccountInfo; use starcoin_vm_types::account_address::AccountAddress; use structopt::StructOpt; /// Change account password, should unlock the account before change password. #[derive(Debug, StructOpt)] #[structopt(name = "change-password")] pub struct ChangePasswordOpt { #[structopt( name = "account_address", help = "The wallet account address which to change password." )] account_address: AccountAddress, #[structopt(short, name = "password")] password: String, } pub struct ChangePasswordCmd; impl CommandAction for ChangePasswordCmd { type State = CliState; type GlobalOpt = StarcoinOpt; type Opt = ChangePasswordOpt; type ReturnItem = AccountInfo; fn run( &self, ctx: &ExecContext<Self::State, Self::GlobalOpt, Self::Opt>, ) -> Result<Self::ReturnItem> { let opt: &ChangePasswordOpt = ctx.opt(); let account_client = ctx.state().account_client(); let account_info = account_client.change_account_password(opt.account_address, opt.password.clone())?; Ok(account_info) } fn skip_history(&self, _ctx: &ExecContext<Self::State, Self::GlobalOpt, Self::Opt>) -> bool { true } }
29.693878
97
0.690034
0e6eba0e6cae281e55063eb6db0948458130e96f
2,049
use anyhow::{Context, Error, Result}; use tokio::sync::mpsc::channel; use crate::providers::http::Report as HttpReport; use crate::report::Report; use crate::tasks::adapters::from_file; use crate::utils::timestamp::current_timestamp; use crate::worker::{Squad, Worker}; /// This is likely to change in the future in order /// to be able to define where to gather the config /// from. Default for now is "nasu.json" const CONFIG_FILE_NAME: &str = "nasu.json"; pub async fn run() -> Result<()> { let tasks = from_file(CONFIG_FILE_NAME).context(format!("Failed to parse \"{}\"", CONFIG_FILE_NAME))?; let workers: Vec<Worker> = tasks.into_iter().map(|task| Worker::from(task)).collect(); let (tx, mut rx) = channel::<Report>(1024); let squad = Squad::new(workers, tx); let print_proc = tokio::spawn(async move { println!( "{0: <15} | {1: <15} | {2: <20} | {3: <15} | {4: <15}", "Log Time", "Task", "HTTP. Status Code", "Req. Time", "Res. Time" ); println!("=========================================================================================="); while let Some(report) = rx.recv().await { match report { Report::Http(HttpReport { id, req_end, req_start, status_code, .. }) => { println!( "{0: <15} | {1: <15} | {2: <20} | {3: <15} | {4: <15}", current_timestamp(), id, status_code, req_start, req_end ); } } } }); let run_proc = tokio::spawn(async move { squad.start().await; }); tokio::select! { _ = print_proc => { Err(Error::msg("Output process stopped")) }, _ = run_proc => { Err(Error::msg("Run process stopped")) }, } }
32.52381
111
0.457296
4b2980bc3a6957211a1e286003d84910203ed527
26,275
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::collections::HashMap; use std::env; use std::ffi::OsString; use std::io::prelude::*; use std::io; use std::path::{Path, PathBuf}; use std::panic::{self, AssertUnwindSafe}; use std::process::Command; use std::rc::Rc; use std::str; use std::sync::{Arc, Mutex}; use testing; use rustc_lint; use rustc::hir; use rustc::hir::intravisit; use rustc::session::{self, CompileIncomplete, config}; use rustc::session::config::{OutputType, OutputTypes, Externs}; use rustc::session::search_paths::{SearchPaths, PathKind}; use rustc_back::dynamic_lib::DynamicLibrary; use rustc_back::tempdir::TempDir; use rustc_driver::{self, driver, Compilation}; use rustc_driver::driver::phase_2_configure_and_expand; use rustc_driver::pretty::ReplaceBodyWithLoop; use rustc_metadata::cstore::CStore; use rustc_resolve::MakeGlobMap; use rustc_trans; use rustc_trans::back::link; use syntax::ast; use syntax::codemap::CodeMap; use syntax::feature_gate::UnstableFeatures; use syntax::fold::Folder; use syntax_pos::{BytePos, DUMMY_SP, Pos, Span}; use errors; use errors::emitter::ColorConfig; use clean::Attributes; use html::markdown::{self, RenderType}; #[derive(Clone, Default)] pub struct TestOptions { pub no_crate_inject: bool, pub attrs: Vec<String>, } pub fn run(input: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs, mut test_args: Vec<String>, crate_name: Option<String>, maybe_sysroot: Option<PathBuf>, render_type: RenderType, display_warnings: bool) -> isize { let input_path = PathBuf::from(input); let input = config::Input::File(input_path.clone()); let sessopts = config::Options { maybe_sysroot: maybe_sysroot.clone().or_else( || Some(env::current_exe().unwrap().parent().unwrap().parent().unwrap().to_path_buf())), search_paths: libs.clone(), crate_types: vec![config::CrateTypeDylib], externs: externs.clone(), unstable_features: UnstableFeatures::from_environment(), lint_cap: Some(::rustc::lint::Level::Allow), actually_rustdoc: true, ..config::basic_options().clone() }; let codemap = Rc::new(CodeMap::new(sessopts.file_path_mapping())); let handler = errors::Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(codemap.clone())); let cstore = Rc::new(CStore::new(box rustc_trans::LlvmMetadataLoader)); let mut sess = session::build_session_( sessopts, Some(input_path.clone()), handler, codemap.clone(), ); rustc_trans::init(&sess); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); sess.parse_sess.config = config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone())); let krate = panictry!(driver::phase_1_parse_input(&driver::CompileController::basic(), &sess, &input)); let krate = ReplaceBodyWithLoop::new().fold_crate(krate); let driver::ExpansionResult { defs, mut hir_forest, .. } = { phase_2_configure_and_expand( &sess, &cstore, krate, None, "rustdoc-test", None, MakeGlobMap::No, |_| Ok(()), ).expect("phase_2_configure_and_expand aborted in rustdoc!") }; let crate_name = crate_name.unwrap_or_else(|| { link::find_crate_name(None, &hir_forest.krate().attrs, &input) }); let opts = scrape_test_config(hir_forest.krate()); let mut collector = Collector::new(crate_name, cfgs, libs, externs, false, opts, maybe_sysroot, Some(codemap), None, render_type); { let map = hir::map::map_crate(&sess, &*cstore, &mut hir_forest, &defs); let krate = map.krate(); let mut hir_collector = HirCollector { sess: &sess, collector: &mut collector, map: &map }; hir_collector.visit_testable("".to_string(), &krate.attrs, |this| { intravisit::walk_crate(this, krate); }); } test_args.insert(0, "rustdoctest".to_string()); testing::test_main(&test_args, collector.tests.into_iter().collect(), testing::Options::new().display_output(display_warnings)); 0 } // Look for #![doc(test(no_crate_inject))], used by crates in the std facade fn scrape_test_config(krate: &::rustc::hir::Crate) -> TestOptions { use syntax::print::pprust; let mut opts = TestOptions { no_crate_inject: false, attrs: Vec::new(), }; let test_attrs: Vec<_> = krate.attrs.iter() .filter(|a| a.check_name("doc")) .flat_map(|a| a.meta_item_list().unwrap_or_else(Vec::new)) .filter(|a| a.check_name("test")) .collect(); let attrs = test_attrs.iter().flat_map(|a| a.meta_item_list().unwrap_or(&[])); for attr in attrs { if attr.check_name("no_crate_inject") { opts.no_crate_inject = true; } if attr.check_name("attr") { if let Some(l) = attr.meta_item_list() { for item in l { opts.attrs.push(pprust::meta_list_item_to_string(item)); } } } } opts } fn run_test(test: &str, cratename: &str, filename: &str, cfgs: Vec<String>, libs: SearchPaths, externs: Externs, should_panic: bool, no_run: bool, as_test_harness: bool, compile_fail: bool, mut error_codes: Vec<String>, opts: &TestOptions, maybe_sysroot: Option<PathBuf>) { // the test harness wants its own `main` & top level functions, so // never wrap the test in `fn main() { ... }` let test = make_test(test, Some(cratename), as_test_harness, opts); // FIXME(#44940): if doctests ever support path remapping, then this filename // needs to be the result of CodeMap::span_to_unmapped_path let input = config::Input::Str { name: filename.to_owned(), input: test.to_owned(), }; let outputs = OutputTypes::new(&[(OutputType::Exe, None)]); let sessopts = config::Options { maybe_sysroot: maybe_sysroot.or_else( || Some(env::current_exe().unwrap().parent().unwrap().parent().unwrap().to_path_buf())), search_paths: libs, crate_types: vec![config::CrateTypeExecutable], output_types: outputs, externs, cg: config::CodegenOptions { prefer_dynamic: true, .. config::basic_codegen_options() }, test: as_test_harness, unstable_features: UnstableFeatures::from_environment(), ..config::basic_options().clone() }; // Shuffle around a few input and output handles here. We're going to pass // an explicit handle into rustc to collect output messages, but we also // want to catch the error message that rustc prints when it fails. // // We take our thread-local stderr (likely set by the test runner) and replace // it with a sink that is also passed to rustc itself. When this function // returns the output of the sink is copied onto the output of our own thread. // // The basic idea is to not use a default Handler for rustc, and then also // not print things by default to the actual stderr. struct Sink(Arc<Mutex<Vec<u8>>>); impl Write for Sink { fn write(&mut self, data: &[u8]) -> io::Result<usize> { Write::write(&mut *self.0.lock().unwrap(), data) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } struct Bomb(Arc<Mutex<Vec<u8>>>, Box<Write+Send>); impl Drop for Bomb { fn drop(&mut self) { let _ = self.1.write_all(&self.0.lock().unwrap()); } } let data = Arc::new(Mutex::new(Vec::new())); let codemap = Rc::new(CodeMap::new(sessopts.file_path_mapping())); let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()), Some(codemap.clone())); let old = io::set_panic(Some(box Sink(data.clone()))); let _bomb = Bomb(data.clone(), old.unwrap_or(box io::stdout())); // Compile the code let diagnostic_handler = errors::Handler::with_emitter(true, false, box emitter); let cstore = Rc::new(CStore::new(box rustc_trans::LlvmMetadataLoader)); let mut sess = session::build_session_( sessopts, None, diagnostic_handler, codemap, ); rustc_trans::init(&sess); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let outdir = Mutex::new(TempDir::new("rustdoctest").ok().expect("rustdoc needs a tempdir")); let libdir = sess.target_filesearch(PathKind::All).get_lib_path(); let mut control = driver::CompileController::basic(); sess.parse_sess.config = config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone())); let out = Some(outdir.lock().unwrap().path().to_path_buf()); if no_run { control.after_analysis.stop = Compilation::Stop; } let res = panic::catch_unwind(AssertUnwindSafe(|| { driver::compile_input(&sess, &cstore, &input, &out, &None, None, &control) })); let compile_result = match res { Ok(Ok(())) | Ok(Err(CompileIncomplete::Stopped)) => Ok(()), Err(_) | Ok(Err(CompileIncomplete::Errored(_))) => Err(()) }; match (compile_result, compile_fail) { (Ok(()), true) => { panic!("test compiled while it wasn't supposed to") } (Ok(()), false) => {} (Err(()), true) => { if error_codes.len() > 0 { let out = String::from_utf8(data.lock().unwrap().to_vec()).unwrap(); error_codes.retain(|err| !out.contains(err)); } } (Err(()), false) => { panic!("couldn't compile the test") } } if error_codes.len() > 0 { panic!("Some expected error codes were not found: {:?}", error_codes); } if no_run { return } // Run the code! // // We're careful to prepend the *target* dylib search path to the child's // environment to ensure that the target loads the right libraries at // runtime. It would be a sad day if the *host* libraries were loaded as a // mistake. let mut cmd = Command::new(&outdir.lock().unwrap().path().join("rust_out")); let var = DynamicLibrary::envvar(); let newpath = { let path = env::var_os(var).unwrap_or(OsString::new()); let mut path = env::split_paths(&path).collect::<Vec<_>>(); path.insert(0, libdir.clone()); env::join_paths(path).unwrap() }; cmd.env(var, &newpath); match cmd.output() { Err(e) => panic!("couldn't run the test: {}{}", e, if e.kind() == io::ErrorKind::PermissionDenied { " - maybe your tempdir is mounted with noexec?" } else { "" }), Ok(out) => { if should_panic && out.status.success() { panic!("test executable succeeded when it should have failed"); } else if !should_panic && !out.status.success() { panic!("test executable failed:\n{}\n{}\n", str::from_utf8(&out.stdout).unwrap_or(""), str::from_utf8(&out.stderr).unwrap_or("")); } } } } pub fn make_test(s: &str, cratename: Option<&str>, dont_insert_main: bool, opts: &TestOptions) -> String { let (crate_attrs, everything_else) = partition_source(s); let mut prog = String::new(); // First push any outer attributes from the example, assuming they // are intended to be crate attributes. prog.push_str(&crate_attrs); // Next, any attributes for other aspects such as lints. for attr in &opts.attrs { prog.push_str(&format!("#![{}]\n", attr)); } // Don't inject `extern crate std` because it's already injected by the // compiler. if !s.contains("extern crate") && !opts.no_crate_inject && cratename != Some("std") { if let Some(cratename) = cratename { if s.contains(cratename) { prog.push_str(&format!("extern crate {};\n", cratename)); } } } // FIXME (#21299): prefer libsyntax or some other actual parser over this // best-effort ad hoc approach let already_has_main = s.lines() .map(|line| { let comment = line.find("//"); if let Some(comment_begins) = comment { &line[0..comment_begins] } else { line } }) .any(|code| code.contains("fn main")); if dont_insert_main || already_has_main { prog.push_str(&everything_else); } else { prog.push_str("fn main() {\n"); prog.push_str(&everything_else); prog = prog.trim().into(); prog.push_str("\n}"); } info!("final test program: {}", prog); prog } // FIXME(aburka): use a real parser to deal with multiline attributes fn partition_source(s: &str) -> (String, String) { use std_unicode::str::UnicodeStr; let mut after_header = false; let mut before = String::new(); let mut after = String::new(); for line in s.lines() { let trimline = line.trim(); let header = trimline.is_whitespace() || trimline.starts_with("#!["); if !header || after_header { after_header = true; after.push_str(line); after.push_str("\n"); } else { before.push_str(line); before.push_str("\n"); } } (before, after) } pub struct Collector { pub tests: Vec<testing::TestDescAndFn>, // to be removed when hoedown will be definitely gone pub old_tests: HashMap<String, Vec<String>>, // The name of the test displayed to the user, separated by `::`. // // In tests from Rust source, this is the path to the item // e.g. `["std", "vec", "Vec", "push"]`. // // In tests from a markdown file, this is the titles of all headers (h1~h6) // of the sections that contain the code block, e.g. if the markdown file is // written as: // // ``````markdown // # Title // // ## Subtitle // // ```rust // assert!(true); // ``` // `````` // // the `names` vector of that test will be `["Title", "Subtitle"]`. names: Vec<String>, cfgs: Vec<String>, libs: SearchPaths, externs: Externs, use_headers: bool, cratename: String, opts: TestOptions, maybe_sysroot: Option<PathBuf>, position: Span, codemap: Option<Rc<CodeMap>>, filename: Option<String>, // to be removed when hoedown will be removed as well pub render_type: RenderType, } impl Collector { pub fn new(cratename: String, cfgs: Vec<String>, libs: SearchPaths, externs: Externs, use_headers: bool, opts: TestOptions, maybe_sysroot: Option<PathBuf>, codemap: Option<Rc<CodeMap>>, filename: Option<String>, render_type: RenderType) -> Collector { Collector { tests: Vec::new(), old_tests: HashMap::new(), names: Vec::new(), cfgs, libs, externs, use_headers, cratename, opts, maybe_sysroot, position: DUMMY_SP, codemap, filename, render_type, } } fn generate_name(&self, line: usize, filename: &str) -> String { format!("{} - {} (line {})", filename, self.names.join("::"), line) } // to be removed once hoedown is gone fn generate_name_beginning(&self, filename: &str) -> String { format!("{} - {} (line", filename, self.names.join("::")) } pub fn add_old_test(&mut self, test: String, filename: String) { let name_beg = self.generate_name_beginning(&filename); let entry = self.old_tests.entry(name_beg) .or_insert(Vec::new()); entry.push(test.trim().to_owned()); } pub fn add_test(&mut self, test: String, should_panic: bool, no_run: bool, should_ignore: bool, as_test_harness: bool, compile_fail: bool, error_codes: Vec<String>, line: usize, filename: String, allow_fail: bool) { let name = self.generate_name(line, &filename); // to be removed when hoedown is removed if self.render_type == RenderType::Pulldown { let name_beg = self.generate_name_beginning(&filename); let mut found = false; let test = test.trim().to_owned(); if let Some(entry) = self.old_tests.get_mut(&name_beg) { found = entry.remove_item(&test).is_some(); } if !found { eprintln!("WARNING: {} Code block is not currently run as a test, but will \ in future versions of rustdoc. Please ensure this code block is \ a runnable test, or use the `ignore` directive.", name); return } } let cfgs = self.cfgs.clone(); let libs = self.libs.clone(); let externs = self.externs.clone(); let cratename = self.cratename.to_string(); let opts = self.opts.clone(); let maybe_sysroot = self.maybe_sysroot.clone(); debug!("Creating test {}: {}", name, test); self.tests.push(testing::TestDescAndFn { desc: testing::TestDesc { name: testing::DynTestName(name), ignore: should_ignore, // compiler failures are test failures should_panic: testing::ShouldPanic::No, allow_fail, }, testfn: testing::DynTestFn(box move |()| { let panic = io::set_panic(None); let print = io::set_print(None); match { rustc_driver::in_rustc_thread(move || { io::set_panic(panic); io::set_print(print); run_test(&test, &cratename, &filename, cfgs, libs, externs, should_panic, no_run, as_test_harness, compile_fail, error_codes, &opts, maybe_sysroot) }) } { Ok(()) => (), Err(err) => panic::resume_unwind(err), } }), }); } pub fn get_line(&self) -> usize { if let Some(ref codemap) = self.codemap { let line = self.position.lo().to_usize(); let line = codemap.lookup_char_pos(BytePos(line as u32)).line; if line > 0 { line - 1 } else { line } } else { 0 } } pub fn set_position(&mut self, position: Span) { self.position = position; } pub fn get_filename(&self) -> String { if let Some(ref codemap) = self.codemap { let filename = codemap.span_to_filename(self.position); if let Ok(cur_dir) = env::current_dir() { if let Ok(path) = Path::new(&filename).strip_prefix(&cur_dir) { if let Some(path) = path.to_str() { return path.to_owned(); } } } filename } else if let Some(ref filename) = self.filename { filename.clone() } else { "<input>".to_owned() } } pub fn register_header(&mut self, name: &str, level: u32) { if self.use_headers { // we use these headings as test names, so it's good if // they're valid identifiers. let name = name.chars().enumerate().map(|(i, c)| { if (i == 0 && c.is_xid_start()) || (i != 0 && c.is_xid_continue()) { c } else { '_' } }).collect::<String>(); // Here we try to efficiently assemble the header titles into the // test name in the form of `h1::h2::h3::h4::h5::h6`. // // Suppose originally `self.names` contains `[h1, h2, h3]`... let level = level as usize; if level <= self.names.len() { // ... Consider `level == 2`. All headers in the lower levels // are irrelevant in this new level. So we should reset // `self.names` to contain headers until <h2>, and replace that // slot with the new name: `[h1, name]`. self.names.truncate(level); self.names[level - 1] = name; } else { // ... On the other hand, consider `level == 5`. This means we // need to extend `self.names` to contain five headers. We fill // in the missing level (<h4>) with `_`. Thus `self.names` will // become `[h1, h2, h3, "_", name]`. if level - 1 > self.names.len() { self.names.resize(level - 1, "_".to_owned()); } self.names.push(name); } } } } struct HirCollector<'a, 'hir: 'a> { sess: &'a session::Session, collector: &'a mut Collector, map: &'a hir::map::Map<'hir> } impl<'a, 'hir> HirCollector<'a, 'hir> { fn visit_testable<F: FnOnce(&mut Self)>(&mut self, name: String, attrs: &[ast::Attribute], nested: F) { let mut attrs = Attributes::from_ast(self.sess.diagnostic(), attrs); if let Some(ref cfg) = attrs.cfg { if !cfg.matches(&self.sess.parse_sess, Some(&self.sess.features.borrow())) { return; } } let has_name = !name.is_empty(); if has_name { self.collector.names.push(name); } attrs.collapse_doc_comments(); attrs.unindent_doc_comments(); if let Some(doc) = attrs.doc_value() { if self.collector.render_type == RenderType::Pulldown { markdown::old_find_testable_code(doc, self.collector, attrs.span.unwrap_or(DUMMY_SP)); markdown::find_testable_code(doc, self.collector, attrs.span.unwrap_or(DUMMY_SP)); } else { markdown::old_find_testable_code(doc, self.collector, attrs.span.unwrap_or(DUMMY_SP)); } } nested(self); if has_name { self.collector.names.pop(); } } } impl<'a, 'hir> intravisit::Visitor<'hir> for HirCollector<'a, 'hir> { fn nested_visit_map<'this>(&'this mut self) -> intravisit::NestedVisitorMap<'this, 'hir> { intravisit::NestedVisitorMap::All(&self.map) } fn visit_item(&mut self, item: &'hir hir::Item) { let name = if let hir::ItemImpl(.., ref ty, _) = item.node { self.map.node_to_pretty_string(ty.id) } else { item.name.to_string() }; self.visit_testable(name, &item.attrs, |this| { intravisit::walk_item(this, item); }); } fn visit_trait_item(&mut self, item: &'hir hir::TraitItem) { self.visit_testable(item.name.to_string(), &item.attrs, |this| { intravisit::walk_trait_item(this, item); }); } fn visit_impl_item(&mut self, item: &'hir hir::ImplItem) { self.visit_testable(item.name.to_string(), &item.attrs, |this| { intravisit::walk_impl_item(this, item); }); } fn visit_foreign_item(&mut self, item: &'hir hir::ForeignItem) { self.visit_testable(item.name.to_string(), &item.attrs, |this| { intravisit::walk_foreign_item(this, item); }); } fn visit_variant(&mut self, v: &'hir hir::Variant, g: &'hir hir::Generics, item_id: ast::NodeId) { self.visit_testable(v.node.name.to_string(), &v.node.attrs, |this| { intravisit::walk_variant(this, v, g, item_id); }); } fn visit_struct_field(&mut self, f: &'hir hir::StructField) { self.visit_testable(f.name.to_string(), &f.attrs, |this| { intravisit::walk_struct_field(this, f); }); } fn visit_macro_def(&mut self, macro_def: &'hir hir::MacroDef) { self.visit_testable(macro_def.name.to_string(), &macro_def.attrs, |_| ()); } }
36.442441
100
0.544091
168d74459ac272ecf2ebe355d28bc3d5577bb1e9
19,394
//! `cargo upgrade` #![warn( missing_docs, missing_debug_implementations, missing_copy_implementations, trivial_casts, trivial_numeric_casts, unsafe_code, unstable_features, unused_import_braces, unused_qualifications )] #[macro_use] extern crate error_chain; use crate::errors::*; use cargo_edit::{ find, get_latest_dependency, manifest_from_pkgid, registry_url, update_registry_index, CrateName, Dependency, LocalManifest, }; use std::collections::{HashMap, HashSet}; use std::io::Write; use std::path::{Path, PathBuf}; use std::process; use structopt::{clap::AppSettings, StructOpt}; use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor}; use url::Url; mod errors { error_chain! { links { CargoEditLib(::cargo_edit::Error, ::cargo_edit::ErrorKind); } foreign_links { CargoMetadata(::cargo_metadata::Error)#[doc = "An error from the cargo_metadata crate"]; } } } #[derive(Debug, StructOpt)] #[structopt(bin_name = "cargo")] enum Command { /// Upgrade dependencies as specified in the local manifest file (i.e. Cargo.toml). #[structopt(name = "upgrade")] #[structopt(after_help = "\ This command differs from `cargo update`, which updates the dependency versions recorded in the \ local lock file (Cargo.lock). If `<dependency>`(s) are provided, only the specified dependencies will be upgraded. The version \ to upgrade to for each can be specified with e.g. `docopt@0.8.0` or `serde@>=0.9,<2.0`. Dev, build, and all target dependencies will also be upgraded. Only dependencies from crates.io \ are supported. Git/path dependencies will be ignored. All packages in the workspace will be upgraded if the `--workspace` flag is supplied. The \ `--workspace` flag may be supplied in the presence of a virtual manifest. If the '--to-lockfile' flag is supplied, all dependencies will be upgraded to the currently locked \ version as recorded in the Cargo.lock file. This flag requires that the Cargo.lock file is \ up-to-date. If the lock file is missing, or it needs to be updated, cargo-upgrade will exit with \ an error. If the '--to-lockfile' flag is supplied then the network won't be accessed.")] Upgrade(Args), } #[derive(Debug, StructOpt)] #[structopt(setting = AppSettings::ColoredHelp)] struct Args { /// Crates to be upgraded. dependency: Vec<String>, /// Path to the manifest to upgrade #[structopt(long = "manifest-path", value_name = "path", conflicts_with = "pkgid")] manifest_path: Option<PathBuf>, /// Package id of the crate to add this dependency to. #[structopt( long = "package", short = "p", value_name = "pkgid", conflicts_with = "path", conflicts_with = "all", conflicts_with = "workspace" )] pkgid: Option<String>, /// Upgrade all packages in the workspace. #[structopt( long = "all", help = "[deprecated in favor of `--workspace`]", conflicts_with = "workspace", conflicts_with = "pkgid" )] all: bool, /// Upgrade all packages in the workspace. #[structopt(long = "workspace", conflicts_with = "all", conflicts_with = "pkgid")] workspace: bool, /// Include prerelease versions when fetching from crates.io (e.g. 0.6.0-alpha'). #[structopt(long = "allow-prerelease")] allow_prerelease: bool, /// Print changes to be made without making them. #[structopt(long = "dry-run")] dry_run: bool, /// Only update a dependency if the new version is semver incompatible. #[structopt(long = "skip-compatible", conflicts_with = "to_lockfile")] skip_compatible: bool, /// Run without accessing the network #[structopt(long = "offline")] pub offline: bool, /// Upgrade all packages to the version in the lockfile. #[structopt(long = "to-lockfile", conflicts_with = "dependency")] pub to_lockfile: bool, /// Crates to exclude and not upgrade. #[structopt(long)] exclude: Vec<String>, } /// A collection of manifests. struct Manifests(Vec<(LocalManifest, cargo_metadata::Package)>); /// Helper function to check whether a `cargo_metadata::Dependency` is a version dependency. fn is_version_dep(dependency: &cargo_metadata::Dependency) -> bool { match dependency.source { // This is the criterion cargo uses (in `SourceId::from_url`) to decide whether a // dependency has the 'registry' kind. Some(ref s) => s.split_once('+').map(|(x, _)| x) == Some("registry"), _ => false, } } fn deprecated_message(message: &str) -> Result<()> { let bufwtr = BufferWriter::stderr(ColorChoice::Always); let mut buffer = bufwtr.buffer(); buffer .set_color(ColorSpec::new().set_fg(Some(Color::Red)).set_bold(true)) .chain_err(|| "Failed to set output colour")?; writeln!(&mut buffer, "{}", message).chain_err(|| "Failed to write deprecated message")?; buffer .set_color(&ColorSpec::new()) .chain_err(|| "Failed to clear output colour")?; bufwtr .print(&buffer) .chain_err(|| "Failed to print deprecated message") } fn dry_run_message() -> Result<()> { let bufwtr = BufferWriter::stderr(ColorChoice::Always); let mut buffer = bufwtr.buffer(); buffer .set_color(ColorSpec::new().set_fg(Some(Color::Cyan)).set_bold(true)) .chain_err(|| "Failed to set output colour")?; write!(&mut buffer, "Starting dry run. ").chain_err(|| "Failed to write dry run message")?; buffer .set_color(&ColorSpec::new()) .chain_err(|| "Failed to clear output colour")?; writeln!(&mut buffer, "Changes will not be saved.") .chain_err(|| "Failed to write dry run message")?; bufwtr .print(&buffer) .chain_err(|| "Failed to print dry run message") } impl Manifests { /// Get all manifests in the workspace. fn get_all(manifest_path: &Option<PathBuf>) -> Result<Self> { let mut cmd = cargo_metadata::MetadataCommand::new(); cmd.no_deps(); if let Some(path) = manifest_path { cmd.manifest_path(path); } let result = cmd .exec() .chain_err(|| "Failed to get workspace metadata")?; result .packages .into_iter() .map(|package| { Ok(( LocalManifest::try_new(Path::new(&package.manifest_path))?, package, )) }) .collect::<Result<Vec<_>>>() .map(Manifests) } fn get_pkgid(manifest_path: Option<&Path>, pkgid: &str) -> Result<Self> { let package = manifest_from_pkgid(manifest_path, pkgid)?; let manifest = LocalManifest::try_new(Path::new(&package.manifest_path))?; Ok(Manifests(vec![(manifest, package)])) } /// Get the manifest specified by the manifest path. Try to make an educated guess if no path is /// provided. fn get_local_one(manifest_path: &Option<PathBuf>) -> Result<Self> { let resolved_manifest_path: String = find(manifest_path)?.to_string_lossy().into(); let manifest = LocalManifest::find(manifest_path)?; let mut cmd = cargo_metadata::MetadataCommand::new(); cmd.no_deps(); if let Some(path) = manifest_path { cmd.manifest_path(path); } let result = cmd.exec().chain_err(|| "Invalid manifest")?; let packages = result.packages; let package = packages .iter() .find(|p| p.manifest_path == resolved_manifest_path) // If we have successfully got metadata, but our manifest path does not correspond to a // package, we must have been called against a virtual manifest. .chain_err(|| { "Found virtual manifest, but this command requires running against an \ actual package in this workspace. Try adding `--workspace`." })?; Ok(Manifests(vec![(manifest, package.to_owned())])) } /// Get the the combined set of dependencies to upgrade. If the user has specified /// per-dependency desired versions, extract those here. fn get_dependencies( &self, only_update: Vec<String>, exclude: Vec<String>, ) -> Result<DesiredUpgrades> { // Map the names of user-specified dependencies to the (optionally) requested version. let selected_dependencies = only_update .into_iter() .map(|name| { if let Some(dependency) = CrateName::new(&name).parse_as_version()? { Ok(( dependency.name.clone(), dependency.version().map(String::from), )) } else { Ok((name, None)) } }) .collect::<Result<HashMap<_, _>>>()?; Ok(DesiredUpgrades( self.0 .iter() .flat_map(|&(_, ref package)| package.dependencies.clone()) .filter(is_version_dep) .filter(|dependency| !exclude.contains(&dependency.name)) // Exclude renamed dependecies aswell .filter(|dependency| { dependency .rename .as_ref() .map_or(true, |rename| !exclude.contains(rename)) }) .filter_map(|dependency| { let is_prerelease = dependency.req.to_string().contains('-'); if selected_dependencies.is_empty() { // User hasn't asked for any specific dependencies to be upgraded, // so upgrade all the dependencies. let mut dep = Dependency::new(&dependency.name); if let Some(rename) = dependency.rename { dep = dep.set_rename(&rename); } Some(( dep, UpgradeMetadata { registry: dependency.registry, version: None, is_prerelease, }, )) } else { // User has asked for specific dependencies. Check if this dependency // was specified, populating the registry from the lockfile metadata. match selected_dependencies.get(&dependency.name) { Some(version) => Some(( Dependency::new(&dependency.name), UpgradeMetadata { registry: dependency.registry, version: version.clone(), is_prerelease, }, )), None => None, } } }) .collect(), )) } /// Upgrade the manifests on disk following the previously-determined upgrade schema. fn upgrade( self, upgraded_deps: &ActualUpgrades, dry_run: bool, skip_compatible: bool, ) -> Result<()> { if dry_run { dry_run_message()?; } for (mut manifest, package) in self.0 { println!("{}:", package.name); for (dep, version) in &upgraded_deps.0 { let mut new_dep = Dependency::new(&dep.name).set_version(version); if let Some(rename) = dep.rename() { new_dep = new_dep.set_rename(rename); } manifest.upgrade(&new_dep, dry_run, skip_compatible)?; } } Ok(()) } /// Update dependencies in Cargo.toml file(s) to match the corresponding /// version in Cargo.lock. fn sync_to_lockfile(self, dry_run: bool, skip_compatible: bool) -> Result<()> { // Get locked dependencies. For workspaces with multiple Cargo.toml // files, there is only a single lockfile, so it suffices to get // metadata for any one of Cargo.toml files. let (manifest, _package) = self.0.get(0).ok_or(ErrorKind::CargoEditLib( ::cargo_edit::ErrorKind::InvalidCargoConfig, ))?; let mut cmd = cargo_metadata::MetadataCommand::new(); cmd.manifest_path(manifest.path.clone()); cmd.features(cargo_metadata::CargoOpt::AllFeatures); cmd.other_options(vec!["--locked".to_string()]); let result = cmd.exec().chain_err(|| "Invalid manifest")?; let locked = result .packages .into_iter() .filter(|p| p.source.is_some()) // Source is none for local packages .collect::<Vec<_>>(); if dry_run { dry_run_message()?; } for (mut manifest, package) in self.0 { println!("{}:", package.name); // Upgrade the manifests one at a time, as multiple manifests may // request the same dependency at differing versions. for (name, version) in package .dependencies .clone() .into_iter() .filter(is_version_dep) .filter_map(|d| { for p in &locked { // The requested dependency may be present in the lock file with different versions, // but only one will be semver-compatible with the requested version. if d.name == p.name && d.req.matches(&p.version) { return Some((d.name, p.version.to_string())); } } None }) { manifest.upgrade( &Dependency::new(&name).set_version(&version), dry_run, skip_compatible, )?; } } Ok(()) } } // Some metadata about the dependency // we're trying to upgrade. struct UpgradeMetadata { registry: Option<String>, // `Some` if the user has specified an explicit // version to upgrade to. version: Option<String>, is_prerelease: bool, } /// The set of dependencies to be upgraded, alongside the registries returned from cargo metadata, and /// the desired versions, if specified by the user. struct DesiredUpgrades(HashMap<Dependency, UpgradeMetadata>); /// The complete specification of the upgrades that will be performed. Map of the dependency names /// to the new versions. struct ActualUpgrades(HashMap<Dependency, String>); impl DesiredUpgrades { /// Transform the dependencies into their upgraded forms. If a version is specified, all /// dependencies will get that version. fn get_upgraded(self, allow_prerelease: bool, manifest_path: &Path) -> Result<ActualUpgrades> { self.0 .into_iter() .map( |( dep, UpgradeMetadata { registry, version, is_prerelease, }, )| { if let Some(v) = version { Ok((dep, v)) } else { let registry_url = match registry { Some(x) => Some(Url::parse(&x).map_err(|_| { ErrorKind::CargoEditLib(::cargo_edit::ErrorKind::InvalidCargoConfig) })?), None => None, }; let allow_prerelease = allow_prerelease || is_prerelease; get_latest_dependency( &dep.name, allow_prerelease, manifest_path, registry_url.as_ref(), ) .map(|new_dep| { ( dep, new_dep .version() .expect("Invalid dependency type") .to_string(), ) }) .chain_err(|| "Failed to get new version") } }, ) .collect::<Result<_>>() .map(ActualUpgrades) } } /// Main processing function. Allows us to return a `Result` so that `main` can print pretty error /// messages. fn process(args: Args) -> Result<()> { let Args { dependency, manifest_path, pkgid, all, allow_prerelease, dry_run, skip_compatible, to_lockfile, workspace, exclude, .. } = args; if all { deprecated_message("The flag `--all` has been deprecated in favor of `--workspace`")?; } let all = workspace || all; if !args.offline && !to_lockfile && std::env::var("CARGO_IS_TEST").is_err() { let url = registry_url(&find(&manifest_path)?, None)?; update_registry_index(&url, false)?; } let manifests = if all { Manifests::get_all(&manifest_path) } else if let Some(ref pkgid) = pkgid { Manifests::get_pkgid(manifest_path.as_deref(), pkgid) } else { Manifests::get_local_one(&manifest_path) }?; if to_lockfile { manifests.sync_to_lockfile(dry_run, skip_compatible) } else { let existing_dependencies = manifests.get_dependencies(dependency, exclude)?; // Update indices for any alternative registries, unless // we're offline. if !args.offline && std::env::var("CARGO_IS_TEST").is_err() { for registry_url in existing_dependencies .0 .values() .filter_map(|UpgradeMetadata { registry, .. }| registry.as_ref()) .collect::<HashSet<_>>() { update_registry_index( &Url::parse(registry_url).map_err(|_| { ErrorKind::CargoEditLib(::cargo_edit::ErrorKind::InvalidCargoConfig) })?, false, )?; } } let upgraded_dependencies = existing_dependencies.get_upgraded(allow_prerelease, &find(&manifest_path)?)?; manifests.upgrade(&upgraded_dependencies, dry_run, skip_compatible) } } fn main() { let args: Command = Command::from_args(); let Command::Upgrade(args) = args; if let Err(err) = process(args) { eprintln!("Command failed due to unhandled error: {}\n", err); for e in err.iter().skip(1) { eprintln!("Caused by: {}", e); } if let Some(backtrace) = err.backtrace() { eprintln!("Backtrace: {:?}", backtrace); } process::exit(1); } }
36.318352
108
0.546664
d973e12aa3004f3fdda4f1f6b3fb2e29b45900a8
4,837
use std::fs; use std::env; use std::path; use console::style; use url::{Url}; use indicatif::{ProgressBar, ProgressStyle}; use clap::{ArgMatches}; use gitlfs::lfs; use crate::gpm; use crate::gpm::command::{Command, CommandError, CommandResult}; use crate::gpm::package::Package; pub struct DownloadPackageCommand { } impl DownloadPackageCommand { fn run_download( &self, package : &Package, force : bool, ) -> Result<bool, CommandError> { info!("running the \"download\" command for package {}", package); println!( "{} package {}", gpm::style::command(&String::from("Downloading")), package, ); println!( "{} Resolving package", style("[1/2]").bold().dim(), ); let (repo, refspec) = gpm::git::find_or_init_repo(package)?; let remote = repo.find_remote("origin")?.url().unwrap().to_owned(); info!("{} found as refspec {} in repository {}", package, &refspec, remote); let oid = repo.refname_to_id(&refspec).map_err(CommandError::GitError)?; package.print_message(oid, &repo); let mut builder = git2::build::CheckoutBuilder::new(); builder.force(); debug!("move repository HEAD to {}", refspec); repo.set_head_detached(oid).map_err(CommandError::GitError)?; repo.checkout_head(Some(&mut builder)).map_err(CommandError::GitError)?; let package_path = package.get_archive_path(Some(path::PathBuf::from(repo.workdir().unwrap()))); let cwd_package_path = env::current_dir().unwrap().join(&package.get_archive_filename()); if cwd_package_path.exists() && !force { error!("path {} already exist, use --force to override", cwd_package_path.display()); return Ok(false); } let parsed_lfs_link_data = lfs::parse_lfs_link_file(&package_path); if parsed_lfs_link_data.is_ok() { let (oid, size) = parsed_lfs_link_data.unwrap().unwrap(); let size = size.parse::<usize>().unwrap(); info!("start downloading archive {:?} from LFS", cwd_package_path); println!( "{} Downloading package", style("[2/2]").bold().dim(), ); let file = fs::OpenOptions::new() .write(true) .create(true) .truncate(true) .open(&cwd_package_path)?; let pb = ProgressBar::new(size as u64); pb.set_style(ProgressStyle::default_bar() .template(" [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({eta})") .progress_chars("#>-")); lfs::resolve_lfs_link( remote.parse().unwrap(), Some(refspec.clone()), &package_path, &mut pb.wrap_write(file), &|repository: Url| { let (k, p) = gpm::ssh::get_ssh_key_and_passphrase( &String::from(repository.host_str().unwrap()) ); (k.unwrap(), p) }, Some(format!("gpm/{}", env!("VERGEN_BUILD_SEMVER"))), ).map_err(CommandError::GitLFSError)?; let mut file = fs::OpenOptions::new() .read(true) .open(&cwd_package_path)?; let archive_oid = lfs::get_oid(&mut file); if archive_oid != oid { return Err(CommandError::InvalidLFSObjectSignature { expected: oid, got: archive_oid, }) } pb.finish(); } else { fs::copy(package_path, cwd_package_path).map_err(CommandError::IOError)?; } // ? FIXME: reset back to HEAD? println!("{}", style("Done!").green()); Ok(true) } } impl Command for DownloadPackageCommand { fn matched_args<'a, 'b>(&self, args : &'a ArgMatches<'b>) -> Option<&'a ArgMatches<'b>> { args.subcommand_matches("download") } fn run(&self, args: &ArgMatches) -> CommandResult { let force = args.is_present("force"); let package = Package::parse(&String::from(args.value_of("package").unwrap())); debug!("parsed package: {:?}", &package); match self.run_download(&package, force) { Ok(success) => { if success { info!("package {} successfully downloaded", &package); Ok(true) } else { error!("package {} has not been downloaded, check the logs for warnings/errors", package); Ok(false) } }, Err(e) => Err(e) } } }
32.033113
110
0.528427
50c5b7403189c7bba7075a1f8986638e843ef563
639
use reqwest::Certificate; #[derive(Debug, Clone)] pub struct RestSettings { pub enable_debug: bool, pub use_https: bool, pub certificate: Option<Certificate>, pub cors: Option<String>, } impl RestSettings { pub fn new_use_https() -> Self { RestSettings { enable_debug: false, use_https: true, certificate: None, cors: None, } } } impl Default for RestSettings { fn default() -> Self { RestSettings { enable_debug: false, use_https: false, certificate: None, cors: None, } } }
19.96875
41
0.549296
ac03b58e3b3e427721cf0af2f1b5300e2f7c795f
79
mod point2d; mod grid; pub use self::point2d::Point2D; pub use self::grid::*;
13.166667
31
0.696203
4856074a62bd01d0824226e28fc7341285f2d1a5
36,727
//! Pre-computed tables powers-of-5 for extended-precision representations. //! //! These tables enable fast scaling of the significant digits //! of a float to the decimal exponent, with minimal rounding //! errors, in a 128 or 192-bit representation. //! //! DO NOT MODIFY: Generated by `src/etc/dec2flt_table.py` pub const SMALLEST_POWER_OF_FIVE: i32 = -342; pub const LARGEST_POWER_OF_FIVE: i32 = 308; pub const N_POWERS_OF_FIVE: usize = (LARGEST_POWER_OF_FIVE - SMALLEST_POWER_OF_FIVE + 1) as usize; // Use static to avoid long compile times: Rust compiler errors // can have the entire table compiled multiple times, and then // emit code multiple times, even if it's stripped out in // the final binary. #[rustfmt::skip] pub static POWER_OF_FIVE_128: [(u64, u64); N_POWERS_OF_FIVE] = [ (0xeef453d6923bd65a, 0x113faa2906a13b3f), // 5^-342 (0x9558b4661b6565f8, 0x4ac7ca59a424c507), // 5^-341 (0xbaaee17fa23ebf76, 0x5d79bcf00d2df649), // 5^-340 (0xe95a99df8ace6f53, 0xf4d82c2c107973dc), // 5^-339 (0x91d8a02bb6c10594, 0x79071b9b8a4be869), // 5^-338 (0xb64ec836a47146f9, 0x9748e2826cdee284), // 5^-337 (0xe3e27a444d8d98b7, 0xfd1b1b2308169b25), // 5^-336 (0x8e6d8c6ab0787f72, 0xfe30f0f5e50e20f7), // 5^-335 (0xb208ef855c969f4f, 0xbdbd2d335e51a935), // 5^-334 (0xde8b2b66b3bc4723, 0xad2c788035e61382), // 5^-333 (0x8b16fb203055ac76, 0x4c3bcb5021afcc31), // 5^-332 (0xaddcb9e83c6b1793, 0xdf4abe242a1bbf3d), // 5^-331 (0xd953e8624b85dd78, 0xd71d6dad34a2af0d), // 5^-330 (0x87d4713d6f33aa6b, 0x8672648c40e5ad68), // 5^-329 (0xa9c98d8ccb009506, 0x680efdaf511f18c2), // 5^-328 (0xd43bf0effdc0ba48, 0x212bd1b2566def2), // 5^-327 (0x84a57695fe98746d, 0x14bb630f7604b57), // 5^-326 (0xa5ced43b7e3e9188, 0x419ea3bd35385e2d), // 5^-325 (0xcf42894a5dce35ea, 0x52064cac828675b9), // 5^-324 (0x818995ce7aa0e1b2, 0x7343efebd1940993), // 5^-323 (0xa1ebfb4219491a1f, 0x1014ebe6c5f90bf8), // 5^-322 (0xca66fa129f9b60a6, 0xd41a26e077774ef6), // 5^-321 (0xfd00b897478238d0, 0x8920b098955522b4), // 5^-320 (0x9e20735e8cb16382, 0x55b46e5f5d5535b0), // 5^-319 (0xc5a890362fddbc62, 0xeb2189f734aa831d), // 5^-318 (0xf712b443bbd52b7b, 0xa5e9ec7501d523e4), // 5^-317 (0x9a6bb0aa55653b2d, 0x47b233c92125366e), // 5^-316 (0xc1069cd4eabe89f8, 0x999ec0bb696e840a), // 5^-315 (0xf148440a256e2c76, 0xc00670ea43ca250d), // 5^-314 (0x96cd2a865764dbca, 0x380406926a5e5728), // 5^-313 (0xbc807527ed3e12bc, 0xc605083704f5ecf2), // 5^-312 (0xeba09271e88d976b, 0xf7864a44c633682e), // 5^-311 (0x93445b8731587ea3, 0x7ab3ee6afbe0211d), // 5^-310 (0xb8157268fdae9e4c, 0x5960ea05bad82964), // 5^-309 (0xe61acf033d1a45df, 0x6fb92487298e33bd), // 5^-308 (0x8fd0c16206306bab, 0xa5d3b6d479f8e056), // 5^-307 (0xb3c4f1ba87bc8696, 0x8f48a4899877186c), // 5^-306 (0xe0b62e2929aba83c, 0x331acdabfe94de87), // 5^-305 (0x8c71dcd9ba0b4925, 0x9ff0c08b7f1d0b14), // 5^-304 (0xaf8e5410288e1b6f, 0x7ecf0ae5ee44dd9), // 5^-303 (0xdb71e91432b1a24a, 0xc9e82cd9f69d6150), // 5^-302 (0x892731ac9faf056e, 0xbe311c083a225cd2), // 5^-301 (0xab70fe17c79ac6ca, 0x6dbd630a48aaf406), // 5^-300 (0xd64d3d9db981787d, 0x92cbbccdad5b108), // 5^-299 (0x85f0468293f0eb4e, 0x25bbf56008c58ea5), // 5^-298 (0xa76c582338ed2621, 0xaf2af2b80af6f24e), // 5^-297 (0xd1476e2c07286faa, 0x1af5af660db4aee1), // 5^-296 (0x82cca4db847945ca, 0x50d98d9fc890ed4d), // 5^-295 (0xa37fce126597973c, 0xe50ff107bab528a0), // 5^-294 (0xcc5fc196fefd7d0c, 0x1e53ed49a96272c8), // 5^-293 (0xff77b1fcbebcdc4f, 0x25e8e89c13bb0f7a), // 5^-292 (0x9faacf3df73609b1, 0x77b191618c54e9ac), // 5^-291 (0xc795830d75038c1d, 0xd59df5b9ef6a2417), // 5^-290 (0xf97ae3d0d2446f25, 0x4b0573286b44ad1d), // 5^-289 (0x9becce62836ac577, 0x4ee367f9430aec32), // 5^-288 (0xc2e801fb244576d5, 0x229c41f793cda73f), // 5^-287 (0xf3a20279ed56d48a, 0x6b43527578c1110f), // 5^-286 (0x9845418c345644d6, 0x830a13896b78aaa9), // 5^-285 (0xbe5691ef416bd60c, 0x23cc986bc656d553), // 5^-284 (0xedec366b11c6cb8f, 0x2cbfbe86b7ec8aa8), // 5^-283 (0x94b3a202eb1c3f39, 0x7bf7d71432f3d6a9), // 5^-282 (0xb9e08a83a5e34f07, 0xdaf5ccd93fb0cc53), // 5^-281 (0xe858ad248f5c22c9, 0xd1b3400f8f9cff68), // 5^-280 (0x91376c36d99995be, 0x23100809b9c21fa1), // 5^-279 (0xb58547448ffffb2d, 0xabd40a0c2832a78a), // 5^-278 (0xe2e69915b3fff9f9, 0x16c90c8f323f516c), // 5^-277 (0x8dd01fad907ffc3b, 0xae3da7d97f6792e3), // 5^-276 (0xb1442798f49ffb4a, 0x99cd11cfdf41779c), // 5^-275 (0xdd95317f31c7fa1d, 0x40405643d711d583), // 5^-274 (0x8a7d3eef7f1cfc52, 0x482835ea666b2572), // 5^-273 (0xad1c8eab5ee43b66, 0xda3243650005eecf), // 5^-272 (0xd863b256369d4a40, 0x90bed43e40076a82), // 5^-271 (0x873e4f75e2224e68, 0x5a7744a6e804a291), // 5^-270 (0xa90de3535aaae202, 0x711515d0a205cb36), // 5^-269 (0xd3515c2831559a83, 0xd5a5b44ca873e03), // 5^-268 (0x8412d9991ed58091, 0xe858790afe9486c2), // 5^-267 (0xa5178fff668ae0b6, 0x626e974dbe39a872), // 5^-266 (0xce5d73ff402d98e3, 0xfb0a3d212dc8128f), // 5^-265 (0x80fa687f881c7f8e, 0x7ce66634bc9d0b99), // 5^-264 (0xa139029f6a239f72, 0x1c1fffc1ebc44e80), // 5^-263 (0xc987434744ac874e, 0xa327ffb266b56220), // 5^-262 (0xfbe9141915d7a922, 0x4bf1ff9f0062baa8), // 5^-261 (0x9d71ac8fada6c9b5, 0x6f773fc3603db4a9), // 5^-260 (0xc4ce17b399107c22, 0xcb550fb4384d21d3), // 5^-259 (0xf6019da07f549b2b, 0x7e2a53a146606a48), // 5^-258 (0x99c102844f94e0fb, 0x2eda7444cbfc426d), // 5^-257 (0xc0314325637a1939, 0xfa911155fefb5308), // 5^-256 (0xf03d93eebc589f88, 0x793555ab7eba27ca), // 5^-255 (0x96267c7535b763b5, 0x4bc1558b2f3458de), // 5^-254 (0xbbb01b9283253ca2, 0x9eb1aaedfb016f16), // 5^-253 (0xea9c227723ee8bcb, 0x465e15a979c1cadc), // 5^-252 (0x92a1958a7675175f, 0xbfacd89ec191ec9), // 5^-251 (0xb749faed14125d36, 0xcef980ec671f667b), // 5^-250 (0xe51c79a85916f484, 0x82b7e12780e7401a), // 5^-249 (0x8f31cc0937ae58d2, 0xd1b2ecb8b0908810), // 5^-248 (0xb2fe3f0b8599ef07, 0x861fa7e6dcb4aa15), // 5^-247 (0xdfbdcece67006ac9, 0x67a791e093e1d49a), // 5^-246 (0x8bd6a141006042bd, 0xe0c8bb2c5c6d24e0), // 5^-245 (0xaecc49914078536d, 0x58fae9f773886e18), // 5^-244 (0xda7f5bf590966848, 0xaf39a475506a899e), // 5^-243 (0x888f99797a5e012d, 0x6d8406c952429603), // 5^-242 (0xaab37fd7d8f58178, 0xc8e5087ba6d33b83), // 5^-241 (0xd5605fcdcf32e1d6, 0xfb1e4a9a90880a64), // 5^-240 (0x855c3be0a17fcd26, 0x5cf2eea09a55067f), // 5^-239 (0xa6b34ad8c9dfc06f, 0xf42faa48c0ea481e), // 5^-238 (0xd0601d8efc57b08b, 0xf13b94daf124da26), // 5^-237 (0x823c12795db6ce57, 0x76c53d08d6b70858), // 5^-236 (0xa2cb1717b52481ed, 0x54768c4b0c64ca6e), // 5^-235 (0xcb7ddcdda26da268, 0xa9942f5dcf7dfd09), // 5^-234 (0xfe5d54150b090b02, 0xd3f93b35435d7c4c), // 5^-233 (0x9efa548d26e5a6e1, 0xc47bc5014a1a6daf), // 5^-232 (0xc6b8e9b0709f109a, 0x359ab6419ca1091b), // 5^-231 (0xf867241c8cc6d4c0, 0xc30163d203c94b62), // 5^-230 (0x9b407691d7fc44f8, 0x79e0de63425dcf1d), // 5^-229 (0xc21094364dfb5636, 0x985915fc12f542e4), // 5^-228 (0xf294b943e17a2bc4, 0x3e6f5b7b17b2939d), // 5^-227 (0x979cf3ca6cec5b5a, 0xa705992ceecf9c42), // 5^-226 (0xbd8430bd08277231, 0x50c6ff782a838353), // 5^-225 (0xece53cec4a314ebd, 0xa4f8bf5635246428), // 5^-224 (0x940f4613ae5ed136, 0x871b7795e136be99), // 5^-223 (0xb913179899f68584, 0x28e2557b59846e3f), // 5^-222 (0xe757dd7ec07426e5, 0x331aeada2fe589cf), // 5^-221 (0x9096ea6f3848984f, 0x3ff0d2c85def7621), // 5^-220 (0xb4bca50b065abe63, 0xfed077a756b53a9), // 5^-219 (0xe1ebce4dc7f16dfb, 0xd3e8495912c62894), // 5^-218 (0x8d3360f09cf6e4bd, 0x64712dd7abbbd95c), // 5^-217 (0xb080392cc4349dec, 0xbd8d794d96aacfb3), // 5^-216 (0xdca04777f541c567, 0xecf0d7a0fc5583a0), // 5^-215 (0x89e42caaf9491b60, 0xf41686c49db57244), // 5^-214 (0xac5d37d5b79b6239, 0x311c2875c522ced5), // 5^-213 (0xd77485cb25823ac7, 0x7d633293366b828b), // 5^-212 (0x86a8d39ef77164bc, 0xae5dff9c02033197), // 5^-211 (0xa8530886b54dbdeb, 0xd9f57f830283fdfc), // 5^-210 (0xd267caa862a12d66, 0xd072df63c324fd7b), // 5^-209 (0x8380dea93da4bc60, 0x4247cb9e59f71e6d), // 5^-208 (0xa46116538d0deb78, 0x52d9be85f074e608), // 5^-207 (0xcd795be870516656, 0x67902e276c921f8b), // 5^-206 (0x806bd9714632dff6, 0xba1cd8a3db53b6), // 5^-205 (0xa086cfcd97bf97f3, 0x80e8a40eccd228a4), // 5^-204 (0xc8a883c0fdaf7df0, 0x6122cd128006b2cd), // 5^-203 (0xfad2a4b13d1b5d6c, 0x796b805720085f81), // 5^-202 (0x9cc3a6eec6311a63, 0xcbe3303674053bb0), // 5^-201 (0xc3f490aa77bd60fc, 0xbedbfc4411068a9c), // 5^-200 (0xf4f1b4d515acb93b, 0xee92fb5515482d44), // 5^-199 (0x991711052d8bf3c5, 0x751bdd152d4d1c4a), // 5^-198 (0xbf5cd54678eef0b6, 0xd262d45a78a0635d), // 5^-197 (0xef340a98172aace4, 0x86fb897116c87c34), // 5^-196 (0x9580869f0e7aac0e, 0xd45d35e6ae3d4da0), // 5^-195 (0xbae0a846d2195712, 0x8974836059cca109), // 5^-194 (0xe998d258869facd7, 0x2bd1a438703fc94b), // 5^-193 (0x91ff83775423cc06, 0x7b6306a34627ddcf), // 5^-192 (0xb67f6455292cbf08, 0x1a3bc84c17b1d542), // 5^-191 (0xe41f3d6a7377eeca, 0x20caba5f1d9e4a93), // 5^-190 (0x8e938662882af53e, 0x547eb47b7282ee9c), // 5^-189 (0xb23867fb2a35b28d, 0xe99e619a4f23aa43), // 5^-188 (0xdec681f9f4c31f31, 0x6405fa00e2ec94d4), // 5^-187 (0x8b3c113c38f9f37e, 0xde83bc408dd3dd04), // 5^-186 (0xae0b158b4738705e, 0x9624ab50b148d445), // 5^-185 (0xd98ddaee19068c76, 0x3badd624dd9b0957), // 5^-184 (0x87f8a8d4cfa417c9, 0xe54ca5d70a80e5d6), // 5^-183 (0xa9f6d30a038d1dbc, 0x5e9fcf4ccd211f4c), // 5^-182 (0xd47487cc8470652b, 0x7647c3200069671f), // 5^-181 (0x84c8d4dfd2c63f3b, 0x29ecd9f40041e073), // 5^-180 (0xa5fb0a17c777cf09, 0xf468107100525890), // 5^-179 (0xcf79cc9db955c2cc, 0x7182148d4066eeb4), // 5^-178 (0x81ac1fe293d599bf, 0xc6f14cd848405530), // 5^-177 (0xa21727db38cb002f, 0xb8ada00e5a506a7c), // 5^-176 (0xca9cf1d206fdc03b, 0xa6d90811f0e4851c), // 5^-175 (0xfd442e4688bd304a, 0x908f4a166d1da663), // 5^-174 (0x9e4a9cec15763e2e, 0x9a598e4e043287fe), // 5^-173 (0xc5dd44271ad3cdba, 0x40eff1e1853f29fd), // 5^-172 (0xf7549530e188c128, 0xd12bee59e68ef47c), // 5^-171 (0x9a94dd3e8cf578b9, 0x82bb74f8301958ce), // 5^-170 (0xc13a148e3032d6e7, 0xe36a52363c1faf01), // 5^-169 (0xf18899b1bc3f8ca1, 0xdc44e6c3cb279ac1), // 5^-168 (0x96f5600f15a7b7e5, 0x29ab103a5ef8c0b9), // 5^-167 (0xbcb2b812db11a5de, 0x7415d448f6b6f0e7), // 5^-166 (0xebdf661791d60f56, 0x111b495b3464ad21), // 5^-165 (0x936b9fcebb25c995, 0xcab10dd900beec34), // 5^-164 (0xb84687c269ef3bfb, 0x3d5d514f40eea742), // 5^-163 (0xe65829b3046b0afa, 0xcb4a5a3112a5112), // 5^-162 (0x8ff71a0fe2c2e6dc, 0x47f0e785eaba72ab), // 5^-161 (0xb3f4e093db73a093, 0x59ed216765690f56), // 5^-160 (0xe0f218b8d25088b8, 0x306869c13ec3532c), // 5^-159 (0x8c974f7383725573, 0x1e414218c73a13fb), // 5^-158 (0xafbd2350644eeacf, 0xe5d1929ef90898fa), // 5^-157 (0xdbac6c247d62a583, 0xdf45f746b74abf39), // 5^-156 (0x894bc396ce5da772, 0x6b8bba8c328eb783), // 5^-155 (0xab9eb47c81f5114f, 0x66ea92f3f326564), // 5^-154 (0xd686619ba27255a2, 0xc80a537b0efefebd), // 5^-153 (0x8613fd0145877585, 0xbd06742ce95f5f36), // 5^-152 (0xa798fc4196e952e7, 0x2c48113823b73704), // 5^-151 (0xd17f3b51fca3a7a0, 0xf75a15862ca504c5), // 5^-150 (0x82ef85133de648c4, 0x9a984d73dbe722fb), // 5^-149 (0xa3ab66580d5fdaf5, 0xc13e60d0d2e0ebba), // 5^-148 (0xcc963fee10b7d1b3, 0x318df905079926a8), // 5^-147 (0xffbbcfe994e5c61f, 0xfdf17746497f7052), // 5^-146 (0x9fd561f1fd0f9bd3, 0xfeb6ea8bedefa633), // 5^-145 (0xc7caba6e7c5382c8, 0xfe64a52ee96b8fc0), // 5^-144 (0xf9bd690a1b68637b, 0x3dfdce7aa3c673b0), // 5^-143 (0x9c1661a651213e2d, 0x6bea10ca65c084e), // 5^-142 (0xc31bfa0fe5698db8, 0x486e494fcff30a62), // 5^-141 (0xf3e2f893dec3f126, 0x5a89dba3c3efccfa), // 5^-140 (0x986ddb5c6b3a76b7, 0xf89629465a75e01c), // 5^-139 (0xbe89523386091465, 0xf6bbb397f1135823), // 5^-138 (0xee2ba6c0678b597f, 0x746aa07ded582e2c), // 5^-137 (0x94db483840b717ef, 0xa8c2a44eb4571cdc), // 5^-136 (0xba121a4650e4ddeb, 0x92f34d62616ce413), // 5^-135 (0xe896a0d7e51e1566, 0x77b020baf9c81d17), // 5^-134 (0x915e2486ef32cd60, 0xace1474dc1d122e), // 5^-133 (0xb5b5ada8aaff80b8, 0xd819992132456ba), // 5^-132 (0xe3231912d5bf60e6, 0x10e1fff697ed6c69), // 5^-131 (0x8df5efabc5979c8f, 0xca8d3ffa1ef463c1), // 5^-130 (0xb1736b96b6fd83b3, 0xbd308ff8a6b17cb2), // 5^-129 (0xddd0467c64bce4a0, 0xac7cb3f6d05ddbde), // 5^-128 (0x8aa22c0dbef60ee4, 0x6bcdf07a423aa96b), // 5^-127 (0xad4ab7112eb3929d, 0x86c16c98d2c953c6), // 5^-126 (0xd89d64d57a607744, 0xe871c7bf077ba8b7), // 5^-125 (0x87625f056c7c4a8b, 0x11471cd764ad4972), // 5^-124 (0xa93af6c6c79b5d2d, 0xd598e40d3dd89bcf), // 5^-123 (0xd389b47879823479, 0x4aff1d108d4ec2c3), // 5^-122 (0x843610cb4bf160cb, 0xcedf722a585139ba), // 5^-121 (0xa54394fe1eedb8fe, 0xc2974eb4ee658828), // 5^-120 (0xce947a3da6a9273e, 0x733d226229feea32), // 5^-119 (0x811ccc668829b887, 0x806357d5a3f525f), // 5^-118 (0xa163ff802a3426a8, 0xca07c2dcb0cf26f7), // 5^-117 (0xc9bcff6034c13052, 0xfc89b393dd02f0b5), // 5^-116 (0xfc2c3f3841f17c67, 0xbbac2078d443ace2), // 5^-115 (0x9d9ba7832936edc0, 0xd54b944b84aa4c0d), // 5^-114 (0xc5029163f384a931, 0xa9e795e65d4df11), // 5^-113 (0xf64335bcf065d37d, 0x4d4617b5ff4a16d5), // 5^-112 (0x99ea0196163fa42e, 0x504bced1bf8e4e45), // 5^-111 (0xc06481fb9bcf8d39, 0xe45ec2862f71e1d6), // 5^-110 (0xf07da27a82c37088, 0x5d767327bb4e5a4c), // 5^-109 (0x964e858c91ba2655, 0x3a6a07f8d510f86f), // 5^-108 (0xbbe226efb628afea, 0x890489f70a55368b), // 5^-107 (0xeadab0aba3b2dbe5, 0x2b45ac74ccea842e), // 5^-106 (0x92c8ae6b464fc96f, 0x3b0b8bc90012929d), // 5^-105 (0xb77ada0617e3bbcb, 0x9ce6ebb40173744), // 5^-104 (0xe55990879ddcaabd, 0xcc420a6a101d0515), // 5^-103 (0x8f57fa54c2a9eab6, 0x9fa946824a12232d), // 5^-102 (0xb32df8e9f3546564, 0x47939822dc96abf9), // 5^-101 (0xdff9772470297ebd, 0x59787e2b93bc56f7), // 5^-100 (0x8bfbea76c619ef36, 0x57eb4edb3c55b65a), // 5^-99 (0xaefae51477a06b03, 0xede622920b6b23f1), // 5^-98 (0xdab99e59958885c4, 0xe95fab368e45eced), // 5^-97 (0x88b402f7fd75539b, 0x11dbcb0218ebb414), // 5^-96 (0xaae103b5fcd2a881, 0xd652bdc29f26a119), // 5^-95 (0xd59944a37c0752a2, 0x4be76d3346f0495f), // 5^-94 (0x857fcae62d8493a5, 0x6f70a4400c562ddb), // 5^-93 (0xa6dfbd9fb8e5b88e, 0xcb4ccd500f6bb952), // 5^-92 (0xd097ad07a71f26b2, 0x7e2000a41346a7a7), // 5^-91 (0x825ecc24c873782f, 0x8ed400668c0c28c8), // 5^-90 (0xa2f67f2dfa90563b, 0x728900802f0f32fa), // 5^-89 (0xcbb41ef979346bca, 0x4f2b40a03ad2ffb9), // 5^-88 (0xfea126b7d78186bc, 0xe2f610c84987bfa8), // 5^-87 (0x9f24b832e6b0f436, 0xdd9ca7d2df4d7c9), // 5^-86 (0xc6ede63fa05d3143, 0x91503d1c79720dbb), // 5^-85 (0xf8a95fcf88747d94, 0x75a44c6397ce912a), // 5^-84 (0x9b69dbe1b548ce7c, 0xc986afbe3ee11aba), // 5^-83 (0xc24452da229b021b, 0xfbe85badce996168), // 5^-82 (0xf2d56790ab41c2a2, 0xfae27299423fb9c3), // 5^-81 (0x97c560ba6b0919a5, 0xdccd879fc967d41a), // 5^-80 (0xbdb6b8e905cb600f, 0x5400e987bbc1c920), // 5^-79 (0xed246723473e3813, 0x290123e9aab23b68), // 5^-78 (0x9436c0760c86e30b, 0xf9a0b6720aaf6521), // 5^-77 (0xb94470938fa89bce, 0xf808e40e8d5b3e69), // 5^-76 (0xe7958cb87392c2c2, 0xb60b1d1230b20e04), // 5^-75 (0x90bd77f3483bb9b9, 0xb1c6f22b5e6f48c2), // 5^-74 (0xb4ecd5f01a4aa828, 0x1e38aeb6360b1af3), // 5^-73 (0xe2280b6c20dd5232, 0x25c6da63c38de1b0), // 5^-72 (0x8d590723948a535f, 0x579c487e5a38ad0e), // 5^-71 (0xb0af48ec79ace837, 0x2d835a9df0c6d851), // 5^-70 (0xdcdb1b2798182244, 0xf8e431456cf88e65), // 5^-69 (0x8a08f0f8bf0f156b, 0x1b8e9ecb641b58ff), // 5^-68 (0xac8b2d36eed2dac5, 0xe272467e3d222f3f), // 5^-67 (0xd7adf884aa879177, 0x5b0ed81dcc6abb0f), // 5^-66 (0x86ccbb52ea94baea, 0x98e947129fc2b4e9), // 5^-65 (0xa87fea27a539e9a5, 0x3f2398d747b36224), // 5^-64 (0xd29fe4b18e88640e, 0x8eec7f0d19a03aad), // 5^-63 (0x83a3eeeef9153e89, 0x1953cf68300424ac), // 5^-62 (0xa48ceaaab75a8e2b, 0x5fa8c3423c052dd7), // 5^-61 (0xcdb02555653131b6, 0x3792f412cb06794d), // 5^-60 (0x808e17555f3ebf11, 0xe2bbd88bbee40bd0), // 5^-59 (0xa0b19d2ab70e6ed6, 0x5b6aceaeae9d0ec4), // 5^-58 (0xc8de047564d20a8b, 0xf245825a5a445275), // 5^-57 (0xfb158592be068d2e, 0xeed6e2f0f0d56712), // 5^-56 (0x9ced737bb6c4183d, 0x55464dd69685606b), // 5^-55 (0xc428d05aa4751e4c, 0xaa97e14c3c26b886), // 5^-54 (0xf53304714d9265df, 0xd53dd99f4b3066a8), // 5^-53 (0x993fe2c6d07b7fab, 0xe546a8038efe4029), // 5^-52 (0xbf8fdb78849a5f96, 0xde98520472bdd033), // 5^-51 (0xef73d256a5c0f77c, 0x963e66858f6d4440), // 5^-50 (0x95a8637627989aad, 0xdde7001379a44aa8), // 5^-49 (0xbb127c53b17ec159, 0x5560c018580d5d52), // 5^-48 (0xe9d71b689dde71af, 0xaab8f01e6e10b4a6), // 5^-47 (0x9226712162ab070d, 0xcab3961304ca70e8), // 5^-46 (0xb6b00d69bb55c8d1, 0x3d607b97c5fd0d22), // 5^-45 (0xe45c10c42a2b3b05, 0x8cb89a7db77c506a), // 5^-44 (0x8eb98a7a9a5b04e3, 0x77f3608e92adb242), // 5^-43 (0xb267ed1940f1c61c, 0x55f038b237591ed3), // 5^-42 (0xdf01e85f912e37a3, 0x6b6c46dec52f6688), // 5^-41 (0x8b61313bbabce2c6, 0x2323ac4b3b3da015), // 5^-40 (0xae397d8aa96c1b77, 0xabec975e0a0d081a), // 5^-39 (0xd9c7dced53c72255, 0x96e7bd358c904a21), // 5^-38 (0x881cea14545c7575, 0x7e50d64177da2e54), // 5^-37 (0xaa242499697392d2, 0xdde50bd1d5d0b9e9), // 5^-36 (0xd4ad2dbfc3d07787, 0x955e4ec64b44e864), // 5^-35 (0x84ec3c97da624ab4, 0xbd5af13bef0b113e), // 5^-34 (0xa6274bbdd0fadd61, 0xecb1ad8aeacdd58e), // 5^-33 (0xcfb11ead453994ba, 0x67de18eda5814af2), // 5^-32 (0x81ceb32c4b43fcf4, 0x80eacf948770ced7), // 5^-31 (0xa2425ff75e14fc31, 0xa1258379a94d028d), // 5^-30 (0xcad2f7f5359a3b3e, 0x96ee45813a04330), // 5^-29 (0xfd87b5f28300ca0d, 0x8bca9d6e188853fc), // 5^-28 (0x9e74d1b791e07e48, 0x775ea264cf55347e), // 5^-27 (0xc612062576589dda, 0x95364afe032a819e), // 5^-26 (0xf79687aed3eec551, 0x3a83ddbd83f52205), // 5^-25 (0x9abe14cd44753b52, 0xc4926a9672793543), // 5^-24 (0xc16d9a0095928a27, 0x75b7053c0f178294), // 5^-23 (0xf1c90080baf72cb1, 0x5324c68b12dd6339), // 5^-22 (0x971da05074da7bee, 0xd3f6fc16ebca5e04), // 5^-21 (0xbce5086492111aea, 0x88f4bb1ca6bcf585), // 5^-20 (0xec1e4a7db69561a5, 0x2b31e9e3d06c32e6), // 5^-19 (0x9392ee8e921d5d07, 0x3aff322e62439fd0), // 5^-18 (0xb877aa3236a4b449, 0x9befeb9fad487c3), // 5^-17 (0xe69594bec44de15b, 0x4c2ebe687989a9b4), // 5^-16 (0x901d7cf73ab0acd9, 0xf9d37014bf60a11), // 5^-15 (0xb424dc35095cd80f, 0x538484c19ef38c95), // 5^-14 (0xe12e13424bb40e13, 0x2865a5f206b06fba), // 5^-13 (0x8cbccc096f5088cb, 0xf93f87b7442e45d4), // 5^-12 (0xafebff0bcb24aafe, 0xf78f69a51539d749), // 5^-11 (0xdbe6fecebdedd5be, 0xb573440e5a884d1c), // 5^-10 (0x89705f4136b4a597, 0x31680a88f8953031), // 5^-9 (0xabcc77118461cefc, 0xfdc20d2b36ba7c3e), // 5^-8 (0xd6bf94d5e57a42bc, 0x3d32907604691b4d), // 5^-7 (0x8637bd05af6c69b5, 0xa63f9a49c2c1b110), // 5^-6 (0xa7c5ac471b478423, 0xfcf80dc33721d54), // 5^-5 (0xd1b71758e219652b, 0xd3c36113404ea4a9), // 5^-4 (0x83126e978d4fdf3b, 0x645a1cac083126ea), // 5^-3 (0xa3d70a3d70a3d70a, 0x3d70a3d70a3d70a4), // 5^-2 (0xcccccccccccccccc, 0xcccccccccccccccd), // 5^-1 (0x8000000000000000, 0x0), // 5^0 (0xa000000000000000, 0x0), // 5^1 (0xc800000000000000, 0x0), // 5^2 (0xfa00000000000000, 0x0), // 5^3 (0x9c40000000000000, 0x0), // 5^4 (0xc350000000000000, 0x0), // 5^5 (0xf424000000000000, 0x0), // 5^6 (0x9896800000000000, 0x0), // 5^7 (0xbebc200000000000, 0x0), // 5^8 (0xee6b280000000000, 0x0), // 5^9 (0x9502f90000000000, 0x0), // 5^10 (0xba43b74000000000, 0x0), // 5^11 (0xe8d4a51000000000, 0x0), // 5^12 (0x9184e72a00000000, 0x0), // 5^13 (0xb5e620f480000000, 0x0), // 5^14 (0xe35fa931a0000000, 0x0), // 5^15 (0x8e1bc9bf04000000, 0x0), // 5^16 (0xb1a2bc2ec5000000, 0x0), // 5^17 (0xde0b6b3a76400000, 0x0), // 5^18 (0x8ac7230489e80000, 0x0), // 5^19 (0xad78ebc5ac620000, 0x0), // 5^20 (0xd8d726b7177a8000, 0x0), // 5^21 (0x878678326eac9000, 0x0), // 5^22 (0xa968163f0a57b400, 0x0), // 5^23 (0xd3c21bcecceda100, 0x0), // 5^24 (0x84595161401484a0, 0x0), // 5^25 (0xa56fa5b99019a5c8, 0x0), // 5^26 (0xcecb8f27f4200f3a, 0x0), // 5^27 (0x813f3978f8940984, 0x4000000000000000), // 5^28 (0xa18f07d736b90be5, 0x5000000000000000), // 5^29 (0xc9f2c9cd04674ede, 0xa400000000000000), // 5^30 (0xfc6f7c4045812296, 0x4d00000000000000), // 5^31 (0x9dc5ada82b70b59d, 0xf020000000000000), // 5^32 (0xc5371912364ce305, 0x6c28000000000000), // 5^33 (0xf684df56c3e01bc6, 0xc732000000000000), // 5^34 (0x9a130b963a6c115c, 0x3c7f400000000000), // 5^35 (0xc097ce7bc90715b3, 0x4b9f100000000000), // 5^36 (0xf0bdc21abb48db20, 0x1e86d40000000000), // 5^37 (0x96769950b50d88f4, 0x1314448000000000), // 5^38 (0xbc143fa4e250eb31, 0x17d955a000000000), // 5^39 (0xeb194f8e1ae525fd, 0x5dcfab0800000000), // 5^40 (0x92efd1b8d0cf37be, 0x5aa1cae500000000), // 5^41 (0xb7abc627050305ad, 0xf14a3d9e40000000), // 5^42 (0xe596b7b0c643c719, 0x6d9ccd05d0000000), // 5^43 (0x8f7e32ce7bea5c6f, 0xe4820023a2000000), // 5^44 (0xb35dbf821ae4f38b, 0xdda2802c8a800000), // 5^45 (0xe0352f62a19e306e, 0xd50b2037ad200000), // 5^46 (0x8c213d9da502de45, 0x4526f422cc340000), // 5^47 (0xaf298d050e4395d6, 0x9670b12b7f410000), // 5^48 (0xdaf3f04651d47b4c, 0x3c0cdd765f114000), // 5^49 (0x88d8762bf324cd0f, 0xa5880a69fb6ac800), // 5^50 (0xab0e93b6efee0053, 0x8eea0d047a457a00), // 5^51 (0xd5d238a4abe98068, 0x72a4904598d6d880), // 5^52 (0x85a36366eb71f041, 0x47a6da2b7f864750), // 5^53 (0xa70c3c40a64e6c51, 0x999090b65f67d924), // 5^54 (0xd0cf4b50cfe20765, 0xfff4b4e3f741cf6d), // 5^55 (0x82818f1281ed449f, 0xbff8f10e7a8921a4), // 5^56 (0xa321f2d7226895c7, 0xaff72d52192b6a0d), // 5^57 (0xcbea6f8ceb02bb39, 0x9bf4f8a69f764490), // 5^58 (0xfee50b7025c36a08, 0x2f236d04753d5b4), // 5^59 (0x9f4f2726179a2245, 0x1d762422c946590), // 5^60 (0xc722f0ef9d80aad6, 0x424d3ad2b7b97ef5), // 5^61 (0xf8ebad2b84e0d58b, 0xd2e0898765a7deb2), // 5^62 (0x9b934c3b330c8577, 0x63cc55f49f88eb2f), // 5^63 (0xc2781f49ffcfa6d5, 0x3cbf6b71c76b25fb), // 5^64 (0xf316271c7fc3908a, 0x8bef464e3945ef7a), // 5^65 (0x97edd871cfda3a56, 0x97758bf0e3cbb5ac), // 5^66 (0xbde94e8e43d0c8ec, 0x3d52eeed1cbea317), // 5^67 (0xed63a231d4c4fb27, 0x4ca7aaa863ee4bdd), // 5^68 (0x945e455f24fb1cf8, 0x8fe8caa93e74ef6a), // 5^69 (0xb975d6b6ee39e436, 0xb3e2fd538e122b44), // 5^70 (0xe7d34c64a9c85d44, 0x60dbbca87196b616), // 5^71 (0x90e40fbeea1d3a4a, 0xbc8955e946fe31cd), // 5^72 (0xb51d13aea4a488dd, 0x6babab6398bdbe41), // 5^73 (0xe264589a4dcdab14, 0xc696963c7eed2dd1), // 5^74 (0x8d7eb76070a08aec, 0xfc1e1de5cf543ca2), // 5^75 (0xb0de65388cc8ada8, 0x3b25a55f43294bcb), // 5^76 (0xdd15fe86affad912, 0x49ef0eb713f39ebe), // 5^77 (0x8a2dbf142dfcc7ab, 0x6e3569326c784337), // 5^78 (0xacb92ed9397bf996, 0x49c2c37f07965404), // 5^79 (0xd7e77a8f87daf7fb, 0xdc33745ec97be906), // 5^80 (0x86f0ac99b4e8dafd, 0x69a028bb3ded71a3), // 5^81 (0xa8acd7c0222311bc, 0xc40832ea0d68ce0c), // 5^82 (0xd2d80db02aabd62b, 0xf50a3fa490c30190), // 5^83 (0x83c7088e1aab65db, 0x792667c6da79e0fa), // 5^84 (0xa4b8cab1a1563f52, 0x577001b891185938), // 5^85 (0xcde6fd5e09abcf26, 0xed4c0226b55e6f86), // 5^86 (0x80b05e5ac60b6178, 0x544f8158315b05b4), // 5^87 (0xa0dc75f1778e39d6, 0x696361ae3db1c721), // 5^88 (0xc913936dd571c84c, 0x3bc3a19cd1e38e9), // 5^89 (0xfb5878494ace3a5f, 0x4ab48a04065c723), // 5^90 (0x9d174b2dcec0e47b, 0x62eb0d64283f9c76), // 5^91 (0xc45d1df942711d9a, 0x3ba5d0bd324f8394), // 5^92 (0xf5746577930d6500, 0xca8f44ec7ee36479), // 5^93 (0x9968bf6abbe85f20, 0x7e998b13cf4e1ecb), // 5^94 (0xbfc2ef456ae276e8, 0x9e3fedd8c321a67e), // 5^95 (0xefb3ab16c59b14a2, 0xc5cfe94ef3ea101e), // 5^96 (0x95d04aee3b80ece5, 0xbba1f1d158724a12), // 5^97 (0xbb445da9ca61281f, 0x2a8a6e45ae8edc97), // 5^98 (0xea1575143cf97226, 0xf52d09d71a3293bd), // 5^99 (0x924d692ca61be758, 0x593c2626705f9c56), // 5^100 (0xb6e0c377cfa2e12e, 0x6f8b2fb00c77836c), // 5^101 (0xe498f455c38b997a, 0xb6dfb9c0f956447), // 5^102 (0x8edf98b59a373fec, 0x4724bd4189bd5eac), // 5^103 (0xb2977ee300c50fe7, 0x58edec91ec2cb657), // 5^104 (0xdf3d5e9bc0f653e1, 0x2f2967b66737e3ed), // 5^105 (0x8b865b215899f46c, 0xbd79e0d20082ee74), // 5^106 (0xae67f1e9aec07187, 0xecd8590680a3aa11), // 5^107 (0xda01ee641a708de9, 0xe80e6f4820cc9495), // 5^108 (0x884134fe908658b2, 0x3109058d147fdcdd), // 5^109 (0xaa51823e34a7eede, 0xbd4b46f0599fd415), // 5^110 (0xd4e5e2cdc1d1ea96, 0x6c9e18ac7007c91a), // 5^111 (0x850fadc09923329e, 0x3e2cf6bc604ddb0), // 5^112 (0xa6539930bf6bff45, 0x84db8346b786151c), // 5^113 (0xcfe87f7cef46ff16, 0xe612641865679a63), // 5^114 (0x81f14fae158c5f6e, 0x4fcb7e8f3f60c07e), // 5^115 (0xa26da3999aef7749, 0xe3be5e330f38f09d), // 5^116 (0xcb090c8001ab551c, 0x5cadf5bfd3072cc5), // 5^117 (0xfdcb4fa002162a63, 0x73d9732fc7c8f7f6), // 5^118 (0x9e9f11c4014dda7e, 0x2867e7fddcdd9afa), // 5^119 (0xc646d63501a1511d, 0xb281e1fd541501b8), // 5^120 (0xf7d88bc24209a565, 0x1f225a7ca91a4226), // 5^121 (0x9ae757596946075f, 0x3375788de9b06958), // 5^122 (0xc1a12d2fc3978937, 0x52d6b1641c83ae), // 5^123 (0xf209787bb47d6b84, 0xc0678c5dbd23a49a), // 5^124 (0x9745eb4d50ce6332, 0xf840b7ba963646e0), // 5^125 (0xbd176620a501fbff, 0xb650e5a93bc3d898), // 5^126 (0xec5d3fa8ce427aff, 0xa3e51f138ab4cebe), // 5^127 (0x93ba47c980e98cdf, 0xc66f336c36b10137), // 5^128 (0xb8a8d9bbe123f017, 0xb80b0047445d4184), // 5^129 (0xe6d3102ad96cec1d, 0xa60dc059157491e5), // 5^130 (0x9043ea1ac7e41392, 0x87c89837ad68db2f), // 5^131 (0xb454e4a179dd1877, 0x29babe4598c311fb), // 5^132 (0xe16a1dc9d8545e94, 0xf4296dd6fef3d67a), // 5^133 (0x8ce2529e2734bb1d, 0x1899e4a65f58660c), // 5^134 (0xb01ae745b101e9e4, 0x5ec05dcff72e7f8f), // 5^135 (0xdc21a1171d42645d, 0x76707543f4fa1f73), // 5^136 (0x899504ae72497eba, 0x6a06494a791c53a8), // 5^137 (0xabfa45da0edbde69, 0x487db9d17636892), // 5^138 (0xd6f8d7509292d603, 0x45a9d2845d3c42b6), // 5^139 (0x865b86925b9bc5c2, 0xb8a2392ba45a9b2), // 5^140 (0xa7f26836f282b732, 0x8e6cac7768d7141e), // 5^141 (0xd1ef0244af2364ff, 0x3207d795430cd926), // 5^142 (0x8335616aed761f1f, 0x7f44e6bd49e807b8), // 5^143 (0xa402b9c5a8d3a6e7, 0x5f16206c9c6209a6), // 5^144 (0xcd036837130890a1, 0x36dba887c37a8c0f), // 5^145 (0x802221226be55a64, 0xc2494954da2c9789), // 5^146 (0xa02aa96b06deb0fd, 0xf2db9baa10b7bd6c), // 5^147 (0xc83553c5c8965d3d, 0x6f92829494e5acc7), // 5^148 (0xfa42a8b73abbf48c, 0xcb772339ba1f17f9), // 5^149 (0x9c69a97284b578d7, 0xff2a760414536efb), // 5^150 (0xc38413cf25e2d70d, 0xfef5138519684aba), // 5^151 (0xf46518c2ef5b8cd1, 0x7eb258665fc25d69), // 5^152 (0x98bf2f79d5993802, 0xef2f773ffbd97a61), // 5^153 (0xbeeefb584aff8603, 0xaafb550ffacfd8fa), // 5^154 (0xeeaaba2e5dbf6784, 0x95ba2a53f983cf38), // 5^155 (0x952ab45cfa97a0b2, 0xdd945a747bf26183), // 5^156 (0xba756174393d88df, 0x94f971119aeef9e4), // 5^157 (0xe912b9d1478ceb17, 0x7a37cd5601aab85d), // 5^158 (0x91abb422ccb812ee, 0xac62e055c10ab33a), // 5^159 (0xb616a12b7fe617aa, 0x577b986b314d6009), // 5^160 (0xe39c49765fdf9d94, 0xed5a7e85fda0b80b), // 5^161 (0x8e41ade9fbebc27d, 0x14588f13be847307), // 5^162 (0xb1d219647ae6b31c, 0x596eb2d8ae258fc8), // 5^163 (0xde469fbd99a05fe3, 0x6fca5f8ed9aef3bb), // 5^164 (0x8aec23d680043bee, 0x25de7bb9480d5854), // 5^165 (0xada72ccc20054ae9, 0xaf561aa79a10ae6a), // 5^166 (0xd910f7ff28069da4, 0x1b2ba1518094da04), // 5^167 (0x87aa9aff79042286, 0x90fb44d2f05d0842), // 5^168 (0xa99541bf57452b28, 0x353a1607ac744a53), // 5^169 (0xd3fa922f2d1675f2, 0x42889b8997915ce8), // 5^170 (0x847c9b5d7c2e09b7, 0x69956135febada11), // 5^171 (0xa59bc234db398c25, 0x43fab9837e699095), // 5^172 (0xcf02b2c21207ef2e, 0x94f967e45e03f4bb), // 5^173 (0x8161afb94b44f57d, 0x1d1be0eebac278f5), // 5^174 (0xa1ba1ba79e1632dc, 0x6462d92a69731732), // 5^175 (0xca28a291859bbf93, 0x7d7b8f7503cfdcfe), // 5^176 (0xfcb2cb35e702af78, 0x5cda735244c3d43e), // 5^177 (0x9defbf01b061adab, 0x3a0888136afa64a7), // 5^178 (0xc56baec21c7a1916, 0x88aaa1845b8fdd0), // 5^179 (0xf6c69a72a3989f5b, 0x8aad549e57273d45), // 5^180 (0x9a3c2087a63f6399, 0x36ac54e2f678864b), // 5^181 (0xc0cb28a98fcf3c7f, 0x84576a1bb416a7dd), // 5^182 (0xf0fdf2d3f3c30b9f, 0x656d44a2a11c51d5), // 5^183 (0x969eb7c47859e743, 0x9f644ae5a4b1b325), // 5^184 (0xbc4665b596706114, 0x873d5d9f0dde1fee), // 5^185 (0xeb57ff22fc0c7959, 0xa90cb506d155a7ea), // 5^186 (0x9316ff75dd87cbd8, 0x9a7f12442d588f2), // 5^187 (0xb7dcbf5354e9bece, 0xc11ed6d538aeb2f), // 5^188 (0xe5d3ef282a242e81, 0x8f1668c8a86da5fa), // 5^189 (0x8fa475791a569d10, 0xf96e017d694487bc), // 5^190 (0xb38d92d760ec4455, 0x37c981dcc395a9ac), // 5^191 (0xe070f78d3927556a, 0x85bbe253f47b1417), // 5^192 (0x8c469ab843b89562, 0x93956d7478ccec8e), // 5^193 (0xaf58416654a6babb, 0x387ac8d1970027b2), // 5^194 (0xdb2e51bfe9d0696a, 0x6997b05fcc0319e), // 5^195 (0x88fcf317f22241e2, 0x441fece3bdf81f03), // 5^196 (0xab3c2fddeeaad25a, 0xd527e81cad7626c3), // 5^197 (0xd60b3bd56a5586f1, 0x8a71e223d8d3b074), // 5^198 (0x85c7056562757456, 0xf6872d5667844e49), // 5^199 (0xa738c6bebb12d16c, 0xb428f8ac016561db), // 5^200 (0xd106f86e69d785c7, 0xe13336d701beba52), // 5^201 (0x82a45b450226b39c, 0xecc0024661173473), // 5^202 (0xa34d721642b06084, 0x27f002d7f95d0190), // 5^203 (0xcc20ce9bd35c78a5, 0x31ec038df7b441f4), // 5^204 (0xff290242c83396ce, 0x7e67047175a15271), // 5^205 (0x9f79a169bd203e41, 0xf0062c6e984d386), // 5^206 (0xc75809c42c684dd1, 0x52c07b78a3e60868), // 5^207 (0xf92e0c3537826145, 0xa7709a56ccdf8a82), // 5^208 (0x9bbcc7a142b17ccb, 0x88a66076400bb691), // 5^209 (0xc2abf989935ddbfe, 0x6acff893d00ea435), // 5^210 (0xf356f7ebf83552fe, 0x583f6b8c4124d43), // 5^211 (0x98165af37b2153de, 0xc3727a337a8b704a), // 5^212 (0xbe1bf1b059e9a8d6, 0x744f18c0592e4c5c), // 5^213 (0xeda2ee1c7064130c, 0x1162def06f79df73), // 5^214 (0x9485d4d1c63e8be7, 0x8addcb5645ac2ba8), // 5^215 (0xb9a74a0637ce2ee1, 0x6d953e2bd7173692), // 5^216 (0xe8111c87c5c1ba99, 0xc8fa8db6ccdd0437), // 5^217 (0x910ab1d4db9914a0, 0x1d9c9892400a22a2), // 5^218 (0xb54d5e4a127f59c8, 0x2503beb6d00cab4b), // 5^219 (0xe2a0b5dc971f303a, 0x2e44ae64840fd61d), // 5^220 (0x8da471a9de737e24, 0x5ceaecfed289e5d2), // 5^221 (0xb10d8e1456105dad, 0x7425a83e872c5f47), // 5^222 (0xdd50f1996b947518, 0xd12f124e28f77719), // 5^223 (0x8a5296ffe33cc92f, 0x82bd6b70d99aaa6f), // 5^224 (0xace73cbfdc0bfb7b, 0x636cc64d1001550b), // 5^225 (0xd8210befd30efa5a, 0x3c47f7e05401aa4e), // 5^226 (0x8714a775e3e95c78, 0x65acfaec34810a71), // 5^227 (0xa8d9d1535ce3b396, 0x7f1839a741a14d0d), // 5^228 (0xd31045a8341ca07c, 0x1ede48111209a050), // 5^229 (0x83ea2b892091e44d, 0x934aed0aab460432), // 5^230 (0xa4e4b66b68b65d60, 0xf81da84d5617853f), // 5^231 (0xce1de40642e3f4b9, 0x36251260ab9d668e), // 5^232 (0x80d2ae83e9ce78f3, 0xc1d72b7c6b426019), // 5^233 (0xa1075a24e4421730, 0xb24cf65b8612f81f), // 5^234 (0xc94930ae1d529cfc, 0xdee033f26797b627), // 5^235 (0xfb9b7cd9a4a7443c, 0x169840ef017da3b1), // 5^236 (0x9d412e0806e88aa5, 0x8e1f289560ee864e), // 5^237 (0xc491798a08a2ad4e, 0xf1a6f2bab92a27e2), // 5^238 (0xf5b5d7ec8acb58a2, 0xae10af696774b1db), // 5^239 (0x9991a6f3d6bf1765, 0xacca6da1e0a8ef29), // 5^240 (0xbff610b0cc6edd3f, 0x17fd090a58d32af3), // 5^241 (0xeff394dcff8a948e, 0xddfc4b4cef07f5b0), // 5^242 (0x95f83d0a1fb69cd9, 0x4abdaf101564f98e), // 5^243 (0xbb764c4ca7a4440f, 0x9d6d1ad41abe37f1), // 5^244 (0xea53df5fd18d5513, 0x84c86189216dc5ed), // 5^245 (0x92746b9be2f8552c, 0x32fd3cf5b4e49bb4), // 5^246 (0xb7118682dbb66a77, 0x3fbc8c33221dc2a1), // 5^247 (0xe4d5e82392a40515, 0xfabaf3feaa5334a), // 5^248 (0x8f05b1163ba6832d, 0x29cb4d87f2a7400e), // 5^249 (0xb2c71d5bca9023f8, 0x743e20e9ef511012), // 5^250 (0xdf78e4b2bd342cf6, 0x914da9246b255416), // 5^251 (0x8bab8eefb6409c1a, 0x1ad089b6c2f7548e), // 5^252 (0xae9672aba3d0c320, 0xa184ac2473b529b1), // 5^253 (0xda3c0f568cc4f3e8, 0xc9e5d72d90a2741e), // 5^254 (0x8865899617fb1871, 0x7e2fa67c7a658892), // 5^255 (0xaa7eebfb9df9de8d, 0xddbb901b98feeab7), // 5^256 (0xd51ea6fa85785631, 0x552a74227f3ea565), // 5^257 (0x8533285c936b35de, 0xd53a88958f87275f), // 5^258 (0xa67ff273b8460356, 0x8a892abaf368f137), // 5^259 (0xd01fef10a657842c, 0x2d2b7569b0432d85), // 5^260 (0x8213f56a67f6b29b, 0x9c3b29620e29fc73), // 5^261 (0xa298f2c501f45f42, 0x8349f3ba91b47b8f), // 5^262 (0xcb3f2f7642717713, 0x241c70a936219a73), // 5^263 (0xfe0efb53d30dd4d7, 0xed238cd383aa0110), // 5^264 (0x9ec95d1463e8a506, 0xf4363804324a40aa), // 5^265 (0xc67bb4597ce2ce48, 0xb143c6053edcd0d5), // 5^266 (0xf81aa16fdc1b81da, 0xdd94b7868e94050a), // 5^267 (0x9b10a4e5e9913128, 0xca7cf2b4191c8326), // 5^268 (0xc1d4ce1f63f57d72, 0xfd1c2f611f63a3f0), // 5^269 (0xf24a01a73cf2dccf, 0xbc633b39673c8cec), // 5^270 (0x976e41088617ca01, 0xd5be0503e085d813), // 5^271 (0xbd49d14aa79dbc82, 0x4b2d8644d8a74e18), // 5^272 (0xec9c459d51852ba2, 0xddf8e7d60ed1219e), // 5^273 (0x93e1ab8252f33b45, 0xcabb90e5c942b503), // 5^274 (0xb8da1662e7b00a17, 0x3d6a751f3b936243), // 5^275 (0xe7109bfba19c0c9d, 0xcc512670a783ad4), // 5^276 (0x906a617d450187e2, 0x27fb2b80668b24c5), // 5^277 (0xb484f9dc9641e9da, 0xb1f9f660802dedf6), // 5^278 (0xe1a63853bbd26451, 0x5e7873f8a0396973), // 5^279 (0x8d07e33455637eb2, 0xdb0b487b6423e1e8), // 5^280 (0xb049dc016abc5e5f, 0x91ce1a9a3d2cda62), // 5^281 (0xdc5c5301c56b75f7, 0x7641a140cc7810fb), // 5^282 (0x89b9b3e11b6329ba, 0xa9e904c87fcb0a9d), // 5^283 (0xac2820d9623bf429, 0x546345fa9fbdcd44), // 5^284 (0xd732290fbacaf133, 0xa97c177947ad4095), // 5^285 (0x867f59a9d4bed6c0, 0x49ed8eabcccc485d), // 5^286 (0xa81f301449ee8c70, 0x5c68f256bfff5a74), // 5^287 (0xd226fc195c6a2f8c, 0x73832eec6fff3111), // 5^288 (0x83585d8fd9c25db7, 0xc831fd53c5ff7eab), // 5^289 (0xa42e74f3d032f525, 0xba3e7ca8b77f5e55), // 5^290 (0xcd3a1230c43fb26f, 0x28ce1bd2e55f35eb), // 5^291 (0x80444b5e7aa7cf85, 0x7980d163cf5b81b3), // 5^292 (0xa0555e361951c366, 0xd7e105bcc332621f), // 5^293 (0xc86ab5c39fa63440, 0x8dd9472bf3fefaa7), // 5^294 (0xfa856334878fc150, 0xb14f98f6f0feb951), // 5^295 (0x9c935e00d4b9d8d2, 0x6ed1bf9a569f33d3), // 5^296 (0xc3b8358109e84f07, 0xa862f80ec4700c8), // 5^297 (0xf4a642e14c6262c8, 0xcd27bb612758c0fa), // 5^298 (0x98e7e9cccfbd7dbd, 0x8038d51cb897789c), // 5^299 (0xbf21e44003acdd2c, 0xe0470a63e6bd56c3), // 5^300 (0xeeea5d5004981478, 0x1858ccfce06cac74), // 5^301 (0x95527a5202df0ccb, 0xf37801e0c43ebc8), // 5^302 (0xbaa718e68396cffd, 0xd30560258f54e6ba), // 5^303 (0xe950df20247c83fd, 0x47c6b82ef32a2069), // 5^304 (0x91d28b7416cdd27e, 0x4cdc331d57fa5441), // 5^305 (0xb6472e511c81471d, 0xe0133fe4adf8e952), // 5^306 (0xe3d8f9e563a198e5, 0x58180fddd97723a6), // 5^307 (0x8e679c2f5e44ff8f, 0x570f09eaa7ea7648), // 5^308 ];
54.734724
98
0.706864
ed8bc70a7bdd31519f5286454b5b257273e1da76
188
mod common; #[cfg(feature = "mssql")] pub mod mssql; #[cfg(feature = "mysql")] pub mod mysql; #[cfg(feature = "postgres")] pub mod postgres; #[cfg(feature = "sqlite")] pub mod sqlite;
13.428571
28
0.643617
0877efba1ac01372605d5c0026ab54bf3c81f690
4,112
//! Button/SimpleButton prototype use crate::avm1::activation::Activation; use crate::avm1::error::Error; use crate::avm1::function::{Executable, FunctionObject}; use crate::avm1::globals::display_object; use crate::avm1::property::Attribute; use crate::avm1::{Object, ScriptObject, TObject, Value}; use crate::display_object::{Button, TDisplayObject}; use gc_arena::MutationContext; macro_rules! with_button_props { ($obj:ident, $gc:ident, $fn_proto:ident, $($name:literal => [$get:ident $(, $set:ident)*],)*) => { $( $obj.add_property( $gc, $name, with_button_props!(getter $gc, $fn_proto, $get), with_button_props!(setter $gc, $fn_proto, $($set),*), Attribute::DONT_DELETE | Attribute::DONT_ENUM, ); )* }; (getter $gc:ident, $fn_proto:ident, $get:ident) => { FunctionObject::function( $gc, Executable::Native( |activation: &mut Activation<'_, 'gc, '_>, this, _args| -> Result<Value<'gc>, Error<'gc>> { if let Some(display_object) = this.as_display_object() { if let Some(button) = display_object.as_button() { return $get(button, activation); } } Ok(Value::Undefined) } as crate::avm1::function::NativeFunction<'gc> ), Some($fn_proto), $fn_proto ) }; (setter $gc:ident, $fn_proto:ident, $set:ident) => { Some(FunctionObject::function( $gc, Executable::Native( |activation: &mut Activation<'_, 'gc, '_>, this, args| -> Result<Value<'gc>, Error<'gc>> { if let Some(display_object) = this.as_display_object() { if let Some(button) = display_object.as_button() { let value = args .get(0) .unwrap_or(&Value::Undefined) .clone(); $set(button, activation, value)?; } } Ok(Value::Undefined) } as crate::avm1::function::NativeFunction<'gc> ), Some($fn_proto), $fn_proto) ) }; (setter $gc:ident, $fn_proto:ident,) => { None }; } pub fn create_proto<'gc>( gc_context: MutationContext<'gc, '_>, proto: Object<'gc>, fn_proto: Object<'gc>, ) -> Object<'gc> { let object = ScriptObject::object(gc_context, Some(proto)); display_object::define_display_object_proto(gc_context, object, fn_proto); with_button_props!( object, gc_context, fn_proto, "enabled" => [enabled, set_enabled], "useHandCursor" => [use_hand_cursor, set_use_hand_cursor], ); object.into() } /// Implements `Button` constructor. pub fn constructor<'gc>( _activation: &mut Activation<'_, 'gc, '_>, this: Object<'gc>, _args: &[Value<'gc>], ) -> Result<Value<'gc>, Error<'gc>> { Ok(this.into()) } fn enabled<'gc>( this: Button<'gc>, _activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Value<'gc>, Error<'gc>> { Ok(this.enabled().into()) } fn set_enabled<'gc>( this: Button<'gc>, activation: &mut Activation<'_, 'gc, '_>, value: Value<'gc>, ) -> Result<(), Error<'gc>> { let enabled = value.as_bool(activation.swf_version()); this.set_enabled(&mut activation.context, enabled); Ok(()) } fn use_hand_cursor<'gc>( this: Button<'gc>, _activation: &mut Activation<'_, 'gc, '_>, ) -> Result<Value<'gc>, Error<'gc>> { Ok(this.use_hand_cursor().into()) } fn set_use_hand_cursor<'gc>( this: Button<'gc>, activation: &mut Activation<'_, 'gc, '_>, value: Value<'gc>, ) -> Result<(), Error<'gc>> { let use_hand_cursor = value.as_bool(activation.swf_version()); this.set_use_hand_cursor(&mut activation.context, use_hand_cursor); Ok(()) }
31.630769
107
0.535992
1a76545011a306c2d1bd1a0c2b59e9716982c6e7
506
use {crate::common::*, maud::html}; pub(crate) fn map_error( mut stderr: Stderr, result: Result<Response<Body>, Error>, ) -> Response<Body> { result.unwrap_or_else(|error| { error.print_backtrace(&mut stderr); writeln!(stderr, "{}", error).ok(); let mut response = html::wrap_body( error.status().canonical_reason().unwrap_or("Error"), html! { h1 { (error.status()) } }, ); *response.status_mut() = error.status(); response }) }
23
59
0.579051
f78a32e5069b87b79438da6e2661937bedbdc4f9
12,066
use ark_ff::{ biginteger::{BigInteger, BigInteger256}, fields::{ fp6_3over2::Fp6Parameters, FftField, FftParameters, Field, FpParameters, PrimeField, SquareRootField, }, One, UniformRand, Zero, }; use ark_serialize::{buffer_bit_byte_size, CanonicalSerialize}; use ark_std::rand::Rng; use ark_std::test_rng; use core::{ cmp::Ordering, ops::{AddAssign, MulAssign, SubAssign}, }; use crate::{Fq, Fq12, Fq2, Fq6, Fq6Parameters, FqParameters, Fr}; use ark_algebra_test_templates::fields::*; pub(crate) const ITERATIONS: usize = 5; #[test] fn test_fr() { let mut rng = test_rng(); for _ in 0..ITERATIONS { let a: Fr = rng.gen(); let b: Fr = rng.gen(); field_test(a, b); primefield_test::<Fr>(); sqrt_field_test(b); let byte_size = a.serialized_size(); field_serialization_test::<Fr>(byte_size); } } #[test] fn test_fq() { let mut rng = test_rng(); for _ in 0..ITERATIONS { let a: Fq = rng.gen(); let b: Fq = rng.gen(); field_test(a, b); primefield_test::<Fq>(); sqrt_field_test(a); let byte_size = a.serialized_size(); let (_, buffer_size) = buffer_bit_byte_size(Fq::size_in_bits()); assert_eq!(byte_size, buffer_size); field_serialization_test::<Fq>(byte_size); } } #[test] fn test_fq2() { let mut rng = test_rng(); for _ in 0..ITERATIONS { let a: Fq2 = rng.gen(); let b: Fq2 = rng.gen(); field_test(a, b); sqrt_field_test(a); } frobenius_test::<Fq2, _>(Fq::characteristic(), 13); let byte_size = Fq2::zero().serialized_size(); field_serialization_test::<Fq2>(byte_size); } #[test] fn test_fq6() { let mut rng = test_rng(); for _ in 0..ITERATIONS { let g: Fq6 = rng.gen(); let h: Fq6 = rng.gen(); field_test(g, h); } frobenius_test::<Fq6, _>(Fq::characteristic(), 13); let byte_size = Fq6::zero().serialized_size(); field_serialization_test::<Fq6>(byte_size); } #[test] fn test_fq12() { let mut rng = test_rng(); for _ in 0..ITERATIONS { let g: Fq12 = rng.gen(); let h: Fq12 = rng.gen(); field_test(g, h); } frobenius_test::<Fq12, _>(Fq::characteristic(), 13); let byte_size = Fq12::zero().serialized_size(); field_serialization_test::<Fq12>(byte_size); } #[test] fn test_fq_repr_from() { assert_eq!(BigInteger256::from(100), BigInteger256([100, 0, 0, 0])); } #[test] fn test_fq_repr_is_odd() { assert!(!BigInteger256::from(0).is_odd()); assert!(BigInteger256::from(0).is_even()); assert!(BigInteger256::from(1).is_odd()); assert!(!BigInteger256::from(1).is_even()); assert!(!BigInteger256::from(324834872).is_odd()); assert!(BigInteger256::from(324834872).is_even()); assert!(BigInteger256::from(324834873).is_odd()); assert!(!BigInteger256::from(324834873).is_even()); } #[test] fn test_fq_repr_is_zero() { assert!(BigInteger256::from(0).is_zero()); assert!(!BigInteger256::from(1).is_zero()); assert!(!BigInteger256([0, 0, 1, 0]).is_zero()); } #[test] fn test_fq_repr_num_bits() { let mut a = BigInteger256::from(0); assert_eq!(0, a.num_bits()); a = BigInteger256::from(1); for i in 1..257 { assert_eq!(i, a.num_bits()); a.mul2(); } assert_eq!(0, a.num_bits()); } #[test] fn test_fq_add_assign() { // Test associativity let mut rng = ark_std::test_rng(); for _ in 0..1000 { // Generate a, b, c and ensure (a + b) + c == a + (b + c). let a = Fq::rand(&mut rng); let b = Fq::rand(&mut rng); let c = Fq::rand(&mut rng); let mut tmp1 = a; tmp1.add_assign(&b); tmp1.add_assign(&c); let mut tmp2 = b; tmp2.add_assign(&c); tmp2.add_assign(&a); assert_eq!(tmp1, tmp2); } } #[test] fn test_fq_sub_assign() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { // Ensure that (a - b) + (b - a) = 0. let a = Fq::rand(&mut rng); let b = Fq::rand(&mut rng); let mut tmp1 = a; tmp1.sub_assign(&b); let mut tmp2 = b; tmp2.sub_assign(&a); tmp1.add_assign(&tmp2); assert!(tmp1.is_zero()); } } #[test] fn test_fq_mul_assign() { let mut rng = ark_std::test_rng(); for _ in 0..1000000 { // Ensure that (a * b) * c = a * (b * c) let a = Fq::rand(&mut rng); let b = Fq::rand(&mut rng); let c = Fq::rand(&mut rng); let mut tmp1 = a; tmp1.mul_assign(&b); tmp1.mul_assign(&c); let mut tmp2 = b; tmp2.mul_assign(&c); tmp2.mul_assign(&a); assert_eq!(tmp1, tmp2); } for _ in 0..1000000 { // Ensure that r * (a + b + c) = r*a + r*b + r*c let r = Fq::rand(&mut rng); let mut a = Fq::rand(&mut rng); let mut b = Fq::rand(&mut rng); let mut c = Fq::rand(&mut rng); let mut tmp1 = a; tmp1.add_assign(&b); tmp1.add_assign(&c); tmp1.mul_assign(&r); a.mul_assign(&r); b.mul_assign(&r); c.mul_assign(&r); a.add_assign(&b); a.add_assign(&c); assert_eq!(tmp1, a); } } #[test] fn test_fq_squaring() { let mut rng = ark_std::test_rng(); for _ in 0..1000000 { // Ensure that (a * a) = a^2 let a = Fq::rand(&mut rng); let mut tmp = a; tmp.square_in_place(); let mut tmp2 = a; tmp2.mul_assign(&a); assert_eq!(tmp, tmp2); } } #[test] fn test_fq_inverse() { assert!(Fq::zero().inverse().is_none()); let mut rng = ark_std::test_rng(); let one = Fq::one(); for _ in 0..1000 { // Ensure that a * a^-1 = 1 let mut a = Fq::rand(&mut rng); let ainv = a.inverse().unwrap(); a.mul_assign(&ainv); assert_eq!(a, one); } } #[test] fn test_fq_double_in_place() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { // Ensure doubling a is equivalent to adding a to itself. let mut a = Fq::rand(&mut rng); let mut b = a; b.add_assign(&a); a.double_in_place(); assert_eq!(a, b); } } #[test] fn test_fq_negate() { { let a = -Fq::zero(); assert!(a.is_zero()); } let mut rng = ark_std::test_rng(); for _ in 0..1000 { // Ensure (a - (-a)) = 0. let mut a = Fq::rand(&mut rng); let b = -a; a.add_assign(&b); assert!(a.is_zero()); } } #[test] fn test_fq_pow() { let mut rng = ark_std::test_rng(); for i in 0..1000 { // Exponentiate by various small numbers and ensure it consists with repeated // multiplication. let a = Fq::rand(&mut rng); let target = a.pow(&[i]); let mut c = Fq::one(); for _ in 0..i { c.mul_assign(&a); } assert_eq!(c, target); } for _ in 0..1000 { // Exponentiating by the modulus should have no effect in a prime field. let a = Fq::rand(&mut rng); assert_eq!(a, a.pow(Fq::characteristic())); } } #[test] fn test_fq_sqrt() { let mut rng = ark_std::test_rng(); assert_eq!(Fq::zero().sqrt().unwrap(), Fq::zero()); for _ in 0..1000 { // Ensure sqrt(a^2) = a or -a let a = Fq::rand(&mut rng); let nega = -a; let mut b = a; b.square_in_place(); let b = b.sqrt().unwrap(); assert!(a == b || nega == b); } for _ in 0..1000 { // Ensure sqrt(a)^2 = a for random a let a = Fq::rand(&mut rng); if let Some(mut tmp) = a.sqrt() { tmp.square_in_place(); assert_eq!(a, tmp); } } } #[test] fn test_fq_num_bits() { assert_eq!(FqParameters::MODULUS_BITS, 254); assert_eq!(FqParameters::CAPACITY, 253); } #[test] fn test_fq_root_of_unity() { assert_eq!(FqParameters::TWO_ADICITY, 1); assert_eq!( Fq::multiplicative_generator().pow([ 0x9e10460b6c3e7ea3, 0xcbc0b548b438e546, 0xdc2822db40c0ac2e, 0x183227397098d014, ]), Fq::two_adic_root_of_unity() ); assert_eq!( Fq::two_adic_root_of_unity().pow([1 << FqParameters::TWO_ADICITY]), Fq::one() ); assert!(Fq::multiplicative_generator().sqrt().is_none()); } #[test] fn test_fq_ordering() { // BigInteger256's ordering is well-tested, but we still need to make sure the // Fq elements aren't being compared in Montgomery form. for i in 0..100 { assert!(Fq::from(BigInteger256::from(i + 1)) > Fq::from(BigInteger256::from(i))); } } #[test] fn test_fq_legendre() { use ark_ff::fields::LegendreSymbol::*; assert_eq!(QuadraticResidue, Fq::one().legendre()); assert_eq!(Zero, Fq::zero().legendre()); assert_eq!( QuadraticResidue, Fq::from(BigInteger256::from(4)).legendre() ); assert_eq!( QuadraticNonResidue, Fq::from(BigInteger256::from(5)).legendre() ); } #[test] fn test_fq2_ordering() { let mut a = Fq2::new(Fq::zero(), Fq::zero()); let mut b = a.clone(); assert!(a.cmp(&b) == Ordering::Equal); b.c0.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Less); a.c0.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Equal); b.c1.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Less); a.c0.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Less); a.c1.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Greater); b.c0.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Equal); } #[test] fn test_fq2_basics() { assert_eq!(Fq2::new(Fq::zero(), Fq::zero(),), Fq2::zero()); assert_eq!(Fq2::new(Fq::one(), Fq::zero(),), Fq2::one()); assert!(Fq2::zero().is_zero()); assert!(!Fq2::one().is_zero()); assert!(!Fq2::new(Fq::zero(), Fq::one(),).is_zero()); } #[test] fn test_fq2_legendre() { use ark_ff::fields::LegendreSymbol::*; assert_eq!(Zero, Fq2::zero().legendre()); // i^2 = -1 let mut m1 = -Fq2::one(); assert_eq!(QuadraticResidue, m1.legendre()); m1 = Fq6Parameters::mul_fp2_by_nonresidue(&m1); assert_eq!(QuadraticNonResidue, m1.legendre()); } #[test] fn test_fq6_mul_by_1() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { let c1 = Fq2::rand(&mut rng); let mut a = Fq6::rand(&mut rng); let mut b = a; a.mul_by_1(&c1); b.mul_assign(&Fq6::new(Fq2::zero(), c1, Fq2::zero())); assert_eq!(a, b); } } #[test] fn test_fq6_mul_by_01() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { let c0 = Fq2::rand(&mut rng); let c1 = Fq2::rand(&mut rng); let mut a = Fq6::rand(&mut rng); let mut b = a; a.mul_by_01(&c0, &c1); b.mul_assign(&Fq6::new(c0, c1, Fq2::zero())); assert_eq!(a, b); } } #[test] fn test_fq12_mul_by_014() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { let c0 = Fq2::rand(&mut rng); let c1 = Fq2::rand(&mut rng); let c5 = Fq2::rand(&mut rng); let mut a = Fq12::rand(&mut rng); let mut b = a; a.mul_by_014(&c0, &c1, &c5); b.mul_assign(&Fq12::new( Fq6::new(c0, c1, Fq2::zero()), Fq6::new(Fq2::zero(), c5, Fq2::zero()), )); assert_eq!(a, b); } } #[test] fn test_fq12_mul_by_034() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { let c0 = Fq2::rand(&mut rng); let c3 = Fq2::rand(&mut rng); let c4 = Fq2::rand(&mut rng); let mut a = Fq12::rand(&mut rng); let mut b = a; a.mul_by_034(&c0, &c3, &c4); b.mul_assign(&Fq12::new( Fq6::new(c0, Fq2::zero(), Fq2::zero()), Fq6::new(c3, c4, Fq2::zero()), )); assert_eq!(a, b); } }
23.893069
92
0.540941
e4440fc46753ea5b68aa432b66e51c5bca84883c
34,570
// This module provides a data structure, `Ignore`, that connects "directory // traversal" with "ignore matchers." Specifically, it knows about gitignore // semantics and precedence, and is organized based on directory hierarchy. // Namely, every matcher logically corresponds to ignore rules from a single // directory, and points to the matcher for its corresponding parent directory. // In this sense, `Ignore` is a *persistent* data structure. // // This design was specifically chosen to make it possible to use this data // structure in a parallel directory iterator. // // My initial intention was to expose this module as part of this crate's // public API, but I think the data structure's public API is too complicated // with non-obvious failure modes. Alas, such things haven't been documented // well. use std::collections::HashMap; use std::ffi::{OsStr, OsString}; use std::path::{Path, PathBuf}; use std::sync::{Arc, RwLock}; use gitignore::{self, Gitignore, GitignoreBuilder}; use overrides::{self, Override}; use pathutil::{is_hidden, strip_prefix}; use types::{self, Types}; use walk::DirEntry; use {Error, Match, PartialErrorBuilder}; /// IgnoreMatch represents information about where a match came from when using /// the `Ignore` matcher. #[derive(Clone, Debug)] pub struct IgnoreMatch<'a>(IgnoreMatchInner<'a>); /// IgnoreMatchInner describes precisely where the match information came from. /// This is private to allow expansion to more matchers in the future. #[derive(Clone, Debug)] enum IgnoreMatchInner<'a> { Override(overrides::Glob<'a>), Gitignore(&'a gitignore::Glob), Types(types::Glob<'a>), Hidden, } impl<'a> IgnoreMatch<'a> { fn overrides(x: overrides::Glob<'a>) -> IgnoreMatch<'a> { IgnoreMatch(IgnoreMatchInner::Override(x)) } fn gitignore(x: &'a gitignore::Glob) -> IgnoreMatch<'a> { IgnoreMatch(IgnoreMatchInner::Gitignore(x)) } fn types(x: types::Glob<'a>) -> IgnoreMatch<'a> { IgnoreMatch(IgnoreMatchInner::Types(x)) } fn hidden() -> IgnoreMatch<'static> { IgnoreMatch(IgnoreMatchInner::Hidden) } } /// Options for the ignore matcher, shared between the matcher itself and the /// builder. #[derive(Clone, Copy, Debug)] struct IgnoreOptions { /// Whether to ignore hidden file paths or not. hidden: bool, /// Whether to read .ignore files. ignore: bool, /// Whether to respect any ignore files in parent directories. parents: bool, /// Whether to read git's global gitignore file. git_global: bool, /// Whether to read .gitignore files. git_ignore: bool, /// Whether to read .git/info/exclude files. git_exclude: bool, /// Whether to ignore files case insensitively ignore_case_insensitive: bool, } /// Ignore is a matcher useful for recursively walking one or more directories. #[derive(Clone, Debug)] pub struct Ignore(Arc<IgnoreInner>); #[derive(Clone, Debug)] struct IgnoreInner { /// A map of all existing directories that have already been /// compiled into matchers. /// /// Note that this is never used during matching, only when adding new /// parent directory matchers. This avoids needing to rebuild glob sets for /// parent directories if many paths are being searched. compiled: Arc<RwLock<HashMap<OsString, Ignore>>>, /// The path to the directory that this matcher was built from. dir: PathBuf, /// An override matcher (default is empty). overrides: Arc<Override>, /// A file type matcher. types: Arc<Types>, /// The parent directory to match next. /// /// If this is the root directory or there are otherwise no more /// directories to match, then `parent` is `None`. parent: Option<Ignore>, /// Whether this is an absolute parent matcher, as added by add_parent. is_absolute_parent: bool, /// The absolute base path of this matcher. Populated only if parent /// directories are added. absolute_base: Option<Arc<PathBuf>>, /// Explicit global ignore matchers specified by the caller. explicit_ignores: Arc<Vec<Gitignore>>, /// Ignore files used in addition to `.ignore` custom_ignore_filenames: Arc<Vec<OsString>>, /// The matcher for custom ignore files custom_ignore_matcher: Gitignore, /// The matcher for .ignore files. ignore_matcher: Gitignore, /// A global gitignore matcher, usually from $XDG_CONFIG_HOME/git/ignore. git_global_matcher: Arc<Gitignore>, /// The matcher for .gitignore files. git_ignore_matcher: Gitignore, /// Special matcher for `.git/info/exclude` files. git_exclude_matcher: Gitignore, /// Whether this directory contains a .git sub-directory. has_git: bool, /// Ignore config. opts: IgnoreOptions, } impl Ignore { /// Return the directory path of this matcher. pub fn path(&self) -> &Path { &self.0.dir } /// Return true if this matcher has no parent. pub fn is_root(&self) -> bool { self.0.parent.is_none() } /// Returns true if this matcher was added via the `add_parents` method. pub fn is_absolute_parent(&self) -> bool { self.0.is_absolute_parent } /// Return this matcher's parent, if one exists. pub fn parent(&self) -> Option<Ignore> { self.0.parent.clone() } /// Create a new `Ignore` matcher with the parent directories of `dir`. /// /// Note that this can only be called on an `Ignore` matcher with no /// parents (i.e., `is_root` returns `true`). This will panic otherwise. pub fn add_parents<P: AsRef<Path>>(&self, path: P) -> (Ignore, Option<Error>) { if !self.0.opts.parents && !self.0.opts.git_ignore && !self.0.opts.git_exclude && !self.0.opts.git_global { // If we never need info from parent directories, then don't do // anything. return (self.clone(), None); } if !self.is_root() { panic!("Ignore::add_parents called on non-root matcher"); } let absolute_base = match path.as_ref().canonicalize() { Ok(path) => Arc::new(path), Err(_) => { // There's not much we can do here, so just return our // existing matcher. We drop the error to be consistent // with our general pattern of ignoring I/O errors when // processing ignore files. return (self.clone(), None); } }; // List of parents, from child to root. let mut parents = vec![]; let mut path = &**absolute_base; while let Some(parent) = path.parent() { parents.push(parent); path = parent; } let mut errs = PartialErrorBuilder::default(); let mut ig = self.clone(); for parent in parents.into_iter().rev() { let mut compiled = self.0.compiled.write().unwrap(); if let Some(prebuilt) = compiled.get(parent.as_os_str()) { ig = prebuilt.clone(); continue; } let (mut igtmp, err) = ig.add_child_path(parent); errs.maybe_push(err); igtmp.is_absolute_parent = true; igtmp.absolute_base = Some(absolute_base.clone()); igtmp.has_git = if self.0.opts.git_ignore { parent.join(".git").exists() } else { false }; ig = Ignore(Arc::new(igtmp)); compiled.insert(parent.as_os_str().to_os_string(), ig.clone()); } (ig, errs.into_error_option()) } /// Create a new `Ignore` matcher for the given child directory. /// /// Since building the matcher may require reading from multiple /// files, it's possible that this method partially succeeds. Therefore, /// a matcher is always returned (which may match nothing) and an error is /// returned if it exists. /// /// Note that all I/O errors are completely ignored. pub fn add_child<P: AsRef<Path>>(&self, dir: P) -> (Ignore, Option<Error>) { let (ig, err) = self.add_child_path(dir.as_ref()); (Ignore(Arc::new(ig)), err) } /// Like add_child, but takes a full path and returns an IgnoreInner. fn add_child_path(&self, dir: &Path) -> (IgnoreInner, Option<Error>) { let mut errs = PartialErrorBuilder::default(); let custom_ig_matcher = if self.0.custom_ignore_filenames.is_empty() { Gitignore::empty() } else { let (m, err) = create_gitignore( &dir, &self.0.custom_ignore_filenames, self.0.opts.ignore_case_insensitive, ); errs.maybe_push(err); m }; let ig_matcher = if !self.0.opts.ignore { Gitignore::empty() } else { let (m, err) = create_gitignore(&dir, &[".ignore"], self.0.opts.ignore_case_insensitive); errs.maybe_push(err); m }; let gi_matcher = if !self.0.opts.git_ignore { Gitignore::empty() } else { let (m, err) = create_gitignore(&dir, &[".gitignore"], self.0.opts.ignore_case_insensitive); errs.maybe_push(err); m }; let gi_exclude_matcher = if !self.0.opts.git_exclude { Gitignore::empty() } else { let (m, err) = create_gitignore( &dir, &[".git/info/exclude"], self.0.opts.ignore_case_insensitive, ); errs.maybe_push(err); m }; let has_git = if self.0.opts.git_ignore { dir.join(".git").exists() } else { false }; let ig = IgnoreInner { compiled: self.0.compiled.clone(), dir: dir.to_path_buf(), overrides: self.0.overrides.clone(), types: self.0.types.clone(), parent: Some(self.clone()), is_absolute_parent: false, absolute_base: self.0.absolute_base.clone(), explicit_ignores: self.0.explicit_ignores.clone(), custom_ignore_filenames: self.0.custom_ignore_filenames.clone(), custom_ignore_matcher: custom_ig_matcher, ignore_matcher: ig_matcher, git_global_matcher: self.0.git_global_matcher.clone(), git_ignore_matcher: gi_matcher, git_exclude_matcher: gi_exclude_matcher, has_git: has_git, opts: self.0.opts, }; (ig, errs.into_error_option()) } /// Returns true if at least one type of ignore rule should be matched. fn has_any_ignore_rules(&self) -> bool { let opts = self.0.opts; let has_custom_ignore_files = !self.0.custom_ignore_filenames.is_empty(); let has_explicit_ignores = !self.0.explicit_ignores.is_empty(); opts.ignore || opts.git_global || opts.git_ignore || opts.git_exclude || has_custom_ignore_files || has_explicit_ignores } /// Like `matched`, but works with a directory entry instead. pub fn matched_dir_entry<'a>(&'a self, dent: &DirEntry) -> Match<IgnoreMatch<'a>> { let m = self.matched(dent.path(), dent.is_dir()); if m.is_none() && self.0.opts.hidden && is_hidden(dent) { return Match::Ignore(IgnoreMatch::hidden()); } m } /// Returns a match indicating whether the given file path should be /// ignored or not. /// /// The match contains information about its origin. fn matched<'a, P: AsRef<Path>>(&'a self, path: P, is_dir: bool) -> Match<IgnoreMatch<'a>> { // We need to be careful with our path. If it has a leading ./, then // strip it because it causes nothing but trouble. let mut path = path.as_ref(); if let Some(p) = strip_prefix("./", path) { path = p; } // Match against the override patterns. If an override matches // regardless of whether it's whitelist/ignore, then we quit and // return that result immediately. Overrides have the highest // precedence. if !self.0.overrides.is_empty() { let mat = self .0 .overrides .matched(path, is_dir) .map(IgnoreMatch::overrides); if !mat.is_none() { return mat; } } let mut whitelisted = Match::None; if self.has_any_ignore_rules() { let mat = self.matched_ignore(path, is_dir); if mat.is_ignore() { return mat; } else if mat.is_whitelist() { whitelisted = mat; } } if !self.0.types.is_empty() { let mat = self.0.types.matched(path, is_dir).map(IgnoreMatch::types); if mat.is_ignore() { return mat; } else if mat.is_whitelist() { whitelisted = mat; } } whitelisted } /// Performs matching only on the ignore files for this directory and /// all parent directories. fn matched_ignore<'a>(&'a self, path: &Path, is_dir: bool) -> Match<IgnoreMatch<'a>> { let (mut m_custom_ignore, mut m_ignore, mut m_gi, mut m_gi_exclude, mut m_explicit) = ( Match::None, Match::None, Match::None, Match::None, Match::None, ); let any_git = self.parents().any(|ig| ig.0.has_git); let mut saw_git = false; for ig in self.parents().take_while(|ig| !ig.0.is_absolute_parent) { if m_custom_ignore.is_none() { m_custom_ignore = ig.0.custom_ignore_matcher .matched(path, is_dir) .map(IgnoreMatch::gitignore); } if m_ignore.is_none() { m_ignore = ig.0.ignore_matcher .matched(path, is_dir) .map(IgnoreMatch::gitignore); } if any_git && !saw_git && m_gi.is_none() { m_gi = ig.0.git_ignore_matcher .matched(path, is_dir) .map(IgnoreMatch::gitignore); } if any_git && !saw_git && m_gi_exclude.is_none() { m_gi_exclude = ig.0.git_exclude_matcher .matched(path, is_dir) .map(IgnoreMatch::gitignore); } saw_git = saw_git || ig.0.has_git; } if self.0.opts.parents { if let Some(abs_parent_path) = self.absolute_base() { let path = abs_parent_path.join(path); for ig in self.parents().skip_while(|ig| !ig.0.is_absolute_parent) { if m_custom_ignore.is_none() { m_custom_ignore = ig.0.custom_ignore_matcher .matched(&path, is_dir) .map(IgnoreMatch::gitignore); } if m_ignore.is_none() { m_ignore = ig.0.ignore_matcher .matched(&path, is_dir) .map(IgnoreMatch::gitignore); } if any_git && !saw_git && m_gi.is_none() { m_gi = ig.0.git_ignore_matcher .matched(&path, is_dir) .map(IgnoreMatch::gitignore); } if any_git && !saw_git && m_gi_exclude.is_none() { m_gi_exclude = ig.0.git_exclude_matcher .matched(&path, is_dir) .map(IgnoreMatch::gitignore); } saw_git = saw_git || ig.0.has_git; } } } for gi in self.0.explicit_ignores.iter().rev() { if !m_explicit.is_none() { break; } m_explicit = gi.matched(&path, is_dir).map(IgnoreMatch::gitignore); } let m_global = if any_git { self.0 .git_global_matcher .matched(&path, is_dir) .map(IgnoreMatch::gitignore) } else { Match::None }; m_custom_ignore .or(m_ignore) .or(m_gi) .or(m_gi_exclude) .or(m_global) .or(m_explicit) } /// Returns an iterator over parent ignore matchers, including this one. pub fn parents(&self) -> Parents { Parents(Some(self)) } /// Returns the first absolute path of the first absolute parent, if /// one exists. fn absolute_base(&self) -> Option<&Path> { self.0.absolute_base.as_ref().map(|p| &***p) } } /// An iterator over all parents of an ignore matcher, including itself. /// /// The lifetime `'a` refers to the lifetime of the initial `Ignore` matcher. pub struct Parents<'a>(Option<&'a Ignore>); impl<'a> Iterator for Parents<'a> { type Item = &'a Ignore; fn next(&mut self) -> Option<&'a Ignore> { match self.0.take() { None => None, Some(ig) => { self.0 = ig.0.parent.as_ref(); Some(ig) } } } } /// A builder for creating an Ignore matcher. #[derive(Clone, Debug)] pub struct IgnoreBuilder { /// The root directory path for this ignore matcher. dir: PathBuf, /// An override matcher (default is empty). overrides: Arc<Override>, /// A type matcher (default is empty). types: Arc<Types>, /// Explicit global ignore matchers. explicit_ignores: Vec<Gitignore>, /// Ignore files in addition to .ignore. custom_ignore_filenames: Vec<OsString>, /// Ignore config. opts: IgnoreOptions, } impl IgnoreBuilder { /// Create a new builder for an `Ignore` matcher. /// /// All relative file paths are resolved with respect to the current /// working directory. pub fn new() -> IgnoreBuilder { IgnoreBuilder { dir: Path::new("").to_path_buf(), overrides: Arc::new(Override::empty()), types: Arc::new(Types::empty()), explicit_ignores: vec![], custom_ignore_filenames: vec![], opts: IgnoreOptions { hidden: true, ignore: true, parents: true, git_global: true, git_ignore: true, git_exclude: true, ignore_case_insensitive: false, }, } } /// Builds a new `Ignore` matcher. /// /// The matcher returned won't match anything until ignore rules from /// directories are added to it. pub fn build(&self) -> Ignore { let git_global_matcher = if !self.opts.git_global { Gitignore::empty() } else { let mut builder = GitignoreBuilder::new(""); builder .case_insensitive(self.opts.ignore_case_insensitive) .unwrap(); let (gi, err) = builder.build_global(); if let Some(err) = err { debug!("{}", err); } gi }; Ignore(Arc::new(IgnoreInner { compiled: Arc::new(RwLock::new(HashMap::new())), dir: self.dir.clone(), overrides: self.overrides.clone(), types: self.types.clone(), parent: None, is_absolute_parent: true, absolute_base: None, explicit_ignores: Arc::new(self.explicit_ignores.clone()), custom_ignore_filenames: Arc::new(self.custom_ignore_filenames.clone()), custom_ignore_matcher: Gitignore::empty(), ignore_matcher: Gitignore::empty(), git_global_matcher: Arc::new(git_global_matcher), git_ignore_matcher: Gitignore::empty(), git_exclude_matcher: Gitignore::empty(), has_git: false, opts: self.opts, })) } /// Add an override matcher. /// /// By default, no override matcher is used. /// /// This overrides any previous setting. pub fn overrides(&mut self, overrides: Override) -> &mut IgnoreBuilder { self.overrides = Arc::new(overrides); self } /// Add a file type matcher. /// /// By default, no file type matcher is used. /// /// This overrides any previous setting. pub fn types(&mut self, types: Types) -> &mut IgnoreBuilder { self.types = Arc::new(types); self } /// Adds a new global ignore matcher from the ignore file path given. pub fn add_ignore(&mut self, ig: Gitignore) -> &mut IgnoreBuilder { self.explicit_ignores.push(ig); self } /// Add a custom ignore file name /// /// These ignore files have higher precedence than all other ignore files. /// /// When specifying multiple names, earlier names have lower precedence than /// later names. pub fn add_custom_ignore_filename<S: AsRef<OsStr>>( &mut self, file_name: S, ) -> &mut IgnoreBuilder { self.custom_ignore_filenames .push(file_name.as_ref().to_os_string()); self } /// Enables ignoring hidden files. /// /// This is enabled by default. pub fn hidden(&mut self, yes: bool) -> &mut IgnoreBuilder { self.opts.hidden = yes; self } /// Enables reading `.ignore` files. /// /// `.ignore` files have the same semantics as `gitignore` files and are /// supported by search tools such as ripgrep and The Silver Searcher. /// /// This is enabled by default. pub fn ignore(&mut self, yes: bool) -> &mut IgnoreBuilder { self.opts.ignore = yes; self } /// Enables reading ignore files from parent directories. /// /// If this is enabled, then .gitignore files in parent directories of each /// file path given are respected. Otherwise, they are ignored. /// /// This is enabled by default. pub fn parents(&mut self, yes: bool) -> &mut IgnoreBuilder { self.opts.parents = yes; self } /// Add a global gitignore matcher. /// /// Its precedence is lower than both normal `.gitignore` files and /// `.git/info/exclude` files. /// /// This overwrites any previous global gitignore setting. /// /// This is enabled by default. pub fn git_global(&mut self, yes: bool) -> &mut IgnoreBuilder { self.opts.git_global = yes; self } /// Enables reading `.gitignore` files. /// /// `.gitignore` files have match semantics as described in the `gitignore` /// man page. /// /// This is enabled by default. pub fn git_ignore(&mut self, yes: bool) -> &mut IgnoreBuilder { self.opts.git_ignore = yes; self } /// Enables reading `.git/info/exclude` files. /// /// `.git/info/exclude` files have match semantics as described in the /// `gitignore` man page. /// /// This is enabled by default. pub fn git_exclude(&mut self, yes: bool) -> &mut IgnoreBuilder { self.opts.git_exclude = yes; self } /// Process ignore files case insensitively /// /// This is disabled by default. pub fn ignore_case_insensitive(&mut self, yes: bool) -> &mut IgnoreBuilder { self.opts.ignore_case_insensitive = yes; self } } /// Creates a new gitignore matcher for the directory given. /// /// Ignore globs are extracted from each of the file names in `dir` in the /// order given (earlier names have lower precedence than later names). /// /// I/O errors are ignored. pub fn create_gitignore<T: AsRef<OsStr>>( dir: &Path, names: &[T], case_insensitive: bool, ) -> (Gitignore, Option<Error>) { let mut builder = GitignoreBuilder::new(dir); let mut errs = PartialErrorBuilder::default(); builder.case_insensitive(case_insensitive).unwrap(); for name in names { let gipath = dir.join(name.as_ref()); errs.maybe_push_ignore_io(builder.add(gipath)); } let gi = match builder.build() { Ok(gi) => gi, Err(err) => { errs.push(err); GitignoreBuilder::new(dir).build().unwrap() } }; (gi, errs.into_error_option()) } #[cfg(test)] mod tests { use std::fs::{self, File}; use std::io::Write; use std::path::Path; use dir::IgnoreBuilder; use gitignore::Gitignore; use tests::TempDir; use Error; fn wfile<P: AsRef<Path>>(path: P, contents: &str) { let mut file = File::create(path).unwrap(); file.write_all(contents.as_bytes()).unwrap(); } fn mkdirp<P: AsRef<Path>>(path: P) { fs::create_dir_all(path).unwrap(); } fn partial(err: Error) -> Vec<Error> { match err { Error::Partial(errs) => errs, _ => panic!("expected partial error but got {:?}", err), } } fn tmpdir() -> TempDir { TempDir::new().unwrap() } #[test] fn explicit_ignore() { let td = tmpdir(); wfile(td.path().join("not-an-ignore"), "foo\n!bar"); let (gi, err) = Gitignore::new(td.path().join("not-an-ignore")); assert!(err.is_none()); let (ig, err) = IgnoreBuilder::new() .add_ignore(gi) .build() .add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_ignore()); assert!(ig.matched("bar", false).is_whitelist()); assert!(ig.matched("baz", false).is_none()); } #[test] fn git_exclude() { let td = tmpdir(); mkdirp(td.path().join(".git/info")); wfile(td.path().join(".git/info/exclude"), "foo\n!bar"); let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_ignore()); assert!(ig.matched("bar", false).is_whitelist()); assert!(ig.matched("baz", false).is_none()); } #[test] fn gitignore() { let td = tmpdir(); mkdirp(td.path().join(".git")); wfile(td.path().join(".gitignore"), "foo\n!bar"); let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_ignore()); assert!(ig.matched("bar", false).is_whitelist()); assert!(ig.matched("baz", false).is_none()); } #[test] fn gitignore_no_git() { let td = tmpdir(); wfile(td.path().join(".gitignore"), "foo\n!bar"); let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_none()); assert!(ig.matched("bar", false).is_none()); assert!(ig.matched("baz", false).is_none()); } #[test] fn ignore() { let td = tmpdir(); wfile(td.path().join(".ignore"), "foo\n!bar"); let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_ignore()); assert!(ig.matched("bar", false).is_whitelist()); assert!(ig.matched("baz", false).is_none()); } #[test] fn custom_ignore() { let td = tmpdir(); let custom_ignore = ".customignore"; wfile(td.path().join(custom_ignore), "foo\n!bar"); let (ig, err) = IgnoreBuilder::new() .add_custom_ignore_filename(custom_ignore) .build() .add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_ignore()); assert!(ig.matched("bar", false).is_whitelist()); assert!(ig.matched("baz", false).is_none()); } // Tests that a custom ignore file will override an .ignore. #[test] fn custom_ignore_over_ignore() { let td = tmpdir(); let custom_ignore = ".customignore"; wfile(td.path().join(".ignore"), "foo"); wfile(td.path().join(custom_ignore), "!foo"); let (ig, err) = IgnoreBuilder::new() .add_custom_ignore_filename(custom_ignore) .build() .add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_whitelist()); } // Tests that earlier custom ignore files have lower precedence than later. #[test] fn custom_ignore_precedence() { let td = tmpdir(); let custom_ignore1 = ".customignore1"; let custom_ignore2 = ".customignore2"; wfile(td.path().join(custom_ignore1), "foo"); wfile(td.path().join(custom_ignore2), "!foo"); let (ig, err) = IgnoreBuilder::new() .add_custom_ignore_filename(custom_ignore1) .add_custom_ignore_filename(custom_ignore2) .build() .add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_whitelist()); } // Tests that an .ignore will override a .gitignore. #[test] fn ignore_over_gitignore() { let td = tmpdir(); wfile(td.path().join(".gitignore"), "foo"); wfile(td.path().join(".ignore"), "!foo"); let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("foo", false).is_whitelist()); } // Tests that exclude has lower precedent than both .ignore and .gitignore. #[test] fn exclude_lowest() { let td = tmpdir(); wfile(td.path().join(".gitignore"), "!foo"); wfile(td.path().join(".ignore"), "!bar"); mkdirp(td.path().join(".git/info")); wfile(td.path().join(".git/info/exclude"), "foo\nbar\nbaz"); let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_none()); assert!(ig.matched("baz", false).is_ignore()); assert!(ig.matched("foo", false).is_whitelist()); assert!(ig.matched("bar", false).is_whitelist()); } #[test] fn errored() { let td = tmpdir(); wfile(td.path().join(".gitignore"), "{foo"); let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_some()); } #[test] fn errored_both() { let td = tmpdir(); wfile(td.path().join(".gitignore"), "{foo"); wfile(td.path().join(".ignore"), "{bar"); let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); assert_eq!(2, partial(err.expect("an error")).len()); } #[test] fn errored_partial() { let td = tmpdir(); mkdirp(td.path().join(".git")); wfile(td.path().join(".gitignore"), "{foo\nbar"); let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_some()); assert!(ig.matched("bar", false).is_ignore()); } #[test] fn errored_partial_and_ignore() { let td = tmpdir(); wfile(td.path().join(".gitignore"), "{foo\nbar"); wfile(td.path().join(".ignore"), "!bar"); let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_some()); assert!(ig.matched("bar", false).is_whitelist()); } #[test] fn not_present_empty() { let td = tmpdir(); let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); assert!(err.is_none()); } #[test] fn stops_at_git_dir() { // This tests that .gitignore files beyond a .git barrier aren't // matched, but .ignore files are. let td = tmpdir(); mkdirp(td.path().join(".git")); mkdirp(td.path().join("foo/.git")); wfile(td.path().join(".gitignore"), "foo"); wfile(td.path().join(".ignore"), "bar"); let ig0 = IgnoreBuilder::new().build(); let (ig1, err) = ig0.add_child(td.path()); assert!(err.is_none()); let (ig2, err) = ig1.add_child(ig1.path().join("foo")); assert!(err.is_none()); assert!(ig1.matched("foo", false).is_ignore()); assert!(ig2.matched("foo", false).is_none()); assert!(ig1.matched("bar", false).is_ignore()); assert!(ig2.matched("bar", false).is_ignore()); } #[test] fn absolute_parent() { let td = tmpdir(); mkdirp(td.path().join(".git")); mkdirp(td.path().join("foo")); wfile(td.path().join(".gitignore"), "bar"); // First, check that the parent gitignore file isn't detected if the // parent isn't added. This establishes a baseline. let ig0 = IgnoreBuilder::new().build(); let (ig1, err) = ig0.add_child(td.path().join("foo")); assert!(err.is_none()); assert!(ig1.matched("bar", false).is_none()); // Second, check that adding a parent directory actually works. let ig0 = IgnoreBuilder::new().build(); let (ig1, err) = ig0.add_parents(td.path().join("foo")); assert!(err.is_none()); let (ig2, err) = ig1.add_child(td.path().join("foo")); assert!(err.is_none()); assert!(ig2.matched("bar", false).is_ignore()); } #[test] fn absolute_parent_anchored() { let td = tmpdir(); mkdirp(td.path().join(".git")); mkdirp(td.path().join("src/llvm")); wfile(td.path().join(".gitignore"), "/llvm/\nfoo"); let ig0 = IgnoreBuilder::new().build(); let (ig1, err) = ig0.add_parents(td.path().join("src")); assert!(err.is_none()); let (ig2, err) = ig1.add_child("src"); assert!(err.is_none()); assert!(ig1.matched("llvm", true).is_none()); assert!(ig2.matched("llvm", true).is_none()); assert!(ig2.matched("src/llvm", true).is_none()); assert!(ig2.matched("foo", false).is_ignore()); assert!(ig2.matched("src/foo", false).is_ignore()); } }
34.743719
95
0.562019
5d7c40a82f6965bd88d34ff367c33cec73a9ab75
9,977
// encoder.rs // // Copyright (c) 2019 Minnesota Department of Transportation // //! Encoder for Mapbox Vector Tile (MVT) geometry. //! use std::vec::Vec; use crate::error::Error; use crate::geom::{Transform, Vec2}; #[derive(Clone, Debug)] enum Command { MoveTo = 1, LineTo = 2, ClosePath = 7, } struct CommandInt { id: Command, count: u32, } struct ParamInt { value: i32, } /// Geometry types for [Features](struct.Feature.html). #[derive(Clone, Copy, Debug)] pub enum GeomType { /// Point or multipoint Point, /// Linestring or Multilinestring Linestring, /// Polygon or Multipolygon Polygon, } /// Encoder for [Feature](struct.Feature.html) geometry. This can consist of /// Point, Linestring or Polygon data. /// /// # Example /// ``` /// # use mvt::{Error,GeomEncoder,GeomType,Transform}; /// # fn main() -> Result<(), Error> { /// let geom_data = GeomEncoder::new(GeomType::Point, Transform::new()) /// .point(0.0, 0.0) /// .point(10.0, 0.0) /// .encode()?; /// # Ok(()) } /// ``` pub struct GeomEncoder { geom_tp: GeomType, transform: Transform, x: i32, y: i32, cmd_offset: usize, count: u32, data: Vec<u32>, } /// Validated geometry data for [Feature](struct.Feature.html)s. Use /// [GeomEncoder](struct.GeomEncoder.html) to encode. /// /// # Example /// ``` /// # use mvt::{Error,GeomEncoder,GeomType,Transform}; /// # fn main() -> Result<(), Error> { /// let geom_data = GeomEncoder::new(GeomType::Point, Transform::new()) /// .point(0.0, 0.0) /// .point(10.0, 0.0) /// .encode()?; /// # Ok(()) } /// ``` pub struct GeomData { geom_tp: GeomType, data: Vec<u32>, } impl CommandInt { fn new(id: Command, count: u32) -> Self { CommandInt { id, count } } fn encode(&self) -> u32 { ((self.id.clone() as u32) & 0x7) | (self.count << 3) } } impl ParamInt { fn new(value: i32) -> Self { ParamInt { value } } fn encode(&self) -> u32 { ((self.value << 1) ^ (self.value >> 31)) as u32 } } impl GeomEncoder { /// Create a new geometry encoder. /// /// * `geom_tp` Geometry type. /// * `transform` Transform to apply to geometry. pub fn new(geom_tp: GeomType, transform: Transform) -> Self { GeomEncoder { geom_tp, transform, x: 0, y: 0, count: 0, cmd_offset: 0, data: vec![], } } /// Add a Command fn command(&mut self, cmd: Command, count: u32) { self.cmd_offset = self.data.len(); debug!("command: {:?}", &cmd); self.data.push(CommandInt::new(cmd, count).encode()); } /// Set count of the most recent Command. fn set_command(&mut self, cmd: Command, count: u32) { let off = self.cmd_offset; self.data[off] = CommandInt::new(cmd, count).encode(); } /// Push one point with relative coörindates. fn push_point(&mut self, x: f64, y: f64) { let p = self.transform * Vec2::new(x, y); let x = p.x as i32; let y = p.y as i32; self.data .push(ParamInt::new(x.saturating_sub(self.x)).encode()); self.data .push(ParamInt::new(y.saturating_sub(self.y)).encode()); debug!("point: {},{}", x, y); self.x = x; self.y = y; } /// Add a point. pub fn add_point(&mut self, x: f64, y: f64) { match self.geom_tp { GeomType::Point => { if self.count == 0 { self.command(Command::MoveTo, 1); } } GeomType::Linestring => match self.count { 0 => self.command(Command::MoveTo, 1), 1 => self.command(Command::LineTo, 1), _ => (), }, GeomType::Polygon => match self.count { 0 => self.command(Command::MoveTo, 1), 1 => self.command(Command::LineTo, 1), _ => (), }, } self.push_point(x, y); self.count += 1; } /// Add a point, taking ownership (for method chaining). pub fn point(mut self, x: f64, y: f64) -> Self { self.add_point(x, y); self } /// Complete the current geometry (for multilinestring / multipolygon). pub fn complete_geom(&mut self) -> Result<(), Error> { // FIXME: return Error::InvalidGeometry // if "MUST" rules in the spec are violated match self.geom_tp { GeomType::Point => (), GeomType::Linestring => { if self.count > 1 { self.set_command(Command::LineTo, self.count - 1); } self.count = 0; } GeomType::Polygon => { if self.count > 1 { self.set_command(Command::LineTo, self.count - 1); self.command(Command::ClosePath, 1); } self.count = 0; } } Ok(()) } /// Complete the current geometry (for multilinestring / multipolygon). pub fn complete(mut self) -> Result<Self, Error> { self.complete_geom()?; Ok(self) } /// Encode the geometry data, consuming the encoder. pub fn encode(mut self) -> Result<GeomData, Error> { // FIXME: return Error::InvalidGeometry // if "MUST" rules in the spec are violated self = if let GeomType::Point = self.geom_tp { if self.count > 1 { self.set_command(Command::MoveTo, self.count); } self } else { self.complete()? }; Ok(GeomData::new(self.geom_tp, self.data)) } } impl GeomData { /// Create new geometry data. /// /// * `geom_tp` Geometry type. /// * `data` Validated geometry. fn new(geom_tp: GeomType, data: Vec<u32>) -> Self { GeomData { geom_tp, data } } /// Get the geometry type pub(crate) fn geom_type(&self) -> GeomType { self.geom_tp } /// Get the geometry data pub(crate) fn into_vec(self) -> Vec<u32> { self.data } } #[cfg(test)] mod test { use super::*; // Examples from MVT spec: #[test] fn test_point() { let v = GeomEncoder::new(GeomType::Point, Transform::new()) .point(25.0, 17.0) .encode() .unwrap() .into_vec(); assert_eq!(v, vec!(9, 50, 34)); } #[test] fn test_multipoint() { let v = GeomEncoder::new(GeomType::Point, Transform::new()) .point(5.0, 7.0) .point(3.0, 2.0) .encode() .unwrap() .into_vec(); assert_eq!(v, vec!(17, 10, 14, 3, 9)); } #[test] fn test_linestring() { let v = GeomEncoder::new(GeomType::Linestring, Transform::new()) .point(2.0, 2.0) .point(2.0, 10.0) .point(10.0, 10.0) .encode() .unwrap() .into_vec(); assert_eq!(v, vec!(9, 4, 4, 18, 0, 16, 16, 0)); } #[test] fn test_multilinestring() { let v = GeomEncoder::new(GeomType::Linestring, Transform::new()) .point(2.0, 2.0) .point(2.0, 10.0) .point(10.0, 10.0) .complete() .unwrap() .point(1.0, 1.0) .point(3.0, 5.0) .encode() .unwrap() .into_vec(); assert_eq!(v, vec!(9, 4, 4, 18, 0, 16, 16, 0, 9, 17, 17, 10, 4, 8)); } #[test] fn test_polygon() { let v = GeomEncoder::new(GeomType::Polygon, Transform::new()) .point(3.0, 6.0) .point(8.0, 12.0) .point(20.0, 34.0) .encode() .unwrap() .into_vec(); assert_eq!(v, vec!(9, 6, 12, 18, 10, 12, 24, 44, 15)); } #[test] fn test_multipolygon() { let v = GeomEncoder::new(GeomType::Polygon, Transform::new()) // positive area => exterior ring .point(0.0, 0.0) .point(10.0, 0.0) .point(10.0, 10.0) .point(0.0, 10.0) .complete() .unwrap() // positive area => exterior ring .point(11.0, 11.0) .point(20.0, 11.0) .point(20.0, 20.0) .point(11.0, 20.0) .complete() .unwrap() // negative area => interior ring .point(13.0, 13.0) .point(13.0, 17.0) .point(17.0, 17.0) .point(17.0, 13.0) .encode() .unwrap() .into_vec(); assert_eq!( v, vec!( 9, 0, 0, 26, 20, 0, 0, 20, 19, 0, 15, 9, 22, 2, 26, 18, 0, 0, 18, 17, 0, 15, 9, 4, 13, 26, 0, 8, 8, 0, 0, 7, 15 ) ); } }
30.233333
77
0.438609
9133e31a86e40cd050ef38ceecbf8db78395c8e7
6,877
use crate::common::get_xpub_data; use crate::Result; use common::error::CoinError; use core::result; use transport::message::send_apdu; use bch_addr::Converter; use bitcoin::util::address::Error as BtcAddressError; use bitcoin::{Address as BtcAddress, Network, PublicKey, Script}; use common::apdu::{Apdu, ApduCheck, BtcApdu}; use common::constants::BTC_AID; use common::path::check_path_validity; use common::utility; use device::device_binding::KEY_MANAGER; use transport::message; use std::fmt::{Display, Formatter}; use std::str::FromStr; fn legacy_to_bch(addr: &str) -> Result<String> { let convert = Converter::new(); let bch_addr = if convert.is_legacy_addr(&addr) { convert .to_cash_addr(&addr) .map_err(|_| CoinError::ConvertToCashAddressFailed)? } else { addr.to_string() }; Ok(remove_bch_prefix(&bch_addr)) } fn bch_to_legacy(addr: &str) -> Result<String> { let convert = Converter::new(); if !convert.is_legacy_addr(&addr) { convert .to_legacy_addr(&addr) .map_err(|_| CoinError::ConvertToLegacyAddressFailed.into()) } else { Ok(addr.to_string()) } } fn remove_bch_prefix(addr: &str) -> String { if let Some(sep) = addr.rfind(':') { if addr.len() > sep + 1 { return addr.split_at(sep + 1).1.to_owned(); } } return addr.to_owned(); } #[derive(Debug, Clone, PartialEq)] pub struct BchAddress(pub BtcAddress); impl BchAddress { pub fn convert_to_legacy_if_need(addr: &str) -> Result<String> { if Converter::default().is_cash_addr(addr) { bch_to_legacy(addr) } else { Ok(addr.to_string()) } } pub fn get_pub_key(network: Network, path: &str) -> Result<String> { //path check check_path_validity(path)?; let select_apdu = Apdu::select_applet(BTC_AID); let select_response = message::send_apdu(select_apdu)?; ApduCheck::check_response(&select_response)?; //get xpub data let res_msg_pubkey = get_xpub_data(path, true)?; let sign_source_val = &res_msg_pubkey[..194]; let sign_result = &res_msg_pubkey[194..res_msg_pubkey.len() - 4]; let key_manager_obj = KEY_MANAGER.lock(); let sign_verify_result = utility::secp256k1_sign_verify( &key_manager_obj.se_pub_key, hex::decode(sign_result).unwrap().as_slice(), hex::decode(sign_source_val).unwrap().as_slice(), )?; if !sign_verify_result { return Err(CoinError::ImkeySignatureVerifyFail.into()); } let uncomprs_pubkey: String = res_msg_pubkey.chars().take(130).collect(); Ok(uncomprs_pubkey) } /** get btc address by path */ pub fn get_address(network: Network, path: &str) -> Result<String> { //path check check_path_validity(path)?; //get pub key let pub_key = Self::get_pub_key(network, path)?; let mut pub_key_obj = PublicKey::from_str(&pub_key)?; pub_key_obj.compressed = true; let addr = BtcAddress::p2pkh(&pub_key_obj, network).to_string(); legacy_to_bch(&addr) } pub fn display_address(network: Network, path: &str) -> Result<String> { //path check check_path_validity(path)?; let address_str = Self::get_address(network, path)?; let apdu_res = send_apdu(BtcApdu::register_name_address( "BCH".as_bytes(), &address_str.clone().into_bytes().to_vec(), ))?; ApduCheck::check_response(apdu_res.as_str())?; Ok(address_str) } pub fn script_pubkey(target_addr: &str) -> Result<Script> { let target_addr = BchAddress::convert_to_legacy_if_need(target_addr)?; let addr = BtcAddress::from_str(&target_addr)?; Ok(addr.script_pubkey()) } pub fn is_valid(address: &str) -> bool { let converter = Converter::default(); if converter.is_legacy_addr(address) || converter.is_cash_addr(address) { return true; } else { return false; } } } impl FromStr for BchAddress { type Err = BtcAddressError; fn from_str(s: &str) -> result::Result<BchAddress, BtcAddressError> { let legacy = bch_to_legacy(s).expect("_bch_to_legacy"); let btc_addr = BtcAddress::from_str(&legacy)?; Ok(BchAddress(btc_addr)) } } impl Display for BchAddress { fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { let legacy = self.0.to_string(); let baddr = legacy_to_bch(&legacy).expect("legacy_to_bch"); std::fmt::Display::fmt(&baddr, f) } } #[cfg(test)] mod tests { use crate::address::BchAddress; use bitcoin::Network; use device::device_binding::bind_test; #[test] pub fn test_convert() { assert_eq!( BchAddress::convert_to_legacy_if_need("2N54wJxopnWTvBfqgAPVWqXVEdaqoH7Suvf").unwrap(), "2N54wJxopnWTvBfqgAPVWqXVEdaqoH7Suvf" ); assert_eq!( BchAddress::convert_to_legacy_if_need("qqyta3mqzeaxe8hqcdsgpy4srwd4f0fc0gj0njf885") .unwrap(), "1oEx5Ztg2DUDYJDxb1AeaiG5TYesikMVU" ); } #[test] fn get_address_test() { bind_test(); let network: Network = Network::Bitcoin; let path: &str = "m/44'/145'/0'/0/0"; let get_btc_address_result = BchAddress::get_address(network, path); assert!(get_btc_address_result.is_ok()); let btc_address = get_btc_address_result.ok().unwrap(); assert_eq!("qzld7dav7d2sfjdl6x9snkvf6raj8lfxjcj5fa8y2r", btc_address); let network: Network = Network::Bitcoin; let path: &str = "m/44'/145'/0'/1/0"; let get_btc_address_result = BchAddress::get_address(network, path); assert!(get_btc_address_result.is_ok()); let btc_address = get_btc_address_result.ok().unwrap(); assert_eq!("qq5jyy9vmsznss93gmt8m2v2fep7wvpdwsn2hrjgsg", btc_address); let network: Network = Network::Testnet; let path: &str = "m/44'/145'/0'/0/0"; let get_btc_address_result = BchAddress::get_address(network, path); assert!(get_btc_address_result.is_ok()); let btc_address = get_btc_address_result.ok().unwrap(); assert_eq!("qzld7dav7d2sfjdl6x9snkvf6raj8lfxjckxd69ndl", btc_address); } #[test] fn display_address_test() { bind_test(); let version: Network = Network::Bitcoin; let path: &str = "m/44'/145'/0'/0/0"; let get_btc_address_result = BchAddress::display_address(version, path); assert!(get_btc_address_result.is_ok()); let btc_address = get_btc_address_result.ok().unwrap(); assert_eq!("qzld7dav7d2sfjdl6x9snkvf6raj8lfxjcj5fa8y2r", btc_address); } }
32.438679
98
0.633125
61905658d5e6d671ba23e0cd511ddf358fe523fe
5,995
//! Navigator backend for web use js_sys::{Array, ArrayBuffer, Uint8Array}; use ruffle_core::backend::navigator::{ url_from_relative_url, NavigationMethod, NavigatorBackend, OwnedFuture, RequestOptions, }; use ruffle_core::loader::Error; use std::borrow::Cow; use std::collections::HashMap; use std::time::Duration; use wasm_bindgen::JsCast; use wasm_bindgen_futures::{spawn_local, JsFuture}; use web_sys::{window, Blob, BlobPropertyBag, Performance, Request, RequestInit, Response}; pub struct WebNavigatorBackend { performance: Performance, start_time: f64, } impl WebNavigatorBackend { pub fn new() -> Self { let window = web_sys::window().expect("window()"); let performance = window.performance().expect("window.performance()"); WebNavigatorBackend { start_time: performance.now(), performance, } } } impl NavigatorBackend for WebNavigatorBackend { fn navigate_to_url( &self, url: String, window_spec: Option<String>, vars_method: Option<(NavigationMethod, HashMap<String, String>)>, ) { if let Some(window) = window() { //TODO: Should we return a result for failed opens? Does Flash care? #[allow(unused_must_use)] match (vars_method, window_spec) { (Some((navmethod, formvars)), window_spec) => { let document = match window.document() { Some(document) => document, None => return, }; let form = document .create_element("form") .unwrap() .dyn_into::<web_sys::HtmlFormElement>() .unwrap(); form.set_attribute( "method", match navmethod { NavigationMethod::GET => "get", NavigationMethod::POST => "post", }, ); form.set_attribute("action", &url); if let Some(target) = window_spec { form.set_attribute("target", &target); } for (k, v) in formvars.iter() { let hidden = document.create_element("hidden").unwrap(); hidden.set_attribute("type", "hidden"); hidden.set_attribute("name", k); hidden.set_attribute("value", v); form.append_child(&hidden); } document.body().unwrap().append_child(&form); form.submit(); } (_, Some(ref window_name)) if window_name != "" => { window.open_with_url_and_target(&url, window_name); } _ => { window.location().assign(&url); } }; } } fn time_since_launch(&mut self) -> Duration { let dt = self.performance.now() - self.start_time; Duration::from_millis(dt as u64) } fn fetch(&self, url: &str, options: RequestOptions) -> OwnedFuture<Vec<u8>, Error> { let url = url.to_string(); Box::pin(async move { let mut init = RequestInit::new(); init.method(match options.method() { NavigationMethod::GET => "GET", NavigationMethod::POST => "POST", }); if let Some((data, mime)) = options.body() { let arraydata = ArrayBuffer::new(data.len() as u32); let u8data = Uint8Array::new(&arraydata); for (i, byte) in data.iter().enumerate() { u8data.fill(*byte, i as u32, i as u32 + 1); } let blobparts = Array::new(); blobparts.push(&arraydata); let mut blobprops = BlobPropertyBag::new(); blobprops.type_(mime); let datablob = Blob::new_with_buffer_source_sequence_and_options(&blobparts, &blobprops) .unwrap() .dyn_into() .unwrap(); init.body(Some(&datablob)); } let request = Request::new_with_str_and_init(&url, &init).unwrap(); let window = web_sys::window().unwrap(); let fetchval = JsFuture::from(window.fetch_with_request(&request)).await; if fetchval.is_err() { return Err(Error::NetworkError(std::io::Error::new( std::io::ErrorKind::Other, "Could not fetch, got JS Error", ))); } let resp: Response = fetchval.unwrap().dyn_into().unwrap(); let data: ArrayBuffer = JsFuture::from(resp.array_buffer().unwrap()) .await .unwrap() .dyn_into() .unwrap(); let jsarray = Uint8Array::new(&data); let mut rust_array = vec![0; jsarray.length() as usize]; jsarray.copy_to(&mut rust_array); Ok(rust_array) }) } fn spawn_future(&mut self, future: OwnedFuture<(), Error>) { spawn_local(async move { if let Err(e) = future.await { log::error!("Asynchronous error occured: {}", e); } }) } fn resolve_relative_url<'a>(&mut self, url: &'a str) -> Cow<'a, str> { let window = web_sys::window().expect("window()"); let document = window.document().expect("document()"); if let Ok(Some(base_uri)) = document.base_uri() { if let Ok(new_url) = url_from_relative_url(&base_uri, url) { return new_url.into_string().into(); } } url.into() } }
34.0625
93
0.497581
abf32af45e9453a3c22212a49a683b09162abde9
73
pub fn init() { wasm_logger::init(wasm_logger::Config::default()); }
18.25
54
0.657534
d6e0bd5cb0dcccd702a44dc3ba07ad6c9d49213a
15,574
#[doc = "Register `DORMANT_WAKE_INTS3` reader"] pub struct R(crate::R<DORMANT_WAKE_INTS3_SPEC>); impl core::ops::Deref for R { type Target = crate::R<DORMANT_WAKE_INTS3_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<DORMANT_WAKE_INTS3_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<DORMANT_WAKE_INTS3_SPEC>) -> Self { R(reader) } } #[doc = "Field `GPIO29_EDGE_HIGH` reader - "] pub struct GPIO29_EDGE_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO29_EDGE_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO29_EDGE_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO29_EDGE_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO29_EDGE_LOW` reader - "] pub struct GPIO29_EDGE_LOW_R(crate::FieldReader<bool, bool>); impl GPIO29_EDGE_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO29_EDGE_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO29_EDGE_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO29_LEVEL_HIGH` reader - "] pub struct GPIO29_LEVEL_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO29_LEVEL_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO29_LEVEL_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO29_LEVEL_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO29_LEVEL_LOW` reader - "] pub struct GPIO29_LEVEL_LOW_R(crate::FieldReader<bool, bool>); impl GPIO29_LEVEL_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO29_LEVEL_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO29_LEVEL_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO28_EDGE_HIGH` reader - "] pub struct GPIO28_EDGE_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO28_EDGE_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO28_EDGE_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO28_EDGE_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO28_EDGE_LOW` reader - "] pub struct GPIO28_EDGE_LOW_R(crate::FieldReader<bool, bool>); impl GPIO28_EDGE_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO28_EDGE_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO28_EDGE_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO28_LEVEL_HIGH` reader - "] pub struct GPIO28_LEVEL_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO28_LEVEL_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO28_LEVEL_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO28_LEVEL_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO28_LEVEL_LOW` reader - "] pub struct GPIO28_LEVEL_LOW_R(crate::FieldReader<bool, bool>); impl GPIO28_LEVEL_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO28_LEVEL_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO28_LEVEL_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO27_EDGE_HIGH` reader - "] pub struct GPIO27_EDGE_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO27_EDGE_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO27_EDGE_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO27_EDGE_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO27_EDGE_LOW` reader - "] pub struct GPIO27_EDGE_LOW_R(crate::FieldReader<bool, bool>); impl GPIO27_EDGE_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO27_EDGE_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO27_EDGE_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO27_LEVEL_HIGH` reader - "] pub struct GPIO27_LEVEL_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO27_LEVEL_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO27_LEVEL_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO27_LEVEL_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO27_LEVEL_LOW` reader - "] pub struct GPIO27_LEVEL_LOW_R(crate::FieldReader<bool, bool>); impl GPIO27_LEVEL_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO27_LEVEL_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO27_LEVEL_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO26_EDGE_HIGH` reader - "] pub struct GPIO26_EDGE_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO26_EDGE_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO26_EDGE_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO26_EDGE_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO26_EDGE_LOW` reader - "] pub struct GPIO26_EDGE_LOW_R(crate::FieldReader<bool, bool>); impl GPIO26_EDGE_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO26_EDGE_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO26_EDGE_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO26_LEVEL_HIGH` reader - "] pub struct GPIO26_LEVEL_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO26_LEVEL_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO26_LEVEL_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO26_LEVEL_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO26_LEVEL_LOW` reader - "] pub struct GPIO26_LEVEL_LOW_R(crate::FieldReader<bool, bool>); impl GPIO26_LEVEL_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO26_LEVEL_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO26_LEVEL_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO25_EDGE_HIGH` reader - "] pub struct GPIO25_EDGE_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO25_EDGE_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO25_EDGE_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO25_EDGE_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO25_EDGE_LOW` reader - "] pub struct GPIO25_EDGE_LOW_R(crate::FieldReader<bool, bool>); impl GPIO25_EDGE_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO25_EDGE_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO25_EDGE_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO25_LEVEL_HIGH` reader - "] pub struct GPIO25_LEVEL_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO25_LEVEL_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO25_LEVEL_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO25_LEVEL_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO25_LEVEL_LOW` reader - "] pub struct GPIO25_LEVEL_LOW_R(crate::FieldReader<bool, bool>); impl GPIO25_LEVEL_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO25_LEVEL_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO25_LEVEL_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO24_EDGE_HIGH` reader - "] pub struct GPIO24_EDGE_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO24_EDGE_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO24_EDGE_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO24_EDGE_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO24_EDGE_LOW` reader - "] pub struct GPIO24_EDGE_LOW_R(crate::FieldReader<bool, bool>); impl GPIO24_EDGE_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO24_EDGE_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO24_EDGE_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO24_LEVEL_HIGH` reader - "] pub struct GPIO24_LEVEL_HIGH_R(crate::FieldReader<bool, bool>); impl GPIO24_LEVEL_HIGH_R { pub(crate) fn new(bits: bool) -> Self { GPIO24_LEVEL_HIGH_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO24_LEVEL_HIGH_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `GPIO24_LEVEL_LOW` reader - "] pub struct GPIO24_LEVEL_LOW_R(crate::FieldReader<bool, bool>); impl GPIO24_LEVEL_LOW_R { pub(crate) fn new(bits: bool) -> Self { GPIO24_LEVEL_LOW_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for GPIO24_LEVEL_LOW_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl R { #[doc = "Bit 23"] #[inline(always)] pub fn gpio29_edge_high(&self) -> GPIO29_EDGE_HIGH_R { GPIO29_EDGE_HIGH_R::new(((self.bits >> 23) & 0x01) != 0) } #[doc = "Bit 22"] #[inline(always)] pub fn gpio29_edge_low(&self) -> GPIO29_EDGE_LOW_R { GPIO29_EDGE_LOW_R::new(((self.bits >> 22) & 0x01) != 0) } #[doc = "Bit 21"] #[inline(always)] pub fn gpio29_level_high(&self) -> GPIO29_LEVEL_HIGH_R { GPIO29_LEVEL_HIGH_R::new(((self.bits >> 21) & 0x01) != 0) } #[doc = "Bit 20"] #[inline(always)] pub fn gpio29_level_low(&self) -> GPIO29_LEVEL_LOW_R { GPIO29_LEVEL_LOW_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 19"] #[inline(always)] pub fn gpio28_edge_high(&self) -> GPIO28_EDGE_HIGH_R { GPIO28_EDGE_HIGH_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 18"] #[inline(always)] pub fn gpio28_edge_low(&self) -> GPIO28_EDGE_LOW_R { GPIO28_EDGE_LOW_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 17"] #[inline(always)] pub fn gpio28_level_high(&self) -> GPIO28_LEVEL_HIGH_R { GPIO28_LEVEL_HIGH_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 16"] #[inline(always)] pub fn gpio28_level_low(&self) -> GPIO28_LEVEL_LOW_R { GPIO28_LEVEL_LOW_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 15"] #[inline(always)] pub fn gpio27_edge_high(&self) -> GPIO27_EDGE_HIGH_R { GPIO27_EDGE_HIGH_R::new(((self.bits >> 15) & 0x01) != 0) } #[doc = "Bit 14"] #[inline(always)] pub fn gpio27_edge_low(&self) -> GPIO27_EDGE_LOW_R { GPIO27_EDGE_LOW_R::new(((self.bits >> 14) & 0x01) != 0) } #[doc = "Bit 13"] #[inline(always)] pub fn gpio27_level_high(&self) -> GPIO27_LEVEL_HIGH_R { GPIO27_LEVEL_HIGH_R::new(((self.bits >> 13) & 0x01) != 0) } #[doc = "Bit 12"] #[inline(always)] pub fn gpio27_level_low(&self) -> GPIO27_LEVEL_LOW_R { GPIO27_LEVEL_LOW_R::new(((self.bits >> 12) & 0x01) != 0) } #[doc = "Bit 11"] #[inline(always)] pub fn gpio26_edge_high(&self) -> GPIO26_EDGE_HIGH_R { GPIO26_EDGE_HIGH_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 10"] #[inline(always)] pub fn gpio26_edge_low(&self) -> GPIO26_EDGE_LOW_R { GPIO26_EDGE_LOW_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 9"] #[inline(always)] pub fn gpio26_level_high(&self) -> GPIO26_LEVEL_HIGH_R { GPIO26_LEVEL_HIGH_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 8"] #[inline(always)] pub fn gpio26_level_low(&self) -> GPIO26_LEVEL_LOW_R { GPIO26_LEVEL_LOW_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 7"] #[inline(always)] pub fn gpio25_edge_high(&self) -> GPIO25_EDGE_HIGH_R { GPIO25_EDGE_HIGH_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 6"] #[inline(always)] pub fn gpio25_edge_low(&self) -> GPIO25_EDGE_LOW_R { GPIO25_EDGE_LOW_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 5"] #[inline(always)] pub fn gpio25_level_high(&self) -> GPIO25_LEVEL_HIGH_R { GPIO25_LEVEL_HIGH_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 4"] #[inline(always)] pub fn gpio25_level_low(&self) -> GPIO25_LEVEL_LOW_R { GPIO25_LEVEL_LOW_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 3"] #[inline(always)] pub fn gpio24_edge_high(&self) -> GPIO24_EDGE_HIGH_R { GPIO24_EDGE_HIGH_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 2"] #[inline(always)] pub fn gpio24_edge_low(&self) -> GPIO24_EDGE_LOW_R { GPIO24_EDGE_LOW_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 1"] #[inline(always)] pub fn gpio24_level_high(&self) -> GPIO24_LEVEL_HIGH_R { GPIO24_LEVEL_HIGH_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 0"] #[inline(always)] pub fn gpio24_level_low(&self) -> GPIO24_LEVEL_LOW_R { GPIO24_LEVEL_LOW_R::new((self.bits & 0x01) != 0) } } #[doc = "Interrupt status after masking & forcing for dormant_wake This register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api). For information about available fields see [dormant_wake_ints3](index.html) module"] pub struct DORMANT_WAKE_INTS3_SPEC; impl crate::RegisterSpec for DORMANT_WAKE_INTS3_SPEC { type Ux = u32; } #[doc = "`read()` method returns [dormant_wake_ints3::R](R) reader structure"] impl crate::Readable for DORMANT_WAKE_INTS3_SPEC { type Reader = R; } #[doc = "`reset()` method sets DORMANT_WAKE_INTS3 to value 0"] impl crate::Resettable for DORMANT_WAKE_INTS3_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
31.526316
122
0.631309
d5df975eae3ef19e205808846d824d227dc84771
19,884
use vulkano::buffer::{BufferUsage, CpuAccessibleBuffer}; use vulkano::command_buffer::{AutoCommandBufferBuilder, DynamicState}; use vulkano::device::{Device, DeviceExtensions}; use vulkano::framebuffer::{Framebuffer, FramebufferAbstract, RenderPassAbstract, Subpass}; use vulkano::image::{ImageUsage, SwapchainImage}; use vulkano::instance::{Instance, PhysicalDevice}; use vulkano::pipeline::viewport::Viewport; use vulkano::pipeline::GraphicsPipeline; use vulkano::swapchain; use vulkano::swapchain::{ AcquireError, ColorSpace, FullscreenExclusive, PresentMode, SurfaceTransform, Swapchain, SwapchainCreationError, }; use vulkano::sync; use vulkano::sync::{FlushError, GpuFuture}; use vulkano_win::VkSurfaceBuild; use winit::event::{Event, WindowEvent}; use winit::event_loop::{ControlFlow, EventLoop}; use winit::window::{Window, WindowBuilder}; use std::sync::Arc; use rand::Rng; use std::io; mod game_data; fn main() { // Here I am just trying out the game data structures game_data::initialize(); let mut player_ship = game_data::Ship::create("PlayerShip",20,25,35); let mut rng = rand::thread_rng(); while player_ship.is_alive(){ player_ship.regen(); player_ship.print_current(); player_ship.take_damage(rng.gen_range(5,16)); player_ship.print_current(); } // Get list of required extensions let required_extensions = vulkano_win::required_extensions(); // Now creating the instance. let instance = Instance::new(None, &required_extensions, None).unwrap(); // Chose the first instance as the device with .next() TODO: make selection up to user let device_nr = 0; for device in PhysicalDevice::enumerate(&instance){ let device_nr = device_nr + 1; println!( "Device {0}: {1} (type: {2:?}", device_nr, device.name(), device.ty() ); } println!("Enter device number to use"); let mut inputline = String::new(); io::stdin().read_line(&mut inputline).expect("Failed to read line."); let device_nr_input: u32 = match inputline.trim().parse(){ Ok(num) => num, Err(_) => panic!{"non number"} }; println!("{}",device_nr_input); let physical = PhysicalDevice::enumerate(&instance).next().unwrap(); // Some little debug infos. println!( "Using device: {} (type: {:?})", physical.name(), physical.ty() ); // Create a `vulkano::swapchain::Surface` by building a window for it let event_loop = EventLoop::new(); let surface = WindowBuilder::new() .build_vk_surface(&event_loop, instance.clone()) .unwrap(); // Attain GPU queue, TODO: use more queues, separately for graphics and transfers let queue_family = physical .queue_families() .find(|&q| { // We take the first queue that supports drawing to our window. q.supports_graphics() && surface.is_supported(q).unwrap_or(false) }) .unwrap(); // Now initializing the device. TODO: add more extensions/layers if needed let device_ext = DeviceExtensions { khr_swapchain: true, ..DeviceExtensions::none() }; let (device, mut queues) = Device::new( physical, physical.supported_features(), &device_ext, [(queue_family, 0.5)].iter().cloned(), ) .unwrap(); // Only one queue now, make it one instance by removing it from the iterator let queue = queues.next().unwrap(); // Create swapchain let (mut swapchain, images) = { // Querying the capabilities of the surface. When we create the swapchain we can only // pass values that are allowed by the capabilities. let caps = surface.capabilities(physical).unwrap(); // The alpha mode indicates how the alpha value of the final image will behave. For example // you can choose whether the window will be opaque or transparent. let alpha = caps.supported_composite_alpha.iter().next().unwrap(); // Choosing the internal format that the images will have. let format = caps.supported_formats[0].0; // set swapchange buffer dimensions let dimensions: [u32; 2] = surface.window().inner_size().into(); // TODO: check meaning of other parameters Swapchain::new( device.clone(), surface.clone(), caps.min_image_count, format, dimensions, 1, ImageUsage::color_attachment(), &queue, SurfaceTransform::Identity, alpha, PresentMode::Fifo, FullscreenExclusive::Default, true, ColorSpace::SrgbNonLinear, ) .unwrap() }; // We now create a buffer that will store the shape of our triangle. let vertex_buffer = { #[derive(Default, Debug, Clone)] struct Vertex { position: [f32; 3], } vulkano::impl_vertex!(Vertex, position); CpuAccessibleBuffer::from_iter( device.clone(), BufferUsage::all(), false, [ Vertex { position: [-0.5, -0.25, 0.0], }, Vertex { position: [0.0, 0.5, 0.0], }, Vertex { position: [0.25, -0.1, 0.0], }, Vertex { position: [-0.5, -0.25, 0.0], }, Vertex { position: [0.3, -0.5, 0.0], }, Vertex { position: [0.0, 0.5, 0.0], }, ] .iter() .cloned(), ) .unwrap() }; // TODO: read the vulkano-shaders::shader! crate docs https://docs.rs/vulkano-shaders/ mod vs { vulkano_shaders::shader! { ty: "vertex", src: " #version 450 layout(location = 0) in vec3 position; void main() { gl_Position = vec4(position, 1.0); } " } } mod fs { vulkano_shaders::shader! { ty: "fragment", src: " #version 450 layout(location = 0) out vec4 f_color; void main() { f_color = vec4(1.0, 0.0, 0.0, 1.0); } " } } let vs = vs::Shader::load(device.clone()).unwrap(); let fs = fs::Shader::load(device.clone()).unwrap(); // Create renderpass let render_pass = Arc::new( vulkano::single_pass_renderpass!( device.clone(), attachments: { // `color` is a custom name we give to the first and only attachment. color: { // `load: Clear` means that we ask the GPU to clear the content of this // attachment at the start of the drawing. load: Clear, // `store: Store` means that we ask the GPU to store the output of the draw // in the actual image. We could also ask it to discard the result. store: Store, // `format: <ty>` indicates the type of the format of the image. This has to // be one of the types of the `vulkano::format` module (or alternatively one // of your structs that implements the `FormatDesc` trait). Here we use the // same format as the swapchain. format: swapchain.format(), // TODO: change the renderpass to do more stuff samples: 1, } }, pass: { // We use the attachment named `color` as the one and only color attachment. color: [color], // No depth-stencil attachment is indicated with empty brackets. depth_stencil: {} } ) .unwrap(), ); // Before we draw we have to create what is called a pipeline. This is similar to an OpenGL // program, but much more specific. let pipeline = Arc::new( GraphicsPipeline::start() // We need to indicate the layout of the vertices. // The type `SingleBufferDefinition` actually contains a template parameter corresponding // to the type of each vertex. But in this code it is automatically inferred. .vertex_input_single_buffer() // A Vulkan shader can in theory contain multiple entry points, so we have to specify // which one. The `main` word of `main_entry_point` actually corresponds to the name of // the entry point. .vertex_shader(vs.main_entry_point(), ()) // The content of the vertex buffer describes a list of triangles. .triangle_list() // Use a resizable viewport set to draw over the entire window .viewports_dynamic_scissors_irrelevant(1) // See `vertex_shader`. .fragment_shader(fs.main_entry_point(), ()) // We have to indicate which subpass of which render pass this pipeline is going to be used // in. The pipeline will only be usable from this particular subpass. .render_pass(Subpass::from(render_pass.clone(), 0).unwrap()) // Now that our builder is filled, we call `build()` to obtain an actual pipeline. .build(device.clone()) .unwrap(), ); // Dynamic viewports allow us to recreate just the viewport when the window is resized // Otherwise we would have to recreate the whole pipeline. let mut dynamic_state = DynamicState { line_width: None, viewports: None, scissors: None, compare_mask: None, write_mask: None, reference: None, }; // The render pass we created above only describes the layout of our framebuffers. Before we // can draw we also need to create the actual framebuffers. // // Since we need to draw to multiple images, we are going to create a different framebuffer for // each image. let mut framebuffers = window_size_dependent_setup(&images, render_pass.clone(), &mut dynamic_state); // Initialization is finally finished! // In some situations, the swapchain will become invalid by itself. This includes for example // when the window is resized (as the images of the swapchain will no longer match the // window's) or, on Android, when the application went to the background and goes back to the // foreground. // // In this situation, acquiring a swapchain image or presenting it will return an error. // Rendering to an image of that swapchain will not produce any error, but may or may not work. // To continue rendering, we need to recreate the swapchain by creating a new swapchain. // Here, we remember that we need to do this for the next loop iteration. let mut recreate_swapchain = false; // In the loop below we are going to submit commands to the GPU. Submitting a command produces // an object that implements the `GpuFuture` trait, which holds the resources for as long as // they are in use by the GPU. // // Destroying the `GpuFuture` blocks until the GPU is finished executing it. In order to avoid // that, we store the submission of the previous frame here. let mut previous_frame_end = Some(sync::now(device.clone()).boxed()); event_loop.run(move |event, _, control_flow| { //TODO: learn about event loops match event { Event::WindowEvent { event: WindowEvent::CloseRequested, .. } => { *control_flow = ControlFlow::Exit; } Event::WindowEvent { event: WindowEvent::Resized(_), .. } => { recreate_swapchain = true; } Event::RedrawEventsCleared => { // It is important to call this function from time to time, otherwise resources will keep // accumulating and you will eventually reach an out of memory error. // Calling this function polls various fences in order to determine what the GPU has // already processed, and frees the resources that are no longer needed. previous_frame_end.as_mut().unwrap().cleanup_finished(); // Whenever the window resizes we need to recreate everything dependent on the window size. // In this example that includes the swapchain, the framebuffers and the dynamic state viewport. if recreate_swapchain { // Get the new dimensions of the window. let dimensions: [u32; 2] = surface.window().inner_size().into(); let (new_swapchain, new_images) = match swapchain.recreate_with_dimensions(dimensions) { Ok(r) => r, // This error tends to happen when the user is manually resizing the window. // Simply restarting the loop is the easiest way to fix this issue. Err(SwapchainCreationError::UnsupportedDimensions) => return, Err(e) => panic!("Failed to recreate swapchain: {:?}", e), }; swapchain = new_swapchain; // Because framebuffers contains an Arc on the old swapchain, we need to // recreate framebuffers as well. framebuffers = window_size_dependent_setup( &new_images, render_pass.clone(), &mut dynamic_state, ); recreate_swapchain = false; } // Before we can draw on the output, we have to *acquire* an image from the swapchain. If // no image is available (which happens if you submit draw commands too quickly), then the // function will block. // This operation returns the index of the image that we are allowed to draw upon. // // This function can block if no image is available. The parameter is an optional timeout // after which the function call will return an error. let (image_num, suboptimal, acquire_future) = match swapchain::acquire_next_image(swapchain.clone(), None) { Ok(r) => r, Err(AcquireError::OutOfDate) => { recreate_swapchain = true; return; } Err(e) => panic!("Failed to acquire next image: {:?}", e), }; // acquire_next_image can be successful, but suboptimal. This means that the swapchain image // will still work, but it may not display correctly. With some drivers this can be when // the window resizes, but it may not cause the swapchain to become out of date. if suboptimal { recreate_swapchain = true; } // Specify the color to clear the framebuffer with i.e. blue let clear_values = vec![[0.1, 0.1, 0.1, 1.0].into()]; // In order to draw, we have to build a *command buffer*. The command buffer object holds // the list of commands that are going to be executed. // // Building a command buffer is an expensive operation (usually a few hundred // microseconds), but it is known to be a hot path in the driver and is expected to be // optimized. // // Note that we have to pass a queue family when we create the command buffer. The command // buffer will only be executable on that given queue family. let mut builder = AutoCommandBufferBuilder::primary_one_time_submit( device.clone(), queue.family(), ) .unwrap(); builder //TODO: learn `draw_secondary` .begin_render_pass(framebuffers[image_num].clone(), false, clear_values) .unwrap() // The last two parameters contain the list of resources to pass to the shaders. // Since we used an `EmptyPipeline` object, the objects have to be `()`. //TODO: are textures added here?? .draw( pipeline.clone(), &dynamic_state, vertex_buffer.clone(), (), (), ) .unwrap() .end_render_pass() .unwrap(); // Finish building the command buffer by calling `build`. let command_buffer = builder.build().unwrap(); let future = previous_frame_end .take() .unwrap() .join(acquire_future) .then_execute(queue.clone(), command_buffer) .unwrap() // The color output is now expected to contain our triangle. But in order to show it on // the screen, we have to *present* the image by calling `present`. // // This function does not actually present the image immediately. Instead it submits a // present command at the end of the queue. This means that it will only be presented once // the GPU has finished executing the command buffer that draws the triangle. .then_swapchain_present(queue.clone(), swapchain.clone(), image_num) .then_signal_fence_and_flush(); match future { Ok(future) => { previous_frame_end = Some(future.boxed()); } Err(FlushError::OutOfDate) => { recreate_swapchain = true; previous_frame_end = Some(sync::now(device.clone()).boxed()); } Err(e) => { println!("Failed to flush future: {:?}", e); previous_frame_end = Some(sync::now(device.clone()).boxed()); } } } //TODO: Add events handled at frame timings _ => (), } //TODO: test to add things here, that really needs to be done every loop //println!("test"); }); } /// This method is called once during initialization, then again whenever the window is resized fn window_size_dependent_setup( images: &[Arc<SwapchainImage<Window>>], render_pass: Arc<dyn RenderPassAbstract + Send + Sync>, dynamic_state: &mut DynamicState, ) -> Vec<Arc<dyn FramebufferAbstract + Send + Sync>> { let dimensions = images[0].dimensions(); let viewport = Viewport { origin: [0.0, 0.0], dimensions: [dimensions[0] as f32, dimensions[1] as f32], depth_range: 0.0..1.0, }; dynamic_state.viewports = Some(vec![viewport]); images .iter() .map(|image| { Arc::new( Framebuffer::start(render_pass.clone()) .add(image.clone()) .unwrap() .build() .unwrap(), ) as Arc<dyn FramebufferAbstract + Send + Sync> }) .collect::<Vec<_>>() }
40.414634
112
0.559747