hexsha
stringlengths
40
40
size
int64
2
1.05M
content
stringlengths
2
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
c168d5c8a2eac9e58752c2cb6d4ddedd08bc2ec6
7,549
//! # The Rust Core Library //! //! The Rust Core Library is the dependency-free[^free] foundation of [The //! Rust Standard Library](../std/index.html). It is the portable glue //! between the language and its libraries, defining the intrinsic and //! primitive building blocks of all Rust code. It links to no //! upstream libraries, no system libraries, and no libc. //! //! [^free]: Strictly speaking, there are some symbols which are needed but //! they aren't always necessary. //! //! The core library is *minimal*: it isn't even aware of heap allocation, //! nor does it provide concurrency or I/O. These things require //! platform integration, and this library is platform-agnostic. //! //! # How to use the core library //! //! Please note that all of these details are currently not considered stable. //! // FIXME: Fill me in with more detail when the interface settles //! This library is built on the assumption of a few existing symbols: //! //! * `memcpy`, `memcmp`, `memset` - These are core memory routines which are //! often generated by LLVM. Additionally, this library can make explicit //! calls to these functions. Their signatures are the same as found in C. //! These functions are often provided by the system libc, but can also be //! provided by the [compiler-builtins crate](https://crates.io/crates/compiler_builtins). //! //! * `rust_begin_panic` - This function takes four arguments, a //! `fmt::Arguments`, a `&'static str`, and two `u32`'s. These four arguments //! dictate the panic message, the file at which panic was invoked, and the //! line and column inside the file. It is up to consumers of this core //! library to define this panic function; it is only required to never //! return. This requires a `lang` attribute named `panic_impl`. //! //! * `rust_eh_personality` - is used by the failure mechanisms of the //! compiler. This is often mapped to GCC's personality function, but crates //! which do not trigger a panic can be assured that this function is never //! called. The `lang` attribute is called `eh_personality`. // Since libcore defines many fundamental lang items, all tests live in a // separate crate, libcoretest, to avoid bizarre issues. // // Here we explicitly #[cfg]-out this whole crate when testing. If we don't do // this, both the generated test artifact and the linked libtest (which // transitively includes libcore) will both define the same set of lang items, // and this will cause the E0152 "duplicate lang item found" error. See // discussion in #50466 for details. // // This cfg won't affect doc tests. #![cfg(not(test))] #![stable(feature = "core", since = "1.6.0")] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/", html_playground_url = "https://play.rust-lang.org/", issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", test(no_crate_inject, attr(deny(warnings))), test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))] #![no_core] #![warn(deprecated_in_future)] #![warn(missing_docs)] #![warn(missing_debug_implementations)] #![deny(intra_doc_link_resolution_failure)] // rustdoc is run without -D warnings #![allow(explicit_outlives_requirements)] #![allow(incomplete_features)] #![feature(allow_internal_unstable)] #![feature(arbitrary_self_types)] #![feature(asm)] #![feature(bound_cloned)] #![feature(cfg_target_has_atomic)] #![feature(concat_idents)] #![feature(const_fn)] #![feature(const_fn_union)] #![feature(const_generics)] #![feature(custom_inner_attributes)] #![feature(decl_macro)] #![feature(doc_cfg)] #![feature(doc_spotlight)] #![feature(extern_types)] #![feature(fundamental)] #![feature(intrinsics)] #![feature(is_sorted)] #![feature(iter_once_with)] #![feature(lang_items)] #![feature(link_llvm_intrinsics)] #![feature(never_type)] #![feature(nll)] #![feature(bind_by_move_pattern_guards)] #![feature(exhaustive_patterns)] #![feature(no_core)] #![feature(on_unimplemented)] #![feature(optin_builtin_traits)] #![feature(prelude_import)] #![feature(repr_simd, platform_intrinsics)] #![feature(rustc_attrs)] #![feature(rustc_const_unstable)] #![feature(simd_ffi)] #![feature(specialization)] #![feature(staged_api)] #![feature(std_internals)] #![feature(stmt_expr_attributes)] #![feature(transparent_unions)] #![feature(unboxed_closures)] #![feature(unsized_locals)] #![feature(untagged_unions)] #![feature(unwind_attributes)] #![feature(doc_alias)] #![feature(mmx_target_feature)] #![feature(tbm_target_feature)] #![feature(sse4a_target_feature)] #![feature(arm_target_feature)] #![feature(powerpc_target_feature)] #![feature(mips_target_feature)] #![feature(aarch64_target_feature)] #![feature(wasm_target_feature)] #![feature(avx512_target_feature)] #![feature(cmpxchg16b_target_feature)] #![feature(rtm_target_feature)] #![feature(f16c_target_feature)] #![feature(hexagon_target_feature)] #![feature(const_slice_len)] #![feature(const_str_as_bytes)] #![feature(const_str_len)] #![feature(const_int_conversion)] #![feature(const_transmute)] #![feature(non_exhaustive)] #![feature(structural_match)] #![feature(abi_unadjusted)] #![feature(adx_target_feature)] #![feature(maybe_uninit_slice)] #![feature(external_doc)] #![feature(mem_take)] #![feature(associated_type_bounds)] #[prelude_import] #[allow(unused)] use prelude::v1::*; #[macro_use] mod macros; #[macro_use] mod internal_macros; #[path = "num/int_macros.rs"] #[macro_use] mod int_macros; #[path = "num/uint_macros.rs"] #[macro_use] mod uint_macros; #[path = "num/isize.rs"] pub mod isize; #[path = "num/i8.rs"] pub mod i8; #[path = "num/i16.rs"] pub mod i16; #[path = "num/i32.rs"] pub mod i32; #[path = "num/i64.rs"] pub mod i64; #[path = "num/i128.rs"] pub mod i128; #[path = "num/usize.rs"] pub mod usize; #[path = "num/u8.rs"] pub mod u8; #[path = "num/u16.rs"] pub mod u16; #[path = "num/u32.rs"] pub mod u32; #[path = "num/u64.rs"] pub mod u64; #[path = "num/u128.rs"] pub mod u128; #[path = "num/f32.rs"] pub mod f32; #[path = "num/f64.rs"] pub mod f64; #[macro_use] pub mod num; /* The libcore prelude, not as all-encompassing as the libstd prelude */ pub mod prelude; /* Core modules for ownership management */ pub mod intrinsics; pub mod mem; pub mod ptr; pub mod hint; /* Core language traits */ pub mod marker; pub mod ops; pub mod cmp; pub mod clone; pub mod default; pub mod convert; pub mod borrow; /* Core types and methods on primitives */ pub mod any; pub mod array; pub mod ascii; pub mod sync; pub mod cell; pub mod char; pub mod panic; pub mod panicking; pub mod pin; pub mod iter; pub mod option; pub mod raw; pub mod result; pub mod ffi; pub mod slice; pub mod str; pub mod hash; pub mod fmt; pub mod time; pub mod unicode; /* Async */ pub mod future; pub mod task; /* Heap memory allocator trait */ #[allow(missing_docs)] pub mod alloc; // note: does not need to be public mod tuple; mod unit; // Pull in the `core_arch` crate directly into libcore. The contents of // `core_arch` are in a different repository: rust-lang/stdarch. // // `core_arch` depends on libcore, but the contents of this module are // set up in such a way that directly pulling it here works such that the // crate uses the this crate as its libcore. #[path = "../stdarch/crates/core_arch/src/mod.rs"] #[allow(missing_docs, missing_debug_implementations, dead_code, unused_imports)] #[unstable(feature = "stdsimd", issue = "48556")] mod core_arch; #[stable(feature = "simd_arch", since = "1.27.0")] pub use core_arch::arch;
30.686992
92
0.718638
1daf7f0456ca2de8990d542b9662590b566b2a47
12,403
use bevy_ecs::ResMut; use std::marker::PhantomData; #[derive(Debug)] struct EventInstance<T> { pub event_count: usize, pub event: T, } #[derive(Debug)] enum State { A, B, } /// An event collection that represents the events that occurred within the last two [Events::update] calls. Events can be cheaply read using /// an [EventReader]. This collection is meant to be paired with a system that calls [Events::update] exactly once per update/frame. [Events::update_system] /// is a system that does this. [EventReader]s are expected to read events from this collection at least once per update/frame. If events are not handled /// within one frame/update, they will be dropped. /// /// # Example /// ``` /// use bevy_app::Events; /// /// struct MyEvent { /// value: usize /// } /// /// // setup /// let mut events = Events::<MyEvent>::default(); /// let mut reader = events.get_reader(); /// /// // run this once per update/frame /// events.update(); /// /// // somewhere else: send an event /// events.send(MyEvent { value: 1 }); /// /// // somewhere else: read the events /// for event in reader.iter(&events) { /// assert_eq!(event.value, 1) /// } /// /// // events are only processed once per reader /// assert_eq!(reader.iter(&events).count(), 0); /// ``` /// /// # Details /// /// [Events] is implemented using a double buffer. Each call to [Events::update] swaps buffers and clears out the oldest buffer. /// [EventReader]s that read at least once per update will never drop events. [EventReader]s that read once within two updates might /// still receive some events. [EventReader]s that read after two updates are guaranteed to drop all events that occurred before those updates. /// /// The buffers in [Events] will grow indefinitely if [Events::update] is never called. /// /// An alternative call pattern would be to call [Events::update] manually across frames to control when events are cleared. However /// this complicates consumption #[derive(Debug)] pub struct Events<T> { events_a: Vec<EventInstance<T>>, events_b: Vec<EventInstance<T>>, a_start_event_count: usize, b_start_event_count: usize, event_count: usize, state: State, } impl<T> Default for Events<T> { fn default() -> Self { Events { a_start_event_count: 0, b_start_event_count: 0, event_count: 0, events_a: Vec::new(), events_b: Vec::new(), state: State::A, } } } fn map_instance_event<T>(event_instance: &EventInstance<T>) -> &T { &event_instance.event } /// Reads events of type `T` in order and tracks which events have already been read. #[derive(Debug)] pub struct EventReader<T> { last_event_count: usize, _marker: PhantomData<T>, } impl<T> Default for EventReader<T> { fn default() -> Self { Self { last_event_count: 0, _marker: PhantomData::default(), } } } impl<T> EventReader<T> { /// Iterates over the events this EventReader has not seen yet. This updates the EventReader's /// event counter, which means subsequent event reads will not include events that happened before now. pub fn iter<'a>(&mut self, events: &'a Events<T>) -> impl DoubleEndedIterator<Item = &'a T> { // if the reader has seen some of the events in a buffer, find the proper index offset. // otherwise read all events in the buffer let a_index = if self.last_event_count > events.a_start_event_count { self.last_event_count - events.a_start_event_count } else { 0 }; let b_index = if self.last_event_count > events.b_start_event_count { self.last_event_count - events.b_start_event_count } else { 0 }; self.last_event_count = events.event_count; match events.state { State::A => events .events_b .get(b_index..) .unwrap_or_else(|| &[]) .iter() .map(map_instance_event) .chain( events .events_a .get(a_index..) .unwrap_or_else(|| &[]) .iter() .map(map_instance_event), ), State::B => events .events_a .get(a_index..) .unwrap_or_else(|| &[]) .iter() .map(map_instance_event) .chain( events .events_b .get(b_index..) .unwrap_or_else(|| &[]) .iter() .map(map_instance_event), ), } } /// Retrieves the latest event that this EventReader hasn't seen yet. This updates the EventReader's /// event counter, which means subsequent event reads will not include events that happened before now. pub fn latest<'a>(&mut self, events: &'a Events<T>) -> Option<&'a T> { self.iter(events).rev().next() } /// Retrieves the latest event that matches the given `predicate` that this reader hasn't seen yet. This updates the EventReader's /// event counter, which means subsequent event reads will not include events that happened before now. pub fn find_latest<'a>( &mut self, events: &'a Events<T>, predicate: impl FnMut(&&T) -> bool, ) -> Option<&'a T> { self.iter(events).rev().find(predicate) } /// Retrieves the earliest event in `events` that this reader hasn't seen yet. This updates the EventReader's /// event counter, which means subsequent event reads will not include events that happened before now. pub fn earliest<'a>(&mut self, events: &'a Events<T>) -> Option<&'a T> { self.iter(events).next() } } impl<T: bevy_ecs::Resource> Events<T> { /// "Sends" an `event` by writing it to the current event buffer. [EventReader]s can then read the event. pub fn send(&mut self, event: T) { let event_instance = EventInstance { event, event_count: self.event_count, }; match self.state { State::A => self.events_a.push(event_instance), State::B => self.events_b.push(event_instance), } self.event_count += 1; } /// Gets a new [EventReader]. This will include all events already in the event buffers. pub fn get_reader(&self) -> EventReader<T> { EventReader { last_event_count: 0, _marker: PhantomData, } } /// Gets a new [EventReader]. This will ignore all events already in the event buffers. It will read all future events. pub fn get_reader_current(&self) -> EventReader<T> { EventReader { last_event_count: self.event_count, _marker: PhantomData, } } /// Swaps the event buffers and clears the oldest event buffer. In general, this should be called once per frame/update. pub fn update(&mut self) { match self.state { State::A => { self.events_b = Vec::new(); self.state = State::B; self.b_start_event_count = self.event_count; } State::B => { self.events_a = Vec::new(); self.state = State::A; self.a_start_event_count = self.event_count; } } } /// A system that calls [Events::update] once per frame. pub fn update_system(mut events: ResMut<Self>) { events.update(); } /// Removes all events. pub fn clear(&mut self) { self.events_a.clear(); self.events_b.clear(); } /// Creates a draining iterator that removes all events. pub fn drain<'a>(&'a mut self) -> impl Iterator<Item = T> + 'a { let map = |i: EventInstance<T>| i.event; match self.state { State::A => self .events_b .drain(..) .map(map) .chain(self.events_a.drain(..).map(map)), State::B => self .events_a .drain(..) .map(map) .chain(self.events_b.drain(..).map(map)), } } pub fn extend<I>(&mut self, events: I) where I: Iterator<Item = T>, { for event in events { self.send(event); } } /// Iterates over events that happened since the last "update" call. /// WARNING: You probably don't want to use this call. In most cases you should use an `EventReader`. You should only use /// this if you know you only need to consume events between the last `update()` call and your call to `iter_current_update_events`. /// If events happen outside that window, they will not be handled. For example, any events that happen after this call and before /// the next `update()` call will be dropped. pub fn iter_current_update_events(&self) -> impl DoubleEndedIterator<Item = &T> { match self.state { State::A => self.events_a.iter().map(map_instance_event), State::B => self.events_b.iter().map(map_instance_event), } } } #[cfg(test)] mod tests { use super::*; #[derive(Copy, Clone, PartialEq, Eq, Debug)] struct TestEvent { i: usize, } #[test] fn test_events() { let mut events = Events::<TestEvent>::default(); let event_0 = TestEvent { i: 0 }; let event_1 = TestEvent { i: 1 }; let event_2 = TestEvent { i: 2 }; // this reader will miss event_0 and event_1 because it wont read them over the course of two updates let mut reader_missed = events.get_reader(); let mut reader_a = events.get_reader(); events.send(event_0); assert_eq!( get_events(&events, &mut reader_a), vec![event_0], "reader_a created before event receives event" ); assert_eq!( get_events(&events, &mut reader_a), vec![], "second iteration of reader_a created before event results in zero events" ); let mut reader_b = events.get_reader(); assert_eq!( get_events(&events, &mut reader_b), vec![event_0], "reader_b created after event receives event" ); assert_eq!( get_events(&events, &mut reader_b), vec![], "second iteration of reader_b created after event results in zero events" ); events.send(event_1); let mut reader_c = events.get_reader(); assert_eq!( get_events(&events, &mut reader_c), vec![event_0, event_1], "reader_c created after two events receives both events" ); assert_eq!( get_events(&events, &mut reader_c), vec![], "second iteration of reader_c created after two event results in zero events" ); assert_eq!( get_events(&events, &mut reader_a), vec![event_1], "reader_a receives next unread event" ); events.update(); let mut reader_d = events.get_reader(); events.send(event_2); assert_eq!( get_events(&events, &mut reader_a), vec![event_2], "reader_a receives event created after update" ); assert_eq!( get_events(&events, &mut reader_b), vec![event_1, event_2], "reader_b receives events created before and after update" ); assert_eq!( get_events(&events, &mut reader_d), vec![event_0, event_1, event_2], "reader_d receives all events created before and after update" ); events.update(); assert_eq!( get_events(&events, &mut reader_missed), vec![event_2], "reader_missed missed events unread after to update() calls" ); } fn get_events( events: &Events<TestEvent>, reader: &mut EventReader<TestEvent>, ) -> Vec<TestEvent> { reader.iter(events).cloned().collect::<Vec<TestEvent>>() } }
32.899204
156
0.574538
7a9b40323f4610974ce0913147f41caa7facb8dc
366
// SPDX-License-Identifier: MIT use rtnetlink::new_connection; #[tokio::main] async fn main() -> Result<(), String> { let (connection, handle, _) = new_connection().unwrap(); tokio::spawn(connection); handle .link() .add() .bridge("my-bridge-1".into()) .execute() .await .map_err(|e| format!("{}", e)) }
21.529412
60
0.54918
5d4bf8e46d5369f86c42ff7db0ecf1a3c6b15261
193,266
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files.git) // DO NOT EDIT use crate::Justification; use crate::TextDirection; use crate::WrapMode; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use glib::StaticType; use glib::ToValue; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct TextTag(Object<ffi::GtkTextTag, ffi::GtkTextTagClass>); match fn { type_ => || ffi::gtk_text_tag_get_type(), } } impl TextTag { #[doc(alias = "gtk_text_tag_new")] pub fn new(name: Option<&str>) -> TextTag { assert_initialized_main_thread!(); unsafe { from_glib_full(ffi::gtk_text_tag_new(name.to_glib_none().0)) } } // rustdoc-stripper-ignore-next /// Creates a new builder-style object to construct a [`TextTag`] /// This method returns an instance of [`TextTagBuilder`] which can be used to create a [`TextTag`]. pub fn builder() -> TextTagBuilder { TextTagBuilder::default() } } #[derive(Clone, Default)] // rustdoc-stripper-ignore-next /// A builder for generating a [`TextTag`]. pub struct TextTagBuilder { accumulative_margin: Option<bool>, allow_breaks: Option<bool>, allow_breaks_set: Option<bool>, background: Option<String>, background_full_height: Option<bool>, background_full_height_set: Option<bool>, background_rgba: Option<gdk::RGBA>, background_set: Option<bool>, direction: Option<TextDirection>, editable: Option<bool>, editable_set: Option<bool>, fallback: Option<bool>, fallback_set: Option<bool>, family: Option<String>, family_set: Option<bool>, font: Option<String>, font_desc: Option<pango::FontDescription>, font_features: Option<String>, font_features_set: Option<bool>, foreground: Option<String>, foreground_rgba: Option<gdk::RGBA>, foreground_set: Option<bool>, indent: Option<i32>, indent_set: Option<bool>, insert_hyphens: Option<bool>, insert_hyphens_set: Option<bool>, invisible: Option<bool>, invisible_set: Option<bool>, justification: Option<Justification>, justification_set: Option<bool>, language: Option<String>, language_set: Option<bool>, left_margin: Option<i32>, left_margin_set: Option<bool>, letter_spacing: Option<i32>, letter_spacing_set: Option<bool>, name: Option<String>, overline: Option<pango::Overline>, overline_rgba: Option<gdk::RGBA>, overline_rgba_set: Option<bool>, overline_set: Option<bool>, paragraph_background: Option<String>, paragraph_background_rgba: Option<gdk::RGBA>, paragraph_background_set: Option<bool>, pixels_above_lines: Option<i32>, pixels_above_lines_set: Option<bool>, pixels_below_lines: Option<i32>, pixels_below_lines_set: Option<bool>, pixels_inside_wrap: Option<i32>, pixels_inside_wrap_set: Option<bool>, right_margin: Option<i32>, right_margin_set: Option<bool>, rise: Option<i32>, rise_set: Option<bool>, scale: Option<f64>, scale_set: Option<bool>, show_spaces: Option<pango::ShowFlags>, show_spaces_set: Option<bool>, size: Option<i32>, size_points: Option<f64>, size_set: Option<bool>, stretch: Option<pango::Stretch>, stretch_set: Option<bool>, strikethrough: Option<bool>, strikethrough_rgba: Option<gdk::RGBA>, strikethrough_rgba_set: Option<bool>, strikethrough_set: Option<bool>, style: Option<pango::Style>, style_set: Option<bool>, tabs: Option<pango::TabArray>, tabs_set: Option<bool>, underline: Option<pango::Underline>, underline_rgba: Option<gdk::RGBA>, underline_rgba_set: Option<bool>, underline_set: Option<bool>, variant: Option<pango::Variant>, variant_set: Option<bool>, weight: Option<i32>, weight_set: Option<bool>, wrap_mode: Option<WrapMode>, wrap_mode_set: Option<bool>, } impl TextTagBuilder { // rustdoc-stripper-ignore-next /// Create a new [`TextTagBuilder`]. pub fn new() -> Self { Self::default() } // rustdoc-stripper-ignore-next /// Build the [`TextTag`]. pub fn build(self) -> TextTag { let mut properties: Vec<(&str, &dyn ToValue)> = vec![]; if let Some(ref accumulative_margin) = self.accumulative_margin { properties.push(("accumulative-margin", accumulative_margin)); } if let Some(ref allow_breaks) = self.allow_breaks { properties.push(("allow-breaks", allow_breaks)); } if let Some(ref allow_breaks_set) = self.allow_breaks_set { properties.push(("allow-breaks-set", allow_breaks_set)); } if let Some(ref background) = self.background { properties.push(("background", background)); } if let Some(ref background_full_height) = self.background_full_height { properties.push(("background-full-height", background_full_height)); } if let Some(ref background_full_height_set) = self.background_full_height_set { properties.push(("background-full-height-set", background_full_height_set)); } if let Some(ref background_rgba) = self.background_rgba { properties.push(("background-rgba", background_rgba)); } if let Some(ref background_set) = self.background_set { properties.push(("background-set", background_set)); } if let Some(ref direction) = self.direction { properties.push(("direction", direction)); } if let Some(ref editable) = self.editable { properties.push(("editable", editable)); } if let Some(ref editable_set) = self.editable_set { properties.push(("editable-set", editable_set)); } if let Some(ref fallback) = self.fallback { properties.push(("fallback", fallback)); } if let Some(ref fallback_set) = self.fallback_set { properties.push(("fallback-set", fallback_set)); } if let Some(ref family) = self.family { properties.push(("family", family)); } if let Some(ref family_set) = self.family_set { properties.push(("family-set", family_set)); } if let Some(ref font) = self.font { properties.push(("font", font)); } if let Some(ref font_desc) = self.font_desc { properties.push(("font-desc", font_desc)); } if let Some(ref font_features) = self.font_features { properties.push(("font-features", font_features)); } if let Some(ref font_features_set) = self.font_features_set { properties.push(("font-features-set", font_features_set)); } if let Some(ref foreground) = self.foreground { properties.push(("foreground", foreground)); } if let Some(ref foreground_rgba) = self.foreground_rgba { properties.push(("foreground-rgba", foreground_rgba)); } if let Some(ref foreground_set) = self.foreground_set { properties.push(("foreground-set", foreground_set)); } if let Some(ref indent) = self.indent { properties.push(("indent", indent)); } if let Some(ref indent_set) = self.indent_set { properties.push(("indent-set", indent_set)); } if let Some(ref insert_hyphens) = self.insert_hyphens { properties.push(("insert-hyphens", insert_hyphens)); } if let Some(ref insert_hyphens_set) = self.insert_hyphens_set { properties.push(("insert-hyphens-set", insert_hyphens_set)); } if let Some(ref invisible) = self.invisible { properties.push(("invisible", invisible)); } if let Some(ref invisible_set) = self.invisible_set { properties.push(("invisible-set", invisible_set)); } if let Some(ref justification) = self.justification { properties.push(("justification", justification)); } if let Some(ref justification_set) = self.justification_set { properties.push(("justification-set", justification_set)); } if let Some(ref language) = self.language { properties.push(("language", language)); } if let Some(ref language_set) = self.language_set { properties.push(("language-set", language_set)); } if let Some(ref left_margin) = self.left_margin { properties.push(("left-margin", left_margin)); } if let Some(ref left_margin_set) = self.left_margin_set { properties.push(("left-margin-set", left_margin_set)); } if let Some(ref letter_spacing) = self.letter_spacing { properties.push(("letter-spacing", letter_spacing)); } if let Some(ref letter_spacing_set) = self.letter_spacing_set { properties.push(("letter-spacing-set", letter_spacing_set)); } if let Some(ref name) = self.name { properties.push(("name", name)); } if let Some(ref overline) = self.overline { properties.push(("overline", overline)); } if let Some(ref overline_rgba) = self.overline_rgba { properties.push(("overline-rgba", overline_rgba)); } if let Some(ref overline_rgba_set) = self.overline_rgba_set { properties.push(("overline-rgba-set", overline_rgba_set)); } if let Some(ref overline_set) = self.overline_set { properties.push(("overline-set", overline_set)); } if let Some(ref paragraph_background) = self.paragraph_background { properties.push(("paragraph-background", paragraph_background)); } if let Some(ref paragraph_background_rgba) = self.paragraph_background_rgba { properties.push(("paragraph-background-rgba", paragraph_background_rgba)); } if let Some(ref paragraph_background_set) = self.paragraph_background_set { properties.push(("paragraph-background-set", paragraph_background_set)); } if let Some(ref pixels_above_lines) = self.pixels_above_lines { properties.push(("pixels-above-lines", pixels_above_lines)); } if let Some(ref pixels_above_lines_set) = self.pixels_above_lines_set { properties.push(("pixels-above-lines-set", pixels_above_lines_set)); } if let Some(ref pixels_below_lines) = self.pixels_below_lines { properties.push(("pixels-below-lines", pixels_below_lines)); } if let Some(ref pixels_below_lines_set) = self.pixels_below_lines_set { properties.push(("pixels-below-lines-set", pixels_below_lines_set)); } if let Some(ref pixels_inside_wrap) = self.pixels_inside_wrap { properties.push(("pixels-inside-wrap", pixels_inside_wrap)); } if let Some(ref pixels_inside_wrap_set) = self.pixels_inside_wrap_set { properties.push(("pixels-inside-wrap-set", pixels_inside_wrap_set)); } if let Some(ref right_margin) = self.right_margin { properties.push(("right-margin", right_margin)); } if let Some(ref right_margin_set) = self.right_margin_set { properties.push(("right-margin-set", right_margin_set)); } if let Some(ref rise) = self.rise { properties.push(("rise", rise)); } if let Some(ref rise_set) = self.rise_set { properties.push(("rise-set", rise_set)); } if let Some(ref scale) = self.scale { properties.push(("scale", scale)); } if let Some(ref scale_set) = self.scale_set { properties.push(("scale-set", scale_set)); } if let Some(ref show_spaces) = self.show_spaces { properties.push(("show-spaces", show_spaces)); } if let Some(ref show_spaces_set) = self.show_spaces_set { properties.push(("show-spaces-set", show_spaces_set)); } if let Some(ref size) = self.size { properties.push(("size", size)); } if let Some(ref size_points) = self.size_points { properties.push(("size-points", size_points)); } if let Some(ref size_set) = self.size_set { properties.push(("size-set", size_set)); } if let Some(ref stretch) = self.stretch { properties.push(("stretch", stretch)); } if let Some(ref stretch_set) = self.stretch_set { properties.push(("stretch-set", stretch_set)); } if let Some(ref strikethrough) = self.strikethrough { properties.push(("strikethrough", strikethrough)); } if let Some(ref strikethrough_rgba) = self.strikethrough_rgba { properties.push(("strikethrough-rgba", strikethrough_rgba)); } if let Some(ref strikethrough_rgba_set) = self.strikethrough_rgba_set { properties.push(("strikethrough-rgba-set", strikethrough_rgba_set)); } if let Some(ref strikethrough_set) = self.strikethrough_set { properties.push(("strikethrough-set", strikethrough_set)); } if let Some(ref style) = self.style { properties.push(("style", style)); } if let Some(ref style_set) = self.style_set { properties.push(("style-set", style_set)); } if let Some(ref tabs) = self.tabs { properties.push(("tabs", tabs)); } if let Some(ref tabs_set) = self.tabs_set { properties.push(("tabs-set", tabs_set)); } if let Some(ref underline) = self.underline { properties.push(("underline", underline)); } if let Some(ref underline_rgba) = self.underline_rgba { properties.push(("underline-rgba", underline_rgba)); } if let Some(ref underline_rgba_set) = self.underline_rgba_set { properties.push(("underline-rgba-set", underline_rgba_set)); } if let Some(ref underline_set) = self.underline_set { properties.push(("underline-set", underline_set)); } if let Some(ref variant) = self.variant { properties.push(("variant", variant)); } if let Some(ref variant_set) = self.variant_set { properties.push(("variant-set", variant_set)); } if let Some(ref weight) = self.weight { properties.push(("weight", weight)); } if let Some(ref weight_set) = self.weight_set { properties.push(("weight-set", weight_set)); } if let Some(ref wrap_mode) = self.wrap_mode { properties.push(("wrap-mode", wrap_mode)); } if let Some(ref wrap_mode_set) = self.wrap_mode_set { properties.push(("wrap-mode-set", wrap_mode_set)); } glib::Object::new::<TextTag>(&properties).expect("Failed to create an instance of TextTag") } pub fn accumulative_margin(mut self, accumulative_margin: bool) -> Self { self.accumulative_margin = Some(accumulative_margin); self } pub fn allow_breaks(mut self, allow_breaks: bool) -> Self { self.allow_breaks = Some(allow_breaks); self } pub fn allow_breaks_set(mut self, allow_breaks_set: bool) -> Self { self.allow_breaks_set = Some(allow_breaks_set); self } pub fn background(mut self, background: &str) -> Self { self.background = Some(background.to_string()); self } pub fn background_full_height(mut self, background_full_height: bool) -> Self { self.background_full_height = Some(background_full_height); self } pub fn background_full_height_set(mut self, background_full_height_set: bool) -> Self { self.background_full_height_set = Some(background_full_height_set); self } pub fn background_rgba(mut self, background_rgba: &gdk::RGBA) -> Self { self.background_rgba = Some(background_rgba.clone()); self } pub fn background_set(mut self, background_set: bool) -> Self { self.background_set = Some(background_set); self } pub fn direction(mut self, direction: TextDirection) -> Self { self.direction = Some(direction); self } pub fn editable(mut self, editable: bool) -> Self { self.editable = Some(editable); self } pub fn editable_set(mut self, editable_set: bool) -> Self { self.editable_set = Some(editable_set); self } pub fn fallback(mut self, fallback: bool) -> Self { self.fallback = Some(fallback); self } pub fn fallback_set(mut self, fallback_set: bool) -> Self { self.fallback_set = Some(fallback_set); self } pub fn family(mut self, family: &str) -> Self { self.family = Some(family.to_string()); self } pub fn family_set(mut self, family_set: bool) -> Self { self.family_set = Some(family_set); self } pub fn font(mut self, font: &str) -> Self { self.font = Some(font.to_string()); self } pub fn font_desc(mut self, font_desc: &pango::FontDescription) -> Self { self.font_desc = Some(font_desc.clone()); self } pub fn font_features(mut self, font_features: &str) -> Self { self.font_features = Some(font_features.to_string()); self } pub fn font_features_set(mut self, font_features_set: bool) -> Self { self.font_features_set = Some(font_features_set); self } pub fn foreground(mut self, foreground: &str) -> Self { self.foreground = Some(foreground.to_string()); self } pub fn foreground_rgba(mut self, foreground_rgba: &gdk::RGBA) -> Self { self.foreground_rgba = Some(foreground_rgba.clone()); self } pub fn foreground_set(mut self, foreground_set: bool) -> Self { self.foreground_set = Some(foreground_set); self } pub fn indent(mut self, indent: i32) -> Self { self.indent = Some(indent); self } pub fn indent_set(mut self, indent_set: bool) -> Self { self.indent_set = Some(indent_set); self } pub fn insert_hyphens(mut self, insert_hyphens: bool) -> Self { self.insert_hyphens = Some(insert_hyphens); self } pub fn insert_hyphens_set(mut self, insert_hyphens_set: bool) -> Self { self.insert_hyphens_set = Some(insert_hyphens_set); self } pub fn invisible(mut self, invisible: bool) -> Self { self.invisible = Some(invisible); self } pub fn invisible_set(mut self, invisible_set: bool) -> Self { self.invisible_set = Some(invisible_set); self } pub fn justification(mut self, justification: Justification) -> Self { self.justification = Some(justification); self } pub fn justification_set(mut self, justification_set: bool) -> Self { self.justification_set = Some(justification_set); self } pub fn language(mut self, language: &str) -> Self { self.language = Some(language.to_string()); self } pub fn language_set(mut self, language_set: bool) -> Self { self.language_set = Some(language_set); self } pub fn left_margin(mut self, left_margin: i32) -> Self { self.left_margin = Some(left_margin); self } pub fn left_margin_set(mut self, left_margin_set: bool) -> Self { self.left_margin_set = Some(left_margin_set); self } pub fn letter_spacing(mut self, letter_spacing: i32) -> Self { self.letter_spacing = Some(letter_spacing); self } pub fn letter_spacing_set(mut self, letter_spacing_set: bool) -> Self { self.letter_spacing_set = Some(letter_spacing_set); self } pub fn name(mut self, name: &str) -> Self { self.name = Some(name.to_string()); self } pub fn overline(mut self, overline: pango::Overline) -> Self { self.overline = Some(overline); self } pub fn overline_rgba(mut self, overline_rgba: &gdk::RGBA) -> Self { self.overline_rgba = Some(overline_rgba.clone()); self } pub fn overline_rgba_set(mut self, overline_rgba_set: bool) -> Self { self.overline_rgba_set = Some(overline_rgba_set); self } pub fn overline_set(mut self, overline_set: bool) -> Self { self.overline_set = Some(overline_set); self } pub fn paragraph_background(mut self, paragraph_background: &str) -> Self { self.paragraph_background = Some(paragraph_background.to_string()); self } pub fn paragraph_background_rgba(mut self, paragraph_background_rgba: &gdk::RGBA) -> Self { self.paragraph_background_rgba = Some(paragraph_background_rgba.clone()); self } pub fn paragraph_background_set(mut self, paragraph_background_set: bool) -> Self { self.paragraph_background_set = Some(paragraph_background_set); self } pub fn pixels_above_lines(mut self, pixels_above_lines: i32) -> Self { self.pixels_above_lines = Some(pixels_above_lines); self } pub fn pixels_above_lines_set(mut self, pixels_above_lines_set: bool) -> Self { self.pixels_above_lines_set = Some(pixels_above_lines_set); self } pub fn pixels_below_lines(mut self, pixels_below_lines: i32) -> Self { self.pixels_below_lines = Some(pixels_below_lines); self } pub fn pixels_below_lines_set(mut self, pixels_below_lines_set: bool) -> Self { self.pixels_below_lines_set = Some(pixels_below_lines_set); self } pub fn pixels_inside_wrap(mut self, pixels_inside_wrap: i32) -> Self { self.pixels_inside_wrap = Some(pixels_inside_wrap); self } pub fn pixels_inside_wrap_set(mut self, pixels_inside_wrap_set: bool) -> Self { self.pixels_inside_wrap_set = Some(pixels_inside_wrap_set); self } pub fn right_margin(mut self, right_margin: i32) -> Self { self.right_margin = Some(right_margin); self } pub fn right_margin_set(mut self, right_margin_set: bool) -> Self { self.right_margin_set = Some(right_margin_set); self } pub fn rise(mut self, rise: i32) -> Self { self.rise = Some(rise); self } pub fn rise_set(mut self, rise_set: bool) -> Self { self.rise_set = Some(rise_set); self } pub fn scale(mut self, scale: f64) -> Self { self.scale = Some(scale); self } pub fn scale_set(mut self, scale_set: bool) -> Self { self.scale_set = Some(scale_set); self } pub fn show_spaces(mut self, show_spaces: pango::ShowFlags) -> Self { self.show_spaces = Some(show_spaces); self } pub fn show_spaces_set(mut self, show_spaces_set: bool) -> Self { self.show_spaces_set = Some(show_spaces_set); self } pub fn size(mut self, size: i32) -> Self { self.size = Some(size); self } pub fn size_points(mut self, size_points: f64) -> Self { self.size_points = Some(size_points); self } pub fn size_set(mut self, size_set: bool) -> Self { self.size_set = Some(size_set); self } pub fn stretch(mut self, stretch: pango::Stretch) -> Self { self.stretch = Some(stretch); self } pub fn stretch_set(mut self, stretch_set: bool) -> Self { self.stretch_set = Some(stretch_set); self } pub fn strikethrough(mut self, strikethrough: bool) -> Self { self.strikethrough = Some(strikethrough); self } pub fn strikethrough_rgba(mut self, strikethrough_rgba: &gdk::RGBA) -> Self { self.strikethrough_rgba = Some(strikethrough_rgba.clone()); self } pub fn strikethrough_rgba_set(mut self, strikethrough_rgba_set: bool) -> Self { self.strikethrough_rgba_set = Some(strikethrough_rgba_set); self } pub fn strikethrough_set(mut self, strikethrough_set: bool) -> Self { self.strikethrough_set = Some(strikethrough_set); self } pub fn style(mut self, style: pango::Style) -> Self { self.style = Some(style); self } pub fn style_set(mut self, style_set: bool) -> Self { self.style_set = Some(style_set); self } pub fn tabs(mut self, tabs: &pango::TabArray) -> Self { self.tabs = Some(tabs.clone()); self } pub fn tabs_set(mut self, tabs_set: bool) -> Self { self.tabs_set = Some(tabs_set); self } pub fn underline(mut self, underline: pango::Underline) -> Self { self.underline = Some(underline); self } pub fn underline_rgba(mut self, underline_rgba: &gdk::RGBA) -> Self { self.underline_rgba = Some(underline_rgba.clone()); self } pub fn underline_rgba_set(mut self, underline_rgba_set: bool) -> Self { self.underline_rgba_set = Some(underline_rgba_set); self } pub fn underline_set(mut self, underline_set: bool) -> Self { self.underline_set = Some(underline_set); self } pub fn variant(mut self, variant: pango::Variant) -> Self { self.variant = Some(variant); self } pub fn variant_set(mut self, variant_set: bool) -> Self { self.variant_set = Some(variant_set); self } pub fn weight(mut self, weight: i32) -> Self { self.weight = Some(weight); self } pub fn weight_set(mut self, weight_set: bool) -> Self { self.weight_set = Some(weight_set); self } pub fn wrap_mode(mut self, wrap_mode: WrapMode) -> Self { self.wrap_mode = Some(wrap_mode); self } pub fn wrap_mode_set(mut self, wrap_mode_set: bool) -> Self { self.wrap_mode_set = Some(wrap_mode_set); self } } pub const NONE_TEXT_TAG: Option<&TextTag> = None; pub trait TextTagExt: 'static { #[doc(alias = "gtk_text_tag_changed")] fn changed(&self, size_changed: bool); #[doc(alias = "gtk_text_tag_get_priority")] #[doc(alias = "get_priority")] fn priority(&self) -> i32; #[doc(alias = "gtk_text_tag_set_priority")] fn set_priority(&self, priority: i32); #[doc(alias = "accumulative-margin")] fn is_accumulative_margin(&self) -> bool; #[doc(alias = "accumulative-margin")] fn set_accumulative_margin(&self, accumulative_margin: bool); #[doc(alias = "allow-breaks")] fn allows_breaks(&self) -> bool; #[doc(alias = "allow-breaks")] fn set_allow_breaks(&self, allow_breaks: bool); #[doc(alias = "allow-breaks-set")] fn allows_breaks_set(&self) -> bool; #[doc(alias = "allow-breaks-set")] fn set_allow_breaks_set(&self, allow_breaks_set: bool); fn set_background(&self, background: Option<&str>); #[doc(alias = "background-full-height")] fn is_background_full_height(&self) -> bool; #[doc(alias = "background-full-height")] fn set_background_full_height(&self, background_full_height: bool); #[doc(alias = "background-full-height-set")] fn is_background_full_height_set(&self) -> bool; #[doc(alias = "background-full-height-set")] fn set_background_full_height_set(&self, background_full_height_set: bool); #[doc(alias = "background-rgba")] fn background_rgba(&self) -> Option<gdk::RGBA>; #[doc(alias = "background-rgba")] fn set_background_rgba(&self, background_rgba: Option<&gdk::RGBA>); #[doc(alias = "background-set")] fn is_background_set(&self) -> bool; #[doc(alias = "background-set")] fn set_background_set(&self, background_set: bool); fn direction(&self) -> TextDirection; fn set_direction(&self, direction: TextDirection); fn is_editable(&self) -> bool; fn set_editable(&self, editable: bool); #[doc(alias = "editable-set")] fn is_editable_set(&self) -> bool; #[doc(alias = "editable-set")] fn set_editable_set(&self, editable_set: bool); fn is_fallback(&self) -> bool; fn set_fallback(&self, fallback: bool); #[doc(alias = "fallback-set")] fn is_fallback_set(&self) -> bool; #[doc(alias = "fallback-set")] fn set_fallback_set(&self, fallback_set: bool); fn family(&self) -> Option<glib::GString>; fn set_family(&self, family: Option<&str>); #[doc(alias = "family-set")] fn is_family_set(&self) -> bool; #[doc(alias = "family-set")] fn set_family_set(&self, family_set: bool); fn font(&self) -> Option<glib::GString>; fn set_font(&self, font: Option<&str>); #[doc(alias = "font-desc")] fn font_desc(&self) -> Option<pango::FontDescription>; #[doc(alias = "font-desc")] fn set_font_desc(&self, font_desc: Option<&pango::FontDescription>); #[doc(alias = "font-features")] fn font_features(&self) -> Option<glib::GString>; #[doc(alias = "font-features")] fn set_font_features(&self, font_features: Option<&str>); #[doc(alias = "font-features-set")] fn is_font_features_set(&self) -> bool; #[doc(alias = "font-features-set")] fn set_font_features_set(&self, font_features_set: bool); fn set_foreground(&self, foreground: Option<&str>); #[doc(alias = "foreground-rgba")] fn foreground_rgba(&self) -> Option<gdk::RGBA>; #[doc(alias = "foreground-rgba")] fn set_foreground_rgba(&self, foreground_rgba: Option<&gdk::RGBA>); #[doc(alias = "foreground-set")] fn is_foreground_set(&self) -> bool; #[doc(alias = "foreground-set")] fn set_foreground_set(&self, foreground_set: bool); fn indent(&self) -> i32; fn set_indent(&self, indent: i32); #[doc(alias = "indent-set")] fn is_indent_set(&self) -> bool; #[doc(alias = "indent-set")] fn set_indent_set(&self, indent_set: bool); #[doc(alias = "insert-hyphens")] fn is_insert_hyphens(&self) -> bool; #[doc(alias = "insert-hyphens")] fn set_insert_hyphens(&self, insert_hyphens: bool); #[doc(alias = "insert-hyphens-set")] fn is_insert_hyphens_set(&self) -> bool; #[doc(alias = "insert-hyphens-set")] fn set_insert_hyphens_set(&self, insert_hyphens_set: bool); fn is_invisible(&self) -> bool; fn set_invisible(&self, invisible: bool); #[doc(alias = "invisible-set")] fn is_invisible_set(&self) -> bool; #[doc(alias = "invisible-set")] fn set_invisible_set(&self, invisible_set: bool); fn justification(&self) -> Justification; fn set_justification(&self, justification: Justification); #[doc(alias = "justification-set")] fn is_justification_set(&self) -> bool; #[doc(alias = "justification-set")] fn set_justification_set(&self, justification_set: bool); fn language(&self) -> Option<glib::GString>; fn set_language(&self, language: Option<&str>); #[doc(alias = "language-set")] fn is_language_set(&self) -> bool; #[doc(alias = "language-set")] fn set_language_set(&self, language_set: bool); #[doc(alias = "left-margin")] fn left_margin(&self) -> i32; #[doc(alias = "left-margin")] fn set_left_margin(&self, left_margin: i32); #[doc(alias = "left-margin-set")] fn is_left_margin_set(&self) -> bool; #[doc(alias = "left-margin-set")] fn set_left_margin_set(&self, left_margin_set: bool); #[doc(alias = "letter-spacing")] fn letter_spacing(&self) -> i32; #[doc(alias = "letter-spacing")] fn set_letter_spacing(&self, letter_spacing: i32); #[doc(alias = "letter-spacing-set")] fn is_letter_spacing_set(&self) -> bool; #[doc(alias = "letter-spacing-set")] fn set_letter_spacing_set(&self, letter_spacing_set: bool); fn name(&self) -> Option<glib::GString>; fn overline(&self) -> pango::Overline; fn set_overline(&self, overline: pango::Overline); #[doc(alias = "overline-rgba")] fn overline_rgba(&self) -> Option<gdk::RGBA>; #[doc(alias = "overline-rgba")] fn set_overline_rgba(&self, overline_rgba: Option<&gdk::RGBA>); #[doc(alias = "overline-rgba-set")] fn is_overline_rgba_set(&self) -> bool; #[doc(alias = "overline-rgba-set")] fn set_overline_rgba_set(&self, overline_rgba_set: bool); #[doc(alias = "overline-set")] fn is_overline_set(&self) -> bool; #[doc(alias = "overline-set")] fn set_overline_set(&self, overline_set: bool); #[doc(alias = "paragraph-background")] fn set_paragraph_background(&self, paragraph_background: Option<&str>); #[doc(alias = "paragraph-background-rgba")] fn paragraph_background_rgba(&self) -> Option<gdk::RGBA>; #[doc(alias = "paragraph-background-rgba")] fn set_paragraph_background_rgba(&self, paragraph_background_rgba: Option<&gdk::RGBA>); #[doc(alias = "paragraph-background-set")] fn is_paragraph_background_set(&self) -> bool; #[doc(alias = "paragraph-background-set")] fn set_paragraph_background_set(&self, paragraph_background_set: bool); #[doc(alias = "pixels-above-lines")] fn pixels_above_lines(&self) -> i32; #[doc(alias = "pixels-above-lines")] fn set_pixels_above_lines(&self, pixels_above_lines: i32); #[doc(alias = "pixels-above-lines-set")] fn is_pixels_above_lines_set(&self) -> bool; #[doc(alias = "pixels-above-lines-set")] fn set_pixels_above_lines_set(&self, pixels_above_lines_set: bool); #[doc(alias = "pixels-below-lines")] fn pixels_below_lines(&self) -> i32; #[doc(alias = "pixels-below-lines")] fn set_pixels_below_lines(&self, pixels_below_lines: i32); #[doc(alias = "pixels-below-lines-set")] fn is_pixels_below_lines_set(&self) -> bool; #[doc(alias = "pixels-below-lines-set")] fn set_pixels_below_lines_set(&self, pixels_below_lines_set: bool); #[doc(alias = "pixels-inside-wrap")] fn pixels_inside_wrap(&self) -> i32; #[doc(alias = "pixels-inside-wrap")] fn set_pixels_inside_wrap(&self, pixels_inside_wrap: i32); #[doc(alias = "pixels-inside-wrap-set")] fn is_pixels_inside_wrap_set(&self) -> bool; #[doc(alias = "pixels-inside-wrap-set")] fn set_pixels_inside_wrap_set(&self, pixels_inside_wrap_set: bool); #[doc(alias = "right-margin")] fn right_margin(&self) -> i32; #[doc(alias = "right-margin")] fn set_right_margin(&self, right_margin: i32); #[doc(alias = "right-margin-set")] fn is_right_margin_set(&self) -> bool; #[doc(alias = "right-margin-set")] fn set_right_margin_set(&self, right_margin_set: bool); fn rise(&self) -> i32; fn set_rise(&self, rise: i32); #[doc(alias = "rise-set")] fn is_rise_set(&self) -> bool; #[doc(alias = "rise-set")] fn set_rise_set(&self, rise_set: bool); fn scale(&self) -> f64; fn set_scale(&self, scale: f64); #[doc(alias = "scale-set")] fn is_scale_set(&self) -> bool; #[doc(alias = "scale-set")] fn set_scale_set(&self, scale_set: bool); #[doc(alias = "show-spaces")] fn show_spaces(&self) -> pango::ShowFlags; #[doc(alias = "show-spaces")] fn set_show_spaces(&self, show_spaces: pango::ShowFlags); #[doc(alias = "show-spaces-set")] fn shows_spaces_set(&self) -> bool; #[doc(alias = "show-spaces-set")] fn set_show_spaces_set(&self, show_spaces_set: bool); fn size(&self) -> i32; fn set_size(&self, size: i32); #[doc(alias = "size-points")] fn size_points(&self) -> f64; #[doc(alias = "size-points")] fn set_size_points(&self, size_points: f64); #[doc(alias = "size-set")] fn is_size_set(&self) -> bool; #[doc(alias = "size-set")] fn set_size_set(&self, size_set: bool); fn stretch(&self) -> pango::Stretch; fn set_stretch(&self, stretch: pango::Stretch); #[doc(alias = "stretch-set")] fn is_stretch_set(&self) -> bool; #[doc(alias = "stretch-set")] fn set_stretch_set(&self, stretch_set: bool); fn is_strikethrough(&self) -> bool; fn set_strikethrough(&self, strikethrough: bool); #[doc(alias = "strikethrough-rgba")] fn strikethrough_rgba(&self) -> Option<gdk::RGBA>; #[doc(alias = "strikethrough-rgba")] fn set_strikethrough_rgba(&self, strikethrough_rgba: Option<&gdk::RGBA>); #[doc(alias = "strikethrough-rgba-set")] fn is_strikethrough_rgba_set(&self) -> bool; #[doc(alias = "strikethrough-rgba-set")] fn set_strikethrough_rgba_set(&self, strikethrough_rgba_set: bool); #[doc(alias = "strikethrough-set")] fn is_strikethrough_set(&self) -> bool; #[doc(alias = "strikethrough-set")] fn set_strikethrough_set(&self, strikethrough_set: bool); fn style(&self) -> pango::Style; fn set_style(&self, style: pango::Style); #[doc(alias = "style-set")] fn is_style_set(&self) -> bool; #[doc(alias = "style-set")] fn set_style_set(&self, style_set: bool); fn tabs(&self) -> Option<pango::TabArray>; fn set_tabs(&self, tabs: Option<&pango::TabArray>); #[doc(alias = "tabs-set")] fn is_tabs_set(&self) -> bool; #[doc(alias = "tabs-set")] fn set_tabs_set(&self, tabs_set: bool); fn underline(&self) -> pango::Underline; fn set_underline(&self, underline: pango::Underline); #[doc(alias = "underline-rgba")] fn underline_rgba(&self) -> Option<gdk::RGBA>; #[doc(alias = "underline-rgba")] fn set_underline_rgba(&self, underline_rgba: Option<&gdk::RGBA>); #[doc(alias = "underline-rgba-set")] fn is_underline_rgba_set(&self) -> bool; #[doc(alias = "underline-rgba-set")] fn set_underline_rgba_set(&self, underline_rgba_set: bool); #[doc(alias = "underline-set")] fn is_underline_set(&self) -> bool; #[doc(alias = "underline-set")] fn set_underline_set(&self, underline_set: bool); fn variant(&self) -> pango::Variant; fn set_variant(&self, variant: pango::Variant); #[doc(alias = "variant-set")] fn is_variant_set(&self) -> bool; #[doc(alias = "variant-set")] fn set_variant_set(&self, variant_set: bool); fn weight(&self) -> i32; fn set_weight(&self, weight: i32); #[doc(alias = "weight-set")] fn is_weight_set(&self) -> bool; #[doc(alias = "weight-set")] fn set_weight_set(&self, weight_set: bool); #[doc(alias = "wrap-mode")] fn wrap_mode(&self) -> WrapMode; #[doc(alias = "wrap-mode")] fn set_wrap_mode(&self, wrap_mode: WrapMode); #[doc(alias = "wrap-mode-set")] fn wraps_mode_set(&self) -> bool; #[doc(alias = "wrap-mode-set")] fn set_wrap_mode_set(&self, wrap_mode_set: bool); #[doc(alias = "accumulative-margin")] fn connect_accumulative_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "allow-breaks")] fn connect_allow_breaks_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "allow-breaks-set")] fn connect_allow_breaks_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "background")] fn connect_background_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "background-full-height")] fn connect_background_full_height_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; #[doc(alias = "background-full-height-set")] fn connect_background_full_height_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; #[doc(alias = "background-rgba")] fn connect_background_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "background-set")] fn connect_background_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "direction")] fn connect_direction_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "editable")] fn connect_editable_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "editable-set")] fn connect_editable_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "fallback")] fn connect_fallback_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "fallback-set")] fn connect_fallback_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "family")] fn connect_family_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "family-set")] fn connect_family_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "font")] fn connect_font_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "font-desc")] fn connect_font_desc_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "font-features")] fn connect_font_features_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "font-features-set")] fn connect_font_features_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "foreground")] fn connect_foreground_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "foreground-rgba")] fn connect_foreground_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "foreground-set")] fn connect_foreground_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "indent")] fn connect_indent_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "indent-set")] fn connect_indent_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "insert-hyphens")] fn connect_insert_hyphens_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "insert-hyphens-set")] fn connect_insert_hyphens_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "invisible")] fn connect_invisible_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "invisible-set")] fn connect_invisible_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "justification")] fn connect_justification_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "justification-set")] fn connect_justification_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "language")] fn connect_language_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "language-set")] fn connect_language_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "left-margin")] fn connect_left_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "left-margin-set")] fn connect_left_margin_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "letter-spacing")] fn connect_letter_spacing_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "letter-spacing-set")] fn connect_letter_spacing_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "overline")] fn connect_overline_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "overline-rgba")] fn connect_overline_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "overline-rgba-set")] fn connect_overline_rgba_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "overline-set")] fn connect_overline_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "paragraph-background")] fn connect_paragraph_background_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "paragraph-background-rgba")] fn connect_paragraph_background_rgba_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; #[doc(alias = "paragraph-background-set")] fn connect_paragraph_background_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; #[doc(alias = "pixels-above-lines")] fn connect_pixels_above_lines_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "pixels-above-lines-set")] fn connect_pixels_above_lines_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; #[doc(alias = "pixels-below-lines")] fn connect_pixels_below_lines_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "pixels-below-lines-set")] fn connect_pixels_below_lines_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; #[doc(alias = "pixels-inside-wrap")] fn connect_pixels_inside_wrap_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "pixels-inside-wrap-set")] fn connect_pixels_inside_wrap_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; #[doc(alias = "right-margin")] fn connect_right_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "right-margin-set")] fn connect_right_margin_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "rise")] fn connect_rise_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "rise-set")] fn connect_rise_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "scale")] fn connect_scale_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "scale-set")] fn connect_scale_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "show-spaces")] fn connect_show_spaces_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "show-spaces-set")] fn connect_show_spaces_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "size")] fn connect_size_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "size-points")] fn connect_size_points_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "size-set")] fn connect_size_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "stretch")] fn connect_stretch_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "stretch-set")] fn connect_stretch_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "strikethrough")] fn connect_strikethrough_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "strikethrough-rgba")] fn connect_strikethrough_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "strikethrough-rgba-set")] fn connect_strikethrough_rgba_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId; #[doc(alias = "strikethrough-set")] fn connect_strikethrough_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "style")] fn connect_style_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "style-set")] fn connect_style_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "tabs")] fn connect_tabs_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "tabs-set")] fn connect_tabs_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "underline")] fn connect_underline_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "underline-rgba")] fn connect_underline_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "underline-rgba-set")] fn connect_underline_rgba_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "underline-set")] fn connect_underline_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "variant")] fn connect_variant_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "variant-set")] fn connect_variant_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "weight")] fn connect_weight_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "weight-set")] fn connect_weight_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "wrap-mode")] fn connect_wrap_mode_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; #[doc(alias = "wrap-mode-set")] fn connect_wrap_mode_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<TextTag>> TextTagExt for O { fn changed(&self, size_changed: bool) { unsafe { ffi::gtk_text_tag_changed(self.as_ref().to_glib_none().0, size_changed.into_glib()); } } fn priority(&self) -> i32 { unsafe { ffi::gtk_text_tag_get_priority(self.as_ref().to_glib_none().0) } } fn set_priority(&self, priority: i32) { unsafe { ffi::gtk_text_tag_set_priority(self.as_ref().to_glib_none().0, priority); } } fn is_accumulative_margin(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"accumulative-margin\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `accumulative-margin` getter") } } fn set_accumulative_margin(&self, accumulative_margin: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"accumulative-margin\0".as_ptr() as *const _, accumulative_margin.to_value().to_glib_none().0, ); } } fn allows_breaks(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"allow-breaks\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `allow-breaks` getter") } } fn set_allow_breaks(&self, allow_breaks: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"allow-breaks\0".as_ptr() as *const _, allow_breaks.to_value().to_glib_none().0, ); } } fn allows_breaks_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"allow-breaks-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `allow-breaks-set` getter") } } fn set_allow_breaks_set(&self, allow_breaks_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"allow-breaks-set\0".as_ptr() as *const _, allow_breaks_set.to_value().to_glib_none().0, ); } } fn set_background(&self, background: Option<&str>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"background\0".as_ptr() as *const _, background.to_value().to_glib_none().0, ); } } fn is_background_full_height(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"background-full-height\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `background-full-height` getter") } } fn set_background_full_height(&self, background_full_height: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"background-full-height\0".as_ptr() as *const _, background_full_height.to_value().to_glib_none().0, ); } } fn is_background_full_height_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"background-full-height-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `background-full-height-set` getter") } } fn set_background_full_height_set(&self, background_full_height_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"background-full-height-set\0".as_ptr() as *const _, background_full_height_set.to_value().to_glib_none().0, ); } } fn background_rgba(&self) -> Option<gdk::RGBA> { unsafe { let mut value = glib::Value::from_type(<gdk::RGBA as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"background-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `background-rgba` getter") } } fn set_background_rgba(&self, background_rgba: Option<&gdk::RGBA>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"background-rgba\0".as_ptr() as *const _, background_rgba.to_value().to_glib_none().0, ); } } fn is_background_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"background-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `background-set` getter") } } fn set_background_set(&self, background_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"background-set\0".as_ptr() as *const _, background_set.to_value().to_glib_none().0, ); } } fn direction(&self) -> TextDirection { unsafe { let mut value = glib::Value::from_type(<TextDirection as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"direction\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `direction` getter") } } fn set_direction(&self, direction: TextDirection) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"direction\0".as_ptr() as *const _, direction.to_value().to_glib_none().0, ); } } fn is_editable(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"editable\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `editable` getter") } } fn set_editable(&self, editable: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"editable\0".as_ptr() as *const _, editable.to_value().to_glib_none().0, ); } } fn is_editable_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"editable-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `editable-set` getter") } } fn set_editable_set(&self, editable_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"editable-set\0".as_ptr() as *const _, editable_set.to_value().to_glib_none().0, ); } } fn is_fallback(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"fallback\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `fallback` getter") } } fn set_fallback(&self, fallback: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"fallback\0".as_ptr() as *const _, fallback.to_value().to_glib_none().0, ); } } fn is_fallback_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"fallback-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `fallback-set` getter") } } fn set_fallback_set(&self, fallback_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"fallback-set\0".as_ptr() as *const _, fallback_set.to_value().to_glib_none().0, ); } } fn family(&self) -> Option<glib::GString> { unsafe { let mut value = glib::Value::from_type(<glib::GString as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"family\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `family` getter") } } fn set_family(&self, family: Option<&str>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"family\0".as_ptr() as *const _, family.to_value().to_glib_none().0, ); } } fn is_family_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"family-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `family-set` getter") } } fn set_family_set(&self, family_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"family-set\0".as_ptr() as *const _, family_set.to_value().to_glib_none().0, ); } } fn font(&self) -> Option<glib::GString> { unsafe { let mut value = glib::Value::from_type(<glib::GString as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"font\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `font` getter") } } fn set_font(&self, font: Option<&str>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"font\0".as_ptr() as *const _, font.to_value().to_glib_none().0, ); } } fn font_desc(&self) -> Option<pango::FontDescription> { unsafe { let mut value = glib::Value::from_type(<pango::FontDescription as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"font-desc\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `font-desc` getter") } } fn set_font_desc(&self, font_desc: Option<&pango::FontDescription>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"font-desc\0".as_ptr() as *const _, font_desc.to_value().to_glib_none().0, ); } } fn font_features(&self) -> Option<glib::GString> { unsafe { let mut value = glib::Value::from_type(<glib::GString as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"font-features\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `font-features` getter") } } fn set_font_features(&self, font_features: Option<&str>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"font-features\0".as_ptr() as *const _, font_features.to_value().to_glib_none().0, ); } } fn is_font_features_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"font-features-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `font-features-set` getter") } } fn set_font_features_set(&self, font_features_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"font-features-set\0".as_ptr() as *const _, font_features_set.to_value().to_glib_none().0, ); } } fn set_foreground(&self, foreground: Option<&str>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"foreground\0".as_ptr() as *const _, foreground.to_value().to_glib_none().0, ); } } fn foreground_rgba(&self) -> Option<gdk::RGBA> { unsafe { let mut value = glib::Value::from_type(<gdk::RGBA as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"foreground-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `foreground-rgba` getter") } } fn set_foreground_rgba(&self, foreground_rgba: Option<&gdk::RGBA>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"foreground-rgba\0".as_ptr() as *const _, foreground_rgba.to_value().to_glib_none().0, ); } } fn is_foreground_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"foreground-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `foreground-set` getter") } } fn set_foreground_set(&self, foreground_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"foreground-set\0".as_ptr() as *const _, foreground_set.to_value().to_glib_none().0, ); } } fn indent(&self) -> i32 { unsafe { let mut value = glib::Value::from_type(<i32 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"indent\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `indent` getter") } } fn set_indent(&self, indent: i32) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"indent\0".as_ptr() as *const _, indent.to_value().to_glib_none().0, ); } } fn is_indent_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"indent-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `indent-set` getter") } } fn set_indent_set(&self, indent_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"indent-set\0".as_ptr() as *const _, indent_set.to_value().to_glib_none().0, ); } } fn is_insert_hyphens(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"insert-hyphens\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `insert-hyphens` getter") } } fn set_insert_hyphens(&self, insert_hyphens: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"insert-hyphens\0".as_ptr() as *const _, insert_hyphens.to_value().to_glib_none().0, ); } } fn is_insert_hyphens_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"insert-hyphens-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `insert-hyphens-set` getter") } } fn set_insert_hyphens_set(&self, insert_hyphens_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"insert-hyphens-set\0".as_ptr() as *const _, insert_hyphens_set.to_value().to_glib_none().0, ); } } fn is_invisible(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"invisible\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `invisible` getter") } } fn set_invisible(&self, invisible: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"invisible\0".as_ptr() as *const _, invisible.to_value().to_glib_none().0, ); } } fn is_invisible_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"invisible-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `invisible-set` getter") } } fn set_invisible_set(&self, invisible_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"invisible-set\0".as_ptr() as *const _, invisible_set.to_value().to_glib_none().0, ); } } fn justification(&self) -> Justification { unsafe { let mut value = glib::Value::from_type(<Justification as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"justification\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `justification` getter") } } fn set_justification(&self, justification: Justification) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"justification\0".as_ptr() as *const _, justification.to_value().to_glib_none().0, ); } } fn is_justification_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"justification-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `justification-set` getter") } } fn set_justification_set(&self, justification_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"justification-set\0".as_ptr() as *const _, justification_set.to_value().to_glib_none().0, ); } } fn language(&self) -> Option<glib::GString> { unsafe { let mut value = glib::Value::from_type(<glib::GString as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"language\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `language` getter") } } fn set_language(&self, language: Option<&str>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"language\0".as_ptr() as *const _, language.to_value().to_glib_none().0, ); } } fn is_language_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"language-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `language-set` getter") } } fn set_language_set(&self, language_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"language-set\0".as_ptr() as *const _, language_set.to_value().to_glib_none().0, ); } } fn left_margin(&self) -> i32 { unsafe { let mut value = glib::Value::from_type(<i32 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"left-margin\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `left-margin` getter") } } fn set_left_margin(&self, left_margin: i32) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"left-margin\0".as_ptr() as *const _, left_margin.to_value().to_glib_none().0, ); } } fn is_left_margin_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"left-margin-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `left-margin-set` getter") } } fn set_left_margin_set(&self, left_margin_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"left-margin-set\0".as_ptr() as *const _, left_margin_set.to_value().to_glib_none().0, ); } } fn letter_spacing(&self) -> i32 { unsafe { let mut value = glib::Value::from_type(<i32 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"letter-spacing\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `letter-spacing` getter") } } fn set_letter_spacing(&self, letter_spacing: i32) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"letter-spacing\0".as_ptr() as *const _, letter_spacing.to_value().to_glib_none().0, ); } } fn is_letter_spacing_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"letter-spacing-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `letter-spacing-set` getter") } } fn set_letter_spacing_set(&self, letter_spacing_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"letter-spacing-set\0".as_ptr() as *const _, letter_spacing_set.to_value().to_glib_none().0, ); } } fn name(&self) -> Option<glib::GString> { unsafe { let mut value = glib::Value::from_type(<glib::GString as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"name\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `name` getter") } } fn overline(&self) -> pango::Overline { unsafe { let mut value = glib::Value::from_type(<pango::Overline as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"overline\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `overline` getter") } } fn set_overline(&self, overline: pango::Overline) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"overline\0".as_ptr() as *const _, overline.to_value().to_glib_none().0, ); } } fn overline_rgba(&self) -> Option<gdk::RGBA> { unsafe { let mut value = glib::Value::from_type(<gdk::RGBA as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"overline-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `overline-rgba` getter") } } fn set_overline_rgba(&self, overline_rgba: Option<&gdk::RGBA>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"overline-rgba\0".as_ptr() as *const _, overline_rgba.to_value().to_glib_none().0, ); } } fn is_overline_rgba_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"overline-rgba-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `overline-rgba-set` getter") } } fn set_overline_rgba_set(&self, overline_rgba_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"overline-rgba-set\0".as_ptr() as *const _, overline_rgba_set.to_value().to_glib_none().0, ); } } fn is_overline_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"overline-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `overline-set` getter") } } fn set_overline_set(&self, overline_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"overline-set\0".as_ptr() as *const _, overline_set.to_value().to_glib_none().0, ); } } fn set_paragraph_background(&self, paragraph_background: Option<&str>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"paragraph-background\0".as_ptr() as *const _, paragraph_background.to_value().to_glib_none().0, ); } } fn paragraph_background_rgba(&self) -> Option<gdk::RGBA> { unsafe { let mut value = glib::Value::from_type(<gdk::RGBA as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"paragraph-background-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `paragraph-background-rgba` getter") } } fn set_paragraph_background_rgba(&self, paragraph_background_rgba: Option<&gdk::RGBA>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"paragraph-background-rgba\0".as_ptr() as *const _, paragraph_background_rgba.to_value().to_glib_none().0, ); } } fn is_paragraph_background_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"paragraph-background-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `paragraph-background-set` getter") } } fn set_paragraph_background_set(&self, paragraph_background_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"paragraph-background-set\0".as_ptr() as *const _, paragraph_background_set.to_value().to_glib_none().0, ); } } fn pixels_above_lines(&self) -> i32 { unsafe { let mut value = glib::Value::from_type(<i32 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-above-lines\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `pixels-above-lines` getter") } } fn set_pixels_above_lines(&self, pixels_above_lines: i32) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-above-lines\0".as_ptr() as *const _, pixels_above_lines.to_value().to_glib_none().0, ); } } fn is_pixels_above_lines_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-above-lines-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `pixels-above-lines-set` getter") } } fn set_pixels_above_lines_set(&self, pixels_above_lines_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-above-lines-set\0".as_ptr() as *const _, pixels_above_lines_set.to_value().to_glib_none().0, ); } } fn pixels_below_lines(&self) -> i32 { unsafe { let mut value = glib::Value::from_type(<i32 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-below-lines\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `pixels-below-lines` getter") } } fn set_pixels_below_lines(&self, pixels_below_lines: i32) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-below-lines\0".as_ptr() as *const _, pixels_below_lines.to_value().to_glib_none().0, ); } } fn is_pixels_below_lines_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-below-lines-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `pixels-below-lines-set` getter") } } fn set_pixels_below_lines_set(&self, pixels_below_lines_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-below-lines-set\0".as_ptr() as *const _, pixels_below_lines_set.to_value().to_glib_none().0, ); } } fn pixels_inside_wrap(&self) -> i32 { unsafe { let mut value = glib::Value::from_type(<i32 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-inside-wrap\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `pixels-inside-wrap` getter") } } fn set_pixels_inside_wrap(&self, pixels_inside_wrap: i32) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-inside-wrap\0".as_ptr() as *const _, pixels_inside_wrap.to_value().to_glib_none().0, ); } } fn is_pixels_inside_wrap_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-inside-wrap-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `pixels-inside-wrap-set` getter") } } fn set_pixels_inside_wrap_set(&self, pixels_inside_wrap_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"pixels-inside-wrap-set\0".as_ptr() as *const _, pixels_inside_wrap_set.to_value().to_glib_none().0, ); } } fn right_margin(&self) -> i32 { unsafe { let mut value = glib::Value::from_type(<i32 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"right-margin\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `right-margin` getter") } } fn set_right_margin(&self, right_margin: i32) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"right-margin\0".as_ptr() as *const _, right_margin.to_value().to_glib_none().0, ); } } fn is_right_margin_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"right-margin-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `right-margin-set` getter") } } fn set_right_margin_set(&self, right_margin_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"right-margin-set\0".as_ptr() as *const _, right_margin_set.to_value().to_glib_none().0, ); } } fn rise(&self) -> i32 { unsafe { let mut value = glib::Value::from_type(<i32 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"rise\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `rise` getter") } } fn set_rise(&self, rise: i32) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"rise\0".as_ptr() as *const _, rise.to_value().to_glib_none().0, ); } } fn is_rise_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"rise-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `rise-set` getter") } } fn set_rise_set(&self, rise_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"rise-set\0".as_ptr() as *const _, rise_set.to_value().to_glib_none().0, ); } } fn scale(&self) -> f64 { unsafe { let mut value = glib::Value::from_type(<f64 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"scale\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `scale` getter") } } fn set_scale(&self, scale: f64) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"scale\0".as_ptr() as *const _, scale.to_value().to_glib_none().0, ); } } fn is_scale_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"scale-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `scale-set` getter") } } fn set_scale_set(&self, scale_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"scale-set\0".as_ptr() as *const _, scale_set.to_value().to_glib_none().0, ); } } fn show_spaces(&self) -> pango::ShowFlags { unsafe { let mut value = glib::Value::from_type(<pango::ShowFlags as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"show-spaces\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `show-spaces` getter") } } fn set_show_spaces(&self, show_spaces: pango::ShowFlags) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"show-spaces\0".as_ptr() as *const _, show_spaces.to_value().to_glib_none().0, ); } } fn shows_spaces_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"show-spaces-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `show-spaces-set` getter") } } fn set_show_spaces_set(&self, show_spaces_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"show-spaces-set\0".as_ptr() as *const _, show_spaces_set.to_value().to_glib_none().0, ); } } fn size(&self) -> i32 { unsafe { let mut value = glib::Value::from_type(<i32 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"size\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `size` getter") } } fn set_size(&self, size: i32) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"size\0".as_ptr() as *const _, size.to_value().to_glib_none().0, ); } } fn size_points(&self) -> f64 { unsafe { let mut value = glib::Value::from_type(<f64 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"size-points\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `size-points` getter") } } fn set_size_points(&self, size_points: f64) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"size-points\0".as_ptr() as *const _, size_points.to_value().to_glib_none().0, ); } } fn is_size_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"size-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `size-set` getter") } } fn set_size_set(&self, size_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"size-set\0".as_ptr() as *const _, size_set.to_value().to_glib_none().0, ); } } fn stretch(&self) -> pango::Stretch { unsafe { let mut value = glib::Value::from_type(<pango::Stretch as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"stretch\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `stretch` getter") } } fn set_stretch(&self, stretch: pango::Stretch) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"stretch\0".as_ptr() as *const _, stretch.to_value().to_glib_none().0, ); } } fn is_stretch_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"stretch-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `stretch-set` getter") } } fn set_stretch_set(&self, stretch_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"stretch-set\0".as_ptr() as *const _, stretch_set.to_value().to_glib_none().0, ); } } fn is_strikethrough(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"strikethrough\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `strikethrough` getter") } } fn set_strikethrough(&self, strikethrough: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"strikethrough\0".as_ptr() as *const _, strikethrough.to_value().to_glib_none().0, ); } } fn strikethrough_rgba(&self) -> Option<gdk::RGBA> { unsafe { let mut value = glib::Value::from_type(<gdk::RGBA as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"strikethrough-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `strikethrough-rgba` getter") } } fn set_strikethrough_rgba(&self, strikethrough_rgba: Option<&gdk::RGBA>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"strikethrough-rgba\0".as_ptr() as *const _, strikethrough_rgba.to_value().to_glib_none().0, ); } } fn is_strikethrough_rgba_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"strikethrough-rgba-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `strikethrough-rgba-set` getter") } } fn set_strikethrough_rgba_set(&self, strikethrough_rgba_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"strikethrough-rgba-set\0".as_ptr() as *const _, strikethrough_rgba_set.to_value().to_glib_none().0, ); } } fn is_strikethrough_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"strikethrough-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `strikethrough-set` getter") } } fn set_strikethrough_set(&self, strikethrough_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"strikethrough-set\0".as_ptr() as *const _, strikethrough_set.to_value().to_glib_none().0, ); } } fn style(&self) -> pango::Style { unsafe { let mut value = glib::Value::from_type(<pango::Style as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"style\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `style` getter") } } fn set_style(&self, style: pango::Style) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"style\0".as_ptr() as *const _, style.to_value().to_glib_none().0, ); } } fn is_style_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"style-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `style-set` getter") } } fn set_style_set(&self, style_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"style-set\0".as_ptr() as *const _, style_set.to_value().to_glib_none().0, ); } } fn tabs(&self) -> Option<pango::TabArray> { unsafe { let mut value = glib::Value::from_type(<pango::TabArray as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"tabs\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `tabs` getter") } } fn set_tabs(&self, tabs: Option<&pango::TabArray>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"tabs\0".as_ptr() as *const _, tabs.to_value().to_glib_none().0, ); } } fn is_tabs_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"tabs-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `tabs-set` getter") } } fn set_tabs_set(&self, tabs_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"tabs-set\0".as_ptr() as *const _, tabs_set.to_value().to_glib_none().0, ); } } fn underline(&self) -> pango::Underline { unsafe { let mut value = glib::Value::from_type(<pango::Underline as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"underline\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `underline` getter") } } fn set_underline(&self, underline: pango::Underline) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"underline\0".as_ptr() as *const _, underline.to_value().to_glib_none().0, ); } } fn underline_rgba(&self) -> Option<gdk::RGBA> { unsafe { let mut value = glib::Value::from_type(<gdk::RGBA as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"underline-rgba\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `underline-rgba` getter") } } fn set_underline_rgba(&self, underline_rgba: Option<&gdk::RGBA>) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"underline-rgba\0".as_ptr() as *const _, underline_rgba.to_value().to_glib_none().0, ); } } fn is_underline_rgba_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"underline-rgba-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `underline-rgba-set` getter") } } fn set_underline_rgba_set(&self, underline_rgba_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"underline-rgba-set\0".as_ptr() as *const _, underline_rgba_set.to_value().to_glib_none().0, ); } } fn is_underline_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"underline-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `underline-set` getter") } } fn set_underline_set(&self, underline_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"underline-set\0".as_ptr() as *const _, underline_set.to_value().to_glib_none().0, ); } } fn variant(&self) -> pango::Variant { unsafe { let mut value = glib::Value::from_type(<pango::Variant as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"variant\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `variant` getter") } } fn set_variant(&self, variant: pango::Variant) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"variant\0".as_ptr() as *const _, variant.to_value().to_glib_none().0, ); } } fn is_variant_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"variant-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `variant-set` getter") } } fn set_variant_set(&self, variant_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"variant-set\0".as_ptr() as *const _, variant_set.to_value().to_glib_none().0, ); } } fn weight(&self) -> i32 { unsafe { let mut value = glib::Value::from_type(<i32 as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"weight\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `weight` getter") } } fn set_weight(&self, weight: i32) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"weight\0".as_ptr() as *const _, weight.to_value().to_glib_none().0, ); } } fn is_weight_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"weight-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `weight-set` getter") } } fn set_weight_set(&self, weight_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"weight-set\0".as_ptr() as *const _, weight_set.to_value().to_glib_none().0, ); } } fn wrap_mode(&self) -> WrapMode { unsafe { let mut value = glib::Value::from_type(<WrapMode as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"wrap-mode\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `wrap-mode` getter") } } fn set_wrap_mode(&self, wrap_mode: WrapMode) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"wrap-mode\0".as_ptr() as *const _, wrap_mode.to_value().to_glib_none().0, ); } } fn wraps_mode_set(&self) -> bool { unsafe { let mut value = glib::Value::from_type(<bool as StaticType>::static_type()); glib::gobject_ffi::g_object_get_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"wrap-mode-set\0".as_ptr() as *const _, value.to_glib_none_mut().0, ); value .get() .expect("Return Value for property `wrap-mode-set` getter") } } fn set_wrap_mode_set(&self, wrap_mode_set: bool) { unsafe { glib::gobject_ffi::g_object_set_property( self.to_glib_none().0 as *mut glib::gobject_ffi::GObject, b"wrap-mode-set\0".as_ptr() as *const _, wrap_mode_set.to_value().to_glib_none().0, ); } } #[doc(alias = "accumulative-margin")] fn connect_accumulative_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_accumulative_margin_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::accumulative-margin\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_accumulative_margin_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "allow-breaks")] fn connect_allow_breaks_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_allow_breaks_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::allow-breaks\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_allow_breaks_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "allow-breaks-set")] fn connect_allow_breaks_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_allow_breaks_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::allow-breaks-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_allow_breaks_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "background")] fn connect_background_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_background_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::background\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_background_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "background-full-height")] fn connect_background_full_height_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_background_full_height_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::background-full-height\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_background_full_height_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "background-full-height-set")] fn connect_background_full_height_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_background_full_height_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::background-full-height-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_background_full_height_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "background-rgba")] fn connect_background_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_background_rgba_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::background-rgba\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_background_rgba_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "background-set")] fn connect_background_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_background_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::background-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_background_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "direction")] fn connect_direction_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_direction_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::direction\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_direction_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "editable")] fn connect_editable_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_editable_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::editable\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_editable_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "editable-set")] fn connect_editable_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_editable_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::editable-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_editable_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "fallback")] fn connect_fallback_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_fallback_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::fallback\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_fallback_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "fallback-set")] fn connect_fallback_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_fallback_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::fallback-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_fallback_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "family")] fn connect_family_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_family_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::family\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_family_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "family-set")] fn connect_family_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_family_set_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::family-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_family_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "font")] fn connect_font_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_font_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::font\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_font_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "font-desc")] fn connect_font_desc_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_font_desc_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::font-desc\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_font_desc_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "font-features")] fn connect_font_features_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_font_features_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::font-features\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_font_features_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "font-features-set")] fn connect_font_features_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_font_features_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::font-features-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_font_features_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "foreground")] fn connect_foreground_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_foreground_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::foreground\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_foreground_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "foreground-rgba")] fn connect_foreground_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_foreground_rgba_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::foreground-rgba\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_foreground_rgba_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "foreground-set")] fn connect_foreground_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_foreground_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::foreground-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_foreground_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "indent")] fn connect_indent_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_indent_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::indent\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_indent_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "indent-set")] fn connect_indent_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_indent_set_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::indent-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_indent_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "insert-hyphens")] fn connect_insert_hyphens_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_insert_hyphens_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::insert-hyphens\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_insert_hyphens_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "insert-hyphens-set")] fn connect_insert_hyphens_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_insert_hyphens_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::insert-hyphens-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_insert_hyphens_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "invisible")] fn connect_invisible_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_invisible_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::invisible\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_invisible_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "invisible-set")] fn connect_invisible_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_invisible_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::invisible-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_invisible_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "justification")] fn connect_justification_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_justification_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::justification\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_justification_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "justification-set")] fn connect_justification_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_justification_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::justification-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_justification_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "language")] fn connect_language_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_language_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::language\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_language_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "language-set")] fn connect_language_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_language_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::language-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_language_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "left-margin")] fn connect_left_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_left_margin_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::left-margin\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_left_margin_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "left-margin-set")] fn connect_left_margin_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_left_margin_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::left-margin-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_left_margin_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "letter-spacing")] fn connect_letter_spacing_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_letter_spacing_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::letter-spacing\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_letter_spacing_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "letter-spacing-set")] fn connect_letter_spacing_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_letter_spacing_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::letter-spacing-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_letter_spacing_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "overline")] fn connect_overline_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_overline_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::overline\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_overline_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "overline-rgba")] fn connect_overline_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_overline_rgba_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::overline-rgba\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_overline_rgba_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "overline-rgba-set")] fn connect_overline_rgba_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_overline_rgba_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::overline-rgba-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_overline_rgba_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "overline-set")] fn connect_overline_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_overline_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::overline-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_overline_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "paragraph-background")] fn connect_paragraph_background_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_paragraph_background_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::paragraph-background\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_paragraph_background_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "paragraph-background-rgba")] fn connect_paragraph_background_rgba_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_paragraph_background_rgba_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::paragraph-background-rgba\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_paragraph_background_rgba_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "paragraph-background-set")] fn connect_paragraph_background_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_paragraph_background_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::paragraph-background-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_paragraph_background_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "pixels-above-lines")] fn connect_pixels_above_lines_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_pixels_above_lines_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::pixels-above-lines\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_pixels_above_lines_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "pixels-above-lines-set")] fn connect_pixels_above_lines_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_pixels_above_lines_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::pixels-above-lines-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_pixels_above_lines_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "pixels-below-lines")] fn connect_pixels_below_lines_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_pixels_below_lines_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::pixels-below-lines\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_pixels_below_lines_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "pixels-below-lines-set")] fn connect_pixels_below_lines_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_pixels_below_lines_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::pixels-below-lines-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_pixels_below_lines_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "pixels-inside-wrap")] fn connect_pixels_inside_wrap_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_pixels_inside_wrap_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::pixels-inside-wrap\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_pixels_inside_wrap_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "pixels-inside-wrap-set")] fn connect_pixels_inside_wrap_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_pixels_inside_wrap_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::pixels-inside-wrap-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_pixels_inside_wrap_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "right-margin")] fn connect_right_margin_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_right_margin_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::right-margin\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_right_margin_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "right-margin-set")] fn connect_right_margin_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_right_margin_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::right-margin-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_right_margin_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "rise")] fn connect_rise_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_rise_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::rise\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_rise_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "rise-set")] fn connect_rise_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_rise_set_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::rise-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_rise_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "scale")] fn connect_scale_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_scale_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::scale\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_scale_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "scale-set")] fn connect_scale_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_scale_set_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::scale-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_scale_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "show-spaces")] fn connect_show_spaces_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_show_spaces_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::show-spaces\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_show_spaces_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "show-spaces-set")] fn connect_show_spaces_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_show_spaces_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::show-spaces-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_show_spaces_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "size")] fn connect_size_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_size_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::size\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_size_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "size-points")] fn connect_size_points_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_size_points_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::size-points\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_size_points_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "size-set")] fn connect_size_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_size_set_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::size-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_size_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "stretch")] fn connect_stretch_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_stretch_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::stretch\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_stretch_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "stretch-set")] fn connect_stretch_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_stretch_set_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::stretch-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_stretch_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "strikethrough")] fn connect_strikethrough_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_strikethrough_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::strikethrough\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_strikethrough_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "strikethrough-rgba")] fn connect_strikethrough_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_strikethrough_rgba_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::strikethrough-rgba\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_strikethrough_rgba_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "strikethrough-rgba-set")] fn connect_strikethrough_rgba_set_notify<F: Fn(&Self) + 'static>( &self, f: F, ) -> SignalHandlerId { unsafe extern "C" fn notify_strikethrough_rgba_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::strikethrough-rgba-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_strikethrough_rgba_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "strikethrough-set")] fn connect_strikethrough_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_strikethrough_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::strikethrough-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_strikethrough_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "style")] fn connect_style_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_style_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::style\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_style_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "style-set")] fn connect_style_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_style_set_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::style-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_style_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "tabs")] fn connect_tabs_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_tabs_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::tabs\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_tabs_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "tabs-set")] fn connect_tabs_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_tabs_set_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::tabs-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_tabs_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "underline")] fn connect_underline_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_underline_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::underline\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_underline_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "underline-rgba")] fn connect_underline_rgba_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_underline_rgba_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::underline-rgba\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_underline_rgba_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "underline-rgba-set")] fn connect_underline_rgba_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_underline_rgba_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::underline-rgba-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_underline_rgba_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "underline-set")] fn connect_underline_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_underline_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::underline-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_underline_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "variant")] fn connect_variant_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_variant_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::variant\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_variant_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "variant-set")] fn connect_variant_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_variant_set_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::variant-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_variant_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "weight")] fn connect_weight_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_weight_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::weight\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_weight_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "weight-set")] fn connect_weight_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_weight_set_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::weight-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_weight_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "wrap-mode")] fn connect_wrap_mode_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_wrap_mode_trampoline<P: IsA<TextTag>, F: Fn(&P) + 'static>( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::wrap-mode\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_wrap_mode_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } #[doc(alias = "wrap-mode-set")] fn connect_wrap_mode_set_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_wrap_mode_set_trampoline< P: IsA<TextTag>, F: Fn(&P) + 'static, >( this: *mut ffi::GtkTextTag, _param_spec: glib::ffi::gpointer, f: glib::ffi::gpointer, ) { let f: &F = &*(f as *const F); f(&TextTag::from_glib_borrow(this).unsafe_cast_ref()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"notify::wrap-mode-set\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( notify_wrap_mode_set_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } } impl fmt::Display for TextTag { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("TextTag") } }
35.684269
104
0.534419
28842791be4f80195a968c06b15ff51bc1b721c0
17,272
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{ account_config::LBR_NAME, account_state_blob::AccountStateBlob, block_info::BlockInfo, ledger_info::LedgerInfo, proof::{ definition::MAX_ACCUMULATOR_PROOF_DEPTH, AccountStateProof, EventAccumulatorInternalNode, EventAccumulatorProof, EventProof, SparseMerkleInternalNode, SparseMerkleLeafNode, SparseMerkleProof, TestAccumulatorInternalNode, TestAccumulatorProof, TransactionAccumulatorInternalNode, TransactionAccumulatorProof, TransactionInfoWithProof, }, transaction::{RawTransaction, Script, Transaction, TransactionInfo}, vm_error::StatusCode, }; use libra_crypto::{ ed25519::Ed25519PrivateKey, hash::{ CryptoHash, TestOnlyHash, ACCUMULATOR_PLACEHOLDER_HASH, GENESIS_BLOCK_ID, SPARSE_MERKLE_PLACEHOLDER_HASH, }, HashValue, PrivateKey, Uniform, }; #[test] fn test_verify_empty_accumulator() { let element_hash = b"hello".test_only_hash(); let root_hash = *ACCUMULATOR_PLACEHOLDER_HASH; let proof = TestAccumulatorProof::new(vec![]); assert!(proof.verify(root_hash, element_hash, 0).is_err()); } #[test] fn test_verify_single_element_accumulator() { let element_hash = b"hello".test_only_hash(); let root_hash = element_hash; let proof = TestAccumulatorProof::new(vec![]); assert!(proof.verify(root_hash, element_hash, 0).is_ok()); } #[test] fn test_verify_two_element_accumulator() { let element0_hash = b"hello".test_only_hash(); let element1_hash = b"world".test_only_hash(); let root_hash = TestAccumulatorInternalNode::new(element0_hash, element1_hash).hash(); assert!(TestAccumulatorProof::new(vec![element1_hash]) .verify(root_hash, element0_hash, 0) .is_ok()); assert!(TestAccumulatorProof::new(vec![element0_hash]) .verify(root_hash, element1_hash, 1) .is_ok()); } #[test] fn test_verify_three_element_accumulator() { let element0_hash = b"hello".test_only_hash(); let element1_hash = b"world".test_only_hash(); let element2_hash = b"!".test_only_hash(); let internal0_hash = TestAccumulatorInternalNode::new(element0_hash, element1_hash).hash(); let internal1_hash = TestAccumulatorInternalNode::new(element2_hash, *ACCUMULATOR_PLACEHOLDER_HASH).hash(); let root_hash = TestAccumulatorInternalNode::new(internal0_hash, internal1_hash).hash(); assert!( TestAccumulatorProof::new(vec![element1_hash, internal1_hash]) .verify(root_hash, element0_hash, 0) .is_ok() ); assert!( TestAccumulatorProof::new(vec![element0_hash, internal1_hash]) .verify(root_hash, element1_hash, 1) .is_ok() ); assert!( TestAccumulatorProof::new(vec![*ACCUMULATOR_PLACEHOLDER_HASH, internal0_hash]) .verify(root_hash, element2_hash, 2) .is_ok() ); } #[test] fn test_accumulator_proof_max_siblings_leftmost() { let element_hash = b"hello".test_only_hash(); let mut siblings = vec![]; for i in 0..MAX_ACCUMULATOR_PROOF_DEPTH as u8 { siblings.push(HashValue::new([i; 32])); } let root_hash = siblings.iter().fold(element_hash, |hash, sibling_hash| { TestAccumulatorInternalNode::new(hash, *sibling_hash).hash() }); let proof = TestAccumulatorProof::new(siblings); assert!(proof.verify(root_hash, element_hash, 0).is_ok()); } #[test] fn test_accumulator_proof_max_siblings_rightmost() { let element_hash = b"hello".test_only_hash(); let mut siblings = vec![]; for i in 0..MAX_ACCUMULATOR_PROOF_DEPTH as u8 { siblings.push(HashValue::new([i; 32])); } let root_hash = siblings.iter().fold(element_hash, |hash, sibling_hash| { TestAccumulatorInternalNode::new(*sibling_hash, hash).hash() }); let leaf_index = (std::u64::MAX - 1) / 2; let proof = TestAccumulatorProof::new(siblings); assert!(proof.verify(root_hash, element_hash, leaf_index).is_ok()); } #[test] #[allow(clippy::range_plus_one)] fn test_accumulator_proof_sibling_overflow() { let element_hash = b"hello".test_only_hash(); let mut siblings = vec![]; for i in 0..MAX_ACCUMULATOR_PROOF_DEPTH as u8 + 1 { siblings.push(HashValue::new([i; 32])); } let root_hash = siblings .iter() .rev() .fold(element_hash, |hash, sibling_hash| { TestAccumulatorInternalNode::new(hash, *sibling_hash).hash() }); let proof = TestAccumulatorProof::new(siblings); assert!(proof.verify(root_hash, element_hash, 0).is_err()); } #[test] fn test_verify_empty_sparse_merkle() { let key = b"hello".test_only_hash(); let blob = b"world".to_vec().into(); let root_hash = *SPARSE_MERKLE_PLACEHOLDER_HASH; let proof = SparseMerkleProof::new(None, vec![]); // Trying to show that this key doesn't exist. assert!(proof.verify(root_hash, key, None).is_ok()); // Trying to show that this key exists. assert!(proof.verify(root_hash, key, Some(&blob)).is_err()); } #[test] fn test_verify_single_element_sparse_merkle() { let key = b"hello".test_only_hash(); let blob: AccountStateBlob = b"world".to_vec().into(); let blob_hash = blob.hash(); let non_existing_blob = b"world?".to_vec().into(); let root_node = SparseMerkleLeafNode::new(key, blob_hash); let root_hash = root_node.hash(); let proof = SparseMerkleProof::new(Some(root_node), vec![]); // Trying to show this exact key exists with its value. assert!(proof.verify(root_hash, key, Some(&blob)).is_ok()); // Trying to show this exact key exists with another value. assert!(proof .verify(root_hash, key, Some(&non_existing_blob)) .is_err()); // Trying to show this key doesn't exist. assert!(proof.verify(root_hash, key, None).is_err()); let non_existing_key = b"HELLO".test_only_hash(); // The proof can be used to show non_existing_key doesn't exist. assert!(proof.verify(root_hash, non_existing_key, None).is_ok()); // The proof can't be used to non_existing_key exists. assert!(proof .verify(root_hash, non_existing_key, Some(&blob)) .is_err()); } #[test] fn test_verify_three_element_sparse_merkle() { // root // / \ // a default // / \ // key1 b // / \ // key2 key3 let key1 = b"hello".test_only_hash(); let key2 = b"world".test_only_hash(); let key3 = b"!".test_only_hash(); assert_eq!(key1[0], 0b0011_0011); assert_eq!(key2[0], 0b0100_0010); assert_eq!(key3[0], 0b0110_1001); let blob1 = AccountStateBlob::from(b"1".to_vec()); let blob2 = AccountStateBlob::from(b"2".to_vec()); let blob3 = AccountStateBlob::from(b"3".to_vec()); let leaf1 = SparseMerkleLeafNode::new(key1, blob1.hash()); let leaf1_hash = leaf1.hash(); let leaf2_hash = SparseMerkleLeafNode::new(key2, blob2.hash()).hash(); let leaf3_hash = SparseMerkleLeafNode::new(key3, blob3.hash()).hash(); let internal_b_hash = SparseMerkleInternalNode::new(leaf2_hash, leaf3_hash).hash(); let internal_a_hash = SparseMerkleInternalNode::new(leaf1_hash, internal_b_hash).hash(); let root_hash = SparseMerkleInternalNode::new(internal_a_hash, *SPARSE_MERKLE_PLACEHOLDER_HASH).hash(); let non_existing_key1 = b"abc".test_only_hash(); let non_existing_key2 = b"def".test_only_hash(); assert_eq!(non_existing_key1[0], 0b0011_1010); assert_eq!(non_existing_key2[0], 0b1000_1110); { // Construct a proof of key1. let proof = SparseMerkleProof::new( Some(leaf1), vec![internal_b_hash, *SPARSE_MERKLE_PLACEHOLDER_HASH], ); // The exact key value exists. assert!(proof.verify(root_hash, key1, Some(&blob1)).is_ok()); // Trying to show that this key has another value. assert!(proof.verify(root_hash, key1, Some(&blob2)).is_err()); // Trying to show that this key doesn't exist. assert!(proof.verify(root_hash, key1, None).is_err()); // This proof can't be used to show anything about key2. assert!(proof.verify(root_hash, key2, None).is_err()); assert!(proof.verify(root_hash, key2, Some(&blob1)).is_err()); assert!(proof.verify(root_hash, key2, Some(&blob2)).is_err()); // This proof can be used to show that non_existing_key1 indeed doesn't exist. assert!(proof.verify(root_hash, non_existing_key1, None).is_ok()); // This proof can't be used to show that non_existing_key2 doesn't exist because it lives // in a different subtree. assert!(proof.verify(root_hash, non_existing_key2, None).is_err()); } { // Construct a proof of the default node. let proof = SparseMerkleProof::new(None, vec![internal_a_hash]); // This proof can't be used to show that a key starting with 0 doesn't exist. assert!(proof.verify(root_hash, non_existing_key1, None).is_err()); // This proof can be used to show that a key starting with 1 doesn't exist. assert!(proof.verify(root_hash, non_existing_key2, None).is_ok()); } } #[test] fn test_verify_transaction() { // root // / \ // / \ // a b // / \ / \ // txn0 txn1 txn2 default let txn_info0_hash = b"hello".test_only_hash(); let txn_info2_hash = b"!".test_only_hash(); let txn1_hash = HashValue::random(); let state_root1_hash = b"a".test_only_hash(); let event_root1_hash = b"b".test_only_hash(); let txn_info1 = TransactionInfo::new( txn1_hash, state_root1_hash, event_root1_hash, /* gas_used = */ 0, /* major_status = */ StatusCode::EXECUTED, ); let txn_info1_hash = txn_info1.hash(); let internal_a_hash = TransactionAccumulatorInternalNode::new(txn_info0_hash, txn_info1_hash).hash(); let internal_b_hash = TransactionAccumulatorInternalNode::new(txn_info2_hash, *ACCUMULATOR_PLACEHOLDER_HASH) .hash(); let root_hash = TransactionAccumulatorInternalNode::new(internal_a_hash, internal_b_hash).hash(); let consensus_data_hash = b"c".test_only_hash(); let ledger_info = LedgerInfo::new( BlockInfo::new(0, 0, *GENESIS_BLOCK_ID, root_hash, 2, 10000, None), consensus_data_hash, ); let ledger_info_to_transaction_info_proof = TransactionAccumulatorProof::new(vec![txn_info0_hash, internal_b_hash]); let proof = TransactionInfoWithProof::new(ledger_info_to_transaction_info_proof.clone(), txn_info1); // The proof can be used to verify txn1. assert!(proof.verify(&ledger_info, 1).is_ok()); // Trying to show that txn1 is at version 2. assert!(proof.verify(&ledger_info, 2).is_err()); // Replacing txn1 with some other txn should cause the verification to fail. let fake_txn_info = TransactionInfo::new( HashValue::random(), state_root1_hash, event_root1_hash, /* gas_used = */ 0, /* major_status = */ StatusCode::EXECUTED, ); let proof = TransactionInfoWithProof::new(ledger_info_to_transaction_info_proof, fake_txn_info); assert!(proof.verify(&ledger_info, 1).is_err()); } #[test] fn test_verify_account_state_and_event() { // root // / \ // / \ // a b // / \ / \ // txn0 txn1 txn2 default // ^ // | // transaction_info2 // / / \ // / / \ // txn state_root event_root // / \ / \ // c default event0 event1 // / \ // key1 d // / \ // key2 key3 let key1 = b"hello".test_only_hash(); let key2 = b"world".test_only_hash(); let key3 = b"!".test_only_hash(); let non_existing_key = b"#".test_only_hash(); assert_eq!(key1[0], 0b0011_0011); assert_eq!(key2[0], 0b0100_0010); assert_eq!(key3[0], 0b0110_1001); assert_eq!(non_existing_key[0], 0b0100_0001); let blob1 = AccountStateBlob::from(b"value1".to_vec()); let blob2 = AccountStateBlob::from(b"value2".to_vec()); let blob3 = AccountStateBlob::from(b"value3".to_vec()); let leaf1_hash = SparseMerkleLeafNode::new(key1, blob1.hash()).hash(); let leaf2 = SparseMerkleLeafNode::new(key2, blob2.hash()); let leaf2_hash = leaf2.hash(); let leaf3_hash = SparseMerkleLeafNode::new(key3, blob3.hash()).hash(); let internal_d_hash = SparseMerkleInternalNode::new(leaf2_hash, leaf3_hash).hash(); let internal_c_hash = SparseMerkleInternalNode::new(leaf1_hash, internal_d_hash).hash(); let state_root_hash = SparseMerkleInternalNode::new(internal_c_hash, *SPARSE_MERKLE_PLACEHOLDER_HASH).hash(); let txn_info0_hash = b"hellohello".test_only_hash(); let txn_info1_hash = b"worldworld".test_only_hash(); let privkey = Ed25519PrivateKey::generate_for_testing(); let pubkey = privkey.public_key(); let txn2_hash = Transaction::UserTransaction( RawTransaction::new_script( crate::account_address::from_public_key(&pubkey), /* sequence_number = */ 0, Script::new(vec![], vec![], vec![]), /* max_gas_amount = */ 0, /* gas_unit_price = */ 0, /* gas_currency_code = */ LBR_NAME.to_owned(), /* expiration_time = */ std::time::Duration::new(0, 0), ) .sign(&privkey, pubkey) .expect("Signing failed.") .into_inner(), ) .hash(); let event0_hash = b"event0".test_only_hash(); let event1_hash = b"event1".test_only_hash(); let event_root_hash = EventAccumulatorInternalNode::new(event0_hash, event1_hash).hash(); let txn_info2 = TransactionInfo::new( txn2_hash, state_root_hash, event_root_hash, /* gas_used = */ 0, /* major_status = */ StatusCode::EXECUTED, ); let txn_info2_hash = txn_info2.hash(); let internal_a_hash = TransactionAccumulatorInternalNode::new(txn_info0_hash, txn_info1_hash).hash(); let internal_b_hash = TransactionAccumulatorInternalNode::new(txn_info2_hash, *ACCUMULATOR_PLACEHOLDER_HASH) .hash(); let root_hash = TransactionAccumulatorInternalNode::new(internal_a_hash, internal_b_hash).hash(); // consensus_data_hash isn't used in proofs, but we need it to construct LedgerInfo. let consensus_data_hash = b"consensus_data".test_only_hash(); let ledger_info = LedgerInfo::new( BlockInfo::new(0, 0, *GENESIS_BLOCK_ID, root_hash, 2, 10000, None), consensus_data_hash, ); let ledger_info_to_transaction_info_proof = TransactionAccumulatorProof::new(vec![*ACCUMULATOR_PLACEHOLDER_HASH, internal_a_hash]); let transaction_info_to_account_proof = SparseMerkleProof::new( Some(leaf2), vec![leaf3_hash, leaf1_hash, *SPARSE_MERKLE_PLACEHOLDER_HASH], ); let account_state_proof = AccountStateProof::new( TransactionInfoWithProof::new( ledger_info_to_transaction_info_proof.clone(), txn_info2.clone(), ), transaction_info_to_account_proof, ); // Prove that account at `key2` has value `value2`. assert!(account_state_proof .verify( &ledger_info, /* state_version = */ 2, key2, Some(&blob2), ) .is_ok()); // Use the same proof to prove that `non_existing_key` doesn't exist. assert!(account_state_proof .verify( &ledger_info, /* state_version = */ 2, non_existing_key, None, ) .is_ok()); let bad_blob2 = b"3".to_vec().into(); assert!(account_state_proof .verify( &ledger_info, /* state_version = */ 2, key2, Some(&bad_blob2), ) .is_err()); let transaction_info_to_event_proof = EventAccumulatorProof::new(vec![event1_hash]); let event_proof = EventProof::new( TransactionInfoWithProof::new(ledger_info_to_transaction_info_proof, txn_info2), transaction_info_to_event_proof, ); // Prove that the first event within transaction 2 is `event0`. assert!(event_proof .verify( &ledger_info, event0_hash, /* transaction_version = */ 2, /* event_version_within_transaction = */ 0, ) .is_ok()); let bad_event0_hash = b"event1".test_only_hash(); assert!(event_proof .verify( &ledger_info, bad_event0_hash, /* transaction_version = */ 2, /* event_version_within_transaction = */ 0, ) .is_err()); }
37.304536
100
0.63629
f49a8f83550e95e5cf061c3e06ede1d3416451c6
92,377
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. /// Operation shape for `AddFacetToObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`add_facet_to_object`](crate::client::Client::add_facet_to_object). /// /// See [`crate::client::fluent_builders::AddFacetToObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct AddFacetToObject { _private: (), } impl AddFacetToObject { /// Creates a new builder-style object to manufacture [`AddFacetToObjectInput`](crate::input::AddFacetToObjectInput) pub fn builder() -> crate::input::add_facet_to_object_input::Builder { crate::input::add_facet_to_object_input::Builder::default() } /// Creates a new `AddFacetToObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for AddFacetToObject { type Output = std::result::Result< crate::output::AddFacetToObjectOutput, crate::error::AddFacetToObjectError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_add_facet_to_object_error(response) } else { crate::operation_deser::parse_add_facet_to_object_response(response) } } } /// Operation shape for `ApplySchema`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`apply_schema`](crate::client::Client::apply_schema). /// /// See [`crate::client::fluent_builders::ApplySchema`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ApplySchema { _private: (), } impl ApplySchema { /// Creates a new builder-style object to manufacture [`ApplySchemaInput`](crate::input::ApplySchemaInput) pub fn builder() -> crate::input::apply_schema_input::Builder { crate::input::apply_schema_input::Builder::default() } /// Creates a new `ApplySchema` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ApplySchema { type Output = std::result::Result<crate::output::ApplySchemaOutput, crate::error::ApplySchemaError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_apply_schema_error(response) } else { crate::operation_deser::parse_apply_schema_response(response) } } } /// Operation shape for `AttachObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`attach_object`](crate::client::Client::attach_object). /// /// See [`crate::client::fluent_builders::AttachObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct AttachObject { _private: (), } impl AttachObject { /// Creates a new builder-style object to manufacture [`AttachObjectInput`](crate::input::AttachObjectInput) pub fn builder() -> crate::input::attach_object_input::Builder { crate::input::attach_object_input::Builder::default() } /// Creates a new `AttachObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for AttachObject { type Output = std::result::Result<crate::output::AttachObjectOutput, crate::error::AttachObjectError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_attach_object_error(response) } else { crate::operation_deser::parse_attach_object_response(response) } } } /// Operation shape for `AttachPolicy`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`attach_policy`](crate::client::Client::attach_policy). /// /// See [`crate::client::fluent_builders::AttachPolicy`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct AttachPolicy { _private: (), } impl AttachPolicy { /// Creates a new builder-style object to manufacture [`AttachPolicyInput`](crate::input::AttachPolicyInput) pub fn builder() -> crate::input::attach_policy_input::Builder { crate::input::attach_policy_input::Builder::default() } /// Creates a new `AttachPolicy` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for AttachPolicy { type Output = std::result::Result<crate::output::AttachPolicyOutput, crate::error::AttachPolicyError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_attach_policy_error(response) } else { crate::operation_deser::parse_attach_policy_response(response) } } } /// Operation shape for `AttachToIndex`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`attach_to_index`](crate::client::Client::attach_to_index). /// /// See [`crate::client::fluent_builders::AttachToIndex`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct AttachToIndex { _private: (), } impl AttachToIndex { /// Creates a new builder-style object to manufacture [`AttachToIndexInput`](crate::input::AttachToIndexInput) pub fn builder() -> crate::input::attach_to_index_input::Builder { crate::input::attach_to_index_input::Builder::default() } /// Creates a new `AttachToIndex` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for AttachToIndex { type Output = std::result::Result<crate::output::AttachToIndexOutput, crate::error::AttachToIndexError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_attach_to_index_error(response) } else { crate::operation_deser::parse_attach_to_index_response(response) } } } /// Operation shape for `AttachTypedLink`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`attach_typed_link`](crate::client::Client::attach_typed_link). /// /// See [`crate::client::fluent_builders::AttachTypedLink`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct AttachTypedLink { _private: (), } impl AttachTypedLink { /// Creates a new builder-style object to manufacture [`AttachTypedLinkInput`](crate::input::AttachTypedLinkInput) pub fn builder() -> crate::input::attach_typed_link_input::Builder { crate::input::attach_typed_link_input::Builder::default() } /// Creates a new `AttachTypedLink` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for AttachTypedLink { type Output = std::result::Result< crate::output::AttachTypedLinkOutput, crate::error::AttachTypedLinkError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_attach_typed_link_error(response) } else { crate::operation_deser::parse_attach_typed_link_response(response) } } } /// Operation shape for `BatchRead`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`batch_read`](crate::client::Client::batch_read). /// /// See [`crate::client::fluent_builders::BatchRead`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct BatchRead { _private: (), } impl BatchRead { /// Creates a new builder-style object to manufacture [`BatchReadInput`](crate::input::BatchReadInput) pub fn builder() -> crate::input::batch_read_input::Builder { crate::input::batch_read_input::Builder::default() } /// Creates a new `BatchRead` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for BatchRead { type Output = std::result::Result<crate::output::BatchReadOutput, crate::error::BatchReadError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_batch_read_error(response) } else { crate::operation_deser::parse_batch_read_response(response) } } } /// Operation shape for `BatchWrite`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`batch_write`](crate::client::Client::batch_write). /// /// See [`crate::client::fluent_builders::BatchWrite`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct BatchWrite { _private: (), } impl BatchWrite { /// Creates a new builder-style object to manufacture [`BatchWriteInput`](crate::input::BatchWriteInput) pub fn builder() -> crate::input::batch_write_input::Builder { crate::input::batch_write_input::Builder::default() } /// Creates a new `BatchWrite` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for BatchWrite { type Output = std::result::Result<crate::output::BatchWriteOutput, crate::error::BatchWriteError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_batch_write_error(response) } else { crate::operation_deser::parse_batch_write_response(response) } } } /// Operation shape for `CreateDirectory`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_directory`](crate::client::Client::create_directory). /// /// See [`crate::client::fluent_builders::CreateDirectory`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateDirectory { _private: (), } impl CreateDirectory { /// Creates a new builder-style object to manufacture [`CreateDirectoryInput`](crate::input::CreateDirectoryInput) pub fn builder() -> crate::input::create_directory_input::Builder { crate::input::create_directory_input::Builder::default() } /// Creates a new `CreateDirectory` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateDirectory { type Output = std::result::Result< crate::output::CreateDirectoryOutput, crate::error::CreateDirectoryError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_create_directory_error(response) } else { crate::operation_deser::parse_create_directory_response(response) } } } /// Operation shape for `CreateFacet`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_facet`](crate::client::Client::create_facet). /// /// See [`crate::client::fluent_builders::CreateFacet`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateFacet { _private: (), } impl CreateFacet { /// Creates a new builder-style object to manufacture [`CreateFacetInput`](crate::input::CreateFacetInput) pub fn builder() -> crate::input::create_facet_input::Builder { crate::input::create_facet_input::Builder::default() } /// Creates a new `CreateFacet` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateFacet { type Output = std::result::Result<crate::output::CreateFacetOutput, crate::error::CreateFacetError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_create_facet_error(response) } else { crate::operation_deser::parse_create_facet_response(response) } } } /// Operation shape for `CreateIndex`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_index`](crate::client::Client::create_index). /// /// See [`crate::client::fluent_builders::CreateIndex`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateIndex { _private: (), } impl CreateIndex { /// Creates a new builder-style object to manufacture [`CreateIndexInput`](crate::input::CreateIndexInput) pub fn builder() -> crate::input::create_index_input::Builder { crate::input::create_index_input::Builder::default() } /// Creates a new `CreateIndex` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateIndex { type Output = std::result::Result<crate::output::CreateIndexOutput, crate::error::CreateIndexError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_create_index_error(response) } else { crate::operation_deser::parse_create_index_response(response) } } } /// Operation shape for `CreateObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_object`](crate::client::Client::create_object). /// /// See [`crate::client::fluent_builders::CreateObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateObject { _private: (), } impl CreateObject { /// Creates a new builder-style object to manufacture [`CreateObjectInput`](crate::input::CreateObjectInput) pub fn builder() -> crate::input::create_object_input::Builder { crate::input::create_object_input::Builder::default() } /// Creates a new `CreateObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateObject { type Output = std::result::Result<crate::output::CreateObjectOutput, crate::error::CreateObjectError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_create_object_error(response) } else { crate::operation_deser::parse_create_object_response(response) } } } /// Operation shape for `CreateSchema`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_schema`](crate::client::Client::create_schema). /// /// See [`crate::client::fluent_builders::CreateSchema`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateSchema { _private: (), } impl CreateSchema { /// Creates a new builder-style object to manufacture [`CreateSchemaInput`](crate::input::CreateSchemaInput) pub fn builder() -> crate::input::create_schema_input::Builder { crate::input::create_schema_input::Builder::default() } /// Creates a new `CreateSchema` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateSchema { type Output = std::result::Result<crate::output::CreateSchemaOutput, crate::error::CreateSchemaError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_create_schema_error(response) } else { crate::operation_deser::parse_create_schema_response(response) } } } /// Operation shape for `CreateTypedLinkFacet`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`create_typed_link_facet`](crate::client::Client::create_typed_link_facet). /// /// See [`crate::client::fluent_builders::CreateTypedLinkFacet`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct CreateTypedLinkFacet { _private: (), } impl CreateTypedLinkFacet { /// Creates a new builder-style object to manufacture [`CreateTypedLinkFacetInput`](crate::input::CreateTypedLinkFacetInput) pub fn builder() -> crate::input::create_typed_link_facet_input::Builder { crate::input::create_typed_link_facet_input::Builder::default() } /// Creates a new `CreateTypedLinkFacet` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for CreateTypedLinkFacet { type Output = std::result::Result< crate::output::CreateTypedLinkFacetOutput, crate::error::CreateTypedLinkFacetError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_create_typed_link_facet_error(response) } else { crate::operation_deser::parse_create_typed_link_facet_response(response) } } } /// Operation shape for `DeleteDirectory`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_directory`](crate::client::Client::delete_directory). /// /// See [`crate::client::fluent_builders::DeleteDirectory`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteDirectory { _private: (), } impl DeleteDirectory { /// Creates a new builder-style object to manufacture [`DeleteDirectoryInput`](crate::input::DeleteDirectoryInput) pub fn builder() -> crate::input::delete_directory_input::Builder { crate::input::delete_directory_input::Builder::default() } /// Creates a new `DeleteDirectory` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteDirectory { type Output = std::result::Result< crate::output::DeleteDirectoryOutput, crate::error::DeleteDirectoryError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_delete_directory_error(response) } else { crate::operation_deser::parse_delete_directory_response(response) } } } /// Operation shape for `DeleteFacet`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_facet`](crate::client::Client::delete_facet). /// /// See [`crate::client::fluent_builders::DeleteFacet`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteFacet { _private: (), } impl DeleteFacet { /// Creates a new builder-style object to manufacture [`DeleteFacetInput`](crate::input::DeleteFacetInput) pub fn builder() -> crate::input::delete_facet_input::Builder { crate::input::delete_facet_input::Builder::default() } /// Creates a new `DeleteFacet` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteFacet { type Output = std::result::Result<crate::output::DeleteFacetOutput, crate::error::DeleteFacetError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_delete_facet_error(response) } else { crate::operation_deser::parse_delete_facet_response(response) } } } /// Operation shape for `DeleteObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_object`](crate::client::Client::delete_object). /// /// See [`crate::client::fluent_builders::DeleteObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteObject { _private: (), } impl DeleteObject { /// Creates a new builder-style object to manufacture [`DeleteObjectInput`](crate::input::DeleteObjectInput) pub fn builder() -> crate::input::delete_object_input::Builder { crate::input::delete_object_input::Builder::default() } /// Creates a new `DeleteObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteObject { type Output = std::result::Result<crate::output::DeleteObjectOutput, crate::error::DeleteObjectError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_delete_object_error(response) } else { crate::operation_deser::parse_delete_object_response(response) } } } /// Operation shape for `DeleteSchema`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_schema`](crate::client::Client::delete_schema). /// /// See [`crate::client::fluent_builders::DeleteSchema`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteSchema { _private: (), } impl DeleteSchema { /// Creates a new builder-style object to manufacture [`DeleteSchemaInput`](crate::input::DeleteSchemaInput) pub fn builder() -> crate::input::delete_schema_input::Builder { crate::input::delete_schema_input::Builder::default() } /// Creates a new `DeleteSchema` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteSchema { type Output = std::result::Result<crate::output::DeleteSchemaOutput, crate::error::DeleteSchemaError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_delete_schema_error(response) } else { crate::operation_deser::parse_delete_schema_response(response) } } } /// Operation shape for `DeleteTypedLinkFacet`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`delete_typed_link_facet`](crate::client::Client::delete_typed_link_facet). /// /// See [`crate::client::fluent_builders::DeleteTypedLinkFacet`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DeleteTypedLinkFacet { _private: (), } impl DeleteTypedLinkFacet { /// Creates a new builder-style object to manufacture [`DeleteTypedLinkFacetInput`](crate::input::DeleteTypedLinkFacetInput) pub fn builder() -> crate::input::delete_typed_link_facet_input::Builder { crate::input::delete_typed_link_facet_input::Builder::default() } /// Creates a new `DeleteTypedLinkFacet` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DeleteTypedLinkFacet { type Output = std::result::Result< crate::output::DeleteTypedLinkFacetOutput, crate::error::DeleteTypedLinkFacetError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_delete_typed_link_facet_error(response) } else { crate::operation_deser::parse_delete_typed_link_facet_response(response) } } } /// Operation shape for `DetachFromIndex`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`detach_from_index`](crate::client::Client::detach_from_index). /// /// See [`crate::client::fluent_builders::DetachFromIndex`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DetachFromIndex { _private: (), } impl DetachFromIndex { /// Creates a new builder-style object to manufacture [`DetachFromIndexInput`](crate::input::DetachFromIndexInput) pub fn builder() -> crate::input::detach_from_index_input::Builder { crate::input::detach_from_index_input::Builder::default() } /// Creates a new `DetachFromIndex` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DetachFromIndex { type Output = std::result::Result< crate::output::DetachFromIndexOutput, crate::error::DetachFromIndexError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_detach_from_index_error(response) } else { crate::operation_deser::parse_detach_from_index_response(response) } } } /// Operation shape for `DetachObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`detach_object`](crate::client::Client::detach_object). /// /// See [`crate::client::fluent_builders::DetachObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DetachObject { _private: (), } impl DetachObject { /// Creates a new builder-style object to manufacture [`DetachObjectInput`](crate::input::DetachObjectInput) pub fn builder() -> crate::input::detach_object_input::Builder { crate::input::detach_object_input::Builder::default() } /// Creates a new `DetachObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DetachObject { type Output = std::result::Result<crate::output::DetachObjectOutput, crate::error::DetachObjectError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_detach_object_error(response) } else { crate::operation_deser::parse_detach_object_response(response) } } } /// Operation shape for `DetachPolicy`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`detach_policy`](crate::client::Client::detach_policy). /// /// See [`crate::client::fluent_builders::DetachPolicy`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DetachPolicy { _private: (), } impl DetachPolicy { /// Creates a new builder-style object to manufacture [`DetachPolicyInput`](crate::input::DetachPolicyInput) pub fn builder() -> crate::input::detach_policy_input::Builder { crate::input::detach_policy_input::Builder::default() } /// Creates a new `DetachPolicy` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DetachPolicy { type Output = std::result::Result<crate::output::DetachPolicyOutput, crate::error::DetachPolicyError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_detach_policy_error(response) } else { crate::operation_deser::parse_detach_policy_response(response) } } } /// Operation shape for `DetachTypedLink`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`detach_typed_link`](crate::client::Client::detach_typed_link). /// /// See [`crate::client::fluent_builders::DetachTypedLink`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DetachTypedLink { _private: (), } impl DetachTypedLink { /// Creates a new builder-style object to manufacture [`DetachTypedLinkInput`](crate::input::DetachTypedLinkInput) pub fn builder() -> crate::input::detach_typed_link_input::Builder { crate::input::detach_typed_link_input::Builder::default() } /// Creates a new `DetachTypedLink` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DetachTypedLink { type Output = std::result::Result< crate::output::DetachTypedLinkOutput, crate::error::DetachTypedLinkError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_detach_typed_link_error(response) } else { crate::operation_deser::parse_detach_typed_link_response(response) } } } /// Operation shape for `DisableDirectory`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`disable_directory`](crate::client::Client::disable_directory). /// /// See [`crate::client::fluent_builders::DisableDirectory`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct DisableDirectory { _private: (), } impl DisableDirectory { /// Creates a new builder-style object to manufacture [`DisableDirectoryInput`](crate::input::DisableDirectoryInput) pub fn builder() -> crate::input::disable_directory_input::Builder { crate::input::disable_directory_input::Builder::default() } /// Creates a new `DisableDirectory` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for DisableDirectory { type Output = std::result::Result< crate::output::DisableDirectoryOutput, crate::error::DisableDirectoryError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_disable_directory_error(response) } else { crate::operation_deser::parse_disable_directory_response(response) } } } /// Operation shape for `EnableDirectory`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`enable_directory`](crate::client::Client::enable_directory). /// /// See [`crate::client::fluent_builders::EnableDirectory`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct EnableDirectory { _private: (), } impl EnableDirectory { /// Creates a new builder-style object to manufacture [`EnableDirectoryInput`](crate::input::EnableDirectoryInput) pub fn builder() -> crate::input::enable_directory_input::Builder { crate::input::enable_directory_input::Builder::default() } /// Creates a new `EnableDirectory` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for EnableDirectory { type Output = std::result::Result< crate::output::EnableDirectoryOutput, crate::error::EnableDirectoryError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_enable_directory_error(response) } else { crate::operation_deser::parse_enable_directory_response(response) } } } /// Operation shape for `GetAppliedSchemaVersion`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_applied_schema_version`](crate::client::Client::get_applied_schema_version). /// /// See [`crate::client::fluent_builders::GetAppliedSchemaVersion`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetAppliedSchemaVersion { _private: (), } impl GetAppliedSchemaVersion { /// Creates a new builder-style object to manufacture [`GetAppliedSchemaVersionInput`](crate::input::GetAppliedSchemaVersionInput) pub fn builder() -> crate::input::get_applied_schema_version_input::Builder { crate::input::get_applied_schema_version_input::Builder::default() } /// Creates a new `GetAppliedSchemaVersion` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetAppliedSchemaVersion { type Output = std::result::Result< crate::output::GetAppliedSchemaVersionOutput, crate::error::GetAppliedSchemaVersionError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_applied_schema_version_error(response) } else { crate::operation_deser::parse_get_applied_schema_version_response(response) } } } /// Operation shape for `GetDirectory`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_directory`](crate::client::Client::get_directory). /// /// See [`crate::client::fluent_builders::GetDirectory`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetDirectory { _private: (), } impl GetDirectory { /// Creates a new builder-style object to manufacture [`GetDirectoryInput`](crate::input::GetDirectoryInput) pub fn builder() -> crate::input::get_directory_input::Builder { crate::input::get_directory_input::Builder::default() } /// Creates a new `GetDirectory` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetDirectory { type Output = std::result::Result<crate::output::GetDirectoryOutput, crate::error::GetDirectoryError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_directory_error(response) } else { crate::operation_deser::parse_get_directory_response(response) } } } /// Operation shape for `GetFacet`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_facet`](crate::client::Client::get_facet). /// /// See [`crate::client::fluent_builders::GetFacet`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetFacet { _private: (), } impl GetFacet { /// Creates a new builder-style object to manufacture [`GetFacetInput`](crate::input::GetFacetInput) pub fn builder() -> crate::input::get_facet_input::Builder { crate::input::get_facet_input::Builder::default() } /// Creates a new `GetFacet` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetFacet { type Output = std::result::Result<crate::output::GetFacetOutput, crate::error::GetFacetError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_facet_error(response) } else { crate::operation_deser::parse_get_facet_response(response) } } } /// Operation shape for `GetLinkAttributes`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_link_attributes`](crate::client::Client::get_link_attributes). /// /// See [`crate::client::fluent_builders::GetLinkAttributes`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetLinkAttributes { _private: (), } impl GetLinkAttributes { /// Creates a new builder-style object to manufacture [`GetLinkAttributesInput`](crate::input::GetLinkAttributesInput) pub fn builder() -> crate::input::get_link_attributes_input::Builder { crate::input::get_link_attributes_input::Builder::default() } /// Creates a new `GetLinkAttributes` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetLinkAttributes { type Output = std::result::Result< crate::output::GetLinkAttributesOutput, crate::error::GetLinkAttributesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_link_attributes_error(response) } else { crate::operation_deser::parse_get_link_attributes_response(response) } } } /// Operation shape for `GetObjectAttributes`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_object_attributes`](crate::client::Client::get_object_attributes). /// /// See [`crate::client::fluent_builders::GetObjectAttributes`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetObjectAttributes { _private: (), } impl GetObjectAttributes { /// Creates a new builder-style object to manufacture [`GetObjectAttributesInput`](crate::input::GetObjectAttributesInput) pub fn builder() -> crate::input::get_object_attributes_input::Builder { crate::input::get_object_attributes_input::Builder::default() } /// Creates a new `GetObjectAttributes` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetObjectAttributes { type Output = std::result::Result< crate::output::GetObjectAttributesOutput, crate::error::GetObjectAttributesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_object_attributes_error(response) } else { crate::operation_deser::parse_get_object_attributes_response(response) } } } /// Operation shape for `GetObjectInformation`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_object_information`](crate::client::Client::get_object_information). /// /// See [`crate::client::fluent_builders::GetObjectInformation`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetObjectInformation { _private: (), } impl GetObjectInformation { /// Creates a new builder-style object to manufacture [`GetObjectInformationInput`](crate::input::GetObjectInformationInput) pub fn builder() -> crate::input::get_object_information_input::Builder { crate::input::get_object_information_input::Builder::default() } /// Creates a new `GetObjectInformation` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetObjectInformation { type Output = std::result::Result< crate::output::GetObjectInformationOutput, crate::error::GetObjectInformationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_object_information_error(response) } else { crate::operation_deser::parse_get_object_information_response(response) } } } /// Operation shape for `GetSchemaAsJson`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_schema_as_json`](crate::client::Client::get_schema_as_json). /// /// See [`crate::client::fluent_builders::GetSchemaAsJson`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetSchemaAsJson { _private: (), } impl GetSchemaAsJson { /// Creates a new builder-style object to manufacture [`GetSchemaAsJsonInput`](crate::input::GetSchemaAsJsonInput) pub fn builder() -> crate::input::get_schema_as_json_input::Builder { crate::input::get_schema_as_json_input::Builder::default() } /// Creates a new `GetSchemaAsJson` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetSchemaAsJson { type Output = std::result::Result< crate::output::GetSchemaAsJsonOutput, crate::error::GetSchemaAsJsonError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_schema_as_json_error(response) } else { crate::operation_deser::parse_get_schema_as_json_response(response) } } } /// Operation shape for `GetTypedLinkFacetInformation`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`get_typed_link_facet_information`](crate::client::Client::get_typed_link_facet_information). /// /// See [`crate::client::fluent_builders::GetTypedLinkFacetInformation`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct GetTypedLinkFacetInformation { _private: (), } impl GetTypedLinkFacetInformation { /// Creates a new builder-style object to manufacture [`GetTypedLinkFacetInformationInput`](crate::input::GetTypedLinkFacetInformationInput) pub fn builder() -> crate::input::get_typed_link_facet_information_input::Builder { crate::input::get_typed_link_facet_information_input::Builder::default() } /// Creates a new `GetTypedLinkFacetInformation` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for GetTypedLinkFacetInformation { type Output = std::result::Result< crate::output::GetTypedLinkFacetInformationOutput, crate::error::GetTypedLinkFacetInformationError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_get_typed_link_facet_information_error(response) } else { crate::operation_deser::parse_get_typed_link_facet_information_response(response) } } } /// Operation shape for `ListAppliedSchemaArns`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_applied_schema_arns`](crate::client::Client::list_applied_schema_arns). /// /// See [`crate::client::fluent_builders::ListAppliedSchemaArns`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListAppliedSchemaArns { _private: (), } impl ListAppliedSchemaArns { /// Creates a new builder-style object to manufacture [`ListAppliedSchemaArnsInput`](crate::input::ListAppliedSchemaArnsInput) pub fn builder() -> crate::input::list_applied_schema_arns_input::Builder { crate::input::list_applied_schema_arns_input::Builder::default() } /// Creates a new `ListAppliedSchemaArns` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListAppliedSchemaArns { type Output = std::result::Result< crate::output::ListAppliedSchemaArnsOutput, crate::error::ListAppliedSchemaArnsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_applied_schema_arns_error(response) } else { crate::operation_deser::parse_list_applied_schema_arns_response(response) } } } /// Operation shape for `ListAttachedIndices`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_attached_indices`](crate::client::Client::list_attached_indices). /// /// See [`crate::client::fluent_builders::ListAttachedIndices`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListAttachedIndices { _private: (), } impl ListAttachedIndices { /// Creates a new builder-style object to manufacture [`ListAttachedIndicesInput`](crate::input::ListAttachedIndicesInput) pub fn builder() -> crate::input::list_attached_indices_input::Builder { crate::input::list_attached_indices_input::Builder::default() } /// Creates a new `ListAttachedIndices` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListAttachedIndices { type Output = std::result::Result< crate::output::ListAttachedIndicesOutput, crate::error::ListAttachedIndicesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_attached_indices_error(response) } else { crate::operation_deser::parse_list_attached_indices_response(response) } } } /// Operation shape for `ListDevelopmentSchemaArns`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_development_schema_arns`](crate::client::Client::list_development_schema_arns). /// /// See [`crate::client::fluent_builders::ListDevelopmentSchemaArns`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListDevelopmentSchemaArns { _private: (), } impl ListDevelopmentSchemaArns { /// Creates a new builder-style object to manufacture [`ListDevelopmentSchemaArnsInput`](crate::input::ListDevelopmentSchemaArnsInput) pub fn builder() -> crate::input::list_development_schema_arns_input::Builder { crate::input::list_development_schema_arns_input::Builder::default() } /// Creates a new `ListDevelopmentSchemaArns` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListDevelopmentSchemaArns { type Output = std::result::Result< crate::output::ListDevelopmentSchemaArnsOutput, crate::error::ListDevelopmentSchemaArnsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_development_schema_arns_error(response) } else { crate::operation_deser::parse_list_development_schema_arns_response(response) } } } /// Operation shape for `ListDirectories`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_directories`](crate::client::Client::list_directories). /// /// See [`crate::client::fluent_builders::ListDirectories`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListDirectories { _private: (), } impl ListDirectories { /// Creates a new builder-style object to manufacture [`ListDirectoriesInput`](crate::input::ListDirectoriesInput) pub fn builder() -> crate::input::list_directories_input::Builder { crate::input::list_directories_input::Builder::default() } /// Creates a new `ListDirectories` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListDirectories { type Output = std::result::Result< crate::output::ListDirectoriesOutput, crate::error::ListDirectoriesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_directories_error(response) } else { crate::operation_deser::parse_list_directories_response(response) } } } /// Operation shape for `ListFacetAttributes`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_facet_attributes`](crate::client::Client::list_facet_attributes). /// /// See [`crate::client::fluent_builders::ListFacetAttributes`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListFacetAttributes { _private: (), } impl ListFacetAttributes { /// Creates a new builder-style object to manufacture [`ListFacetAttributesInput`](crate::input::ListFacetAttributesInput) pub fn builder() -> crate::input::list_facet_attributes_input::Builder { crate::input::list_facet_attributes_input::Builder::default() } /// Creates a new `ListFacetAttributes` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListFacetAttributes { type Output = std::result::Result< crate::output::ListFacetAttributesOutput, crate::error::ListFacetAttributesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_facet_attributes_error(response) } else { crate::operation_deser::parse_list_facet_attributes_response(response) } } } /// Operation shape for `ListFacetNames`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_facet_names`](crate::client::Client::list_facet_names). /// /// See [`crate::client::fluent_builders::ListFacetNames`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListFacetNames { _private: (), } impl ListFacetNames { /// Creates a new builder-style object to manufacture [`ListFacetNamesInput`](crate::input::ListFacetNamesInput) pub fn builder() -> crate::input::list_facet_names_input::Builder { crate::input::list_facet_names_input::Builder::default() } /// Creates a new `ListFacetNames` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListFacetNames { type Output = std::result::Result<crate::output::ListFacetNamesOutput, crate::error::ListFacetNamesError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_facet_names_error(response) } else { crate::operation_deser::parse_list_facet_names_response(response) } } } /// Operation shape for `ListIncomingTypedLinks`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_incoming_typed_links`](crate::client::Client::list_incoming_typed_links). /// /// See [`crate::client::fluent_builders::ListIncomingTypedLinks`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListIncomingTypedLinks { _private: (), } impl ListIncomingTypedLinks { /// Creates a new builder-style object to manufacture [`ListIncomingTypedLinksInput`](crate::input::ListIncomingTypedLinksInput) pub fn builder() -> crate::input::list_incoming_typed_links_input::Builder { crate::input::list_incoming_typed_links_input::Builder::default() } /// Creates a new `ListIncomingTypedLinks` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListIncomingTypedLinks { type Output = std::result::Result< crate::output::ListIncomingTypedLinksOutput, crate::error::ListIncomingTypedLinksError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_incoming_typed_links_error(response) } else { crate::operation_deser::parse_list_incoming_typed_links_response(response) } } } /// Operation shape for `ListIndex`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_index`](crate::client::Client::list_index). /// /// See [`crate::client::fluent_builders::ListIndex`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListIndex { _private: (), } impl ListIndex { /// Creates a new builder-style object to manufacture [`ListIndexInput`](crate::input::ListIndexInput) pub fn builder() -> crate::input::list_index_input::Builder { crate::input::list_index_input::Builder::default() } /// Creates a new `ListIndex` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListIndex { type Output = std::result::Result<crate::output::ListIndexOutput, crate::error::ListIndexError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_index_error(response) } else { crate::operation_deser::parse_list_index_response(response) } } } /// Operation shape for `ListManagedSchemaArns`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_managed_schema_arns`](crate::client::Client::list_managed_schema_arns). /// /// See [`crate::client::fluent_builders::ListManagedSchemaArns`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListManagedSchemaArns { _private: (), } impl ListManagedSchemaArns { /// Creates a new builder-style object to manufacture [`ListManagedSchemaArnsInput`](crate::input::ListManagedSchemaArnsInput) pub fn builder() -> crate::input::list_managed_schema_arns_input::Builder { crate::input::list_managed_schema_arns_input::Builder::default() } /// Creates a new `ListManagedSchemaArns` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListManagedSchemaArns { type Output = std::result::Result< crate::output::ListManagedSchemaArnsOutput, crate::error::ListManagedSchemaArnsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_managed_schema_arns_error(response) } else { crate::operation_deser::parse_list_managed_schema_arns_response(response) } } } /// Operation shape for `ListObjectAttributes`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_object_attributes`](crate::client::Client::list_object_attributes). /// /// See [`crate::client::fluent_builders::ListObjectAttributes`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListObjectAttributes { _private: (), } impl ListObjectAttributes { /// Creates a new builder-style object to manufacture [`ListObjectAttributesInput`](crate::input::ListObjectAttributesInput) pub fn builder() -> crate::input::list_object_attributes_input::Builder { crate::input::list_object_attributes_input::Builder::default() } /// Creates a new `ListObjectAttributes` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListObjectAttributes { type Output = std::result::Result< crate::output::ListObjectAttributesOutput, crate::error::ListObjectAttributesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_object_attributes_error(response) } else { crate::operation_deser::parse_list_object_attributes_response(response) } } } /// Operation shape for `ListObjectChildren`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_object_children`](crate::client::Client::list_object_children). /// /// See [`crate::client::fluent_builders::ListObjectChildren`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListObjectChildren { _private: (), } impl ListObjectChildren { /// Creates a new builder-style object to manufacture [`ListObjectChildrenInput`](crate::input::ListObjectChildrenInput) pub fn builder() -> crate::input::list_object_children_input::Builder { crate::input::list_object_children_input::Builder::default() } /// Creates a new `ListObjectChildren` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListObjectChildren { type Output = std::result::Result< crate::output::ListObjectChildrenOutput, crate::error::ListObjectChildrenError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_object_children_error(response) } else { crate::operation_deser::parse_list_object_children_response(response) } } } /// Operation shape for `ListObjectParentPaths`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_object_parent_paths`](crate::client::Client::list_object_parent_paths). /// /// See [`crate::client::fluent_builders::ListObjectParentPaths`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListObjectParentPaths { _private: (), } impl ListObjectParentPaths { /// Creates a new builder-style object to manufacture [`ListObjectParentPathsInput`](crate::input::ListObjectParentPathsInput) pub fn builder() -> crate::input::list_object_parent_paths_input::Builder { crate::input::list_object_parent_paths_input::Builder::default() } /// Creates a new `ListObjectParentPaths` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListObjectParentPaths { type Output = std::result::Result< crate::output::ListObjectParentPathsOutput, crate::error::ListObjectParentPathsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_object_parent_paths_error(response) } else { crate::operation_deser::parse_list_object_parent_paths_response(response) } } } /// Operation shape for `ListObjectParents`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_object_parents`](crate::client::Client::list_object_parents). /// /// See [`crate::client::fluent_builders::ListObjectParents`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListObjectParents { _private: (), } impl ListObjectParents { /// Creates a new builder-style object to manufacture [`ListObjectParentsInput`](crate::input::ListObjectParentsInput) pub fn builder() -> crate::input::list_object_parents_input::Builder { crate::input::list_object_parents_input::Builder::default() } /// Creates a new `ListObjectParents` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListObjectParents { type Output = std::result::Result< crate::output::ListObjectParentsOutput, crate::error::ListObjectParentsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_object_parents_error(response) } else { crate::operation_deser::parse_list_object_parents_response(response) } } } /// Operation shape for `ListObjectPolicies`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_object_policies`](crate::client::Client::list_object_policies). /// /// See [`crate::client::fluent_builders::ListObjectPolicies`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListObjectPolicies { _private: (), } impl ListObjectPolicies { /// Creates a new builder-style object to manufacture [`ListObjectPoliciesInput`](crate::input::ListObjectPoliciesInput) pub fn builder() -> crate::input::list_object_policies_input::Builder { crate::input::list_object_policies_input::Builder::default() } /// Creates a new `ListObjectPolicies` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListObjectPolicies { type Output = std::result::Result< crate::output::ListObjectPoliciesOutput, crate::error::ListObjectPoliciesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_object_policies_error(response) } else { crate::operation_deser::parse_list_object_policies_response(response) } } } /// Operation shape for `ListOutgoingTypedLinks`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_outgoing_typed_links`](crate::client::Client::list_outgoing_typed_links). /// /// See [`crate::client::fluent_builders::ListOutgoingTypedLinks`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListOutgoingTypedLinks { _private: (), } impl ListOutgoingTypedLinks { /// Creates a new builder-style object to manufacture [`ListOutgoingTypedLinksInput`](crate::input::ListOutgoingTypedLinksInput) pub fn builder() -> crate::input::list_outgoing_typed_links_input::Builder { crate::input::list_outgoing_typed_links_input::Builder::default() } /// Creates a new `ListOutgoingTypedLinks` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListOutgoingTypedLinks { type Output = std::result::Result< crate::output::ListOutgoingTypedLinksOutput, crate::error::ListOutgoingTypedLinksError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_outgoing_typed_links_error(response) } else { crate::operation_deser::parse_list_outgoing_typed_links_response(response) } } } /// Operation shape for `ListPolicyAttachments`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_policy_attachments`](crate::client::Client::list_policy_attachments). /// /// See [`crate::client::fluent_builders::ListPolicyAttachments`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListPolicyAttachments { _private: (), } impl ListPolicyAttachments { /// Creates a new builder-style object to manufacture [`ListPolicyAttachmentsInput`](crate::input::ListPolicyAttachmentsInput) pub fn builder() -> crate::input::list_policy_attachments_input::Builder { crate::input::list_policy_attachments_input::Builder::default() } /// Creates a new `ListPolicyAttachments` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListPolicyAttachments { type Output = std::result::Result< crate::output::ListPolicyAttachmentsOutput, crate::error::ListPolicyAttachmentsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_policy_attachments_error(response) } else { crate::operation_deser::parse_list_policy_attachments_response(response) } } } /// Operation shape for `ListPublishedSchemaArns`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_published_schema_arns`](crate::client::Client::list_published_schema_arns). /// /// See [`crate::client::fluent_builders::ListPublishedSchemaArns`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListPublishedSchemaArns { _private: (), } impl ListPublishedSchemaArns { /// Creates a new builder-style object to manufacture [`ListPublishedSchemaArnsInput`](crate::input::ListPublishedSchemaArnsInput) pub fn builder() -> crate::input::list_published_schema_arns_input::Builder { crate::input::list_published_schema_arns_input::Builder::default() } /// Creates a new `ListPublishedSchemaArns` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListPublishedSchemaArns { type Output = std::result::Result< crate::output::ListPublishedSchemaArnsOutput, crate::error::ListPublishedSchemaArnsError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_published_schema_arns_error(response) } else { crate::operation_deser::parse_list_published_schema_arns_response(response) } } } /// Operation shape for `ListTagsForResource`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_tags_for_resource`](crate::client::Client::list_tags_for_resource). /// /// See [`crate::client::fluent_builders::ListTagsForResource`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListTagsForResource { _private: (), } impl ListTagsForResource { /// Creates a new builder-style object to manufacture [`ListTagsForResourceInput`](crate::input::ListTagsForResourceInput) pub fn builder() -> crate::input::list_tags_for_resource_input::Builder { crate::input::list_tags_for_resource_input::Builder::default() } /// Creates a new `ListTagsForResource` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListTagsForResource { type Output = std::result::Result< crate::output::ListTagsForResourceOutput, crate::error::ListTagsForResourceError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_tags_for_resource_error(response) } else { crate::operation_deser::parse_list_tags_for_resource_response(response) } } } /// Operation shape for `ListTypedLinkFacetAttributes`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_typed_link_facet_attributes`](crate::client::Client::list_typed_link_facet_attributes). /// /// See [`crate::client::fluent_builders::ListTypedLinkFacetAttributes`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListTypedLinkFacetAttributes { _private: (), } impl ListTypedLinkFacetAttributes { /// Creates a new builder-style object to manufacture [`ListTypedLinkFacetAttributesInput`](crate::input::ListTypedLinkFacetAttributesInput) pub fn builder() -> crate::input::list_typed_link_facet_attributes_input::Builder { crate::input::list_typed_link_facet_attributes_input::Builder::default() } /// Creates a new `ListTypedLinkFacetAttributes` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListTypedLinkFacetAttributes { type Output = std::result::Result< crate::output::ListTypedLinkFacetAttributesOutput, crate::error::ListTypedLinkFacetAttributesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_typed_link_facet_attributes_error(response) } else { crate::operation_deser::parse_list_typed_link_facet_attributes_response(response) } } } /// Operation shape for `ListTypedLinkFacetNames`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`list_typed_link_facet_names`](crate::client::Client::list_typed_link_facet_names). /// /// See [`crate::client::fluent_builders::ListTypedLinkFacetNames`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct ListTypedLinkFacetNames { _private: (), } impl ListTypedLinkFacetNames { /// Creates a new builder-style object to manufacture [`ListTypedLinkFacetNamesInput`](crate::input::ListTypedLinkFacetNamesInput) pub fn builder() -> crate::input::list_typed_link_facet_names_input::Builder { crate::input::list_typed_link_facet_names_input::Builder::default() } /// Creates a new `ListTypedLinkFacetNames` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for ListTypedLinkFacetNames { type Output = std::result::Result< crate::output::ListTypedLinkFacetNamesOutput, crate::error::ListTypedLinkFacetNamesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_list_typed_link_facet_names_error(response) } else { crate::operation_deser::parse_list_typed_link_facet_names_response(response) } } } /// Operation shape for `LookupPolicy`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`lookup_policy`](crate::client::Client::lookup_policy). /// /// See [`crate::client::fluent_builders::LookupPolicy`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct LookupPolicy { _private: (), } impl LookupPolicy { /// Creates a new builder-style object to manufacture [`LookupPolicyInput`](crate::input::LookupPolicyInput) pub fn builder() -> crate::input::lookup_policy_input::Builder { crate::input::lookup_policy_input::Builder::default() } /// Creates a new `LookupPolicy` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for LookupPolicy { type Output = std::result::Result<crate::output::LookupPolicyOutput, crate::error::LookupPolicyError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_lookup_policy_error(response) } else { crate::operation_deser::parse_lookup_policy_response(response) } } } /// Operation shape for `PublishSchema`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`publish_schema`](crate::client::Client::publish_schema). /// /// See [`crate::client::fluent_builders::PublishSchema`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PublishSchema { _private: (), } impl PublishSchema { /// Creates a new builder-style object to manufacture [`PublishSchemaInput`](crate::input::PublishSchemaInput) pub fn builder() -> crate::input::publish_schema_input::Builder { crate::input::publish_schema_input::Builder::default() } /// Creates a new `PublishSchema` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PublishSchema { type Output = std::result::Result<crate::output::PublishSchemaOutput, crate::error::PublishSchemaError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_publish_schema_error(response) } else { crate::operation_deser::parse_publish_schema_response(response) } } } /// Operation shape for `PutSchemaFromJson`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`put_schema_from_json`](crate::client::Client::put_schema_from_json). /// /// See [`crate::client::fluent_builders::PutSchemaFromJson`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct PutSchemaFromJson { _private: (), } impl PutSchemaFromJson { /// Creates a new builder-style object to manufacture [`PutSchemaFromJsonInput`](crate::input::PutSchemaFromJsonInput) pub fn builder() -> crate::input::put_schema_from_json_input::Builder { crate::input::put_schema_from_json_input::Builder::default() } /// Creates a new `PutSchemaFromJson` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for PutSchemaFromJson { type Output = std::result::Result< crate::output::PutSchemaFromJsonOutput, crate::error::PutSchemaFromJsonError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_put_schema_from_json_error(response) } else { crate::operation_deser::parse_put_schema_from_json_response(response) } } } /// Operation shape for `RemoveFacetFromObject`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`remove_facet_from_object`](crate::client::Client::remove_facet_from_object). /// /// See [`crate::client::fluent_builders::RemoveFacetFromObject`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct RemoveFacetFromObject { _private: (), } impl RemoveFacetFromObject { /// Creates a new builder-style object to manufacture [`RemoveFacetFromObjectInput`](crate::input::RemoveFacetFromObjectInput) pub fn builder() -> crate::input::remove_facet_from_object_input::Builder { crate::input::remove_facet_from_object_input::Builder::default() } /// Creates a new `RemoveFacetFromObject` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for RemoveFacetFromObject { type Output = std::result::Result< crate::output::RemoveFacetFromObjectOutput, crate::error::RemoveFacetFromObjectError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_remove_facet_from_object_error(response) } else { crate::operation_deser::parse_remove_facet_from_object_response(response) } } } /// Operation shape for `TagResource`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`tag_resource`](crate::client::Client::tag_resource). /// /// See [`crate::client::fluent_builders::TagResource`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct TagResource { _private: (), } impl TagResource { /// Creates a new builder-style object to manufacture [`TagResourceInput`](crate::input::TagResourceInput) pub fn builder() -> crate::input::tag_resource_input::Builder { crate::input::tag_resource_input::Builder::default() } /// Creates a new `TagResource` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for TagResource { type Output = std::result::Result<crate::output::TagResourceOutput, crate::error::TagResourceError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_tag_resource_error(response) } else { crate::operation_deser::parse_tag_resource_response(response) } } } /// Operation shape for `UntagResource`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`untag_resource`](crate::client::Client::untag_resource). /// /// See [`crate::client::fluent_builders::UntagResource`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UntagResource { _private: (), } impl UntagResource { /// Creates a new builder-style object to manufacture [`UntagResourceInput`](crate::input::UntagResourceInput) pub fn builder() -> crate::input::untag_resource_input::Builder { crate::input::untag_resource_input::Builder::default() } /// Creates a new `UntagResource` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UntagResource { type Output = std::result::Result<crate::output::UntagResourceOutput, crate::error::UntagResourceError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_untag_resource_error(response) } else { crate::operation_deser::parse_untag_resource_response(response) } } } /// Operation shape for `UpdateFacet`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_facet`](crate::client::Client::update_facet). /// /// See [`crate::client::fluent_builders::UpdateFacet`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateFacet { _private: (), } impl UpdateFacet { /// Creates a new builder-style object to manufacture [`UpdateFacetInput`](crate::input::UpdateFacetInput) pub fn builder() -> crate::input::update_facet_input::Builder { crate::input::update_facet_input::Builder::default() } /// Creates a new `UpdateFacet` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateFacet { type Output = std::result::Result<crate::output::UpdateFacetOutput, crate::error::UpdateFacetError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_facet_error(response) } else { crate::operation_deser::parse_update_facet_response(response) } } } /// Operation shape for `UpdateLinkAttributes`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_link_attributes`](crate::client::Client::update_link_attributes). /// /// See [`crate::client::fluent_builders::UpdateLinkAttributes`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateLinkAttributes { _private: (), } impl UpdateLinkAttributes { /// Creates a new builder-style object to manufacture [`UpdateLinkAttributesInput`](crate::input::UpdateLinkAttributesInput) pub fn builder() -> crate::input::update_link_attributes_input::Builder { crate::input::update_link_attributes_input::Builder::default() } /// Creates a new `UpdateLinkAttributes` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateLinkAttributes { type Output = std::result::Result< crate::output::UpdateLinkAttributesOutput, crate::error::UpdateLinkAttributesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_link_attributes_error(response) } else { crate::operation_deser::parse_update_link_attributes_response(response) } } } /// Operation shape for `UpdateObjectAttributes`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_object_attributes`](crate::client::Client::update_object_attributes). /// /// See [`crate::client::fluent_builders::UpdateObjectAttributes`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateObjectAttributes { _private: (), } impl UpdateObjectAttributes { /// Creates a new builder-style object to manufacture [`UpdateObjectAttributesInput`](crate::input::UpdateObjectAttributesInput) pub fn builder() -> crate::input::update_object_attributes_input::Builder { crate::input::update_object_attributes_input::Builder::default() } /// Creates a new `UpdateObjectAttributes` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateObjectAttributes { type Output = std::result::Result< crate::output::UpdateObjectAttributesOutput, crate::error::UpdateObjectAttributesError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_object_attributes_error(response) } else { crate::operation_deser::parse_update_object_attributes_response(response) } } } /// Operation shape for `UpdateSchema`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_schema`](crate::client::Client::update_schema). /// /// See [`crate::client::fluent_builders::UpdateSchema`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateSchema { _private: (), } impl UpdateSchema { /// Creates a new builder-style object to manufacture [`UpdateSchemaInput`](crate::input::UpdateSchemaInput) pub fn builder() -> crate::input::update_schema_input::Builder { crate::input::update_schema_input::Builder::default() } /// Creates a new `UpdateSchema` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateSchema { type Output = std::result::Result<crate::output::UpdateSchemaOutput, crate::error::UpdateSchemaError>; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_schema_error(response) } else { crate::operation_deser::parse_update_schema_response(response) } } } /// Operation shape for `UpdateTypedLinkFacet`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`update_typed_link_facet`](crate::client::Client::update_typed_link_facet). /// /// See [`crate::client::fluent_builders::UpdateTypedLinkFacet`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpdateTypedLinkFacet { _private: (), } impl UpdateTypedLinkFacet { /// Creates a new builder-style object to manufacture [`UpdateTypedLinkFacetInput`](crate::input::UpdateTypedLinkFacetInput) pub fn builder() -> crate::input::update_typed_link_facet_input::Builder { crate::input::update_typed_link_facet_input::Builder::default() } /// Creates a new `UpdateTypedLinkFacet` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpdateTypedLinkFacet { type Output = std::result::Result< crate::output::UpdateTypedLinkFacetOutput, crate::error::UpdateTypedLinkFacetError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_update_typed_link_facet_error(response) } else { crate::operation_deser::parse_update_typed_link_facet_response(response) } } } /// Operation shape for `UpgradeAppliedSchema`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`upgrade_applied_schema`](crate::client::Client::upgrade_applied_schema). /// /// See [`crate::client::fluent_builders::UpgradeAppliedSchema`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpgradeAppliedSchema { _private: (), } impl UpgradeAppliedSchema { /// Creates a new builder-style object to manufacture [`UpgradeAppliedSchemaInput`](crate::input::UpgradeAppliedSchemaInput) pub fn builder() -> crate::input::upgrade_applied_schema_input::Builder { crate::input::upgrade_applied_schema_input::Builder::default() } /// Creates a new `UpgradeAppliedSchema` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpgradeAppliedSchema { type Output = std::result::Result< crate::output::UpgradeAppliedSchemaOutput, crate::error::UpgradeAppliedSchemaError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_upgrade_applied_schema_error(response) } else { crate::operation_deser::parse_upgrade_applied_schema_response(response) } } } /// Operation shape for `UpgradePublishedSchema`. /// /// This is usually constructed for you using the the fluent builder returned by /// [`upgrade_published_schema`](crate::client::Client::upgrade_published_schema). /// /// See [`crate::client::fluent_builders::UpgradePublishedSchema`] for more details about the operation. #[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)] pub struct UpgradePublishedSchema { _private: (), } impl UpgradePublishedSchema { /// Creates a new builder-style object to manufacture [`UpgradePublishedSchemaInput`](crate::input::UpgradePublishedSchemaInput) pub fn builder() -> crate::input::upgrade_published_schema_input::Builder { crate::input::upgrade_published_schema_input::Builder::default() } /// Creates a new `UpgradePublishedSchema` operation. pub fn new() -> Self { Self { _private: () } } } impl aws_smithy_http::response::ParseStrictResponse for UpgradePublishedSchema { type Output = std::result::Result< crate::output::UpgradePublishedSchemaOutput, crate::error::UpgradePublishedSchemaError, >; fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output { if !response.status().is_success() && response.status().as_u16() != 200 { crate::operation_deser::parse_upgrade_published_schema_error(response) } else { crate::operation_deser::parse_upgrade_published_schema_response(response) } } }
42.142792
144
0.689771
f83d22e5c397b74668a99d4de38afde71d59d35b
1,840
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ use aws_config::meta::region::RegionProviderChain; use qldb::model::PermissionsMode; use qldb::{Client, Error, Region, PKG_VERSION}; use structopt::StructOpt; #[derive(Debug, StructOpt)] struct Opt { /// The default AWS Region. #[structopt(short, long)] region: Option<String>, /// The name of the ledger. #[structopt(short, long)] ledger: String, /// Whether to display additional runtime information. #[structopt(short, long)] verbose: bool, } /// Creates an Amazon QLDB ledger. /// # Arguments /// /// * `-l LEDGER` - The name of the ledger. /// * `[-r REGION]` - The Region in which the client is created. /// If not supplied, uses the value of the **AWS_REGION** environment variable. /// If the environment variable is not set, defaults to **us-west-2**. /// * `[-v]` - Whether to display additional information. #[tokio::main] async fn main() -> Result<(), Error> { tracing_subscriber::fmt::init(); let Opt { region, ledger, verbose, } = Opt::from_args(); let region_provider = RegionProviderChain::first_try(region.map(Region::new)) .or_default_provider() .or_else(Region::new("us-west-2")); let shared_config = aws_config::from_env().region(region_provider).load().await; let client = Client::new(&shared_config); if verbose { println!("QLDB version: {}", PKG_VERSION); println!("Region: {:?}", shared_config.region().unwrap()); println!(); } let result = client .create_ledger() .name(ledger) .permissions_mode(PermissionsMode::AllowAll) .send() .await?; println!("ARN: {}", result.arn.unwrap()); Ok(()) }
27.462687
84
0.623913
2fc4532de41c32ca5f07617368fb478f59e9eb30
25,643
//! Oracles. //! //! Oracles take a test case and determine whether we have a bug. For example, //! one of the simplest oracles is to take a Wasm binary as our input test case, //! validate and instantiate it, and (implicitly) check that no assertions //! failed or segfaults happened. A more complicated oracle might compare the //! result of executing a Wasm file with and without optimizations enabled, and //! make sure that the two executions are observably identical. //! //! When an oracle finds a bug, it should report it to the fuzzing engine by //! panicking. pub mod dummy; use arbitrary::Arbitrary; use dummy::dummy_imports; use log::debug; use std::cell::Cell; use std::rc::Rc; use std::sync::atomic::{AtomicUsize, Ordering::SeqCst}; use std::sync::{Arc, Condvar, Mutex}; use std::time::{Duration, Instant}; use wasmtime::*; use wasmtime_wast::WastContext; static CNT: AtomicUsize = AtomicUsize::new(0); fn log_wasm(wasm: &[u8]) { if !log::log_enabled!(log::Level::Debug) { return; } let i = CNT.fetch_add(1, SeqCst); let name = format!("testcase{}.wasm", i); std::fs::write(&name, wasm).expect("failed to write wasm file"); log::debug!("wrote wasm file to `{}`", name); if let Ok(s) = wasmprinter::print_bytes(wasm) { let name = format!("testcase{}.wat", i); std::fs::write(&name, s).expect("failed to write wat file"); } } /// Methods of timing out execution of a WebAssembly module #[derive(Debug)] pub enum Timeout { /// No timeout is used, it should be guaranteed via some other means that /// the input does not infinite loop. None, /// A time-based timeout is used with a sleeping thread sending a signal /// after the specified duration. Time(Duration), /// Fuel-based timeouts are used where the specified fuel is all that the /// provided wasm module is allowed to consume. Fuel(u64), } /// Instantiate the Wasm buffer, and implicitly fail if we have an unexpected /// panic or segfault or anything else that can be detected "passively". /// /// Performs initial validation, and returns early if the Wasm is invalid. /// /// You can control which compiler is used via passing a `Strategy`. pub fn instantiate(wasm: &[u8], known_valid: bool, strategy: Strategy) { // Explicitly disable module linking for now since it's a breaking change to // pre-module-linking modules due to imports let mut cfg = crate::fuzz_default_config(strategy).unwrap(); cfg.wasm_module_linking(false); instantiate_with_config(wasm, known_valid, cfg, Timeout::None); } /// Instantiate the Wasm buffer, and implicitly fail if we have an unexpected /// panic or segfault or anything else that can be detected "passively". /// /// The engine will be configured using provided config. /// /// See also `instantiate` functions. pub fn instantiate_with_config( wasm: &[u8], known_valid: bool, mut config: Config, timeout: Timeout, ) { crate::init_fuzzing(); config.interruptable(match &timeout { Timeout::Time(_) => true, _ => false, }); config.consume_fuel(match &timeout { Timeout::Fuel(_) => true, _ => false, }); let engine = Engine::new(&config); let store = Store::new(&engine); let mut timeout_state = SignalOnDrop::default(); match timeout { Timeout::Fuel(fuel) => store.add_fuel(fuel), // If a timeout is requested then we spawn a helper thread to wait for // the requested time and then send us a signal to get interrupted. We // also arrange for the thread's sleep to get interrupted if we return // early (or the wasm returns within the time limit), which allows the // thread to get torn down. // // This prevents us from creating a huge number of sleeping threads if // this function is executed in a loop, like it does on nightly fuzzing // infrastructure. Timeout::Time(timeout) => { let handle = store.interrupt_handle().unwrap(); timeout_state.spawn_timeout(timeout, move || handle.interrupt()); } Timeout::None => {} } log_wasm(wasm); let module = match Module::new(&engine, wasm) { Ok(module) => module, Err(_) if !known_valid => return, Err(e) => panic!("failed to compile module: {:?}", e), }; let imports = dummy_imports(&store, module.imports()); match Instance::new(&store, &module, &imports) { Ok(_) => {} // Allow traps which can happen normally with `unreachable` or a timeout Err(e) if e.downcast_ref::<Trap>().is_some() => {} // Allow resource exhaustion since this is something that our wasm-smith // generator doesn't guarantee is forbidden. Err(e) if e.to_string().contains("resource limit exceeded") => {} // Also allow errors related to fuel consumption Err(e) if e.to_string().contains("all fuel consumed") => {} // Everything else should be a bug in the fuzzer Err(e) => panic!("failed to instantiate {}", e), } } /// Compile the Wasm buffer, and implicitly fail if we have an unexpected /// panic or segfault or anything else that can be detected "passively". /// /// Performs initial validation, and returns early if the Wasm is invalid. /// /// You can control which compiler is used via passing a `Strategy`. pub fn compile(wasm: &[u8], strategy: Strategy) { crate::init_fuzzing(); let engine = Engine::new(&crate::fuzz_default_config(strategy).unwrap()); log_wasm(wasm); let _ = Module::new(&engine, wasm); } /// Instantiate the given Wasm module with each `Config` and call all of its /// exports. Modulo OOM, non-canonical NaNs, and usage of Wasm features that are /// or aren't enabled for different configs, we should get the same results when /// we call the exported functions for all of our different configs. pub fn differential_execution( module: &wasm_smith::Module, configs: &[crate::generators::DifferentialConfig], ) { use std::collections::{HashMap, HashSet}; crate::init_fuzzing(); // We need at least two configs. if configs.len() < 2 // And all the configs should be unique. || configs.iter().collect::<HashSet<_>>().len() != configs.len() { return; } let configs: Vec<_> = match configs.iter().map(|c| c.to_wasmtime_config()).collect() { Ok(cs) => cs, // If the config is trying to use something that was turned off at // compile time, eg lightbeam, just continue to the next fuzz input. Err(_) => return, }; let mut export_func_results: HashMap<String, Result<Box<[Val]>, Trap>> = Default::default(); let wasm = module.to_bytes(); log_wasm(&wasm); for config in &configs { let engine = Engine::new(config); let store = Store::new(&engine); let module = Module::new(&engine, &wasm).unwrap(); // TODO: we should implement tracing versions of these dummy imports // that record a trace of the order that imported functions were called // in and with what values. Like the results of exported functions, // calls to imports should also yield the same values for each // configuration, and we should assert that. let imports = dummy_imports(&store, module.imports()); // Don't unwrap this: there can be instantiation-/link-time errors that // aren't caught during validation or compilation. For example, an imported // table might not have room for an element segment that we want to // initialize into it. let instance = match Instance::new(&store, &module, &imports) { Ok(instance) => instance, Err(e) => { eprintln!( "Warning: failed to instantiate `wasm-opt -ttf` module: {}", e ); continue; } }; for (name, f) in instance.exports().filter_map(|e| { let name = e.name(); e.into_func().map(|f| (name, f)) }) { // Always call the hang limit initializer first, so that we don't // infinite loop when calling another export. init_hang_limit(&instance); let ty = f.ty(); let params = dummy::dummy_values(ty.params()); let this_result = f.call(&params).map_err(|e| e.downcast::<Trap>().unwrap()); let existing_result = export_func_results .entry(name.to_string()) .or_insert_with(|| this_result.clone()); assert_same_export_func_result(&existing_result, &this_result, name); } } fn init_hang_limit(instance: &Instance) { match instance.get_export("hangLimitInitializer") { None => return, Some(Extern::Func(f)) => { f.call(&[]) .expect("initializing the hang limit should not fail"); } Some(_) => panic!("unexpected hangLimitInitializer export"), } } fn assert_same_export_func_result( lhs: &Result<Box<[Val]>, Trap>, rhs: &Result<Box<[Val]>, Trap>, func_name: &str, ) { let fail = || { panic!( "differential fuzzing failed: exported func {} returned two \ different results: {:?} != {:?}", func_name, lhs, rhs ) }; match (lhs, rhs) { (Err(_), Err(_)) => {} (Ok(lhs), Ok(rhs)) => { if lhs.len() != rhs.len() { fail(); } for (lhs, rhs) in lhs.iter().zip(rhs.iter()) { match (lhs, rhs) { (Val::I32(lhs), Val::I32(rhs)) if lhs == rhs => continue, (Val::I64(lhs), Val::I64(rhs)) if lhs == rhs => continue, (Val::V128(lhs), Val::V128(rhs)) if lhs == rhs => continue, (Val::F32(lhs), Val::F32(rhs)) if f32_equal(*lhs, *rhs) => continue, (Val::F64(lhs), Val::F64(rhs)) if f64_equal(*lhs, *rhs) => continue, (Val::ExternRef(_), Val::ExternRef(_)) | (Val::FuncRef(_), Val::FuncRef(_)) => continue, _ => fail(), } } } _ => fail(), } } } fn f32_equal(a: u32, b: u32) -> bool { let a = f32::from_bits(a); let b = f32::from_bits(b); a == b || (a.is_nan() && b.is_nan()) } fn f64_equal(a: u64, b: u64) -> bool { let a = f64::from_bits(a); let b = f64::from_bits(b); a == b || (a.is_nan() && b.is_nan()) } /// Invoke the given API calls. pub fn make_api_calls(api: crate::generators::api::ApiCalls) { use crate::generators::api::ApiCall; use std::collections::HashMap; crate::init_fuzzing(); let mut config: Option<Config> = None; let mut engine: Option<Engine> = None; let mut store: Option<Store> = None; let mut modules: HashMap<usize, Module> = Default::default(); let mut instances: HashMap<usize, Instance> = Default::default(); for call in api.calls { match call { ApiCall::ConfigNew => { log::trace!("creating config"); assert!(config.is_none()); config = Some(crate::fuzz_default_config(wasmtime::Strategy::Cranelift).unwrap()); } ApiCall::ConfigDebugInfo(b) => { log::trace!("enabling debuginfo"); config.as_mut().unwrap().debug_info(b); } ApiCall::ConfigInterruptable(b) => { log::trace!("enabling interruption"); config.as_mut().unwrap().interruptable(b); } ApiCall::EngineNew => { log::trace!("creating engine"); assert!(engine.is_none()); engine = Some(Engine::new(config.as_ref().unwrap())); } ApiCall::StoreNew => { log::trace!("creating store"); assert!(store.is_none()); store = Some(Store::new(engine.as_ref().unwrap())); } ApiCall::ModuleNew { id, wasm } => { log::debug!("creating module: {}", id); let wasm = wasm.to_bytes(); log_wasm(&wasm); let module = match Module::new(engine.as_ref().unwrap(), &wasm) { Ok(m) => m, Err(_) => continue, }; let old = modules.insert(id, module); assert!(old.is_none()); } ApiCall::ModuleDrop { id } => { log::trace!("dropping module: {}", id); drop(modules.remove(&id)); } ApiCall::InstanceNew { id, module } => { log::trace!("instantiating module {} as {}", module, id); let module = match modules.get(&module) { Some(m) => m, None => continue, }; let store = store.as_ref().unwrap(); let imports = dummy_imports(store, module.imports()); // Don't unwrap this: there can be instantiation-/link-time errors that // aren't caught during validation or compilation. For example, an imported // table might not have room for an element segment that we want to // initialize into it. if let Ok(instance) = Instance::new(store, &module, &imports) { instances.insert(id, instance); } } ApiCall::InstanceDrop { id } => { log::trace!("dropping instance {}", id); drop(instances.remove(&id)); } ApiCall::CallExportedFunc { instance, nth } => { log::trace!("calling instance export {} / {}", instance, nth); let instance = match instances.get(&instance) { Some(i) => i, None => { // Note that we aren't guaranteed to instantiate valid // modules, see comments in `InstanceNew` for details on // that. But the API call generator can't know if // instantiation failed, so we might not actually have // this instance. When that's the case, just skip the // API call and keep going. continue; } }; let funcs = instance .exports() .filter_map(|e| match e.into_extern() { Extern::Func(f) => Some(f.clone()), _ => None, }) .collect::<Vec<_>>(); if funcs.is_empty() { continue; } let nth = nth % funcs.len(); let f = &funcs[nth]; let ty = f.ty(); let params = dummy::dummy_values(ty.params()); let _ = f.call(&params); } } } } /// Executes the wast `test` spectest with the `config` specified. /// /// Ensures that spec tests pass regardless of the `Config`. pub fn spectest(fuzz_config: crate::generators::Config, test: crate::generators::SpecTest) { crate::init_fuzzing(); log::debug!("running {:?} with {:?}", test.file, fuzz_config); let mut config = fuzz_config.to_wasmtime(); config.wasm_reference_types(false); config.wasm_bulk_memory(false); let store = Store::new(&Engine::new(&config)); if fuzz_config.consume_fuel { store.add_fuel(u64::max_value()); } let mut wast_context = WastContext::new(store); wast_context.register_spectest().unwrap(); wast_context .run_buffer(test.file, test.contents.as_bytes()) .unwrap(); } /// Execute a series of `table.get` and `table.set` operations. pub fn table_ops( fuzz_config: crate::generators::Config, ops: crate::generators::table_ops::TableOps, ) { let _ = env_logger::try_init(); let num_dropped = Rc::new(Cell::new(0)); { let mut config = fuzz_config.to_wasmtime(); config.wasm_reference_types(true); let engine = Engine::new(&config); let store = Store::new(&engine); if fuzz_config.consume_fuel { store.add_fuel(u64::max_value()); } let wasm = ops.to_wasm_binary(); log_wasm(&wasm); let module = match Module::new(&engine, &wasm) { Ok(m) => m, Err(_) => return, }; // To avoid timeouts, limit the number of explicit GCs we perform per // test case. const MAX_GCS: usize = 5; let num_gcs = Cell::new(0); let gc = Func::wrap(&store, move |caller: Caller| { if num_gcs.get() < MAX_GCS { caller.store().gc(); num_gcs.set(num_gcs.get() + 1); } }); let instance = Instance::new(&store, &module, &[gc.into()]).unwrap(); let run = instance.get_func("run").unwrap(); let args: Vec<_> = (0..ops.num_params()) .map(|_| Val::ExternRef(Some(ExternRef::new(CountDrops(num_dropped.clone()))))) .collect(); let _ = run.call(&args); } assert_eq!(num_dropped.get(), ops.num_params()); return; struct CountDrops(Rc<Cell<u8>>); impl Drop for CountDrops { fn drop(&mut self) { self.0.set(self.0.get().checked_add(1).unwrap()); } } } /// Configuration options for wasm-smith such that generated modules always /// conform to certain specifications. #[derive(Default, Debug, Arbitrary, Clone)] pub struct DifferentialWasmiModuleConfig; impl wasm_smith::Config for DifferentialWasmiModuleConfig { fn allow_start_export(&self) -> bool { false } fn min_funcs(&self) -> usize { 1 } fn max_funcs(&self) -> usize { 1 } fn min_memories(&self) -> u32 { 1 } fn max_memories(&self) -> usize { 1 } fn max_imports(&self) -> usize { 0 } fn min_exports(&self) -> usize { 2 } fn max_memory_pages(&self) -> u32 { 1 } fn memory_max_size_required(&self) -> bool { true } } /// Perform differential execution between Cranelift and wasmi, diffing the /// resulting memory image when execution terminates. This relies on the /// module-under-test to be instrumented to bound the execution time. Invoke /// with a module generated by `wasm-smith` using the /// `DiferentialWasmiModuleConfig` configuration type for best results. /// /// May return `None` if we early-out due to a rejected fuzz config; these /// should be rare if modules are generated appropriately. pub fn differential_wasmi_execution(wasm: &[u8], config: &crate::generators::Config) -> Option<()> { crate::init_fuzzing(); // Instantiate wasmi module and instance. let wasmi_module = wasmi::Module::from_buffer(&wasm[..]).ok()?; let wasmi_instance = wasmi::ModuleInstance::new(&wasmi_module, &wasmi::ImportsBuilder::default()).ok()?; let wasmi_instance = wasmi_instance.assert_no_start(); // TODO(paritytech/wasmi#19): wasmi does not currently canonicalize NaNs. To avoid spurious // fuzz failures, for now let's fuzz only integer Wasm programs. if wasmi_module.deny_floating_point().is_err() { return None; } // Instantiate wasmtime module and instance. let mut wasmtime_config = config.to_wasmtime(); wasmtime_config.cranelift_nan_canonicalization(true); let wasmtime_engine = Engine::new(&wasmtime_config); let wasmtime_store = Store::new(&wasmtime_engine); if config.consume_fuel { wasmtime_store.add_fuel(u64::max_value()); } let wasmtime_module = Module::new(&wasmtime_engine, &wasm).expect("Wasmtime can compile module"); let wasmtime_instance = Instance::new(&wasmtime_store, &wasmtime_module, &[]) .expect("Wasmtime can instantiate module"); // Introspect wasmtime module to find name of an exported function and of an // exported memory. Stop when we have one of each. (According to the config // above, there should be at most one of each.) let (func_name, memory_name) = { let mut func_name = None; let mut memory_name = None; for e in wasmtime_module.exports() { match e.ty() { wasmtime::ExternType::Func(..) => func_name = Some(e.name().to_string()), wasmtime::ExternType::Memory(..) => memory_name = Some(e.name().to_string()), _ => {} } if func_name.is_some() && memory_name.is_some() { break; } } (func_name?, memory_name?) }; let wasmi_mem_export = wasmi_instance.export_by_name(&memory_name[..]).unwrap(); let wasmi_mem = wasmi_mem_export.as_memory().unwrap(); let wasmi_main_export = wasmi_instance.export_by_name(&func_name[..]).unwrap(); let wasmi_main = wasmi_main_export.as_func().unwrap(); let wasmi_val = wasmi::FuncInstance::invoke(&wasmi_main, &[], &mut wasmi::NopExternals); let wasmtime_mem = wasmtime_instance .get_memory(&memory_name[..]) .expect("memory export is present"); let wasmtime_main = wasmtime_instance .get_func(&func_name[..]) .expect("function export is present"); let wasmtime_vals = wasmtime_main.call(&[]); let wasmtime_val = wasmtime_vals.map(|v| v.iter().next().cloned()); debug!( "Successful execution: wasmi returned {:?}, wasmtime returned {:?}", wasmi_val, wasmtime_val ); let show_wat = || { if let Ok(s) = wasmprinter::print_bytes(&wasm[..]) { eprintln!("wat:\n{}\n", s); } }; match (&wasmi_val, &wasmtime_val) { (&Ok(Some(wasmi::RuntimeValue::I32(a))), &Ok(Some(Val::I32(b)))) if a == b => {} (&Ok(Some(wasmi::RuntimeValue::F32(a))), &Ok(Some(Val::F32(b)))) if f32_equal(a.to_bits(), b) => {} (&Ok(Some(wasmi::RuntimeValue::I64(a))), &Ok(Some(Val::I64(b)))) if a == b => {} (&Ok(Some(wasmi::RuntimeValue::F64(a))), &Ok(Some(Val::F64(b)))) if f64_equal(a.to_bits(), b) => {} (&Ok(None), &Ok(None)) => {} (&Err(_), &Err(_)) => {} _ => { show_wat(); panic!( "Values do not match: wasmi returned {:?}; wasmtime returned {:?}", wasmi_val, wasmtime_val ); } } if wasmi_mem.current_size().0 != wasmtime_mem.size() as usize { show_wat(); panic!("resulting memories are not the same size"); } // Wasmi memory may be stored non-contiguously; copy it out to a contiguous chunk. let mut wasmi_buf: Vec<u8> = vec![0; wasmtime_mem.data_size()]; wasmi_mem .get_into(0, &mut wasmi_buf[..]) .expect("can access wasmi memory"); let wasmtime_slice = unsafe { wasmtime_mem.data_unchecked() }; if wasmi_buf.len() >= 64 { debug!("-> First 64 bytes of wasmi heap: {:?}", &wasmi_buf[0..64]); debug!( "-> First 64 bytes of Wasmtime heap: {:?}", &wasmtime_slice[0..64] ); } if &wasmi_buf[..] != &wasmtime_slice[..] { show_wat(); panic!("memory contents are not equal"); } Some(()) } #[derive(Default)] struct SignalOnDrop { state: Arc<(Mutex<bool>, Condvar)>, thread: Option<std::thread::JoinHandle<()>>, } impl SignalOnDrop { fn spawn_timeout(&mut self, dur: Duration, closure: impl FnOnce() + Send + 'static) { let state = self.state.clone(); let start = Instant::now(); self.thread = Some(std::thread::spawn(move || { // Using our mutex/condvar we wait here for the first of `dur` to // pass or the `SignalOnDrop` instance to get dropped. let (lock, cvar) = &*state; let mut signaled = lock.lock().unwrap(); while !*signaled { // Adjust our requested `dur` based on how much time has passed. let dur = match dur.checked_sub(start.elapsed()) { Some(dur) => dur, None => break, }; let (lock, result) = cvar.wait_timeout(signaled, dur).unwrap(); signaled = lock; // If we timed out for sure then there's no need to continue // since we'll just abort on the next `checked_sub` anyway. if result.timed_out() { break; } } drop(signaled); closure(); })); } } impl Drop for SignalOnDrop { fn drop(&mut self) { if let Some(thread) = self.thread.take() { let (lock, cvar) = &*self.state; // Signal our thread that we've been dropped and wake it up if it's // blocked. let mut g = lock.lock().unwrap(); *g = true; cvar.notify_one(); drop(g); // ... and then wait for the thread to exit to ensure we clean up // after ourselves. thread.join().unwrap(); } } }
36.015449
100
0.565768
0e883d6adfcbd701acc4936494cc829cc582139b
7,652
use std::ffi::CStr; use crate::extn::core::numeric::{self, Coercion, Outcome}; use crate::extn::prelude::*; const FLOAT_CSTR: &CStr = cstr::cstr!("Float"); pub fn init(interp: &mut Artichoke) -> InitializeResult<()> { if interp.is_class_defined::<Float>() { return Ok(()); } let spec = class::Spec::new("Float", FLOAT_CSTR, None, None)?; interp.def_class::<Float>(spec)?; let _ = interp.eval(&include_bytes!("float.rb")[..])?; let dig = interp.convert(Float::DIG); interp.define_class_constant::<Float>("DIG", dig)?; let epsilon = interp.convert_mut(Float::EPSILON); interp.define_class_constant::<Float>("EPSILON", epsilon)?; let infinity = interp.convert_mut(Float::INFINITY); interp.define_class_constant::<Float>("INFINITY", infinity)?; let mant_dig = interp.convert(Float::MANT_DIG); interp.define_class_constant::<Float>("MANT_DIG", mant_dig)?; let max = interp.convert_mut(Float::MAX); interp.define_class_constant::<Float>("MAX", max)?; let max_10_exp = interp.convert(Float::MAX_10_EXP); interp.define_class_constant::<Float>("MAX_10_EXP", max_10_exp)?; let max_exp = interp.convert(Float::MAX_EXP); interp.define_class_constant::<Float>("MAX_EXP", max_exp)?; let min = interp.convert_mut(Float::MIN); interp.define_class_constant::<Float>("MIN", min)?; let min_10_exp = interp.convert(Float::MIN_10_EXP); interp.define_class_constant::<Float>("MIN_10_EXP", min_10_exp)?; let min_exp = interp.convert(Float::MIN_EXP); interp.define_class_constant::<Float>("MIN_EXP", min_exp)?; let nan = interp.convert_mut(Float::NAN); interp.define_class_constant::<Float>("NAN", nan)?; let radix = interp.convert(Float::RADIX); interp.define_class_constant::<Float>("RADIX", radix)?; let rounds = interp.convert(Float::ROUNDS); interp.define_class_constant::<Float>("ROUNDS", rounds)?; trace!("Patched Float onto interpreter"); Ok(()) } #[derive(Default, Debug, Clone, Copy, PartialEq, PartialOrd)] pub struct Float(Fp); impl ConvertMut<Float, Value> for Artichoke { #[inline] fn convert_mut(&mut self, from: Float) -> Value { self.convert_mut(from.0) } } impl TryConvert<Value, Float> for Artichoke { type Error = Error; #[inline] fn try_convert(&self, value: Value) -> Result<Float, Self::Error> { let num = self.try_convert(value)?; Ok(Float(num)) } } impl From<Fp> for Float { #[inline] fn from(flt: Fp) -> Self { Self(flt) } } impl From<Float> for Fp { #[inline] fn from(flt: Float) -> Self { flt.as_f64() } } impl From<Float> for Outcome { #[inline] fn from(flt: Float) -> Self { Self::Float(flt.into()) } } impl From<Fp> for Outcome { #[inline] fn from(flt: Fp) -> Self { Self::Float(flt) } } impl Float { /// The minimum number of significant decimal digits in a double-precision /// floating point. /// /// Usually defaults to 15. pub const DIG: Int = Fp::DIGITS as Int; /// The difference between 1 and the smallest double-precision floating /// point number greater than 1. /// /// Usually defaults to 2.2204460492503131e-16. pub const EPSILON: Fp = Fp::EPSILON; /// An expression representing positive infinity. pub const INFINITY: Fp = Fp::INFINITY; /// The minimum number of significant decimal digits in a double-precision /// floating point. /// /// Usually defaults to 15. pub const MANT_DIG: Int = Fp::MANTISSA_DIGITS as Int; /// The largest possible integer in a double-precision floating point /// number. /// /// Usually defaults to 1.7976931348623157e+308. pub const MAX: Fp = Fp::MAX; /// The largest positive exponent in a double-precision floating point where /// 10 raised to this power minus 1. /// /// Usually defaults to 308. pub const MAX_10_EXP: Int = Fp::MAX_10_EXP as Int; /// The largest possible exponent value in a double-precision floating /// point. /// /// Usually defaults to 1024. pub const MAX_EXP: Int = Fp::MAX_EXP as Int; /// The smallest positive normalized number in a double-precision floating /// point. /// /// Usually defaults to 2.2250738585072014e-308. /// /// If the platform supports denormalized numbers, there are numbers between /// zero and [`Float::MIN`]. `0.0.next_float` returns the smallest positive /// floating point number including denormalized numbers. pub const MIN: Fp = Fp::MIN; /// The smallest negative exponent in a double-precision floating point /// where 10 raised to this power minus 1. /// /// Usually defaults to -307. pub const MIN_10_EXP: Int = Fp::MIN_10_EXP as Int; /// The smallest possible exponent value in a double-precision floating /// point. /// /// Usually defaults to -1021. pub const MIN_EXP: Int = Fp::MIN_EXP as Int; /// An expression representing a value which is "not a number". pub const NAN: Fp = Fp::NAN; pub const NEG_INFINITY: Fp = Fp::NEG_INFINITY; /// The base of the floating point, or number of unique digits used to /// represent the number. /// /// Usually defaults to 2 on most systems, which would represent a base-10 /// decimal. pub const RADIX: Int = Fp::RADIX as Int; /// Represents the rounding mode for floating point addition. /// /// Usually defaults to 1, rounding to the nearest number. /// /// Other modes include: /// /// | mode | value | /// |------------------------------------|-------| /// | Indeterminable | -1 | /// | Rounding towards zero | 0 | /// | Rounding to the nearest number | 1 | /// | Rounding towards positive infinity | 2 | /// | Rounding towards negative infinity | 3 | /// /// # Rust Caveats /// /// Rust does not support setting the rounding mode and the behavior from /// LLVM is not documented. Because of this uncertainty, Artichoke sets its /// rounding mode to `-1`, Indeterminable. /// /// The Rust docs say [`f64::round`][round] rounds "half-way cases away from /// 0.0." Stack Overflow has a /// [discussion around float rounding semantics][stackoverflow] in Rust and /// LLVM. /// /// [stackoverflow]: https://stackoverflow.com/a/28122536 /// [round]: https://doc.rust-lang.org/1.42.0/std/primitive.f64.html#method.round pub const ROUNDS: Int = -1; /// Construct a new, zero, float. #[inline] #[must_use] pub const fn new() -> Self { Self(0.0) } /// Return the inner [`f64`]. #[inline] #[must_use] pub const fn as_f64(self) -> f64 { self.0 } /// Compute the remainder of self and other. /// /// Equivalent to `self.as_f64() % other.as_f64()`. #[inline] #[must_use] pub fn modulo(self, other: Self) -> Self { Self(self.0 % other.0) } #[inline] pub fn coerced_modulo(self, interp: &mut Artichoke, other: Value) -> Result<Outcome, Error> { if let Ruby::Float = other.ruby_type() { let other = other.try_into::<Float>(interp)?; return Ok(self.modulo(other).into()); } let x = interp.convert_mut(self); let coerced = numeric::coerce(interp, x, other)?; match coerced { Coercion::Float(x, y) => Ok((x % y).into()), Coercion::Integer(x, y) => Ok((x % y).into()), } } }
32.561702
97
0.614219
e927c05e0c9db9453d1af4ee44a279ae6d65b823
59,471
// This file is part of Substrate. // Copyright (C) 2017-2021 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! # Balances Pallet //! //! The Balances pallet provides functionality for handling accounts and balances. //! //! - [`Config`] //! - [`Call`] //! - [`Pallet`] //! //! ## Overview //! //! The Balances pallet provides functions for: //! //! - Getting and setting free balances. //! - Retrieving total, reserved and unreserved balances. //! - Repatriating a reserved balance to a beneficiary account that exists. //! - Transferring a balance between accounts (when not reserved). //! - Slashing an account balance. //! - Account creation and removal. //! - Managing total issuance. //! - Setting and managing locks. //! //! ### Terminology //! //! - **Existential Deposit:** The minimum balance required to create or keep an account open. This prevents //! "dust accounts" from filling storage. When the free plus the reserved balance (i.e. the total balance) //! fall below this, then the account is said to be dead; and it loses its functionality as well as any //! prior history and all information on it is removed from the chain's state. //! No account should ever have a total balance that is strictly between 0 and the existential //! deposit (exclusive). If this ever happens, it indicates either a bug in this pallet or an //! erroneous raw mutation of storage. //! //! - **Total Issuance:** The total number of units in existence in a system. //! //! - **Reaping an account:** The act of removing an account by resetting its nonce. Happens after its //! total balance has become zero (or, strictly speaking, less than the Existential Deposit). //! //! - **Free Balance:** The portion of a balance that is not reserved. The free balance is the only //! balance that matters for most operations. //! //! - **Reserved Balance:** Reserved balance still belongs to the account holder, but is suspended. //! Reserved balance can still be slashed, but only after all the free balance has been slashed. //! //! - **Imbalance:** A condition when some funds were credited or debited without equal and opposite accounting //! (i.e. a difference between total issuance and account balances). Functions that result in an imbalance will //! return an object of the `Imbalance` trait that can be managed within your runtime logic. (If an imbalance is //! simply dropped, it should automatically maintain any book-keeping such as total issuance.) //! //! - **Lock:** A freeze on a specified amount of an account's free balance until a specified block number. Multiple //! locks always operate over the same funds, so they "overlay" rather than "stack". //! //! ### Implementations //! //! The Balances pallet provides implementations for the following traits. If these traits provide the functionality //! that you need, then you can avoid coupling with the Balances pallet. //! //! - [`Currency`](frame_support::traits::Currency): Functions for dealing with a //! fungible assets system. //! - [`ReservableCurrency`](frame_support::traits::ReservableCurrency): //! Functions for dealing with assets that can be reserved from an account. //! - [`LockableCurrency`](frame_support::traits::LockableCurrency): Functions for //! dealing with accounts that allow liquidity restrictions. //! - [`Imbalance`](frame_support::traits::Imbalance): Functions for handling //! imbalances between total issuance in the system and account balances. Must be used when a function //! creates new funds (e.g. a reward) or destroys some funds (e.g. a system fee). //! //! ## Interface //! //! ### Dispatchable Functions //! //! - `transfer` - Transfer some liquid free balance to another account. //! - `set_balance` - Set the balances of a given account. The origin of this call must be root. //! //! ## Usage //! //! The following examples show how to use the Balances pallet in your custom pallet. //! //! ### Examples from the FRAME //! //! The Contract pallet uses the `Currency` trait to handle gas payment, and its types inherit from `Currency`: //! //! ``` //! use frame_support::traits::Currency; //! # pub trait Config: frame_system::Config { //! # type Currency: Currency<Self::AccountId>; //! # } //! //! pub type BalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance; //! pub type NegativeImbalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::NegativeImbalance; //! //! # fn main() {} //! ``` //! //! The Staking pallet uses the `LockableCurrency` trait to lock a stash account's funds: //! //! ``` //! use frame_support::traits::{WithdrawReasons, LockableCurrency}; //! use sp_runtime::traits::Bounded; //! pub trait Config: frame_system::Config { //! type Currency: LockableCurrency<Self::AccountId, Moment=Self::BlockNumber>; //! } //! # struct StakingLedger<T: Config> { //! # stash: <T as frame_system::Config>::AccountId, //! # total: <<T as Config>::Currency as frame_support::traits::Currency<<T as frame_system::Config>::AccountId>>::Balance, //! # phantom: std::marker::PhantomData<T>, //! # } //! # const STAKING_ID: [u8; 8] = *b"staking "; //! //! fn update_ledger<T: Config>( //! controller: &T::AccountId, //! ledger: &StakingLedger<T> //! ) { //! T::Currency::set_lock( //! STAKING_ID, //! &ledger.stash, //! ledger.total, //! WithdrawReasons::all() //! ); //! // <Ledger<T>>::insert(controller, ledger); // Commented out as we don't have access to Staking's storage here. //! } //! # fn main() {} //! ``` //! //! ## Genesis config //! //! The Balances pallet depends on the [`GenesisConfig`]. //! //! ## Assumptions //! //! * Total issued balanced of all accounts should be less than `Config::Balance::max_value()`. #![cfg_attr(not(feature = "std"), no_std)] #[macro_use] mod tests; mod benchmarking; mod tests_composite; mod tests_local; pub mod weights; pub use self::imbalances::{NegativeImbalance, PositiveImbalance}; use codec::{Codec, Decode, Encode}; #[cfg(feature = "std")] use frame_support::traits::GenesisBuild; use frame_support::{ ensure, traits::{ BalanceStatus as Status, Currency, ExistenceRequirement, ExistenceRequirement::AllowDeath, ExistenceRequirement::KeepAlive, Get, Imbalance, LockIdentifier, LockableCurrency, OnUnbalanced, ReservableCurrency, SignedImbalance, StoredMap, TryDrop, WithdrawReasons, }, }; use frame_system as system; use sp_runtime::{ traits::{ AtLeast32BitUnsigned, Bounded, CheckedAdd, CheckedSub, MaybeSerializeDeserialize, Saturating, StaticLookup, StoredMapError, Zero, }, DispatchError, DispatchResult, RuntimeDebug, }; use sp_std::prelude::*; use sp_std::{cmp, fmt::Debug, mem, ops::BitOr, result}; pub use weights::WeightInfo; pub use pallet::*; pub trait MutableCurrency<AccountId>: Currency<AccountId> { fn mutate_account_balance<R>( who: &AccountId, f: impl FnOnce(&mut AccountData<Self::Balance>) -> R, ) -> Result<R, StoredMapError>; } impl<T: Config<I>, I: 'static> MutableCurrency<T::AccountId> for Pallet<T, I> where T::Balance: MaybeSerializeDeserialize + Debug, { fn mutate_account_balance<R>( who: &T::AccountId, f: impl FnOnce(&mut AccountData<T::Balance>) -> R, ) -> Result<R, StoredMapError> { Self::mutate_account(who, f) } } #[frame_support::pallet] pub mod pallet { use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; #[pallet::config] pub trait Config<I: 'static = ()>: frame_system::Config { /// The balance of an account. type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + MaybeSerializeDeserialize + Debug; /// Handler for the unbalanced reduction when removing a dust account. type DustRemoval: OnUnbalanced<NegativeImbalance<Self, I>>; /// The overarching event type. type Event: From<Event<Self, I>> + IsType<<Self as frame_system::Config>::Event>; /// The minimum amount required to keep an account open. #[pallet::constant] type ExistentialDeposit: Get<Self::Balance>; /// The means of storing the balances of an account. type AccountStore: StoredMap<Self::AccountId, AccountData<Self::Balance>>; /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; /// The maximum number of locks that should exist on an account. /// Not strictly enforced, but used for weight estimation. type MaxLocks: Get<u32>; } #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet<T, I = ()>(PhantomData<(T, I)>); #[pallet::hooks] impl<T: Config<I>, I: 'static> Hooks<BlockNumberFor<T>> for Pallet<T, I> {} #[pallet::call] impl<T: Config<I>, I: 'static> Pallet<T, I> { /// Transfer some liquid free balance to another account. /// /// `transfer` will set the `FreeBalance` of the sender and receiver. /// It will decrease the total issuance of the system by the `TransferFee`. /// If the sender's account is below the existential deposit as a result /// of the transfer, the account will be reaped. /// /// The dispatch origin for this call must be `Signed` by the transactor. /// /// # <weight> /// - Dependent on arguments but not critical, given proper implementations for /// input config types. See related functions below. /// - It contains a limited number of reads and writes internally and no complex computation. /// /// Related functions: /// /// - `ensure_can_withdraw` is always called internally but has a bounded complexity. /// - Transferring balances to accounts that did not exist before will cause /// `T::OnNewAccount::on_new_account` to be called. /// - Removing enough funds from an account will trigger `T::DustRemoval::on_unbalanced`. /// - `transfer_keep_alive` works the same way as `transfer`, but has an additional /// check that the transfer will not kill the origin account. /// --------------------------------- /// - Base Weight: 73.64 µs, worst case scenario (account created, account removed) /// - DB Weight: 1 Read and 1 Write to destination account /// - Origin account is already in memory, so no DB operations for them. /// # </weight> #[pallet::weight(T::WeightInfo::transfer())] pub fn transfer( origin: OriginFor<T>, dest: <T::Lookup as StaticLookup>::Source, #[pallet::compact] value: T::Balance, ) -> DispatchResultWithPostInfo { let transactor = ensure_signed(origin)?; let dest = T::Lookup::lookup(dest)?; <Self as Currency<_>>::transfer( &transactor, &dest, value, ExistenceRequirement::AllowDeath, )?; Ok(().into()) } /// Set the balances of a given account. /// /// This will alter `FreeBalance` and `ReservedBalance` in storage. it will /// also decrease the total issuance of the system (`TotalIssuance`). /// If the new free or reserved balance is below the existential deposit, /// it will reset the account nonce (`frame_system::AccountNonce`). /// /// The dispatch origin for this call is `root`. /// /// # <weight> /// - Independent of the arguments. /// - Contains a limited number of reads and writes. /// --------------------- /// - Base Weight: /// - Creating: 27.56 µs /// - Killing: 35.11 µs /// - DB Weight: 1 Read, 1 Write to `who` /// # </weight> #[pallet::weight( T::WeightInfo::set_balance_creating() // Creates a new account. .max(T::WeightInfo::set_balance_killing()) // Kills an existing account. )] pub(super) fn set_balance( origin: OriginFor<T>, who: <T::Lookup as StaticLookup>::Source, #[pallet::compact] new_free: T::Balance, #[pallet::compact] new_reserved: T::Balance, ) -> DispatchResultWithPostInfo { ensure_root(origin)?; let who = T::Lookup::lookup(who)?; let existential_deposit = T::ExistentialDeposit::get(); let wipeout = new_free + new_reserved < existential_deposit; let new_free = if wipeout { Zero::zero() } else { new_free }; let new_reserved = if wipeout { Zero::zero() } else { new_reserved }; let (free, reserved) = Self::mutate_account(&who, |account| { if new_free > account.free { mem::drop(PositiveImbalance::<T, I>::new(new_free - account.free)); } else if new_free < account.free { mem::drop(NegativeImbalance::<T, I>::new(account.free - new_free)); } if new_reserved > account.reserved { mem::drop(PositiveImbalance::<T, I>::new( new_reserved - account.reserved, )); } else if new_reserved < account.reserved { mem::drop(NegativeImbalance::<T, I>::new( account.reserved - new_reserved, )); } account.free = new_free; account.reserved = new_reserved; (account.free, account.reserved) })?; Self::deposit_event(Event::BalanceSet(who, free, reserved)); Ok(().into()) } /// Exactly as `transfer`, except the origin must be root and the source account may be /// specified. /// # <weight> /// - Same as transfer, but additional read and write because the source account is /// not assumed to be in the overlay. /// # </weight> #[pallet::weight(T::WeightInfo::force_transfer())] pub fn force_transfer( origin: OriginFor<T>, source: <T::Lookup as StaticLookup>::Source, dest: <T::Lookup as StaticLookup>::Source, #[pallet::compact] value: T::Balance, ) -> DispatchResultWithPostInfo { ensure_root(origin)?; let source = T::Lookup::lookup(source)?; let dest = T::Lookup::lookup(dest)?; <Self as Currency<_>>::transfer( &source, &dest, value, ExistenceRequirement::AllowDeath, )?; Ok(().into()) } /// Same as the [`transfer`] call, but with a check that the transfer will not kill the /// origin account. /// /// 99% of the time you want [`transfer`] instead. /// /// [`transfer`]: struct.Pallet.html#method.transfer /// # <weight> /// - Cheaper than transfer because account cannot be killed. /// - Base Weight: 51.4 µs /// - DB Weight: 1 Read and 1 Write to dest (sender is in overlay already) /// #</weight> #[pallet::weight(T::WeightInfo::transfer_keep_alive())] pub fn transfer_keep_alive( origin: OriginFor<T>, dest: <T::Lookup as StaticLookup>::Source, #[pallet::compact] value: T::Balance, ) -> DispatchResultWithPostInfo { let transactor = ensure_signed(origin)?; let dest = T::Lookup::lookup(dest)?; <Self as Currency<_>>::transfer(&transactor, &dest, value, KeepAlive)?; Ok(().into()) } } #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId", T::Balance = "Balance")] pub enum Event<T: Config<I>, I: 'static = ()> { /// An account was created with some free balance. \[account, free_balance\] Endowed(T::AccountId, T::Balance), /// An account was removed whose balance was non-zero but below ExistentialDeposit, /// resulting in an outright loss. \[account, balance\] DustLost(T::AccountId, T::Balance), /// Transfer succeeded. \[from, to, value\] Transfer(T::AccountId, T::AccountId, T::Balance), /// A balance was set by root. \[who, free, reserved\] BalanceSet(T::AccountId, T::Balance, T::Balance), /// Some amount was deposited (e.g. for transaction fees). \[who, deposit\] Deposit(T::AccountId, T::Balance), /// Some balance was reserved (moved from free to reserved). \[who, value\] Reserved(T::AccountId, T::Balance), /// Some balance was unreserved (moved from reserved to free). \[who, value\] Unreserved(T::AccountId, T::Balance), /// Some balance was moved from the reserve of the first account to the second account. /// Final argument indicates the destination balance type. /// \[from, to, balance, destination_status\] ReserveRepatriated(T::AccountId, T::AccountId, T::Balance, Status), } /// Old name generated by `decl_event`. #[deprecated(note = "use `Event` instead")] pub type RawEvent<T, I = ()> = Event<T, I>; #[pallet::error] pub enum Error<T, I = ()> { /// Vesting balance too high to send value VestingBalance, /// Account liquidity restrictions prevent withdrawal LiquidityRestrictions, /// Got an overflow after adding Overflow, /// Balance too low to send value InsufficientBalance, /// Value too low to create account due to existential deposit ExistentialDeposit, /// Transfer/payment would kill account KeepAlive, /// A vesting schedule already exists for this account ExistingVestingSchedule, /// Beneficiary account must pre-exist DeadAccount, } /// The total units issued in the system. #[pallet::storage] #[pallet::getter(fn total_issuance)] pub type TotalIssuance<T: Config<I>, I: 'static = ()> = StorageValue<_, T::Balance, ValueQuery>; /// The balance of an account. /// /// NOTE: This is only used in the case that this pallet is used to store balances. #[pallet::storage] pub type Account<T: Config<I>, I: 'static = ()> = StorageMap<_, Blake2_128Concat, T::AccountId, AccountData<T::Balance>, ValueQuery>; /// Any liquidity locks on some account balances. /// NOTE: Should only be accessed when setting, changing and freeing a lock. #[pallet::storage] #[pallet::getter(fn locks)] pub type Locks<T: Config<I>, I: 'static = ()> = StorageMap<_, Blake2_128Concat, T::AccountId, Vec<BalanceLock<T::Balance>>, ValueQuery>; /// Storage version of the pallet. /// /// This is set to v2.0.0 for new networks. #[pallet::storage] pub(super) type StorageVersion<T: Config<I>, I: 'static = ()> = StorageValue<_, Releases, ValueQuery>; #[pallet::genesis_config] pub struct GenesisConfig<T: Config<I>, I: 'static = ()> { pub balances: Vec<(T::AccountId, T::Balance)>, } #[cfg(feature = "std")] impl<T: Config<I>, I: 'static> Default for GenesisConfig<T, I> { fn default() -> Self { Self { balances: Default::default(), } } } #[pallet::genesis_build] impl<T: Config<I>, I: 'static> GenesisBuild<T, I> for GenesisConfig<T, I> { fn build(&self) { let total = self .balances .iter() .fold(Zero::zero(), |acc: T::Balance, &(_, n)| acc + n); <TotalIssuance<T, I>>::put(total); <StorageVersion<T, I>>::put(Releases::V2_0_0); for (_, balance) in &self.balances { assert!( *balance >= <T as Config<I>>::ExistentialDeposit::get(), "the balance of any account should always be at least the existential deposit.", ) } // ensure no duplicates exist. let endowed_accounts = self .balances .iter() .map(|(x, _)| x) .cloned() .collect::<std::collections::BTreeSet<_>>(); assert!( endowed_accounts.len() == self.balances.len(), "duplicate balances in genesis." ); for &(ref who, free) in self.balances.iter() { assert!(T::AccountStore::insert( who, AccountData { free, ..Default::default() } ) .is_ok()); } } } } #[cfg(feature = "std")] impl<T: Config<I>, I: 'static> GenesisConfig<T, I> { /// Direct implementation of `GenesisBuild::build_storage`. /// /// Kept in order not to break dependency. pub fn build_storage(&self) -> Result<sp_runtime::Storage, String> { <Self as GenesisBuild<T, I>>::build_storage(self) } /// Direct implementation of `GenesisBuild::assimilate_storage`. /// /// Kept in order not to break dependency. pub fn assimilate_storage(&self, storage: &mut sp_runtime::Storage) -> Result<(), String> { <Self as GenesisBuild<T, I>>::assimilate_storage(self, storage) } } /// Simplified reasons for withdrawing balance. #[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)] pub enum Reasons { /// Paying system transaction fees. Fee = 0, /// Any reason other than paying system transaction fees. Misc = 1, /// Any reason at all. All = 2, } impl From<WithdrawReasons> for Reasons { fn from(r: WithdrawReasons) -> Reasons { if r == WithdrawReasons::from(WithdrawReasons::TRANSACTION_PAYMENT) { Reasons::Fee } else if r.contains(WithdrawReasons::TRANSACTION_PAYMENT) { Reasons::All } else { Reasons::Misc } } } impl BitOr for Reasons { type Output = Reasons; fn bitor(self, other: Reasons) -> Reasons { if self == other { return self; } Reasons::All } } /// A single lock on a balance. There can be many of these on an account and they "overlap", so the /// same balance is frozen by multiple locks. #[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] pub struct BalanceLock<Balance> { /// An identifier for this lock. Only one lock may be in existence for each identifier. pub id: LockIdentifier, /// The amount which the free balance may not drop below when this lock is in effect. pub amount: Balance, /// If true, then the lock remains in effect even for payment of transaction fees. pub reasons: Reasons, } /// All balance information for an account. #[derive(Encode, Decode, Clone, PartialEq, Eq, Default, RuntimeDebug)] pub struct AccountData<Balance> { /// Non-reserved part of the balance. There may still be restrictions on this, but it is the /// total pool what may in principle be transferred, reserved and used for tipping. /// /// This is the only balance that matters in terms of most operations on tokens. It /// alone is used to determine the balance when in the contract execution environment. pub free: Balance, /// Balance which is reserved and may not be used at all. /// /// This can still get slashed, but gets slashed last of all. /// /// This balance is a 'reserve' balance that other subsystems use in order to set aside tokens /// that are still 'owned' by the account holder, but which are suspendable. pub reserved: Balance, /// The amount that `free` may not drop below when withdrawing for *anything except transaction /// fee payment*. pub misc_frozen: Balance, /// The amount that `free` may not drop below when withdrawing specifically for transaction /// fee payment. pub fee_frozen: Balance, } impl<Balance: Saturating + Copy + Ord> AccountData<Balance> { /// How much this account's balance can be reduced for the given `reasons`. fn usable(&self, reasons: Reasons) -> Balance { self.free.saturating_sub(self.frozen(reasons)) } /// The amount that this account's free balance may not be reduced beyond for the given /// `reasons`. fn frozen(&self, reasons: Reasons) -> Balance { match reasons { Reasons::All => self.misc_frozen.max(self.fee_frozen), Reasons::Misc => self.misc_frozen, Reasons::Fee => self.fee_frozen, } } /// The total balance in this account including any that is reserved and ignoring any frozen. fn total(&self) -> Balance { self.free.saturating_add(self.reserved) } } // A value placed in storage that represents the current version of the Balances storage. // This value is used by the `on_runtime_upgrade` logic to determine whether we run // storage migration logic. This should match directly with the semantic versions of the Rust crate. #[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)] enum Releases { V1_0_0, V2_0_0, } impl Default for Releases { fn default() -> Self { Releases::V1_0_0 } } impl<T: Config<I>, I: 'static> Pallet<T, I> { /// Get the free balance of an account. pub fn free_balance(who: impl sp_std::borrow::Borrow<T::AccountId>) -> T::Balance { Self::account(who.borrow()).free } /// Get the balance of an account that can be used for transfers, reservations, or any other /// non-locking, non-transaction-fee activity. Will be at most `free_balance`. pub fn usable_balance(who: impl sp_std::borrow::Borrow<T::AccountId>) -> T::Balance { Self::account(who.borrow()).usable(Reasons::Misc) } /// Get the balance of an account that can be used for paying transaction fees (not tipping, /// or any other kind of fees, though). Will be at most `free_balance`. pub fn usable_balance_for_fees(who: impl sp_std::borrow::Borrow<T::AccountId>) -> T::Balance { Self::account(who.borrow()).usable(Reasons::Fee) } /// Get the reserved balance of an account. pub fn reserved_balance(who: impl sp_std::borrow::Borrow<T::AccountId>) -> T::Balance { Self::account(who.borrow()).reserved } /// Get both the free and reserved balances of an account. fn account(who: &T::AccountId) -> AccountData<T::Balance> { T::AccountStore::get(&who) } /// Places the `free` and `reserved` parts of `new` into `account`. Also does any steps needed /// after mutating an account. This includes DustRemoval unbalancing, in the case than the `new` /// account's total balance is non-zero but below ED. /// /// Returns the final free balance, iff the account was previously of total balance zero, known /// as its "endowment". fn post_mutation( who: &T::AccountId, new: AccountData<T::Balance>, ) -> Option<AccountData<T::Balance>> { let total = new.total(); if total < T::ExistentialDeposit::get() { if !total.is_zero() { T::DustRemoval::on_unbalanced(NegativeImbalance::new(total)); Self::deposit_event(Event::DustLost(who.clone(), total)); } None } else { Some(new) } } /// Mutate an account to some new value, or delete it entirely with `None`. Will enforce /// `ExistentialDeposit` law, annulling the account as needed. /// /// NOTE: Doesn't do any preparatory work for creating a new account, so should only be used /// when it is known that the account already exists. /// /// NOTE: LOW-LEVEL: This will not attempt to maintain total issuance. It is expected that /// the caller will do this. pub fn mutate_account<R>( who: &T::AccountId, f: impl FnOnce(&mut AccountData<T::Balance>) -> R, ) -> Result<R, StoredMapError> { Self::try_mutate_account(who, |a, _| -> Result<R, StoredMapError> { Ok(f(a)) }) } /// Mutate an account to some new value, or delete it entirely with `None`. Will enforce /// `ExistentialDeposit` law, annulling the account as needed. This will do nothing if the /// result of `f` is an `Err`. /// /// NOTE: Doesn't do any preparatory work for creating a new account, so should only be used /// when it is known that the account already exists. /// /// NOTE: LOW-LEVEL: This will not attempt to maintain total issuance. It is expected that /// the caller will do this. fn try_mutate_account<R, E: From<StoredMapError>>( who: &T::AccountId, f: impl FnOnce(&mut AccountData<T::Balance>, bool) -> Result<R, E>, ) -> Result<R, E> { T::AccountStore::try_mutate_exists(who, |maybe_account| { let is_new = maybe_account.is_none(); let mut account = maybe_account.take().unwrap_or_default(); f(&mut account, is_new).map(move |result| { let maybe_endowed = if is_new { Some(account.free) } else { None }; *maybe_account = Self::post_mutation(who, account); (maybe_endowed, result) }) }) .map(|(maybe_endowed, result)| { if let Some(endowed) = maybe_endowed { Self::deposit_event(Event::Endowed(who.clone(), endowed)); } result }) } /// Update the account entry for `who`, given the locks. fn update_locks(who: &T::AccountId, locks: &[BalanceLock<T::Balance>]) { if locks.len() as u32 > T::MaxLocks::get() { frame_support::debug::warn!( "Warning: A user has more currency locks than expected. \ A runtime configuration adjustment may be needed." ); } // No way this can fail since we do not alter the existential balances. let _ = Self::mutate_account(who, |b| { b.misc_frozen = Zero::zero(); b.fee_frozen = Zero::zero(); for l in locks.iter() { if l.reasons == Reasons::All || l.reasons == Reasons::Misc { b.misc_frozen = b.misc_frozen.max(l.amount); } if l.reasons == Reasons::All || l.reasons == Reasons::Fee { b.fee_frozen = b.fee_frozen.max(l.amount); } } }); let existed = Locks::<T, I>::contains_key(who); if locks.is_empty() { Locks::<T, I>::remove(who); if existed { // TODO: use Locks::<T, I>::hashed_key // https://github.com/paritytech/substrate/issues/4969 system::Pallet::<T>::dec_consumers(who); } } else { Locks::<T, I>::insert(who, locks); if !existed { if system::Pallet::<T>::inc_consumers(who).is_err() { // No providers for the locks. This is impossible under normal circumstances // since the funds that are under the lock will themselves be stored in the // account and therefore will need a reference. frame_support::debug::warn!( "Warning: Attempt to introduce lock consumer reference, yet no providers. \ This is unexpected but should be safe." ); } } } } } // wrapping these imbalances in a private module is necessary to ensure absolute privacy // of the inner member. mod imbalances { use super::{result, Config, Imbalance, RuntimeDebug, Saturating, TryDrop, Zero}; use sp_std::mem; /// Opaque, move-only struct with private fields that serves as a token denoting that /// funds have been created without any equal and opposite accounting. #[must_use] #[derive(RuntimeDebug, PartialEq, Eq)] pub struct PositiveImbalance<T: Config<I>, I: 'static>(T::Balance); impl<T: Config<I>, I: 'static> PositiveImbalance<T, I> { /// Create a new positive imbalance from a balance. pub fn new(amount: T::Balance) -> Self { PositiveImbalance(amount) } } /// Opaque, move-only struct with private fields that serves as a token denoting that /// funds have been destroyed without any equal and opposite accounting. #[must_use] #[derive(RuntimeDebug, PartialEq, Eq)] pub struct NegativeImbalance<T: Config<I>, I: 'static>(T::Balance); impl<T: Config<I>, I: 'static> NegativeImbalance<T, I> { /// Create a new negative imbalance from a balance. pub fn new(amount: T::Balance) -> Self { NegativeImbalance(amount) } } impl<T: Config<I>, I: 'static> TryDrop for PositiveImbalance<T, I> { fn try_drop(self) -> result::Result<(), Self> { self.drop_zero() } } impl<T: Config<I>, I: 'static> Imbalance<T::Balance> for PositiveImbalance<T, I> { type Opposite = NegativeImbalance<T, I>; fn zero() -> Self { Self(Zero::zero()) } fn drop_zero(self) -> result::Result<(), Self> { if self.0.is_zero() { Ok(()) } else { Err(self) } } fn split(self, amount: T::Balance) -> (Self, Self) { let first = self.0.min(amount); let second = self.0 - first; mem::forget(self); (Self(first), Self(second)) } fn merge(mut self, other: Self) -> Self { self.0 = self.0.saturating_add(other.0); mem::forget(other); self } fn subsume(&mut self, other: Self) { self.0 = self.0.saturating_add(other.0); mem::forget(other); } fn offset(self, other: Self::Opposite) -> result::Result<Self, Self::Opposite> { let (a, b) = (self.0, other.0); mem::forget((self, other)); if a >= b { Ok(Self(a - b)) } else { Err(NegativeImbalance::new(b - a)) } } fn peek(&self) -> T::Balance { self.0.clone() } } impl<T: Config<I>, I: 'static> TryDrop for NegativeImbalance<T, I> { fn try_drop(self) -> result::Result<(), Self> { self.drop_zero() } } impl<T: Config<I>, I: 'static> Imbalance<T::Balance> for NegativeImbalance<T, I> { type Opposite = PositiveImbalance<T, I>; fn zero() -> Self { Self(Zero::zero()) } fn drop_zero(self) -> result::Result<(), Self> { if self.0.is_zero() { Ok(()) } else { Err(self) } } fn split(self, amount: T::Balance) -> (Self, Self) { let first = self.0.min(amount); let second = self.0 - first; mem::forget(self); (Self(first), Self(second)) } fn merge(mut self, other: Self) -> Self { self.0 = self.0.saturating_add(other.0); mem::forget(other); self } fn subsume(&mut self, other: Self) { self.0 = self.0.saturating_add(other.0); mem::forget(other); } fn offset(self, other: Self::Opposite) -> result::Result<Self, Self::Opposite> { let (a, b) = (self.0, other.0); mem::forget((self, other)); if a >= b { Ok(Self(a - b)) } else { Err(PositiveImbalance::new(b - a)) } } fn peek(&self) -> T::Balance { self.0.clone() } } impl<T: Config<I>, I: 'static> Drop for PositiveImbalance<T, I> { /// Basic drop handler will just square up the total issuance. fn drop(&mut self) { <super::TotalIssuance<T, I>>::mutate(|v| *v = v.saturating_add(self.0)); } } impl<T: Config<I>, I: 'static> Drop for NegativeImbalance<T, I> { /// Basic drop handler will just square up the total issuance. fn drop(&mut self) { <super::TotalIssuance<T, I>>::mutate(|v| *v = v.saturating_sub(self.0)); } } } impl<T: Config<I>, I: 'static> Currency<T::AccountId> for Pallet<T, I> where T::Balance: MaybeSerializeDeserialize + Debug, { type Balance = T::Balance; type PositiveImbalance = PositiveImbalance<T, I>; type NegativeImbalance = NegativeImbalance<T, I>; fn total_balance(who: &T::AccountId) -> Self::Balance { Self::account(who).total() } // Check if `value` amount of free balance can be slashed from `who`. fn can_slash(who: &T::AccountId, value: Self::Balance) -> bool { if value.is_zero() { return true; } Self::free_balance(who) >= value } fn total_issuance() -> Self::Balance { <TotalIssuance<T, I>>::get() } fn minimum_balance() -> Self::Balance { T::ExistentialDeposit::get() } // Burn funds from the total issuance, returning a positive imbalance for the amount burned. // Is a no-op if amount to be burned is zero. fn burn(mut amount: Self::Balance) -> Self::PositiveImbalance { if amount.is_zero() { return PositiveImbalance::zero(); } <TotalIssuance<T, I>>::mutate(|issued| { *issued = issued.checked_sub(&amount).unwrap_or_else(|| { amount = *issued; Zero::zero() }); }); PositiveImbalance::new(amount) } // Create new funds into the total issuance, returning a negative imbalance // for the amount issued. // Is a no-op if amount to be issued it zero. fn issue(mut amount: Self::Balance) -> Self::NegativeImbalance { if amount.is_zero() { return NegativeImbalance::zero(); } <TotalIssuance<T, I>>::mutate(|issued| { *issued = issued.checked_add(&amount).unwrap_or_else(|| { amount = Self::Balance::max_value() - *issued; Self::Balance::max_value() }) }); NegativeImbalance::new(amount) } fn free_balance(who: &T::AccountId) -> Self::Balance { Self::account(who).free } // Ensure that an account can withdraw from their free balance given any existing withdrawal // restrictions like locks and vesting balance. // Is a no-op if amount to be withdrawn is zero. // // # <weight> // Despite iterating over a list of locks, they are limited by the number of // lock IDs, which means the number of runtime pallets that intend to use and create locks. // # </weight> fn ensure_can_withdraw( who: &T::AccountId, amount: T::Balance, reasons: WithdrawReasons, new_balance: T::Balance, ) -> DispatchResult { if amount.is_zero() { return Ok(()); } let min_balance = Self::account(who).frozen(reasons.into()); ensure!( new_balance >= min_balance, Error::<T, I>::LiquidityRestrictions ); Ok(()) } // Transfer some free balance from `transactor` to `dest`, respecting existence requirements. // Is a no-op if value to be transferred is zero or the `transactor` is the same as `dest`. fn transfer( transactor: &T::AccountId, dest: &T::AccountId, value: Self::Balance, existence_requirement: ExistenceRequirement, ) -> DispatchResult { if value.is_zero() || transactor == dest { return Ok(()); } Self::try_mutate_account(dest, |to_account, _| -> DispatchResult { Self::try_mutate_account(transactor, |from_account, _| -> DispatchResult { from_account.free = from_account .free .checked_sub(&value) .ok_or(Error::<T, I>::InsufficientBalance)?; // NOTE: total stake being stored in the same type means that this could never overflow // but better to be safe than sorry. to_account.free = to_account .free .checked_add(&value) .ok_or(Error::<T, I>::Overflow)?; let ed = T::ExistentialDeposit::get(); ensure!(to_account.total() >= ed, Error::<T, I>::ExistentialDeposit); Self::ensure_can_withdraw( transactor, value, WithdrawReasons::TRANSFER, from_account.free, ) .map_err(|_| Error::<T, I>::LiquidityRestrictions)?; // TODO: This is over-conservative. There may now be other providers, and this pallet // may not even be a provider. let allow_death = existence_requirement == ExistenceRequirement::AllowDeath; let allow_death = allow_death && !system::Pallet::<T>::is_provider_required(transactor); ensure!( allow_death || from_account.free >= ed, Error::<T, I>::KeepAlive ); Ok(()) }) })?; // Emit transfer event. Self::deposit_event(Event::Transfer(transactor.clone(), dest.clone(), value)); Ok(()) } /// Slash a target account `who`, returning the negative imbalance created and any left over /// amount that could not be slashed. /// /// Is a no-op if `value` to be slashed is zero or the account does not exist. /// /// NOTE: `slash()` prefers free balance, but assumes that reserve balance can be drawn /// from in extreme circumstances. `can_slash()` should be used prior to `slash()` to avoid having /// to draw from reserved funds, however we err on the side of punishment if things are inconsistent /// or `can_slash` wasn't used appropriately. fn slash(who: &T::AccountId, value: Self::Balance) -> (Self::NegativeImbalance, Self::Balance) { if value.is_zero() { return (NegativeImbalance::zero(), Zero::zero()); } if Self::total_balance(&who).is_zero() { return (NegativeImbalance::zero(), value); } for attempt in 0..2 { match Self::try_mutate_account(who, |account, _is_new| -> Result<(Self::NegativeImbalance, Self::Balance), StoredMapError> { // Best value is the most amount we can slash following liveness rules. let best_value = match attempt { // First attempt we try to slash the full amount, and see if liveness issues happen. 0 => value, // If acting as a critical provider (i.e. first attempt failed), then slash // as much as possible while leaving at least at ED. _ => value.min((account.free + account.reserved).saturating_sub(T::ExistentialDeposit::get())), }; let free_slash = cmp::min(account.free, best_value); account.free -= free_slash; // Safe because of above check let remaining_slash = best_value - free_slash; // Safe because of above check if !remaining_slash.is_zero() { // If we have remaining slash, take it from reserved balance. let reserved_slash = cmp::min(account.reserved, remaining_slash); account.reserved -= reserved_slash; // Safe because of above check Ok(( NegativeImbalance::new(free_slash + reserved_slash), value - free_slash - reserved_slash, // Safe because value is gt or eq total slashed )) } else { // Else we are done! Ok(( NegativeImbalance::new(free_slash), value - free_slash, // Safe because value is gt or eq to total slashed )) } } ) { Ok(r) => return r, Err(_) => (), } } // Should never get here. But we'll be defensive anyway. (Self::NegativeImbalance::zero(), value) } /// Deposit some `value` into the free balance of an existing target account `who`. /// /// Is a no-op if the `value` to be deposited is zero. fn deposit_into_existing( who: &T::AccountId, value: Self::Balance, ) -> Result<Self::PositiveImbalance, DispatchError> { if value.is_zero() { return Ok(PositiveImbalance::zero()); } Self::try_mutate_account( who, |account, is_new| -> Result<Self::PositiveImbalance, DispatchError> { ensure!(!is_new, Error::<T, I>::DeadAccount); account.free = account .free .checked_add(&value) .ok_or(Error::<T, I>::Overflow)?; Ok(PositiveImbalance::new(value)) }, ) } /// Deposit some `value` into the free balance of `who`, possibly creating a new account. /// /// This function is a no-op if: /// - the `value` to be deposited is zero; or /// - the `value` to be deposited is less than the required ED and the account does not yet exist; or /// - the deposit would necessitate the account to exist and there are no provider references; or /// - `value` is so large it would cause the balance of `who` to overflow. fn deposit_creating(who: &T::AccountId, value: Self::Balance) -> Self::PositiveImbalance { if value.is_zero() { return Self::PositiveImbalance::zero(); } let r = Self::try_mutate_account( who, |account, is_new| -> Result<Self::PositiveImbalance, DispatchError> { let ed = T::ExistentialDeposit::get(); ensure!(value >= ed || !is_new, Error::<T, I>::ExistentialDeposit); // defensive only: overflow should never happen, however in case it does, then this // operation is a no-op. account.free = match account.free.checked_add(&value) { Some(x) => x, None => return Ok(Self::PositiveImbalance::zero()), }; Ok(PositiveImbalance::new(value)) }, ) .unwrap_or_else(|_| Self::PositiveImbalance::zero()); r } /// Withdraw some free balance from an account, respecting existence requirements. /// /// Is a no-op if value to be withdrawn is zero. fn withdraw( who: &T::AccountId, value: Self::Balance, reasons: WithdrawReasons, liveness: ExistenceRequirement, ) -> result::Result<Self::NegativeImbalance, DispatchError> { if value.is_zero() { return Ok(NegativeImbalance::zero()); } Self::try_mutate_account( who, |account, _| -> Result<Self::NegativeImbalance, DispatchError> { let new_free_account = account .free .checked_sub(&value) .ok_or(Error::<T, I>::InsufficientBalance)?; // bail if we need to keep the account alive and this would kill it. let ed = T::ExistentialDeposit::get(); let would_be_dead = new_free_account + account.reserved < ed; let would_kill = would_be_dead && account.free + account.reserved >= ed; ensure!( liveness == AllowDeath || !would_kill, Error::<T, I>::KeepAlive ); Self::ensure_can_withdraw(who, value, reasons, new_free_account)?; account.free = new_free_account; Ok(NegativeImbalance::new(value)) }, ) } /// Force the new free balance of a target account `who` to some new value `balance`. fn make_free_balance_be( who: &T::AccountId, value: Self::Balance, ) -> SignedImbalance<Self::Balance, Self::PositiveImbalance> { Self::try_mutate_account(who, |account, is_new| -> Result<SignedImbalance<Self::Balance, Self::PositiveImbalance>, DispatchError> { let ed = T::ExistentialDeposit::get(); let total = value.saturating_add(account.reserved); // If we're attempting to set an existing account to less than ED, then // bypass the entire operation. It's a no-op if you follow it through, but // since this is an instance where we might account for a negative imbalance // (in the dust cleaner of set_account) before we account for its actual // equal and opposite cause (returned as an Imbalance), then in the // instance that there's no other accounts on the system at all, we might // underflow the issuance and our arithmetic will be off. ensure!(total >= ed || !is_new, Error::<T, I>::ExistentialDeposit); let imbalance = if account.free <= value { SignedImbalance::Positive(PositiveImbalance::new(value - account.free)) } else { SignedImbalance::Negative(NegativeImbalance::new(account.free - value)) }; account.free = value; Ok(imbalance) }).unwrap_or_else(|_| SignedImbalance::Positive(Self::PositiveImbalance::zero())) } } impl<T: Config<I>, I: 'static> ReservableCurrency<T::AccountId> for Pallet<T, I> where T::Balance: MaybeSerializeDeserialize + Debug, { /// Check if `who` can reserve `value` from their free balance. /// /// Always `true` if value to be reserved is zero. fn can_reserve(who: &T::AccountId, value: Self::Balance) -> bool { if value.is_zero() { return true; } Self::account(who) .free .checked_sub(&value) .map_or(false, |new_balance| { Self::ensure_can_withdraw(who, value, WithdrawReasons::RESERVE, new_balance).is_ok() }) } fn reserved_balance(who: &T::AccountId) -> Self::Balance { Self::account(who).reserved } /// Move `value` from the free balance from `who` to their reserved balance. /// /// Is a no-op if value to be reserved is zero. fn reserve(who: &T::AccountId, value: Self::Balance) -> DispatchResult { if value.is_zero() { return Ok(()); } Self::try_mutate_account(who, |account, _| -> DispatchResult { account.free = account .free .checked_sub(&value) .ok_or(Error::<T, I>::InsufficientBalance)?; account.reserved = account .reserved .checked_add(&value) .ok_or(Error::<T, I>::Overflow)?; Self::ensure_can_withdraw(&who, value.clone(), WithdrawReasons::RESERVE, account.free) })?; Self::deposit_event(Event::Reserved(who.clone(), value)); Ok(()) } /// Unreserve some funds, returning any amount that was unable to be unreserved. /// /// Is a no-op if the value to be unreserved is zero or the account does not exist. fn unreserve(who: &T::AccountId, value: Self::Balance) -> Self::Balance { if value.is_zero() { return Zero::zero(); } if Self::total_balance(&who).is_zero() { return value; } let actual = match Self::mutate_account(who, |account| { let actual = cmp::min(account.reserved, value); account.reserved -= actual; // defensive only: this can never fail since total issuance which is at least free+reserved // fits into the same data type. account.free = account.free.saturating_add(actual); actual }) { Ok(x) => x, Err(_) => { // This should never happen since we don't alter the total amount in the account. // If it ever does, then we should fail gracefully though, indicating that nothing // could be done. return value; } }; Self::deposit_event(Event::Unreserved(who.clone(), actual.clone())); value - actual } /// Slash from reserved balance, returning the negative imbalance created, /// and any amount that was unable to be slashed. /// /// Is a no-op if the value to be slashed is zero or the account does not exist. fn slash_reserved( who: &T::AccountId, value: Self::Balance, ) -> (Self::NegativeImbalance, Self::Balance) { if value.is_zero() { return (NegativeImbalance::zero(), Zero::zero()); } if Self::total_balance(&who).is_zero() { return (NegativeImbalance::zero(), value); } // NOTE: `mutate_account` may fail if it attempts to reduce the balance to the point that an // account is attempted to be illegally destroyed. for attempt in 0..2 { match Self::mutate_account(who, |account| { let best_value = match attempt { 0 => value, // If acting as a critical provider (i.e. first attempt failed), then ensure // slash leaves at least the ED. _ => value.min( (account.free + account.reserved) .saturating_sub(T::ExistentialDeposit::get()), ), }; let actual = cmp::min(account.reserved, best_value); account.reserved -= actual; // underflow should never happen, but it if does, there's nothing to be done here. (NegativeImbalance::new(actual), value - actual) }) { Ok(r) => return r, Err(_) => (), } } // Should never get here as we ensure that ED is left in the second attempt. // In case we do, though, then we fail gracefully. (Self::NegativeImbalance::zero(), value) } /// Move the reserved balance of one account into the balance of another, according to `status`. /// /// Is a no-op if: /// - the value to be moved is zero; or /// - the `slashed` id equal to `beneficiary` and the `status` is `Reserved`. fn repatriate_reserved( slashed: &T::AccountId, beneficiary: &T::AccountId, value: Self::Balance, status: Status, ) -> Result<Self::Balance, DispatchError> { if value.is_zero() { return Ok(Zero::zero()); } if slashed == beneficiary { return match status { Status::Free => Ok(Self::unreserve(slashed, value)), Status::Reserved => Ok(value.saturating_sub(Self::reserved_balance(slashed))), }; } let actual = Self::try_mutate_account( beneficiary, |to_account, is_new| -> Result<Self::Balance, DispatchError> { ensure!(!is_new, Error::<T, I>::DeadAccount); Self::try_mutate_account( slashed, |from_account, _| -> Result<Self::Balance, DispatchError> { let actual = cmp::min(from_account.reserved, value); match status { Status::Free => { to_account.free = to_account .free .checked_add(&actual) .ok_or(Error::<T, I>::Overflow)? } Status::Reserved => { to_account.reserved = to_account .reserved .checked_add(&actual) .ok_or(Error::<T, I>::Overflow)? } } from_account.reserved -= actual; Ok(actual) }, ) }, )?; Self::deposit_event(Event::ReserveRepatriated( slashed.clone(), beneficiary.clone(), actual, status, )); Ok(value - actual) } } impl<T: Config<I>, I: 'static> LockableCurrency<T::AccountId> for Pallet<T, I> where T::Balance: MaybeSerializeDeserialize + Debug, { type Moment = T::BlockNumber; type MaxLocks = T::MaxLocks; // Set a lock on the balance of `who`. // Is a no-op if lock amount is zero or `reasons` `is_none()`. fn set_lock( id: LockIdentifier, who: &T::AccountId, amount: T::Balance, reasons: WithdrawReasons, ) { if amount.is_zero() || reasons.is_empty() { return; } let mut new_lock = Some(BalanceLock { id, amount, reasons: reasons.into(), }); let mut locks = Self::locks(who) .into_iter() .filter_map(|l| if l.id == id { new_lock.take() } else { Some(l) }) .collect::<Vec<_>>(); if let Some(lock) = new_lock { locks.push(lock) } Self::update_locks(who, &locks[..]); } // Extend a lock on the balance of `who`. // Is a no-op if lock amount is zero or `reasons` `is_none()`. fn extend_lock( id: LockIdentifier, who: &T::AccountId, amount: T::Balance, reasons: WithdrawReasons, ) { if amount.is_zero() || reasons.is_empty() { return; } let mut new_lock = Some(BalanceLock { id, amount, reasons: reasons.into(), }); let mut locks = Self::locks(who) .into_iter() .filter_map(|l| { if l.id == id { new_lock.take().map(|nl| BalanceLock { id: l.id, amount: l.amount.max(nl.amount), reasons: l.reasons | nl.reasons, }) } else { Some(l) } }) .collect::<Vec<_>>(); if let Some(lock) = new_lock { locks.push(lock) } Self::update_locks(who, &locks[..]); } fn remove_lock(id: LockIdentifier, who: &T::AccountId) { let mut locks = Self::locks(who); locks.retain(|l| l.id != id); Self::update_locks(who, &locks[..]); } }
38.567445
135
0.579257
11798f7d6c54b5b289e3eec205e2d9d0f6e3476b
6,832
//! Contains the `VehicleType` enum and its associated traits. It specifies the //! mode of transportation for transit directions. use crate::directions::error::Error; use serde::{Deserialize, Serialize}; /// Indicates the [vehicle /// type](https://developers.google.com/maps/documentation/directions/intro#VehicleType) #[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize, Deserialize)] pub enum VehicleType { /// Bus. #[serde(alias = "BUS")] Bus, /// A vehicle that operates on a cable, usually on the ground. Aerial cable /// cars may be of the type VehicleType::Gondola. #[serde(alias = "CABLE_CAR")] CableCar, /// Commuter rail. #[serde(alias = "COMMUTER_TRAIN")] CommuterTrain, /// Ferry. #[serde(alias = "FERRY")] Ferry, /// A vehicle that is pulled up a steep incline by a cable. A Funicular /// typically consists of two cars, with each car acting as a counterweight /// for the other. #[serde(alias = "FUNICULAR")] Funicular, /// An aerial cable car. #[serde(alias = "GONDOLA_LIFT")] GondolaLift, /// Heavy rail. #[serde(alias = "HEAVY_RAIL")] HeavyRail, /// High speed train. #[serde(alias = "HIGH_SPEED_TRAIN")] HighSpeedTrain, /// Intercity bus. #[serde(alias = "INTERCITY_BUS")] IntercityBus, /// Long distance train. #[serde(alias = "LONG_DISTANCE_TRAIN")] LongDistanceTrain, /// Light rail transit. #[serde(alias = "METRO_RAIL")] MetroRail, /// Monorail. #[serde(alias = "MONORAIL")] Monorail, /// All other vehicles will return this type. #[serde(alias = "OTHER")] Other, /// Rail. #[serde(alias = "RAIL")] Rail, /// Share taxi is a kind of bus with the ability to drop off and pick up /// passengers anywhere on its route. #[serde(alias = "SHARE_TAXI")] ShareTaxi, /// Underground light rail. #[serde(alias = "SUBWAY")] Subway, /// Above ground light rail. #[serde(alias = "TRAM")] Tram, /// Trolleybus. #[serde(alias = "TROLLEYBUS")] Trolleybus, } // enum impl std::convert::From<&VehicleType> for String { /// Converts a `VehicleType` enum to a `String` that contains a [vehicle /// type](https://developers.google.com/maps/documentation/directions/intro#VehicleType) /// code. fn from(vehicle_type: &VehicleType) -> String { match vehicle_type { VehicleType::Bus => String::from("BUS"), VehicleType::CableCar => String::from("CABLE_CAR"), VehicleType::CommuterTrain => String::from("COMMUTER_TRAIN"), VehicleType::Ferry => String::from("FERRY"), VehicleType::Funicular => String::from("FUNICULAR"), VehicleType::GondolaLift => String::from("GONDOLA_LIFT"), VehicleType::HeavyRail => String::from("HEAVY_RAIL"), VehicleType::HighSpeedTrain => String::from("HIGH_SPEED_TRAIN"), VehicleType::IntercityBus => String::from("INTERCITY_BUS"), VehicleType::LongDistanceTrain => String::from("LONG_DISTANCE_TRAIN"), VehicleType::MetroRail => String::from("METRO_RAIL"), VehicleType::Monorail => String::from("MONORAIL"), VehicleType::Other => String::from("OTHER"), VehicleType::Rail => String::from("RAIL"), VehicleType::ShareTaxi => String::from("SHARE_TAXI"), VehicleType::Subway => String::from("SUBWAY"), VehicleType::Tram => String::from("TRAM"), VehicleType::Trolleybus => String::from("TROLLEYBUS"), } // match } // fn } // impl impl std::convert::TryFrom<&str> for VehicleType { // Error definitions are contained in the // `google_maps\src\directions\error.rs` module. type Error = crate::directions::error::Error; /// Gets a `VehicleType` enum from a `String` that contains a valid [vehicle /// type](https://developers.google.com/maps/documentation/directions/intro#VehicleType) /// code. fn try_from(vehicle_type: &str) -> Result<VehicleType, Error> { match vehicle_type { "BUS" => Ok(VehicleType::Bus), "CABLE_CAR" => Ok(VehicleType::CableCar), "COMMUTER_TRAIN" => Ok(VehicleType::CommuterTrain), "FERRY" => Ok(VehicleType::Ferry), "FUNICULAR" => Ok(VehicleType::Funicular), "GONDOLA_LIFT" => Ok(VehicleType::GondolaLift), "HEAVY_RAIL" => Ok(VehicleType::HeavyRail), "HIGH_SPEED_TRAIN" => Ok(VehicleType::HighSpeedTrain), "INTERCITY_BUS" => Ok(VehicleType::IntercityBus), "LONG_DISTANCE_TRAIN" => Ok(VehicleType::LongDistanceTrain), "METRO_RAIL" => Ok(VehicleType::MetroRail), "MONORAIL" => Ok(VehicleType::Monorail), "OTHER" => Ok(VehicleType::Other), "RAIL" => Ok(VehicleType::Rail), "SHARE_TAXI" => Ok(VehicleType::ShareTaxi), "SUBWAY" => Ok(VehicleType::Subway), "TRAM" => Ok(VehicleType::Tram), "TROLLEYBUS" => Ok(VehicleType::Trolleybus), _ => Err(Error::InvalidVehicleTypeCode(vehicle_type.to_string())), } // match } // fn } // impl impl std::default::Default for VehicleType { /// Returns a reasonable default variant for the `VehicleType` enum type. fn default() -> Self { VehicleType::Bus } // fn } // impl impl std::fmt::Display for VehicleType { /// Formats a `VehicleType` enum into a string that is presentable to the /// end user. fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { VehicleType::Bus => write!(f, "Bus"), VehicleType::CableCar => write!(f, "Cable Car"), VehicleType::CommuterTrain => write!(f, "Commuter Train"), VehicleType::Ferry => write!(f, "Ferry"), VehicleType::Funicular => write!(f, "Funicular"), VehicleType::GondolaLift => write!(f, "Gondola Lift"), VehicleType::HeavyRail => write!(f, "Heavy Rail"), VehicleType::HighSpeedTrain => write!(f, "High Speed Train"), VehicleType::IntercityBus => write!(f, "Intercity Bus"), VehicleType::LongDistanceTrain => write!(f, "Long Distance Train"), VehicleType::MetroRail => write!(f, "Metro Rail"), VehicleType::Monorail => write!(f, "Monorail"), VehicleType::Other => write!(f, "Other"), VehicleType::Rail => write!(f, "Rail"), VehicleType::ShareTaxi => write!(f, "Share Taxi"), VehicleType::Subway => write!(f, "Subway"), VehicleType::Tram => write!(f, "Tram"), VehicleType::Trolleybus => write!(f, "Trolleybus"), } // match } // fn } // impl
41.658537
92
0.602752
67b0f788cf2b0b22945b158ab9576e56cd6f2173
2,318
/// /// Interface for adding grafana annotations about deploys /// use reqwest; use chrono::Utc; use std::env; use serde_json::json; use super::{Result, ErrorKind, ResultExt}; /// At what time the annotation should be made #[derive(Debug)] pub enum TimeSpec { Now, Time(u64), } /// The type of annotation event #[derive(Debug)] pub enum Event { Upgrade, Rollback, } /// A representation of a particular deployment event #[derive(Debug)] pub struct Annotation { pub event: Event, pub service: String, pub version: String, pub region: String, pub time: TimeSpec, } /// Extracts grafana URL + HTTP scheme from environment pub fn env_hook_url() -> Result<String> { env::var("GRAFANA_SHIPCAT_HOOK_URL") .map_err(|_| ErrorKind::MissingGrafanaUrl.into()) } /// Extracts grafana API key from environment pub fn env_token() -> Result<String> { env::var("GRAFANA_SHIPCAT_TOKEN") .map_err(|_| ErrorKind::MissingGrafanaToken.into()) } /// Convert timespec to UNIX time, in milliseconds fn unix_timestamp(spec: &TimeSpec) -> Result<u64> { let timestamp = match spec { TimeSpec::Now => Utc::now().timestamp_millis() as u64, TimeSpec::Time(timestamp) => *timestamp }; Ok(timestamp) } /// Create an annotation for a deployment using grafana's REST API pub fn create(annotation: Annotation) -> Result<()> { let hook_url = env_hook_url()?; let hook_token = env_token()?; let timestamp = unix_timestamp(&annotation.time)?; let data = json!({ "time": timestamp, "text": format!("{} {}={} in {}", match annotation.event { Event::Upgrade => "Upgrade", Event::Rollback => "Rollback" }, &annotation.service, &annotation.version, &annotation.region ), "tags": [ "all-deploys", format!("{}-deploys", annotation.region), format!("{}-deploys", annotation.service) ] }); let url = reqwest::Url::parse(&hook_url)?.join("api/annotations")?; let mkerr = || ErrorKind::Url(url.clone()); let client = reqwest::Client::new(); client.post(url.clone()) .bearer_auth(hook_token) .json(&data) .send() .chain_err(&mkerr)?; Ok(()) }
25.195652
71
0.610009
89156717f4cd0b4147f0011be06b81fa1c653693
64
aima.test.core.unit.probability.bayes.approx.ParticleFilterTest
32
63
0.875
1aae08f3cca1dd4787584ca2f9f60f40dfad66d4
25,727
use std::env; use support::{project, execs, basic_bin_manifest}; use support::{RUNNING, COMPILING, DOCTEST}; use hamcrest::{assert_that, existing_file}; use cargo::util::process; fn setup() { } fn disabled() -> bool { // First, disable if ./configure requested so match env::var("CFG_DISABLE_CROSS_TESTS") { Ok(ref s) if *s == "1" => return true, _ => {} } // Right now the windows bots cannot cross compile due to the mingw setup, // so we disable ourselves on all but macos/linux setups where the rustc // install script ensures we have both architectures !(cfg!(target_os = "macos") || cfg!(target_os = "linux") || cfg!(target_env = "msvc")) } fn alternate() -> String { let platform = match env::consts::OS { "linux" => "unknown-linux-gnu", "macos" => "apple-darwin", "windows" => "pc-windows-msvc", _ => unreachable!(), }; let arch = match env::consts::ARCH { "x86" => "x86_64", "x86_64" => "i686", _ => unreachable!(), }; format!("{}-{}", arch, platform) } fn alternate_arch() -> &'static str { match env::consts::ARCH { "x86" => "x86_64", "x86_64" => "x86", _ => unreachable!(), } } test!(simple_cross { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" "#) .file("build.rs", &format!(r#" fn main() {{ assert_eq!(std::env::var("TARGET").unwrap(), "{}"); }} "#, alternate())) .file("src/main.rs", &format!(r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, alternate_arch())); let target = alternate(); assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0)); assert_that(&p.target_bin(&target, "foo"), existing_file()); assert_that(process(&p.target_bin(&target, "foo")).unwrap(), execs().with_status(0)); }); test!(simple_deps { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#) .file("src/main.rs", r#" extern crate bar; fn main() { bar::bar(); } "#); let p2 = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "pub fn bar() {}"); p2.build(); let target = alternate(); assert_that(p.cargo_process("build").arg("--target").arg(&target), execs().with_status(0)); assert_that(&p.target_bin(&target, "foo"), existing_file()); assert_that(process(&p.target_bin(&target, "foo")).unwrap(), execs().with_status(0)); }); test!(plugin_deps { if disabled() { return } if !::is_nightly() { return } let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" [dependencies.baz] path = "../baz" "#) .file("src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] extern crate baz; fn main() { assert_eq!(bar!(), baz::baz()); } "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true "#) .file("src/lib.rs", r#" #![feature(plugin_registrar, quote, rustc_private)] extern crate rustc; extern crate syntax; use rustc::plugin::Registry; use syntax::ast::TokenTree; use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; #[plugin_registrar] pub fn foo(reg: &mut Registry) { reg.register_macro("bar", expand_bar); } fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'static> { MacEager::expr(quote_expr!(cx, 1)) } "#); let baz = project("baz") .file("Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); bar.build(); baz.build(); let target = alternate(); assert_that(foo.cargo_process("build").arg("--target").arg(&target), execs().with_status(0)); assert_that(&foo.target_bin(&target, "foo"), existing_file()); assert_that(process(&foo.target_bin(&target, "foo")).unwrap(), execs().with_status(0)); }); test!(plugin_to_the_max { if disabled() { return } if !::is_nightly() { return } let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" [dependencies.baz] path = "../baz" "#) .file("src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] extern crate baz; fn main() { assert_eq!(bar!(), baz::baz()); } "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true [dependencies.baz] path = "../baz" "#) .file("src/lib.rs", r#" #![feature(plugin_registrar, quote, rustc_private)] extern crate rustc; extern crate syntax; extern crate baz; use rustc::plugin::Registry; use syntax::ast::TokenTree; use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, MacEager, MacResult}; #[plugin_registrar] pub fn foo(reg: &mut Registry) { reg.register_macro("bar", expand_bar); } fn expand_bar(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'static> { MacEager::expr(quote_expr!(cx, baz::baz())) } "#); let baz = project("baz") .file("Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] "#) .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); bar.build(); baz.build(); let target = alternate(); assert_that(foo.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0)); println!("second"); assert_that(foo.cargo("build").arg("-v") .arg("--target").arg(&target), execs().with_status(0)); assert_that(&foo.target_bin(&target, "foo"), existing_file()); assert_that(process(&foo.target_bin(&target, "foo")).unwrap(), execs().with_status(0)); }); test!(linker_and_ar { if disabled() { return } let target = alternate(); let p = project("foo") .file(".cargo/config", &format!(r#" [target.{}] ar = "my-ar-tool" linker = "my-linker-tool" "#, target)) .file("Cargo.toml", &basic_bin_manifest("foo")) .file("src/foo.rs", &format!(r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, alternate_arch())); assert_that(p.cargo_process("build").arg("--target").arg(&target) .arg("-v"), execs().with_status(101) .with_stdout(&format!("\ {compiling} foo v0.5.0 ({url}) {running} `rustc src[..]foo.rs --crate-name foo --crate-type bin -g \ --out-dir {dir}[..]target[..]{target}[..]debug \ --emit=dep-info,link \ --target {target} \ -C ar=my-ar-tool -C linker=my-linker-tool \ -L dependency={dir}[..]target[..]{target}[..]debug \ -L dependency={dir}[..]target[..]{target}[..]debug[..]deps` ", running = RUNNING, compiling = COMPILING, dir = p.root().display(), url = p.url(), target = target, ))); }); test!(plugin_with_extra_dylib_dep { if disabled() { return } if !::is_nightly() { return } let foo = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] [dependencies.bar] path = "../bar" "#) .file("src/main.rs", r#" #![feature(plugin)] #![plugin(bar)] fn main() {} "#); let bar = project("bar") .file("Cargo.toml", r#" [package] name = "bar" version = "0.0.1" authors = [] [lib] name = "bar" plugin = true [dependencies.baz] path = "../baz" "#) .file("src/lib.rs", r#" #![feature(plugin_registrar, rustc_private)] extern crate rustc; extern crate baz; use rustc::plugin::Registry; #[plugin_registrar] pub fn foo(reg: &mut Registry) { println!("{}", baz::baz()); } "#); let baz = project("baz") .file("Cargo.toml", r#" [package] name = "baz" version = "0.0.1" authors = [] [lib] name = "baz" crate_type = ["dylib"] "#) .file("src/lib.rs", "pub fn baz() -> i32 { 1 }"); bar.build(); baz.build(); let target = alternate(); assert_that(foo.cargo_process("build").arg("--target").arg(&target), execs().with_status(0)); }); test!(cross_tests { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [project] name = "foo" authors = [] version = "0.0.0" [[bin]] name = "bar" "#) .file("src/main.rs", &format!(r#" extern crate foo; use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} #[test] fn test() {{ main() }} "#, alternate_arch())) .file("src/lib.rs", &format!(r#" use std::env; pub fn foo() {{ assert_eq!(env::consts::ARCH, "{}"); }} #[test] fn test_foo() {{ foo() }} "#, alternate_arch())); let target = alternate(); assert_that(p.cargo_process("test").arg("--target").arg(&target), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.0 ({foo}) {running} target[..]{triple}[..]bar-[..] running 1 test test test ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {running} target[..]{triple}[..]foo-[..] running 1 test test test_foo ... ok test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured {doctest} foo running 0 tests test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, foo = p.url(), triple = target, doctest = DOCTEST))); }); test!(simple_cargo_run { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] "#) .file("src/main.rs", &format!(r#" use std::env; fn main() {{ assert_eq!(env::consts::ARCH, "{}"); }} "#, alternate_arch())); let target = alternate(); assert_that(p.cargo_process("run").arg("--target").arg(&target), execs().with_status(0)); }); test!(cross_with_a_build_script { if disabled() { return } let target = alternate(); let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = 'build.rs' "#) .file("build.rs", &format!(r#" use std::env; use std::path::PathBuf; fn main() {{ assert_eq!(env::var("TARGET").unwrap(), "{0}"); let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap()); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "out"); path.pop(); assert!(path.file_name().unwrap().to_str().unwrap() .starts_with("foo-")); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "build"); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "debug"); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "{0}"); path.pop(); assert_eq!(path.file_name().unwrap().to_str().unwrap(), "target"); }} "#, target)) .file("src/main.rs", "fn main() {}"); assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.0 (file://[..]) {running} `rustc build.rs [..] --out-dir {dir}[..]target[..]build[..]foo-[..]` {running} `{dir}[..]target[..]build[..]foo-[..]build-script-build` {running} `rustc src[..]main.rs [..] --target {target} [..]` ", compiling = COMPILING, running = RUNNING, target = target, dir = p.root().display()))); }); test!(build_script_needed_for_host_and_target { if disabled() { return } let target = alternate(); let host = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = 'build.rs' [dependencies.d1] path = "d1" [build-dependencies.d2] path = "d2" "#) .file("build.rs", r#" extern crate d2; fn main() { d2::d2(); } "#) .file("src/main.rs", " extern crate d1; fn main() { d1::d1(); } ") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] build = 'build.rs' "#) .file("d1/src/lib.rs", " pub fn d1() {} ") .file("d1/build.rs", r#" use std::env; fn main() { let target = env::var("TARGET").unwrap(); println!("cargo:rustc-flags=-L /path/to/{}", target); } "#) .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] [dependencies.d1] path = "../d1" "#) .file("d2/src/lib.rs", " extern crate d1; pub fn d2() { d1::d1(); } "); assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0) .with_stdout_contains(&format!("\ {compiling} d1 v0.0.0 ({url})", compiling = COMPILING, url = p.url())) .with_stdout_contains(&format!("\ {running} `rustc d1[..]build.rs [..] --out-dir {dir}[..]target[..]build[..]d1-[..]`", running = RUNNING, dir = p.root().display())) .with_stdout_contains(&format!("\ {running} `{dir}[..]target[..]build[..]d1-[..]build-script-build`", running = RUNNING, dir = p.root().display())) .with_stdout_contains(&format!("\ {running} `rustc d1[..]src[..]lib.rs [..]`", running = RUNNING)) .with_stdout_contains(&format!("\ {compiling} d2 v0.0.0 ({url})", compiling = COMPILING, url = p.url())) .with_stdout_contains(&format!("\ {running} `rustc d2[..]src[..]lib.rs [..] \ -L /path/to/{host}`", running = RUNNING, host = host)) .with_stdout_contains(&format!("\ {compiling} foo v0.0.0 ({url})", compiling = COMPILING, url = p.url())) .with_stdout_contains(&format!("\ {running} `rustc build.rs [..] --out-dir {dir}[..]target[..]build[..]foo-[..] \ -L /path/to/{host}`", running = RUNNING, dir = p.root().display(), host = host)) .with_stdout_contains(&format!("\ {running} `rustc src[..]main.rs [..] --target {target} [..] \ -L /path/to/{target}`", running = RUNNING, target = target))); }); test!(build_deps_for_the_right_arch { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] [dependencies.d2] path = "d2" "#) .file("src/main.rs", "extern crate d2; fn main() {}") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] "#) .file("d1/src/lib.rs", " pub fn d1() {} ") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] build = "build.rs" [build-dependencies.d1] path = "../d1" "#) .file("d2/build.rs", "extern crate d1; fn main() {}") .file("d2/src/lib.rs", ""); let target = alternate(); assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0)); }); test!(build_script_only_host { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.0" authors = [] build = "build.rs" [build-dependencies.d1] path = "d1" "#) .file("src/main.rs", "fn main() {}") .file("build.rs", "extern crate d1; fn main() {}") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] build = "build.rs" "#) .file("d1/src/lib.rs", " pub fn d1() {} ") .file("d1/build.rs", r#" use std::env; fn main() { assert!(env::var("OUT_DIR").unwrap().replace("\\", "/") .contains("target/debug/build/d1-"), "bad: {:?}", env::var("OUT_DIR")); } "#); let target = alternate(); assert_that(p.cargo_process("build").arg("--target").arg(&target).arg("-v"), execs().with_status(0)); }); test!(plugin_build_script_right_arch { if disabled() { return } let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [lib] name = "foo" plugin = true "#) .file("build.rs", "fn main() {}") .file("src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(alternate()), execs().with_status(0) .with_stdout(&format!("\ {compiling} foo v0.0.1 ([..]) {running} `rustc build.rs [..]` {running} `[..]build-script-build[..]` {running} `rustc src[..]lib.rs [..]` ", compiling = COMPILING, running = RUNNING))); }); test!(build_script_with_platform_specific_dependencies { if disabled() { return } let target = alternate(); let host = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [build-dependencies.d1] path = "d1" "#) .file("build.rs", "extern crate d1; fn main() {}") .file("src/lib.rs", "") .file("d1/Cargo.toml", &format!(r#" [package] name = "d1" version = "0.0.0" authors = [] [target.{}.dependencies] d2 = {{ path = "../d2" }} "#, host)) .file("d1/src/lib.rs", "extern crate d2;") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] "#) .file("d2/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), execs().with_status(0) .with_stdout(&format!("\ {compiling} d2 v0.0.0 ([..]) {running} `rustc d2[..]src[..]lib.rs [..]` {compiling} d1 v0.0.0 ([..]) {running} `rustc d1[..]src[..]lib.rs [..]` {compiling} foo v0.0.1 ([..]) {running} `rustc build.rs [..]` {running} `{dir}[..]target[..]build[..]foo-[..]build-script-build` {running} `rustc src[..]lib.rs [..] --target {target} [..]` ", compiling = COMPILING, running = RUNNING, dir = p.root().display(), target = target))); }); test!(platform_specific_dependencies_do_not_leak { if disabled() { return } let target = alternate(); let host = ::rustc_host(); let p = project("foo") .file("Cargo.toml", r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [dependencies.d1] path = "d1" [build-dependencies.d1] path = "d1" "#) .file("build.rs", "extern crate d1; fn main() {}") .file("src/lib.rs", "") .file("d1/Cargo.toml", &format!(r#" [package] name = "d1" version = "0.0.0" authors = [] [target.{}.dependencies] d2 = {{ path = "../d2" }} "#, host)) .file("d1/src/lib.rs", "extern crate d2;") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] "#) .file("d2/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), execs().with_status(101) .with_stderr("\ [..] error: can't find crate for `d2`[..] [..] extern crate d2; [..] error: aborting due to previous error Could not compile `d1`. Caused by: [..] ")); }); test!(platform_specific_variables_reflected_in_build_scripts { if disabled() { return } let target = alternate(); let host = ::rustc_host(); let p = project("foo") .file("Cargo.toml", &format!(r#" [package] name = "foo" version = "0.0.1" authors = [] build = "build.rs" [target.{host}.dependencies] d1 = {{ path = "d1" }} [target.{target}.dependencies] d2 = {{ path = "d2" }} "#, host = host, target = target)) .file("build.rs", &format!(r#" use std::env; fn main() {{ let platform = env::var("TARGET").unwrap(); let (expected, not_expected) = match &platform[..] {{ "{host}" => ("DEP_D1_VAL", "DEP_D2_VAL"), "{target}" => ("DEP_D2_VAL", "DEP_D1_VAL"), _ => panic!("unknown platform") }}; env::var(expected).ok() .expect(&format!("missing {{}}", expected)); env::var(not_expected).err() .expect(&format!("found {{}}", not_expected)); }} "#, host = host, target = target)) .file("src/lib.rs", "") .file("d1/Cargo.toml", r#" [package] name = "d1" version = "0.0.0" authors = [] links = "d1" build = "build.rs" "#) .file("d1/build.rs", r#" fn main() { println!("cargo:val=1") } "#) .file("d1/src/lib.rs", "") .file("d2/Cargo.toml", r#" [package] name = "d2" version = "0.0.0" authors = [] links = "d2" build = "build.rs" "#) .file("d2/build.rs", r#" fn main() { println!("cargo:val=1") } "#) .file("d2/src/lib.rs", ""); assert_that(p.cargo_process("build").arg("-v"), execs().with_status(0)); assert_that(p.cargo_process("build").arg("-v").arg("--target").arg(&target), execs().with_status(0)); });
29.537313
91
0.449644
f811eb7f2951d6d0be3eb7fe0745fce578b35456
709
// test1.rs // This is a test for the following sections: // - Variables // - Functions // Mary is buying apples. One apple usually costs 2 Rustbucks, but if you buy // more than 40 at once, each apple only costs 1! Write a function that calculates // the price of an order of apples given the order amount. No hints this time! // Put your function here! // fn ..... { fn calculate_apple_price(quantity: i32) -> i32 { if quantity > 40 { quantity } else { quantity * 2 } } // Don't modify this function! #[test] fn verify_test() { let price1 = calculate_apple_price(35); let price2 = calculate_apple_price(65); assert_eq!(70, price1); assert_eq!(65, price2); }
23.633333
82
0.660085
fc6481450c9e85ce6f53ea8b600425569ca5621b
20,139
//! Here we handle incoming connections and communication on the RPC socket. //! Actual JSONRPC2 commands are handled in the `api` mod. use crate::jsonrpc::api::{JsonRpcMetaData, RpcApi, RpcImpl}; use crate::DaemonControl; use std::{ collections::{HashMap, VecDeque}, io::{self, Write}, path::PathBuf, sync::{Arc, RwLock}, thread, }; pub use mio::net::UnixListener; use mio::{net::UnixStream, Events, Interest, Poll, Token}; use jsonrpc_core::{futures::Future, Call, MethodCall, Response}; // Maximum number of concurrent handlers for incoming RPC commands const MAX_HANDLER_THREADS: usize = 4; // Remove trailing newlines from utf-8 byte stream fn trimmed(mut vec: Vec<u8>, bytes_read: usize) -> Vec<u8> { vec.truncate(bytes_read); // Until there is some whatever-newline character, pop. while let Some(byte) = vec.last() { // Of course, we assume utf-8 if !(&0x0a..=&0x0d).contains(&byte) { break; } vec.pop(); } vec } // Returns an error only on a fatal one, and None on recoverable ones. fn read_bytes_from_stream(stream: &mut dyn io::Read) -> Result<Option<Vec<u8>>, io::Error> { let mut buf = vec![0; 512]; let mut total_read = 0; loop { match stream.read(&mut buf[total_read..]) { Ok(0) => { if total_read == 0 { return Ok(None); } return Ok(Some(trimmed(buf, total_read))); } Ok(n) => { total_read += n; // Note that we don't return if it appears that we read till the end // here: we always wait for a WouldBlock so that we are sure they are // done writing. if total_read == buf.len() { buf.resize(total_read * 2, 0); } } Err(err) => { match err.kind() { io::ErrorKind::WouldBlock => { if total_read == 0 { // We can't read it just yet, but it's fine. return Ok(None); } if total_read == 0 { // We can't read it just yet, but it's fine. return Ok(None); } return Ok(Some(trimmed(buf, total_read))); } io::ErrorKind::Interrupted => { // Interrupted? Let's try again continue; } io::ErrorKind::ConnectionReset | io::ErrorKind::ConnectionAborted | io::ErrorKind::BrokenPipe => { // They're not there anymore, but it's fine. return Ok(None); } // Now that's actually bad _ => return Err(err), } } } } } // Returns Ok(None) on entirely written data and Ok(Some(remaining_data)) on partially-written // data. fn write_byte_stream(stream: &mut UnixStream, resp: Vec<u8>) -> Result<Option<Vec<u8>>, io::Error> { let mut written = 0; loop { match stream.write(&resp[written..]) { Ok(n) => { written += n; log::trace!("Wrote '{}', total '{}'", n, written); if written == resp.len() { return Ok(None); } } Err(e) => match e.kind() { io::ErrorKind::WouldBlock | io::ErrorKind::Interrupted => { log::debug!( "Got error '{}' when writing. Wrote '{}' bytes, defering \ the rest of the buffer to next write.", e, written ); return Ok(Some(resp[written..].to_vec())); } _ => return Err(e), }, } } } // Used to check if, when receiving an event for a token, we have an ongoing connection and stream // for it. type ConnectionMap = HashMap<Token, (UnixStream, Arc<RwLock<VecDeque<Vec<u8>>>>)>; fn handle_single_request( jsonrpc_io: Arc<RwLock<jsonrpc_core::MetaIoHandler<JsonRpcMetaData>>>, metadata: JsonRpcMetaData, resp_queue: Arc<RwLock<VecDeque<Vec<u8>>>>, message: MethodCall, ) { let res = jsonrpc_io .read() .unwrap() .handle_call(Call::MethodCall(message), metadata) .wait() .expect("jsonrpc_core says: Handler calls can never fail.") .expect("This is a method call, there is always a response."); let resp = Response::Single(res); let resp_bytes = serde_json::to_vec(&resp).expect("jsonrpc_core says: This should never fail."); resp_queue.write().unwrap().push_back(resp_bytes); } // Read request from the stream, parse it as JSON and handle the JSONRPC command. // Returns true if parsed correctly, false otherwise. // Extend the cache with data read from the stream, and parse it as a set of JSONRPC requests (no // notification). If there are remaining bytes not interpretable as a valid JSONRPC request, leave // it in the cache. // Will return true if we read at least one valid JSONRPC request. fn read_handle_request( cache: &mut Vec<u8>, stream: &mut UnixStream, resp_queue: &mut Arc<RwLock<VecDeque<Vec<u8>>>>, jsonrpc_io: &Arc<RwLock<jsonrpc_core::MetaIoHandler<JsonRpcMetaData>>>, metadata: &JsonRpcMetaData, handler_threads: &mut VecDeque<thread::JoinHandle<()>>, ) -> Result<(), io::Error> { // We use an optional index if there is some left unparsed bytes, because borrow checker :) let mut leftover = None; if let Some(new) = read_bytes_from_stream(stream)? { cache.extend(new); } else { // Nothing new? We can short-circuit. return Ok(()); } let mut de = serde_json::Deserializer::from_slice(cache).into_iter::<MethodCall>(); while let Some(method_call) = de.next() { log::trace!("Got JSONRPC request '{:#?}", method_call); match method_call { // Get a response and append it to the response queue Ok(m) => { let t_io_handler = jsonrpc_io.clone(); let t_meta = metadata.clone(); let t_queue = resp_queue.clone(); // We special case the 'stop' command to treat it synchronously, as we could miss // the "read closed" event in the main loop and hang up forever otherwise. // FIXME: We could not have a handler for it, and just write the raw response by // hand. if m.method.as_str() == "stop" { handle_single_request(t_io_handler, t_meta, t_queue, m); } else { // If there are too many threads spawned, wait for the oldest one to complete. // FIXME: we can be smarter than that.. if handler_threads.len() >= MAX_HANDLER_THREADS { handler_threads .pop_front() .expect("Just checked the length") .join() .unwrap(); } handler_threads.push_back(thread::spawn(move || { handle_single_request(t_io_handler, t_meta, t_queue, m) })); } } // Parsing error? Assume it's a message we'll be able to read later. Err(e) => { if e.is_eof() { leftover = Some(de.byte_offset()); } log::trace!( "Non fatal error reading JSON: '{}'. Probably partial read.", e ); break; } } } if let Some(leftover) = leftover { let s = &cache[leftover..]; *cache = s.to_vec(); } else { cache.clear(); } Ok(()) } // Our main polling loop fn mio_loop( mut listener: UnixListener, jsonrpc_io: jsonrpc_core::MetaIoHandler<JsonRpcMetaData>, metadata: JsonRpcMetaData, ) -> Result<(), io::Error> { const JSONRPC_SERVER: Token = Token(0); let mut poller = Poll::new()?; let mut events = Events::with_capacity(16); // UID per connection let mut unique_token = Token(JSONRPC_SERVER.0 + 1); let mut connections_map: ConnectionMap = HashMap::with_capacity(8); // Cache what we read from the socket, in case we read only half a message. let mut read_cache_map: HashMap<Token, Vec<u8>> = HashMap::with_capacity(8); let jsonrpc_io = Arc::from(RwLock::from(jsonrpc_io)); // Handle to thread currently handling commands we were sent. let mut handler_threads: VecDeque<std::thread::JoinHandle<_>> = VecDeque::with_capacity(MAX_HANDLER_THREADS); poller .registry() .register(&mut listener, JSONRPC_SERVER, Interest::READABLE)?; loop { poller.poll(&mut events, None)?; for event in &events { // A connection was established; loop to process all the messages if event.token() == JSONRPC_SERVER && event.is_readable() { while !metadata.is_shutdown() { match listener.accept() { Ok((mut stream, _)) => { let curr_token = Token(unique_token.0); unique_token.0 += 1; // So we actually know they want to discuss :) poller.registry().register( &mut stream, curr_token, Interest::READABLE, )?; // So we can retrieve it when they start the discussion connections_map.insert( curr_token, ( stream, Arc::new(RwLock::new(VecDeque::<Vec<u8>>::with_capacity(32))), ), ); read_cache_map.insert(curr_token, Vec::with_capacity(1024)); } Err(e) => { // Ok; next time then! if e.kind() == io::ErrorKind::WouldBlock { break; } // This one is not expected! return Err(e); } } } } else if connections_map.contains_key(&event.token()) { // Under normal circumstances we are always interested in both // Writable (do we got something for them from the resp_queue?) // and Readable (do they have something for us?) events let (stream, resp_queue) = connections_map .get_mut(&event.token()) .expect("We checked it existed just above."); poller.registry().reregister( stream, event.token(), Interest::READABLE.add(Interest::WRITABLE), )?; if event.is_readable() { log::trace!("Readable event for {:?}", event.token()); let read_cache = read_cache_map .get_mut(&event.token()) .expect("Entry is always set when connection_map's entry is"); read_handle_request( read_cache, stream, resp_queue, &jsonrpc_io, &metadata, &mut handler_threads, )?; } if event.is_writable() { // FIFO loop { // We can't use while let Some(resp) because deadlock let resp = match resp_queue.write().unwrap().pop_front() { Some(resp) => resp, None => break, }; log::trace!( "Writing response for {:?} ({} bytes)", event.token(), resp.len() ); // If we could not write the data, don't lose track of it! This would only // reasonably happen on `WouldBlock`. match write_byte_stream(stream, resp) { Ok(Some(resp)) => resp_queue.write().unwrap().push_front(resp), Ok(None) => {} Err(e) => { log::error!("Error writing resp for {:?}: '{}'", event.token(), e) } } } } if event.is_read_closed() || event.is_error() { log::trace!("Dropping connection for {:?}", event.token()); connections_map.remove(&event.token()); // If this was the last connection alive and we are shutting down, // actually shut down. if metadata.is_shutdown() && connections_map.is_empty() { while let Some(t) = handler_threads.pop_front() { t.join().unwrap(); } return Ok(()); } } } } } } // Tries to bind to the socket, if we are told it's already in use try to connect // to check there is actually someone listening and it's not a leftover from a // crash. fn bind(socket_path: PathBuf) -> Result<UnixListener, io::Error> { match UnixListener::bind(&socket_path) { Ok(l) => Ok(l), Err(e) => { if e.kind() == io::ErrorKind::AddrInUse { return match UnixStream::connect(&socket_path) { Ok(_) => Err(e), Err(_) => { // Ok, no one's here. Just delete the socket and bind. log::debug!("Removing leftover rpc socket."); std::fs::remove_file(&socket_path)?; UnixListener::bind(&socket_path) } }; } Err(e) } } } /// Bind to the UDS at `socket_path` pub fn rpcserver_setup(socket_path: PathBuf) -> Result<UnixListener, io::Error> { // Create the socket with RW permissions only for the user let old_umask = unsafe { libc::umask(0o177) }; let listener = bind(socket_path); unsafe { libc::umask(old_umask); } listener } /// The main event loop for the JSONRPC interface, polling the UDS listener pub fn rpcserver_loop( listener: UnixListener, daemon_control: DaemonControl, ) -> Result<(), io::Error> { let mut jsonrpc_io = jsonrpc_core::MetaIoHandler::<JsonRpcMetaData, _>::default(); jsonrpc_io.extend_with(RpcImpl.to_delegate()); let metadata = JsonRpcMetaData::new(daemon_control); log::info!("JSONRPC server started."); return mio_loop(listener, jsonrpc_io, metadata); } #[cfg(test)] mod tests { use super::{read_bytes_from_stream, rpcserver_loop, rpcserver_setup, trimmed}; use crate::utils::test_utils::{dummy_rpcutil, test_datadir, UserRole}; use std::{ fs, io::{Cursor, Read, Write}, thread, time::Duration, }; use std::os::unix::net::UnixStream; // Redundant with functional tests but useful for testing the Windows loop // until the functional tests suite can run on it. #[test] fn simple_write_recv() { let datadir = test_datadir(); let rpcutils = dummy_rpcutil(datadir.clone(), UserRole::ManagerStakeholder); let revaultd_datadir = rpcutils.revaultd.read().unwrap().data_dir.clone(); let mut rpc_socket_path = revaultd_datadir.clone(); rpc_socket_path.push("revaultd_rpc"); let socket = rpcserver_setup(rpc_socket_path.clone()).unwrap(); let server_loop_thread = thread::spawn(move || { rpcserver_loop(socket, rpcutils).unwrap_or_else(|e| { panic!("Error in JSONRPC server event loop: {}", e.to_string()); }) }); fn bind_or_die(path: &std::path::PathBuf, starting_time: std::time::Instant) -> UnixStream { match UnixStream::connect(path) { Ok(s) => s, Err(e) => { if starting_time.elapsed() > Duration::from_secs(5) { panic!("Could not connect to the socket: '{:?}'", e); } bind_or_die(path, starting_time) } } } let now = std::time::Instant::now(); let mut sock = bind_or_die(&rpc_socket_path, now); // Write a valid JSONRPC message (but invalid command) // For some reasons it takes '{}' as non-empty parameters ON UNIX BUT NOT WINDOWS WTF.. let invalid_msg = String::from(r#"{"jsonrpc": "2.0", "id": 0, "method": "stop", "params": {"a": "b"}}"#); let mut response = vec![0; 256]; sock.write(invalid_msg.as_bytes()).unwrap(); let read = sock.read(&mut response).unwrap(); assert_eq!( String::from_utf8(trimmed(response, read)).unwrap(), String::from( r#"{"jsonrpc":"2.0","error":{"code":-32602,"message":"Invalid parameters: No parameters were expected","data":"Map({\"a\": String(\"b\")})"},"id":0}"# ) ); { // Write valid JSONRPC message with a half-written one afterward let msg = String::from( r#"{"jsonrpc": "2.0", "id": 1, "method": "aaa", "params": []} {"jsonrpc": "2.0", "id": 2, "#, ); let mut response = vec![0; 256]; sock.write(msg.as_bytes()).unwrap(); let read = sock.read(&mut response).unwrap(); assert_eq!( response[..read], String::from( r#"{"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not found"},"id":1}"# ) .as_bytes()[..read] ); // Write the other half of the message let msg = String::from(r#" "method": "bbbb", "params": []}"#); let mut response = vec![0; 256]; sock.write(msg.as_bytes()).unwrap(); let read = sock.read(&mut response).unwrap(); assert_eq!( response[..read], String::from( r#"{"jsonrpc":"2.0","error":{"code":-32601,"message":"Method not found"},"id":2}"# ) .as_bytes()[..read] ); } // Tell it to stop, should send us a Shutdown message let msg = String::from(r#"{"jsonrpc": "2.0", "id": 0, "method": "stop", "params": []}"#); sock.write(msg.as_bytes()).unwrap(); sock.flush().unwrap(); drop(sock); server_loop_thread.join().unwrap(); fs::remove_dir_all(&datadir).unwrap(); } #[test] fn test_bytes_reader() { let samples = [vec![22; 22], vec![1; 522], vec![189; 28903]]; // TODO: read_bytes_from_stream() would make a great fuzz target.. for data in samples.iter() { let mut stream = Cursor::new(data.clone()); let res = read_bytes_from_stream(&mut stream); assert_eq!(&res.unwrap().unwrap(), data); } } }
38.214421
166
0.497294
16e07cb5f47b720c75a9893e0955cb491f7cf262
2,250
use std::ffi::OsStr; use std::fs; use std::io::Write; use std::path::PathBuf; use structopt::StructOpt; #[derive(Debug, StructOpt)] #[structopt( name = "gpak2c", about = "GPAK output converter from *.hex or *.csv into *.c files" )] struct InputArgs { #[structopt(short = "i", long = "input")] input_path: PathBuf, #[structopt(short = "o", long = "output")] output_path: PathBuf, } fn main() { let args = InputArgs::from_args(); assert_ne!(args.input_path, args.output_path); let ext = args.input_path.extension().and_then(OsStr::to_str); match ext { Some("csv") => extract_from_csv(args.input_path, args.output_path), Some("hex") => extract_from_hex(args.input_path, args.output_path), Some(_) => println!("Unsupported file type!"), None => eprintln!("File has no extension!"), } } fn extract_from_csv(input_file: PathBuf, output_file: PathBuf) { let input_data = fs::read_to_string(input_file).unwrap(); let mut output_data = fs::File::create(&output_file).unwrap(); writeln!( output_data, "uint8_t {}[256] = {{", output_file.file_stem().and_then(OsStr::to_str).unwrap() ) .unwrap(); for (index, line) in input_data.lines().enumerate() { if (index + 1) % 16 != 0 { write!(output_data, "0x{}, ", line).unwrap(); } else { writeln!(output_data, "0x{}, ", line).unwrap(); } } writeln!(output_data, "}};").unwrap(); } fn extract_from_hex(input_file: PathBuf, output_file: PathBuf) { let input_data = fs::read_to_string(input_file).unwrap(); let mut output_data = fs::File::create(&output_file).unwrap(); writeln!( output_data, "uint8_t {}[256] = {{", output_file.file_stem().and_then(OsStr::to_str).unwrap() ) .unwrap(); for line in input_data.lines() { if line.len() == 43 { for i in 9..=39 { if i % 2 == 0 { continue; } else { write!(output_data, "0x{}, ", &line[i..=(i + 1)]).unwrap(); } } writeln!(output_data).unwrap(); } } writeln!(output_data, "}};").unwrap(); }
29.220779
79
0.563556
29aba7c121873bbd6c57aa3d24d2758942040ba3
19,332
use crate::utils::{constants, snippet_opt, span_lint, span_lint_and_help, span_lint_and_sugg, span_lint_and_then}; use rustc_ast::ast::{ BindingMode, Expr, ExprKind, GenericParamKind, Generics, Lit, LitFloatType, LitIntType, LitKind, Mutability, NodeId, Pat, PatKind, UnOp, }; use rustc_ast::visit::FnKind; use rustc_data_structures::fx::FxHashMap; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::source_map::Span; declare_clippy_lint! { /// **What it does:** Checks for structure field patterns bound to wildcards. /// /// **Why is this bad?** Using `..` instead is shorter and leaves the focus on /// the fields that are actually bound. /// /// **Known problems:** None. /// /// **Example:** /// ```rust /// # struct Foo { /// # a: i32, /// # b: i32, /// # c: i32, /// # } /// let f = Foo { a: 0, b: 0, c: 0 }; /// /// // Bad /// match f { /// Foo { a: _, b: 0, .. } => {}, /// Foo { a: _, b: _, c: _ } => {}, /// } /// /// // Good /// match f { /// Foo { b: 0, .. } => {}, /// Foo { .. } => {}, /// } /// ``` pub UNNEEDED_FIELD_PATTERN, restriction, "struct fields bound to a wildcard instead of using `..`" } declare_clippy_lint! { /// **What it does:** Checks for function arguments having the similar names /// differing by an underscore. /// /// **Why is this bad?** It affects code readability. /// /// **Known problems:** None. /// /// **Example:** /// ```rust /// // Bad /// fn foo(a: i32, _a: i32) {} /// /// // Good /// fn bar(a: i32, _b: i32) {} /// ``` pub DUPLICATE_UNDERSCORE_ARGUMENT, style, "function arguments having names which only differ by an underscore" } declare_clippy_lint! { /// **What it does:** Detects expressions of the form `--x`. /// /// **Why is this bad?** It can mislead C/C++ programmers to think `x` was /// decremented. /// /// **Known problems:** None. /// /// **Example:** /// ```rust /// let mut x = 3; /// --x; /// ``` pub DOUBLE_NEG, style, "`--x`, which is a double negation of `x` and not a pre-decrement as in C/C++" } declare_clippy_lint! { /// **What it does:** Warns on hexadecimal literals with mixed-case letter /// digits. /// /// **Why is this bad?** It looks confusing. /// /// **Known problems:** None. /// /// **Example:** /// ```rust /// // Bad /// let y = 0x1a9BAcD; /// /// // Good /// let y = 0x1A9BACD; /// ``` pub MIXED_CASE_HEX_LITERALS, style, "hex literals whose letter digits are not consistently upper- or lowercased" } declare_clippy_lint! { /// **What it does:** Warns if literal suffixes are not separated by an /// underscore. /// /// **Why is this bad?** It is much less readable. /// /// **Known problems:** None. /// /// **Example:** /// ```rust /// // Bad /// let y = 123832i32; /// /// // Good /// let y = 123832_i32; /// ``` pub UNSEPARATED_LITERAL_SUFFIX, pedantic, "literals whose suffix is not separated by an underscore" } declare_clippy_lint! { /// **What it does:** Warns if an integral constant literal starts with `0`. /// /// **Why is this bad?** In some languages (including the infamous C language /// and most of its /// family), this marks an octal constant. In Rust however, this is a decimal /// constant. This could /// be confusing for both the writer and a reader of the constant. /// /// **Known problems:** None. /// /// **Example:** /// /// In Rust: /// ```rust /// fn main() { /// let a = 0123; /// println!("{}", a); /// } /// ``` /// /// prints `123`, while in C: /// /// ```c /// #include <stdio.h> /// /// int main() { /// int a = 0123; /// printf("%d\n", a); /// } /// ``` /// /// prints `83` (as `83 == 0o123` while `123 == 0o173`). pub ZERO_PREFIXED_LITERAL, complexity, "integer literals starting with `0`" } declare_clippy_lint! { /// **What it does:** Warns if a generic shadows a built-in type. /// /// **Why is this bad?** This gives surprising type errors. /// /// **Known problems:** None. /// /// **Example:** /// /// ```ignore /// impl<u32> Foo<u32> { /// fn impl_func(&self) -> u32 { /// 42 /// } /// } /// ``` pub BUILTIN_TYPE_SHADOW, style, "shadowing a builtin type" } declare_clippy_lint! { /// **What it does:** Checks for patterns in the form `name @ _`. /// /// **Why is this bad?** It's almost always more readable to just use direct /// bindings. /// /// **Known problems:** None. /// /// **Example:** /// ```rust /// # let v = Some("abc"); /// /// // Bad /// match v { /// Some(x) => (), /// y @ _ => (), /// } /// /// // Good /// match v { /// Some(x) => (), /// y => (), /// } /// ``` pub REDUNDANT_PATTERN, style, "using `name @ _` in a pattern" } declare_clippy_lint! { /// **What it does:** Checks for tuple patterns with a wildcard /// pattern (`_`) is next to a rest pattern (`..`). /// /// _NOTE_: While `_, ..` means there is at least one element left, `..` /// means there are 0 or more elements left. This can make a difference /// when refactoring, but shouldn't result in errors in the refactored code, /// since the wildcard pattern isn't used anyway. /// **Why is this bad?** The wildcard pattern is unneeded as the rest pattern /// can match that element as well. /// /// **Known problems:** None. /// /// **Example:** /// ```rust /// # struct TupleStruct(u32, u32, u32); /// # let t = TupleStruct(1, 2, 3); /// /// // Bad /// match t { /// TupleStruct(0, .., _) => (), /// _ => (), /// } /// /// // Good /// match t { /// TupleStruct(0, ..) => (), /// _ => (), /// } /// ``` pub UNNEEDED_WILDCARD_PATTERN, complexity, "tuple patterns with a wildcard pattern (`_`) is next to a rest pattern (`..`)" } declare_lint_pass!(MiscEarlyLints => [ UNNEEDED_FIELD_PATTERN, DUPLICATE_UNDERSCORE_ARGUMENT, DOUBLE_NEG, MIXED_CASE_HEX_LITERALS, UNSEPARATED_LITERAL_SUFFIX, ZERO_PREFIXED_LITERAL, BUILTIN_TYPE_SHADOW, REDUNDANT_PATTERN, UNNEEDED_WILDCARD_PATTERN, ]); impl EarlyLintPass for MiscEarlyLints { fn check_generics(&mut self, cx: &EarlyContext<'_>, gen: &Generics) { for param in &gen.params { if let GenericParamKind::Type { .. } = param.kind { let name = param.ident.as_str(); if constants::BUILTIN_TYPES.contains(&&*name) { span_lint( cx, BUILTIN_TYPE_SHADOW, param.ident.span, &format!("This generic shadows the built-in type `{}`", name), ); } } } } fn check_pat(&mut self, cx: &EarlyContext<'_>, pat: &Pat) { if let PatKind::Struct(ref npat, ref pfields, _) = pat.kind { let mut wilds = 0; let type_name = npat .segments .last() .expect("A path must have at least one segment") .ident .name; for field in pfields { if let PatKind::Wild = field.pat.kind { wilds += 1; } } if !pfields.is_empty() && wilds == pfields.len() { span_lint_and_help( cx, UNNEEDED_FIELD_PATTERN, pat.span, "All the struct fields are matched to a wildcard pattern, consider using `..`.", None, &format!("Try with `{} {{ .. }}` instead", type_name), ); return; } if wilds > 0 { for field in pfields { if let PatKind::Wild = field.pat.kind { wilds -= 1; if wilds > 0 { span_lint( cx, UNNEEDED_FIELD_PATTERN, field.span, "You matched a field with a wildcard pattern. Consider using `..` instead", ); } else { let mut normal = vec![]; for field in pfields { match field.pat.kind { PatKind::Wild => {}, _ => { if let Ok(n) = cx.sess().source_map().span_to_snippet(field.span) { normal.push(n); } }, } } span_lint_and_help( cx, UNNEEDED_FIELD_PATTERN, field.span, "You matched a field with a wildcard pattern. Consider using `..` \ instead", None, &format!("Try with `{} {{ {}, .. }}`", type_name, normal[..].join(", ")), ); } } } } } if let PatKind::Ident(left, ident, Some(ref right)) = pat.kind { let left_binding = match left { BindingMode::ByRef(Mutability::Mut) => "ref mut ", BindingMode::ByRef(Mutability::Not) => "ref ", BindingMode::ByValue(..) => "", }; if let PatKind::Wild = right.kind { span_lint_and_sugg( cx, REDUNDANT_PATTERN, pat.span, &format!( "the `{} @ _` pattern can be written as just `{}`", ident.name, ident.name, ), "try", format!("{}{}", left_binding, ident.name), Applicability::MachineApplicable, ); } } check_unneeded_wildcard_pattern(cx, pat); } fn check_fn(&mut self, cx: &EarlyContext<'_>, fn_kind: FnKind<'_>, _: Span, _: NodeId) { let mut registered_names: FxHashMap<String, Span> = FxHashMap::default(); for arg in &fn_kind.decl().inputs { if let PatKind::Ident(_, ident, None) = arg.pat.kind { let arg_name = ident.to_string(); if arg_name.starts_with('_') { if let Some(correspondence) = registered_names.get(&arg_name[1..]) { span_lint( cx, DUPLICATE_UNDERSCORE_ARGUMENT, *correspondence, &format!( "`{}` already exists, having another argument having almost the same \ name makes code comprehension and documentation more difficult", arg_name[1..].to_owned() ), ); } } else { registered_names.insert(arg_name, arg.pat.span); } } } } fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) { if in_external_macro(cx.sess(), expr.span) { return; } match expr.kind { ExprKind::Unary(UnOp::Neg, ref inner) => { if let ExprKind::Unary(UnOp::Neg, _) = inner.kind { span_lint( cx, DOUBLE_NEG, expr.span, "`--x` could be misinterpreted as pre-decrement by C programmers, is usually a no-op", ); } }, ExprKind::Lit(ref lit) => Self::check_lit(cx, lit), _ => (), } } } impl MiscEarlyLints { fn check_lit(cx: &EarlyContext<'_>, lit: &Lit) { // We test if first character in snippet is a number, because the snippet could be an expansion // from a built-in macro like `line!()` or a proc-macro like `#[wasm_bindgen]`. // Note that this check also covers special case that `line!()` is eagerly expanded by compiler. // See <https://github.com/rust-lang/rust-clippy/issues/4507> for a regression. // FIXME: Find a better way to detect those cases. let lit_snip = match snippet_opt(cx, lit.span) { Some(snip) if snip.chars().next().map_or(false, |c| c.is_digit(10)) => snip, _ => return, }; if let LitKind::Int(value, lit_int_type) = lit.kind { let suffix = match lit_int_type { LitIntType::Signed(ty) => ty.name_str(), LitIntType::Unsigned(ty) => ty.name_str(), LitIntType::Unsuffixed => "", }; let maybe_last_sep_idx = if let Some(val) = lit_snip.len().checked_sub(suffix.len() + 1) { val } else { return; // It's useless so shouldn't lint. }; // Do not lint when literal is unsuffixed. if !suffix.is_empty() && lit_snip.as_bytes()[maybe_last_sep_idx] != b'_' { span_lint_and_sugg( cx, UNSEPARATED_LITERAL_SUFFIX, lit.span, "integer type suffix should be separated by an underscore", "add an underscore", format!("{}_{}", &lit_snip[..=maybe_last_sep_idx], suffix), Applicability::MachineApplicable, ); } if lit_snip.starts_with("0x") { if maybe_last_sep_idx <= 2 { // It's meaningless or causes range error. return; } let mut seen = (false, false); for ch in lit_snip.as_bytes()[2..=maybe_last_sep_idx].iter() { match ch { b'a'..=b'f' => seen.0 = true, b'A'..=b'F' => seen.1 = true, _ => {}, } if seen.0 && seen.1 { span_lint( cx, MIXED_CASE_HEX_LITERALS, lit.span, "inconsistent casing in hexadecimal literal", ); break; } } } else if lit_snip.starts_with("0b") || lit_snip.starts_with("0o") { /* nothing to do */ } else if value != 0 && lit_snip.starts_with('0') { span_lint_and_then( cx, ZERO_PREFIXED_LITERAL, lit.span, "this is a decimal constant", |diag| { diag.span_suggestion( lit.span, "if you mean to use a decimal constant, remove the `0` to avoid confusion", lit_snip.trim_start_matches(|c| c == '_' || c == '0').to_string(), Applicability::MaybeIncorrect, ); diag.span_suggestion( lit.span, "if you mean to use an octal constant, use `0o`", format!("0o{}", lit_snip.trim_start_matches(|c| c == '_' || c == '0')), Applicability::MaybeIncorrect, ); }, ); } } else if let LitKind::Float(_, LitFloatType::Suffixed(float_ty)) = lit.kind { let suffix = float_ty.name_str(); let maybe_last_sep_idx = if let Some(val) = lit_snip.len().checked_sub(suffix.len() + 1) { val } else { return; // It's useless so shouldn't lint. }; if lit_snip.as_bytes()[maybe_last_sep_idx] != b'_' { span_lint_and_sugg( cx, UNSEPARATED_LITERAL_SUFFIX, lit.span, "float type suffix should be separated by an underscore", "add an underscore", format!("{}_{}", &lit_snip[..=maybe_last_sep_idx], suffix), Applicability::MachineApplicable, ); } } } } fn check_unneeded_wildcard_pattern(cx: &EarlyContext<'_>, pat: &Pat) { if let PatKind::TupleStruct(_, ref patterns) | PatKind::Tuple(ref patterns) = pat.kind { fn span_lint(cx: &EarlyContext<'_>, span: Span, only_one: bool) { span_lint_and_sugg( cx, UNNEEDED_WILDCARD_PATTERN, span, if only_one { "this pattern is unneeded as the `..` pattern can match that element" } else { "these patterns are unneeded as the `..` pattern can match those elements" }, if only_one { "remove it" } else { "remove them" }, "".to_string(), Applicability::MachineApplicable, ); } if let Some(rest_index) = patterns.iter().position(|pat| pat.is_rest()) { if let Some((left_index, left_pat)) = patterns[..rest_index] .iter() .rev() .take_while(|pat| matches!(pat.kind, PatKind::Wild)) .enumerate() .last() { span_lint(cx, left_pat.span.until(patterns[rest_index].span), left_index == 0); } if let Some((right_index, right_pat)) = patterns[rest_index + 1..] .iter() .take_while(|pat| matches!(pat.kind, PatKind::Wild)) .enumerate() .last() { span_lint( cx, patterns[rest_index].span.shrink_to_hi().to(right_pat.span), right_index == 0, ); } } } }
33.915789
114
0.450652
1656176bea027afd0bac2cc243546783b48a97b3
1,141
#![recursion_limit = "256"] #![cfg_attr(feature = "diagnostics", feature(proc_macro_diagnostic))] #[macro_use] extern crate quote; #[macro_use] extern crate syn; extern crate fnv; extern crate ident_case; #[cfg(feature = "diagnostics")] extern crate proc_macro; extern crate proc_macro2; #[cfg(feature = "suggestions")] extern crate strsim; #[macro_use] mod macros_private; #[macro_use] mod macros_public; pub mod ast; pub(crate) mod codegen; pub mod derive; pub mod error; mod from_attributes; mod from_derive_input; mod from_field; mod from_generic_param; mod from_generics; mod from_meta; mod from_type_param; mod from_variant; pub(crate) mod options; pub mod usage; pub mod util; pub use self::error::{Error, Result}; pub use self::from_attributes::FromAttributes; pub use self::from_derive_input::FromDeriveInput; pub use self::from_field::FromField; pub use self::from_generic_param::FromGenericParam; pub use self::from_generics::FromGenerics; pub use self::from_meta::FromMeta; pub use self::from_type_param::FromTypeParam; pub use self::from_variant::FromVariant; // Re-export tokenizer #[doc(hidden)] pub use quote::ToTokens;
22.82
69
0.778265
fb25814aaa79b77ac32d6a0e923dc755c30ef38e
1,820
use std::fs::File; use std::io::Read; use std::io::Result; pub fn solve() -> Result<()> { let mut file = File::open("input/09.txt")?; let mut input = String::new(); file.read_to_string(&mut input)?; let (first, second) = solve_both(&input); println!("day 9 first: {}", first); println!("day 9 second: {}", second); Ok(()) } fn solve_both(input: &str) -> (i32, i32) { let bytes = input.as_bytes(); let mut garbage = false; let mut ignore = false; let mut cur = 1; let mut sum = 0; let mut garbage_bytes = 0; for byte in bytes { if ignore { ignore = false; continue; } if garbage { match byte { b'!' => ignore = true, b'>' => garbage = false, _ => { garbage_bytes += 1; } } } else { match byte { b'<' => garbage = true, b'{' => { sum += cur; cur += 1; } b'}' => { cur -= 1; } _ => (), } } } (sum, garbage_bytes) } #[cfg(test)] mod tests { use super::solve_both; #[test] fn test_solve_both() { assert_eq!(solve_both("{}"), (1, 0)); assert_eq!(solve_both("{{{}}}"), (6, 0)); assert_eq!(solve_both("{{{},{},{{}}}}"), (16, 0)); assert_eq!(solve_both("{{<ab>},{<ab>},{<ab>},{<ab>}}"), (9, 8)); assert_eq!(solve_both("{{<!!>},{<!!>},{<!!>},{<!!>}}"), (9, 0)); assert_eq!(solve_both("{{<a!>},{<a!>},{<a!>},{<ab>}}"), (3, 17)); assert_eq!(solve_both("{<!!!>>}"), (1, 0)); assert_eq!(solve_both("{<{o'i!a,<{i<a>}"), (1, 10)); } }
26.376812
73
0.397802
fc44b3ae288edc898329947f85221a5ef8333ab9
37,521
//! This module implements the JavaScript Value. //! //! Javascript values, utility methods and conversion between Javascript values and Rust values. #[cfg(test)] mod tests; use crate::{ builtins::{ number::{f64_to_int32, f64_to_uint32}, Number, }, gc::{Finalize, Trace}, object::{JsObject, ObjectData}, property::{PropertyDescriptor, PropertyKey}, symbol::{JsSymbol, WellKnownSymbols}, BoaProfiler, Context, JsBigInt, JsResult, JsString, }; use num_bigint::BigInt; use num_integer::Integer; use num_traits::Zero; use once_cell::sync::Lazy; use std::{ collections::HashSet, convert::TryFrom, fmt::{self, Display}, ops::Sub, str::FromStr, }; mod conversions; pub(crate) mod display; mod equality; mod hash; mod operations; mod r#type; pub use conversions::*; pub use display::ValueDisplay; pub use equality::*; pub use hash::*; pub use operations::*; pub use r#type::Type; static TWO_E_64: Lazy<BigInt> = Lazy::new(|| { const TWO_E_64: u128 = 2u128.pow(64); BigInt::from(TWO_E_64) }); static TWO_E_63: Lazy<BigInt> = Lazy::new(|| { const TWO_E_63: u128 = 2u128.pow(63); BigInt::from(TWO_E_63) }); /// A Javascript value #[derive(Trace, Finalize, Debug, Clone)] pub enum JsValue { /// `null` - A null value, for when a value doesn't exist. Null, /// `undefined` - An undefined value, for when a field or index doesn't exist. Undefined, /// `boolean` - A `true` / `false` value, for if a certain criteria is met. Boolean(bool), /// `String` - A UTF-8 string, such as `"Hello, world"`. String(JsString), /// `Number` - A 64-bit floating point number, such as `3.1415` Rational(f64), /// `Number` - A 32-bit integer, such as `42`. Integer(i32), /// `BigInt` - holds any arbitrary large signed integer. BigInt(JsBigInt), /// `Object` - An object, such as `Math`, represented by a binary tree of string keys to Javascript values. Object(JsObject), /// `Symbol` - A Symbol Primitive type. Symbol(JsSymbol), } /// Represents the result of `ToIntegerOrInfinity` operation #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum IntegerOrInfinity { Integer(i64), PositiveInfinity, NegativeInfinity, } impl JsValue { /// Create a new [`JsValue`]. #[inline] pub fn new<T>(value: T) -> Self where T: Into<Self>, { value.into() } /// Creates a new `undefined` value. #[inline] pub fn undefined() -> Self { Self::Undefined } /// Creates a new `null` value. #[inline] pub fn null() -> Self { Self::Null } /// Creates a new number with `NaN` value. #[inline] pub fn nan() -> Self { Self::Rational(f64::NAN) } /// Creates a new number with `Infinity` value. #[inline] pub fn positive_infinity() -> Self { Self::Rational(f64::INFINITY) } /// Creates a new number with `-Infinity` value. #[inline] pub fn negative_infinity() -> Self { Self::Rational(f64::NEG_INFINITY) } /// Returns true if the value is an object #[inline] pub fn is_object(&self) -> bool { matches!(self, Self::Object(_)) } #[inline] pub fn as_object(&self) -> Option<&JsObject> { match *self { Self::Object(ref o) => Some(o), _ => None, } } /// It determines if the value is a callable function with a `[[Call]]` internal method. /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-iscallable #[inline] pub fn is_callable(&self) -> bool { matches!(self, Self::Object(obj) if obj.is_callable()) } #[inline] pub fn as_callable(&self) -> Option<&JsObject> { self.as_object().filter(|obj| obj.is_callable()) } /// Returns true if the value is a constructor object #[inline] pub fn is_constructor(&self) -> bool { matches!(self, Self::Object(obj) if obj.is_constructor()) } #[inline] pub fn as_constructor(&self) -> Option<&JsObject> { self.as_object().filter(|obj| obj.is_constructor()) } /// Returns true if the value is a symbol. #[inline] pub fn is_symbol(&self) -> bool { matches!(self, Self::Symbol(_)) } pub fn as_symbol(&self) -> Option<JsSymbol> { match self { Self::Symbol(symbol) => Some(symbol.clone()), _ => None, } } /// Returns true if the value is undefined. #[inline] pub fn is_undefined(&self) -> bool { matches!(self, Self::Undefined) } /// Returns true if the value is null. #[inline] pub fn is_null(&self) -> bool { matches!(self, Self::Null) } /// Returns true if the value is null or undefined. #[inline] pub fn is_null_or_undefined(&self) -> bool { matches!(self, Self::Null | Self::Undefined) } /// Returns true if the value is a 64-bit floating-point number. #[inline] pub fn is_double(&self) -> bool { matches!(self, Self::Rational(_)) } /// Returns true if the value is integer. #[inline] #[allow(clippy::float_cmp)] pub fn is_integer(&self) -> bool { // If it can fit in a i32 and the trucated version is // equal to the original then it is an integer. let is_racional_intiger = |n: f64| n == f64::from(n as i32); match *self { Self::Integer(_) => true, Self::Rational(n) if is_racional_intiger(n) => true, _ => false, } } /// Returns true if the value is a number. #[inline] pub fn is_number(&self) -> bool { matches!(self, Self::Rational(_) | Self::Integer(_)) } #[inline] pub fn as_number(&self) -> Option<f64> { match *self { Self::Integer(integer) => Some(integer.into()), Self::Rational(rational) => Some(rational), _ => None, } } /// Returns true if the value is a string. #[inline] pub fn is_string(&self) -> bool { matches!(self, Self::String(_)) } /// Returns the string if the values is a string, otherwise `None`. #[inline] pub fn as_string(&self) -> Option<&JsString> { match self { Self::String(ref string) => Some(string), _ => None, } } /// Returns true if the value is a boolean. #[inline] pub fn is_boolean(&self) -> bool { matches!(self, Self::Boolean(_)) } #[inline] pub fn as_boolean(&self) -> Option<bool> { match self { Self::Boolean(boolean) => Some(*boolean), _ => None, } } /// Returns true if the value is a bigint. #[inline] pub fn is_bigint(&self) -> bool { matches!(self, Self::BigInt(_)) } /// Returns an optional reference to a `BigInt` if the value is a `BigInt` primitive. #[inline] pub fn as_bigint(&self) -> Option<&JsBigInt> { match self { Self::BigInt(bigint) => Some(bigint), _ => None, } } /// Converts the value to a `bool` type. /// /// More information: /// - [ECMAScript][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-toboolean pub fn to_boolean(&self) -> bool { match *self { Self::Undefined | Self::Null => false, Self::Symbol(_) | Self::Object(_) => true, Self::String(ref s) if !s.is_empty() => true, Self::Rational(n) if n != 0.0 && !n.is_nan() => true, Self::Integer(n) if n != 0 => true, Self::BigInt(ref n) if !n.is_zero() => true, Self::Boolean(v) => v, _ => false, } } /// Resolve the property in the object. /// /// A copy of the Property is returned. pub(crate) fn get_property<Key>(&self, key: Key) -> Option<PropertyDescriptor> where Key: Into<PropertyKey>, { let key = key.into(); let _timer = BoaProfiler::global().start_event("Value::get_property", "value"); match self { Self::Object(ref object) => { // TODO: had to skip `__get_own_properties__` since we don't have context here let property = object.borrow().properties().get(&key).cloned(); if property.is_some() { return property; } object .prototype() .as_ref() .map_or(Self::Null, |obj| obj.clone().into()) .get_property(key) } _ => None, } } /** Resolve the property in the object and get its value, or undefined if this is not an object or the field doesn't exist `get_field` receives a Property from get_prop(). It should then return the `[[Get]]` result value if that's set, otherwise fall back to `[[Value]]` */ pub(crate) fn get_field<K>(&self, key: K, context: &mut Context) -> JsResult<Self> where K: Into<PropertyKey>, { let _timer = BoaProfiler::global().start_event("Value::get_field", "value"); if let Self::Object(ref obj) = *self { obj.clone() .__get__(&key.into(), obj.clone().into(), context) } else { Ok(Self::undefined()) } } /// Set the field in the value /// /// Similar to `7.3.4 Set ( O, P, V, Throw )`, but returns the value instead of a boolean. /// /// More information: /// - [ECMAScript][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-set-o-p-v-throw #[inline] pub(crate) fn set_field<K, V>( &self, key: K, value: V, throw: bool, context: &mut Context, ) -> JsResult<Self> where K: Into<PropertyKey>, V: Into<Self>, { // 1. Assert: Type(O) is Object. // TODO: Currently the value may not be an object. // In that case this function does nothing. // 2. Assert: IsPropertyKey(P) is true. // 3. Assert: Type(Throw) is Boolean. let key = key.into(); let value = value.into(); let _timer = BoaProfiler::global().start_event("Value::set_field", "value"); if let Self::Object(ref obj) = *self { // 4. Let success be ? O.[[Set]](P, V, O). let success = obj .clone() .__set__(key, value.clone(), obj.clone().into(), context)?; // 5. If success is false and Throw is true, throw a TypeError exception. // 6. Return success. if !success && throw { return context.throw_type_error("Cannot assign value to property"); } return Ok(value); } Ok(value) } /// Set the kind of an object. #[inline] pub fn set_data(&self, data: ObjectData) { if let Self::Object(ref obj) = *self { obj.borrow_mut().data = data; } } /// Set the property in the value. #[inline] pub(crate) fn set_property<K, P>(&self, key: K, property: P) where K: Into<PropertyKey>, P: Into<PropertyDescriptor>, { if let Some(object) = self.as_object() { object.insert(key.into(), property.into()); } } /// The abstract operation `ToPrimitive` takes an input argument and an optional argumen`PreferredType`pe. /// /// <https://tc39.es/ecma262/#sec-toprimitive> pub fn to_primitive( &self, context: &mut Context, preferred_type: PreferredType, ) -> JsResult<Self> { // 1. Assert: input is an ECMAScript language value. (always a value not need to check) // 2. If Type(input) is Object, then if self.is_object() { // a. Let exoticToPrim be ? GetMethod(input, @@toPrimitive). let exotic_to_prim = self.get_method(WellKnownSymbols::to_primitive(), context)?; // b. If exoticToPrim is not undefined, then if let Some(exotic_to_prim) = exotic_to_prim { // i. If preferredType is not present, let hint be "default". // ii. Else if preferredType is string, let hint be "string". // iii. Else, // 1. Assert: preferredType is number. // 2. Let hint be "number". let hint = match preferred_type { PreferredType::Default => "default", PreferredType::String => "string", PreferredType::Number => "number", } .into(); // iv. Let result be ? Call(exoticToPrim, input, « hint »). let result = exotic_to_prim.call(self, &[hint], context)?; // v. If Type(result) is not Object, return result. // vi. Throw a TypeError exception. return if result.is_object() { context.throw_type_error("Symbol.toPrimitive cannot return an object") } else { Ok(result) }; } // c. If preferredType is not present, let preferredType be number. let preferred_type = match preferred_type { PreferredType::Default | PreferredType::Number => PreferredType::Number, PreferredType::String => PreferredType::String, }; // d. Return ? OrdinaryToPrimitive(input, preferredType). self.as_object() .expect("self was not an object") .ordinary_to_primitive(context, preferred_type) } else { // 3. Return input. Ok(self.clone()) } } /// `7.1.13 ToBigInt ( argument )` /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-tobigint pub fn to_bigint(&self, context: &mut Context) -> JsResult<JsBigInt> { match self { JsValue::Null => context.throw_type_error("cannot convert null to a BigInt"), JsValue::Undefined => context.throw_type_error("cannot convert undefined to a BigInt"), JsValue::String(ref string) => { if let Some(value) = JsBigInt::from_string(string) { Ok(value) } else { context.throw_syntax_error(format!( "cannot convert string '{string}' to bigint primitive", )) } } JsValue::Boolean(true) => Ok(JsBigInt::one()), JsValue::Boolean(false) => Ok(JsBigInt::zero()), JsValue::Integer(_) | JsValue::Rational(_) => { context.throw_type_error("cannot convert Number to a BigInt") } JsValue::BigInt(b) => Ok(b.clone()), JsValue::Object(_) => { let primitive = self.to_primitive(context, PreferredType::Number)?; primitive.to_bigint(context) } JsValue::Symbol(_) => context.throw_type_error("cannot convert Symbol to a BigInt"), } } /// Returns an object that implements `Display`. /// /// # Examples /// /// ``` /// use boa::JsValue; /// /// let value = JsValue::new(3); /// /// println!("{}", value.display()); /// ``` #[inline] pub fn display(&self) -> ValueDisplay<'_> { ValueDisplay { value: self } } /// Converts the value to a string. /// /// This function is equivalent to `String(value)` in JavaScript. pub fn to_string(&self, context: &mut Context) -> JsResult<JsString> { match self { JsValue::Null => Ok("null".into()), JsValue::Undefined => Ok("undefined".into()), JsValue::Boolean(boolean) => Ok(boolean.to_string().into()), JsValue::Rational(rational) => Ok(Number::to_native_string(*rational).into()), JsValue::Integer(integer) => Ok(integer.to_string().into()), JsValue::String(string) => Ok(string.clone()), JsValue::Symbol(_) => context.throw_type_error("can't convert symbol to string"), JsValue::BigInt(ref bigint) => Ok(bigint.to_string().into()), JsValue::Object(_) => { let primitive = self.to_primitive(context, PreferredType::String)?; primitive.to_string(context) } } } /// Converts the value to an Object. /// /// This function is equivalent to `Object(value)` in JavaScript. /// /// See: <https://tc39.es/ecma262/#sec-toobject> pub fn to_object(&self, context: &mut Context) -> JsResult<JsObject> { match self { JsValue::Undefined | JsValue::Null => { context.throw_type_error("cannot convert 'null' or 'undefined' to object") } JsValue::Boolean(boolean) => { let prototype = context.standard_objects().boolean_object().prototype(); Ok(JsObject::from_proto_and_data( prototype, ObjectData::boolean(*boolean), )) } JsValue::Integer(integer) => { let prototype = context.standard_objects().number_object().prototype(); Ok(JsObject::from_proto_and_data( prototype, ObjectData::number(f64::from(*integer)), )) } JsValue::Rational(rational) => { let prototype = context.standard_objects().number_object().prototype(); Ok(JsObject::from_proto_and_data( prototype, ObjectData::number(*rational), )) } JsValue::String(ref string) => { let prototype = context.standard_objects().string_object().prototype(); let object = JsObject::from_proto_and_data(prototype, ObjectData::string(string.clone())); // Make sure the correct length is set on our new string object object.insert_property( "length", PropertyDescriptor::builder() .value(string.encode_utf16().count()) .writable(false) .enumerable(false) .configurable(false), ); Ok(object) } JsValue::Symbol(ref symbol) => { let prototype = context.standard_objects().symbol_object().prototype(); Ok(JsObject::from_proto_and_data( prototype, ObjectData::symbol(symbol.clone()), )) } JsValue::BigInt(ref bigint) => { let prototype = context.standard_objects().bigint_object().prototype(); Ok(JsObject::from_proto_and_data( prototype, ObjectData::big_int(bigint.clone()), )) } JsValue::Object(jsobject) => Ok(jsobject.clone()), } } /// Converts the value to a `PropertyKey`, that can be used as a key for properties. /// /// See <https://tc39.es/ecma262/#sec-topropertykey> pub fn to_property_key(&self, context: &mut Context) -> JsResult<PropertyKey> { Ok(match self { // Fast path: JsValue::String(string) => string.clone().into(), JsValue::Symbol(symbol) => symbol.clone().into(), // Slow path: _ => match self.to_primitive(context, PreferredType::String)? { JsValue::String(ref string) => string.clone().into(), JsValue::Symbol(ref symbol) => symbol.clone().into(), primitive => primitive.to_string(context)?.into(), }, }) } /// It returns value converted to a numeric value of type `Number` or `BigInt`. /// /// See: <https://tc39.es/ecma262/#sec-tonumeric> pub fn to_numeric(&self, context: &mut Context) -> JsResult<Numeric> { let primitive = self.to_primitive(context, PreferredType::Number)?; if let Some(bigint) = primitive.as_bigint() { return Ok(bigint.clone().into()); } Ok(self.to_number(context)?.into()) } /// Converts a value to an integral 32 bit unsigned integer. /// /// This function is equivalent to `value | 0` in JavaScript /// /// See: <https://tc39.es/ecma262/#sec-touint32> pub fn to_u32(&self, context: &mut Context) -> JsResult<u32> { // This is the fast path, if the value is Integer we can just return it. if let JsValue::Integer(number) = *self { return Ok(number as u32); } let number = self.to_number(context)?; Ok(f64_to_uint32(number)) } /// Converts a value to an integral 32 bit signed integer. /// /// See: <https://tc39.es/ecma262/#sec-toint32> pub fn to_i32(&self, context: &mut Context) -> JsResult<i32> { // This is the fast path, if the value is Integer we can just return it. if let JsValue::Integer(number) = *self { return Ok(number); } let number = self.to_number(context)?; Ok(f64_to_int32(number)) } /// `7.1.10 ToInt8 ( argument )` /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-toint8 pub fn to_int8(&self, context: &mut Context) -> JsResult<i8> { // 1. Let number be ? ToNumber(argument). let number = self.to_number(context)?; // 2. If number is NaN, +0𝔽, -0𝔽, +∞𝔽, or -∞𝔽, return +0𝔽. if number.is_nan() || number.is_zero() || number.is_infinite() { return Ok(0); } // 3. Let int be the mathematical value whose sign is the sign of number and whose magnitude is floor(abs(ℝ(number))). let int = number.floor() as i64; // 4. Let int8bit be int modulo 2^8. let int_8_bit = int % 2i64.pow(8); // 5. If int8bit ≥ 2^7, return 𝔽(int8bit - 2^8); otherwise return 𝔽(int8bit). if int_8_bit >= 2i64.pow(7) { Ok((int_8_bit - 2i64.pow(8)) as i8) } else { Ok(int_8_bit as i8) } } /// `7.1.11 ToUint8 ( argument )` /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-touint8 pub fn to_uint8(&self, context: &mut Context) -> JsResult<u8> { // 1. Let number be ? ToNumber(argument). let number = self.to_number(context)?; // 2. If number is NaN, +0𝔽, -0𝔽, +∞𝔽, or -∞𝔽, return +0𝔽. if number.is_nan() || number.is_zero() || number.is_infinite() { return Ok(0); } // 3. Let int be the mathematical value whose sign is the sign of number and whose magnitude is floor(abs(ℝ(number))). let int = number.floor() as i64; // 4. Let int8bit be int modulo 2^8. let int_8_bit = int % 2i64.pow(8); // 5. Return 𝔽(int8bit). Ok(int_8_bit as u8) } /// `7.1.12 ToUint8Clamp ( argument )` /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-touint8clamp pub fn to_uint8_clamp(&self, context: &mut Context) -> JsResult<u8> { // 1. Let number be ? ToNumber(argument). let number = self.to_number(context)?; // 2. If number is NaN, return +0𝔽. if number.is_nan() { return Ok(0); } // 3. If ℝ(number) ≤ 0, return +0𝔽. if number <= 0.0 { return Ok(0); } // 4. If ℝ(number) ≥ 255, return 255𝔽. if number >= 255.0 { return Ok(255); } // 5. Let f be floor(ℝ(number)). let f = number.floor(); // 6. If f + 0.5 < ℝ(number), return 𝔽(f + 1). if f + 0.5 < number { return Ok(f as u8 + 1); } // 7. If ℝ(number) < f + 0.5, return 𝔽(f). if number < f + 0.5 { return Ok(f as u8); } // 8. If f is odd, return 𝔽(f + 1). if f as u8 % 2 != 0 { return Ok(f as u8 + 1); } // 9. Return 𝔽(f). Ok(f as u8) } /// `7.1.8 ToInt16 ( argument )` /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-toint16 pub fn to_int16(&self, context: &mut Context) -> JsResult<i16> { // 1. Let number be ? ToNumber(argument). let number = self.to_number(context)?; // 2. If number is NaN, +0𝔽, -0𝔽, +∞𝔽, or -∞𝔽, return +0𝔽. if number.is_nan() || number.is_zero() || number.is_infinite() { return Ok(0); } // 3. Let int be the mathematical value whose sign is the sign of number and whose magnitude is floor(abs(ℝ(number))). let int = number.floor() as i64; // 4. Let int16bit be int modulo 2^16. let int_16_bit = int % 2i64.pow(16); // 5. If int16bit ≥ 2^15, return 𝔽(int16bit - 2^16); otherwise return 𝔽(int16bit). if int_16_bit >= 2i64.pow(15) { Ok((int_16_bit - 2i64.pow(16)) as i16) } else { Ok(int_16_bit as i16) } } /// `7.1.9 ToUint16 ( argument )` /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-touint16 pub fn to_uint16(&self, context: &mut Context) -> JsResult<u16> { // 1. Let number be ? ToNumber(argument). let number = self.to_number(context)?; // 2. If number is NaN, +0𝔽, -0𝔽, +∞𝔽, or -∞𝔽, return +0𝔽. if number.is_nan() || number.is_zero() || number.is_infinite() { return Ok(0); } // 3. Let int be the mathematical value whose sign is the sign of number and whose magnitude is floor(abs(ℝ(number))). let int = number.floor() as i64; // 4. Let int16bit be int modulo 2^16. let int_16_bit = int % 2i64.pow(16); // 5. Return 𝔽(int16bit). Ok(int_16_bit as u16) } /// `7.1.15 ToBigInt64 ( argument )` /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-tobigint64 pub fn to_big_int64(&self, context: &mut Context) -> JsResult<BigInt> { // 1. Let n be ? ToBigInt(argument). let n = self.to_bigint(context)?; // 2. Let int64bit be ℝ(n) modulo 2^64. let int64_bit = n.as_inner().mod_floor(&TWO_E_64); // 3. If int64bit ≥ 2^63, return ℤ(int64bit - 2^64); otherwise return ℤ(int64bit). if int64_bit >= *TWO_E_63 { Ok(int64_bit.sub(&*TWO_E_64)) } else { Ok(int64_bit) } } /// `7.1.16 ToBigUint64 ( argument )` /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-tobiguint64 pub fn to_big_uint64(&self, context: &mut Context) -> JsResult<BigInt> { let two_e_64: u128 = 0x1_0000_0000_0000_0000; let two_e_64 = BigInt::from(two_e_64); // 1. Let n be ? ToBigInt(argument). let n = self.to_bigint(context)?; // 2. Let int64bit be ℝ(n) modulo 2^64. // 3. Return ℤ(int64bit). Ok(n.as_inner().mod_floor(&two_e_64)) } /// Converts a value to a non-negative integer if it is a valid integer index value. /// /// See: <https://tc39.es/ecma262/#sec-toindex> pub fn to_index(&self, context: &mut Context) -> JsResult<usize> { if self.is_undefined() { return Ok(0); } let integer_index = self.to_integer(context)?; if integer_index < 0.0 { return context.throw_range_error("Integer index must be >= 0"); } if integer_index > Number::MAX_SAFE_INTEGER { return context.throw_range_error("Integer index must be less than 2**(53) - 1"); } Ok(integer_index as usize) } /// Converts argument to an integer suitable for use as the length of an array-like object. /// /// See: <https://tc39.es/ecma262/#sec-tolength> pub fn to_length(&self, context: &mut Context) -> JsResult<usize> { // 1. Let len be ? ToInteger(argument). let len = self.to_integer(context)?; // 2. If len ≤ +0, return +0. if len < 0.0 { return Ok(0); } // 3. Return min(len, 2^53 - 1). Ok(len.min(Number::MAX_SAFE_INTEGER) as usize) } /// Converts a value to an integral Number value. /// /// See: <https://tc39.es/ecma262/#sec-tointeger> pub fn to_integer(&self, context: &mut Context) -> JsResult<f64> { // 1. Let number be ? ToNumber(argument). let number = self.to_number(context)?; // 2. If number is +∞ or -∞, return number. if !number.is_finite() { // 3. If number is NaN, +0, or -0, return +0. if number.is_nan() { return Ok(0.0); } return Ok(number); } // 4. Let integer be the Number value that is the same sign as number and whose magnitude is floor(abs(number)). // 5. If integer is -0, return +0. // 6. Return integer. Ok(number.trunc() + 0.0) // We add 0.0 to convert -0.0 to +0.0 } /// Converts a value to a double precision floating point. /// /// This function is equivalent to the unary `+` operator (`+value`) in JavaScript /// /// See: <https://tc39.es/ecma262/#sec-tonumber> pub fn to_number(&self, context: &mut Context) -> JsResult<f64> { match *self { JsValue::Null => Ok(0.0), JsValue::Undefined => Ok(f64::NAN), JsValue::Boolean(b) => Ok(if b { 1.0 } else { 0.0 }), JsValue::String(ref string) => Ok(string.string_to_number()), JsValue::Rational(number) => Ok(number), JsValue::Integer(integer) => Ok(f64::from(integer)), JsValue::Symbol(_) => context.throw_type_error("argument must not be a symbol"), JsValue::BigInt(_) => context.throw_type_error("argument must not be a bigint"), JsValue::Object(_) => { let primitive = self.to_primitive(context, PreferredType::Number)?; primitive.to_number(context) } } } /// This is a more specialized version of `to_numeric`, including `BigInt`. /// /// This function is equivalent to `Number(value)` in JavaScript /// /// See: <https://tc39.es/ecma262/#sec-tonumeric> pub fn to_numeric_number(&self, context: &mut Context) -> JsResult<f64> { let primitive = self.to_primitive(context, PreferredType::Number)?; if let Some(bigint) = primitive.as_bigint() { return Ok(bigint.to_f64()); } primitive.to_number(context) } /// Check if the `Value` can be converted to an `Object` /// /// The abstract operation `RequireObjectCoercible` takes argument argument. /// It throws an error if argument is a value that cannot be converted to an Object using `ToObject`. /// It is defined by [Table 15][table] /// /// More information: /// - [ECMAScript reference][spec] /// /// [table]: https://tc39.es/ecma262/#table-14 /// [spec]: https://tc39.es/ecma262/#sec-requireobjectcoercible #[inline] pub fn require_object_coercible(&self, context: &mut Context) -> JsResult<&Self> { if self.is_null_or_undefined() { context.throw_type_error("cannot convert null or undefined to Object") } else { Ok(self) } } #[inline] pub fn to_property_descriptor(&self, context: &mut Context) -> JsResult<PropertyDescriptor> { // 1. If Type(Obj) is not Object, throw a TypeError exception. self.as_object() .ok_or_else(|| { context.construct_type_error( "Cannot construct a property descriptor from a non-object", ) }) .and_then(|obj| obj.to_property_descriptor(context)) } /// Converts argument to an integer, +∞, or -∞. /// /// See: <https://tc39.es/ecma262/#sec-tointegerorinfinity> pub fn to_integer_or_infinity(&self, context: &mut Context) -> JsResult<IntegerOrInfinity> { // 1. Let number be ? ToNumber(argument). let number = self.to_number(context)?; // 2. If number is NaN, +0𝔽, or -0𝔽, return 0. if number.is_nan() || number == 0.0 || number == -0.0 { Ok(IntegerOrInfinity::Integer(0)) } else if number.is_infinite() && number.is_sign_positive() { // 3. If number is +∞𝔽, return +∞. Ok(IntegerOrInfinity::PositiveInfinity) } else if number.is_infinite() && number.is_sign_negative() { // 4. If number is -∞𝔽, return -∞. Ok(IntegerOrInfinity::NegativeInfinity) } else { // 5. Let integer be floor(abs(ℝ(number))). let integer = number.abs().floor(); let integer = integer.min(Number::MAX_SAFE_INTEGER) as i64; // 6. If number < +0𝔽, set integer to -integer. // 7. Return integer. if number < 0.0 { Ok(IntegerOrInfinity::Integer(-integer)) } else { Ok(IntegerOrInfinity::Integer(integer)) } } } /// `typeof` operator. Returns a string representing the type of the /// given ECMA Value. /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-typeof-operator pub fn type_of(&self) -> JsString { match *self { Self::Rational(_) | Self::Integer(_) => "number", Self::String(_) => "string", Self::Boolean(_) => "boolean", Self::Symbol(_) => "symbol", Self::Null => "object", Self::Undefined => "undefined", Self::BigInt(_) => "bigint", Self::Object(ref object) => { if object.is_callable() { "function" } else { "object" } } } .into() } /// Abstract operation `IsArray ( argument )` /// /// Check if a value is an array. /// /// More information: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-isarray pub(crate) fn is_array(&self, context: &mut Context) -> JsResult<bool> { // Note: The spec specifies this function for JsValue. // The main part of the function is implemented for JsObject. // 1. If Type(argument) is not Object, return false. if let Some(object) = self.as_object() { object.is_array_abstract(context) } // 4. Return false. else { Ok(false) } } } impl Default for JsValue { fn default() -> Self { Self::Undefined } } /// The preffered type to convert an object to a primitive `Value`. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum PreferredType { String, Number, Default, } /// Numeric value which can be of two types `Number`, `BigInt`. #[derive(Debug, Clone, PartialEq, PartialOrd)] pub enum Numeric { /// Double precision floating point number. Number(f64), /// BigInt an integer of arbitrary size. BigInt(JsBigInt), } impl From<f64> for Numeric { #[inline] fn from(value: f64) -> Self { Self::Number(value) } } impl From<f32> for Numeric { #[inline] fn from(value: f32) -> Self { Self::Number(value.into()) } } impl From<i64> for Numeric { #[inline] fn from(value: i64) -> Self { Self::BigInt(value.into()) } } impl From<i32> for Numeric { #[inline] fn from(value: i32) -> Self { Self::Number(value.into()) } } impl From<i16> for Numeric { #[inline] fn from(value: i16) -> Self { Self::Number(value.into()) } } impl From<i8> for Numeric { #[inline] fn from(value: i8) -> Self { Self::Number(value.into()) } } impl From<u64> for Numeric { #[inline] fn from(value: u64) -> Self { Self::BigInt(value.into()) } } impl From<u32> for Numeric { #[inline] fn from(value: u32) -> Self { Self::Number(value.into()) } } impl From<u16> for Numeric { #[inline] fn from(value: u16) -> Self { Self::Number(value.into()) } } impl From<u8> for Numeric { #[inline] fn from(value: u8) -> Self { Self::Number(value.into()) } } impl From<JsBigInt> for Numeric { #[inline] fn from(value: JsBigInt) -> Self { Self::BigInt(value) } } impl From<Numeric> for JsValue { fn from(value: Numeric) -> Self { match value { Numeric::Number(number) => Self::new(number), Numeric::BigInt(bigint) => Self::new(bigint), } } }
32.34569
151
0.542976
8fc2026f7c72f92fd6fd2741e935c242ae88ce62
3,882
#[derive(Clone, PartialEq, ::prost::Message)] pub struct C2s { ///UserInfoField集合,不设置默认返回全部信息 #[prost(int32, optional, tag="2")] pub flag: ::core::option::Option<i32>, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct S2c { ///用户昵称 #[prost(string, optional, tag="1")] pub nick_name: ::core::option::Option<::prost::alloc::string::String>, ///用户头像url #[prost(string, optional, tag="2")] pub avatar_url: ::core::option::Option<::prost::alloc::string::String>, ///api用户等级描述, 已在2.10版本之后废弃 #[prost(string, optional, tag="3")] pub api_level: ::core::option::Option<::prost::alloc::string::String>, ///港股行情权限, QotRight #[prost(int32, optional, tag="4")] pub hk_qot_right: ::core::option::Option<i32>, ///美股行情权限, QotRight #[prost(int32, optional, tag="5")] pub us_qot_right: ::core::option::Option<i32>, ///A股行情权限, QotRight #[prost(int32, optional, tag="6")] pub cn_qot_right: ::core::option::Option<i32>, ///已开户用户需要同意免责声明,未开户或已同意的用户返回false #[prost(bool, optional, tag="7")] pub is_need_agree_disclaimer: ::core::option::Option<bool>, ///用户牛牛号 #[prost(int64, optional, tag="8")] pub user_id: ::core::option::Option<i64>, ///升级类型,UpdateType #[prost(int32, optional, tag="9")] pub update_type: ::core::option::Option<i32>, #[prost(string, optional, tag="10")] pub web_key: ::core::option::Option<::prost::alloc::string::String>, #[prost(string, optional, tag="18")] pub web_jump_url_head: ::core::option::Option<::prost::alloc::string::String>, ///港股期权行情权限, Qot_Common.QotRight #[prost(int32, optional, tag="11")] pub hk_option_qot_right: ::core::option::Option<i32>, ///是否有美股期权行情权限 #[prost(bool, optional, tag="12")] pub has_us_option_qot_right: ::core::option::Option<bool>, ///港股期货行情权限, Qot_Common.QotRight #[prost(int32, optional, tag="13")] pub hk_future_qot_right: ::core::option::Option<i32>, ///订阅额度 #[prost(int32, optional, tag="14")] pub sub_quota: ::core::option::Option<i32>, ///历史K线额度 #[prost(int32, optional, tag="15")] pub history_kl_quota: ::core::option::Option<i32>, ///美股期货行情权限, Qot_Common.QotRight #[prost(int32, optional, tag="16")] pub us_future_qot_right: ::core::option::Option<i32>, ///美股期货行情权限, Qot_Common.QotRight #[prost(int32, optional, tag="17")] pub us_option_qot_right: ::core::option::Option<i32>, ///用户注册归属地 #[prost(int32, optional, tag="19")] pub user_attribution: ::core::option::Option<i32>, ///升级提示 #[prost(string, optional, tag="20")] pub update_whats_new: ::core::option::Option<::prost::alloc::string::String>, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct Request { #[prost(message, required, tag="1")] pub c2s: C2s, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct Response { ///返回结果,参见Common.RetType的枚举定义 #[prost(int32, required, tag="1", default="-400")] pub ret_type: i32, ///返回结果描述 #[prost(string, optional, tag="2")] pub ret_msg: ::core::option::Option<::prost::alloc::string::String>, ///错误码,客户端一般通过retType和retMsg来判断结果和详情,errCode只做日志记录,仅在个别协议失败时对账用 #[prost(int32, optional, tag="3")] pub err_code: ::core::option::Option<i32>, #[prost(message, optional, tag="4")] pub s2c: ::core::option::Option<S2c>, } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum UpdateType { ///无需升级 None = 0, ///建议升级 Advice = 1, ///强制升级 Force = 2, } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum UserInfoField { ///昵称,用户头像,牛牛号 Basic = 1, ///API权限信息 Api = 2, ///市场的行情权限 QotRight = 4, ///免责 Disclaimer = 8, ///升级类型 Update = 16, WebKey = 2048, }
34.660714
89
0.628027
4895cd64af35aaede434bab97a19596c3786cd91
3,272
use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; use ::test::run_test; #[test] fn vminsd_1() { run_test(&Instruction { mnemonic: Mnemonic::VMINSD, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 251, 93, 195], OperandSize::Dword) } #[test] fn vminsd_2() { run_test(&Instruction { mnemonic: Mnemonic::VMINSD, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM4)), operand3: Some(IndirectScaledIndexed(ESI, ESI, Eight, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 219, 93, 12, 246], OperandSize::Dword) } #[test] fn vminsd_3() { run_test(&Instruction { mnemonic: Mnemonic::VMINSD, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM3)), operand3: Some(Direct(XMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 227, 93, 227], OperandSize::Qword) } #[test] fn vminsd_4() { run_test(&Instruction { mnemonic: Mnemonic::VMINSD, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM6)), operand3: Some(IndirectScaledIndexed(RBX, RCX, Two, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 203, 93, 20, 75], OperandSize::Qword) } #[test] fn vminsd_5() { run_test(&Instruction { mnemonic: Mnemonic::VMINSD, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: true, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 255, 159, 93, 236], OperandSize::Dword) } #[test] fn vminsd_6() { run_test(&Instruction { mnemonic: Mnemonic::VMINSD, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM6)), operand3: Some(IndirectScaledIndexedDisplaced(ECX, EAX, Eight, 647280727, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 207, 143, 93, 140, 193, 87, 184, 148, 38], OperandSize::Dword) } #[test] fn vminsd_7() { run_test(&Instruction { mnemonic: Mnemonic::VMINSD, operand1: Some(Direct(XMM25)), operand2: Some(Direct(XMM10)), operand3: Some(Direct(XMM31)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: true, mask: Some(MaskReg::K4), broadcast: None }, &[98, 1, 175, 156, 93, 207], OperandSize::Qword) } #[test] fn vminsd_8() { run_test(&Instruction { mnemonic: Mnemonic::VMINSD, operand1: Some(Direct(XMM16)), operand2: Some(Direct(XMM8)), operand3: Some(IndirectDisplaced(RCX, 687927536, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 225, 191, 137, 93, 129, 240, 240, 0, 41], OperandSize::Qword) }
66.77551
438
0.705685
f46983118d72a7598e9ef1779971ced267abc6f3
2,303
// This file is part of HydraDX. // Copyright (C) 2020-2021 Intergalactic, Limited (GIB). // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![cfg(feature = "runtime-benchmarks")] use super::*; use codec::Decode; use frame_benchmarking::benchmarks; use frame_system::RawOrigin; use hex_literal::hex; benchmarks! { claim { let alice_id = hex!["d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d"]; let signature = hex!["bcae7d4f96f71cf974c173ae936a1a79083af7f76232efbf8a568b7f990eceed73c2465bba769de959b7f6ac5690162b61eb90949901464d0fa158a83022a0741c"]; #[cfg(test)] let alice_id = hex!["2a00000000000000"]; #[cfg(test)] let signature = hex!["5b2b46b0162f4b4431f154c4b9fc5ba923690b98b0c2063720799da54cb35a354304102ede62977ba556f0b03e67710522d4b7523547c62fcdc5acea59c99aa41b"]; let caller = T::AccountId::decode(&mut &alice_id[..]).unwrap_or_default(); let eth_address = EthereumAddress(hex!["8202c0af5962b750123ce1a9b12e1c30a4973557"]); Claims::<T>::insert(eth_address, T::CurrencyBalance::from(1_000_000_000_000_000_000_u128).into()); }: _(RawOrigin::Signed(caller.clone()), EcdsaSignature(signature)) verify { let expected_balance = T::CurrencyBalance::from(2_000_000_000_000_000_000_u128); #[cfg(test)] let expected_balance = T::CurrencyBalance::from(1_000_000_000_000_000_000_u128); assert_eq!(T::Currency::free_balance(&caller), expected_balance.into()); assert_eq!(Claims::<T>::get(eth_address), T::CurrencyBalance::from(0u128).into()); } } #[cfg(test)] mod tests { use super::mock::Test; use super::*; use crate::tests::new_test_ext; use frame_support::assert_ok; #[test] fn test_benchmarks() { new_test_ext().execute_with(|| { assert_ok!(test_benchmark_claim::<Test>()); }); } }
34.893939
157
0.756839
1a3d7821159c3fc5874697724a42866f31e72deb
1,141
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Test that shadowed lifetimes generate an error. #![feature(box_syntax)] struct Foo<T>; impl<T> Foo<T> { fn shadow_in_method<T>(&self) {} //~^ ERROR type parameter `T` shadows another type parameter fn not_shadow_in_item<U>(&self) { struct Bar<T, U>; // not a shadow, separate item fn foo<T, U>() {} // same } } trait Bar<T> { fn shadow_in_required<T>(&self); //~^ ERROR type parameter `T` shadows another type parameter fn shadow_in_provided<T>(&self) {} //~^ ERROR type parameter `T` shadows another type parameter fn not_shadow_in_required<U>(&self); fn not_shadow_in_provided<U>(&self) {} } fn main() {}
29.25641
68
0.680982
89461853bdfa300013e7f1aa7bb6934a6f8b3f95
19,523
use crate::{ behavior::{commit_block, verify_block}, block_proposal::BlockProposal, network::raft::{ message::{NewBlockRequest, NewBlockResponse}, rpc::get_block_proposal, }, snapshot::Snapshot, }; use async_raft::{ raft::{Entry, EntryPayload, MembershipConfig}, storage::{CurrentSnapshotData, HardState, InitialState}, RaftStorage, }; use async_trait::async_trait; use itertools::process_results; use serde::{Deserialize, Serialize}; use slimchain_chain::{ config::ChainConfig, consensus::raft::{verify_consensus, Block}, db::{DBPtr, Transaction as DBTransaction}, latest::{LatestBlockHeaderPtr, LatestTxCount, LatestTxCountPtr}, loader::BlockLoaderTrait, }; use slimchain_common::{ basic::{BlockHeight, H256}, digest::Digestible, error::{anyhow, ensure, Result}, tx::TxTrait, }; use slimchain_network::{ behavior::raft::client_network::fetch_leader_id, http::config::{NetworkConfig, NetworkRouteTable, PeerId}, }; use slimchain_tx_state::TxStateUpdate; use slimchain_utils::serde::{binary_decode, binary_encode}; use std::{collections::BTreeSet, io::Cursor, marker::PhantomData}; use tokio::sync::{Mutex, RwLock}; #[derive(Clone, Serialize, Deserialize)] struct RaftSnapshot { index: u64, term: u64, membership: MembershipConfig, snapshot: Snapshot<Block>, } #[derive(Clone, Serialize, Deserialize)] struct RaftStateMachine { last_applied_log: u64, snapshot: Snapshot<Block>, } #[derive(Debug, Copy, Clone, thiserror::Error)] pub enum ShutdownError {} pub struct ClientNodeStorage<Tx: TxTrait> { peer_id: PeerId, chain_cfg: ChainConfig, route_table: NetworkRouteTable, latest_block_header: LatestBlockHeaderPtr, latest_tx_count: LatestTxCountPtr, db: DBPtr, raft_log: RwLock<BTreeSet<u64>>, raft_snapshot: RwLock<Option<RaftSnapshot>>, raft_sm: RwLock<RaftStateMachine>, miner_snapshot: Mutex<Option<(H256, Snapshot<Block>, TxStateUpdate)>>, _marker: PhantomData<Tx>, } impl<Tx> ClientNodeStorage<Tx> where Tx: TxTrait + Serialize + for<'de> Deserialize<'de> + 'static, { pub fn new(db: DBPtr, chain_cfg: &ChainConfig, net_cfg: &NetworkConfig) -> Result<Self> { let snapshot = Snapshot::<Block>::load_from_db(&db, chain_cfg.state_len)?; let latest_block_header = snapshot.to_latest_block_header(); let latest_tx_count = LatestTxCount::new(0); let last_applied_log = db.get_meta_object("raft-last-applied")?.unwrap_or_default(); let log = db.get_meta_object("raft-log")?.unwrap_or_default(); let last_snapshot = db.get_meta_object("raft-snapshot")?.unwrap_or_default(); Ok(Self { peer_id: net_cfg.peer_id, chain_cfg: chain_cfg.clone(), route_table: net_cfg.to_route_table(), latest_block_header, latest_tx_count, db, raft_log: RwLock::new(log), raft_snapshot: RwLock::new(last_snapshot), raft_sm: RwLock::new(RaftStateMachine { last_applied_log, snapshot, }), miner_snapshot: Mutex::new(None), _marker: PhantomData, }) } pub async fn get_block_proposal( &self, height: BlockHeight, ) -> Result<BlockProposal<Block, Tx>> { let block = self.db.get_block(height)?; BlockProposal::from_existing_block(block, &self.db) } pub fn latest_block_header(&self) -> LatestBlockHeaderPtr { self.latest_block_header.clone() } pub fn latest_tx_count(&self) -> LatestTxCountPtr { self.latest_tx_count.clone() } pub async fn latest_snapshot(&self) -> Snapshot<Block> { let sm = self.raft_sm.read().await; sm.snapshot.clone() } pub async fn set_miner_snapshot( &self, blk_proposal: &BlockProposal<Block, Tx>, snapshot: Snapshot<Block>, state_update: TxStateUpdate, ) { let blk_hash = blk_proposal.get_block().to_digest(); let mut miner_snapshot = self.miner_snapshot.lock().await; *miner_snapshot = Some((blk_hash, snapshot, state_update)); } pub async fn reset_miner_snapshot(&self) { *self.miner_snapshot.lock().await = None; } #[tracing::instrument(level = "debug", skip(self), err)] pub async fn save_to_db(&self) -> Result<()> { let sm = self.raft_sm.read().await; let log = self.raft_log.read().await; let snapshot = self.raft_snapshot.read().await; let mut db_tx = sm.snapshot.write_db_tx()?; db_tx.insert_meta_object("raft-last-applied", &sm.last_applied_log)?; db_tx.insert_meta_object("raft-log", &(*log))?; db_tx.insert_meta_object("raft-snapshot", &(*snapshot))?; self.db.write_async(db_tx).await } fn read_log(&self, idx: u64) -> Result<Entry<NewBlockRequest<Tx>>> { self.db .get_log_object(idx)? .ok_or_else(|| anyhow!("Failed to read raft log. idx={}", idx)) } } #[async_trait] impl<Tx> RaftStorage<NewBlockRequest<Tx>, NewBlockResponse> for ClientNodeStorage<Tx> where Tx: TxTrait + Serialize + for<'de> Deserialize<'de> + 'static, { type Snapshot = Cursor<Vec<u8>>; type ShutdownError = ShutdownError; #[tracing::instrument(level = "debug", skip(self), err)] async fn get_membership_config(&self) -> Result<MembershipConfig> { let log = self.raft_log.read().await; let cfg_opt = process_results( log.iter().rev().map(|idx| self.read_log(*idx)), |mut iter| { iter.find_map(|entry| match &entry.payload { EntryPayload::ConfigChange(cfg) => Some(cfg.membership.clone()), EntryPayload::SnapshotPointer(snap) => Some(snap.membership.clone()), _ => None, }) }, )?; Ok(match cfg_opt { Some(cfg) => cfg, None => MembershipConfig::new_initial(self.peer_id.into()), }) } #[tracing::instrument(level = "debug", skip(self), err)] async fn get_initial_state(&self) -> Result<InitialState> { let membership = self.get_membership_config().await?; let log = self.raft_log.read().await; let sm = self.raft_sm.read().await; let hs: Option<HardState> = self.db.get_meta_object("raft-hs")?; let state = match hs { Some(hard_state) => { let (last_log_index, last_log_term) = match log.iter().rev().next() { Some(idx) => { let log_entry = self.read_log(*idx)?; (log_entry.index, log_entry.term) } None => (0, 0), }; let last_applied_log = sm.last_applied_log; InitialState { last_log_index, last_log_term, last_applied_log, hard_state, membership, } } None => { let new = InitialState::new_initial(self.peer_id.into()); self.save_hard_state(&new.hard_state).await?; new } }; Ok(state) } #[tracing::instrument(level = "debug", skip(self, hs), err)] async fn save_hard_state(&self, hs: &HardState) -> Result<()> { let mut db_tx = DBTransaction::new(); db_tx.insert_meta_object("raft-hs", hs)?; self.db.write_async(db_tx).await } #[tracing::instrument(level = "debug", skip(self), err)] async fn get_log_entries( &self, start: u64, stop: u64, ) -> Result<Vec<Entry<NewBlockRequest<Tx>>>> { if start > stop { error!("invalid request, start > stop"); return Ok(vec![]); } let _log = self.raft_log.read().await; Ok((start..stop) .filter_map(|idx| self.read_log(idx).ok()) .collect()) } #[tracing::instrument(level = "debug", skip(self), err)] async fn delete_logs_from(&self, start: u64, stop: Option<u64>) -> Result<()> { if let Some(stop) = stop { if start > stop { error!("invalid request, start > stop"); return Ok(()); } } let mut db_tx = DBTransaction::new(); let mut log = self.raft_log.write().await; if let Some(stop) = stop.as_ref() { for key in start..*stop { log.remove(&key); db_tx.delete_log_object(key); } } else { for key in log.split_off(&start) { db_tx.delete_log_object(key); } } self.db.write_async(db_tx).await } #[tracing::instrument(level = "debug", skip(self), err)] async fn append_entry_to_log(&self, entry: &Entry<NewBlockRequest<Tx>>) -> Result<()> { let mut db_tx = DBTransaction::new(); let mut log = self.raft_log.write().await; log.insert(entry.index); db_tx.insert_log_object(entry.index, entry)?; self.db.write_async(db_tx).await } #[tracing::instrument(level = "debug", skip(self), err)] async fn replicate_to_log(&self, entries: &[Entry<NewBlockRequest<Tx>>]) -> Result<()> { let mut db_tx = DBTransaction::new(); let mut log = self.raft_log.write().await; for entry in entries { log.insert(entry.index); db_tx.insert_log_object(entry.index, entry)?; } self.db.write_async(db_tx).await } #[tracing::instrument(level = "debug", skip(self), err)] async fn apply_entry_to_state_machine( &self, index: &u64, data: &NewBlockRequest<Tx>, ) -> Result<NewBlockResponse> { let mut sm = self.raft_sm.write().await; let blk_proposal = &data.0; let blk_proposal_height = blk_proposal.get_block_height(); let snapshot_height = sm.snapshot.current_height(); if blk_proposal_height <= snapshot_height { return Ok(NewBlockResponse::Ok); } else if blk_proposal_height != snapshot_height.next_height() { let err = format!( "Invalid block height. curr: {}, proposal: {}", snapshot_height, blk_proposal_height ); return Ok(NewBlockResponse::Err(err)); } let mut snapshot = sm.snapshot.clone(); let mut state_update = TxStateUpdate::default(); let mut miner = false; let mut miner_snapshot = self.miner_snapshot.lock().await; if let Some((blk_hash, m_snapshot, m_state_update)) = miner_snapshot.take() { if blk_proposal.get_block().to_digest() == blk_hash { snapshot = m_snapshot; state_update = m_state_update; miner = true; } } if !miner { match verify_block( &self.chain_cfg, &self.db, &mut snapshot, blk_proposal, verify_consensus, ) .await { Ok(update) => { state_update = update; } Err(e) => { let err = format!("Failed to import block. Error: {}", e); return Ok(NewBlockResponse::Err(err)); } } } if let Err(e) = commit_block( blk_proposal, &state_update, &self.db, &self.latest_block_header, &self.latest_tx_count, ) .await { let err = format!("Failed to commit block. Error: {}", e); return Ok(NewBlockResponse::Err(err)); } sm.last_applied_log = *index; sm.snapshot = snapshot; Ok(NewBlockResponse::Ok) } #[tracing::instrument(level = "debug", skip(self), err)] async fn replicate_to_state_machine( &self, entries: &[(&u64, &NewBlockRequest<Tx>)], ) -> Result<()> { for (index, data) in entries { self.apply_entry_to_state_machine(*index, *data).await?; } Ok(()) } #[tracing::instrument(level = "debug", skip(self), err)] async fn do_log_compaction(&self) -> Result<CurrentSnapshotData<Self::Snapshot>> { let sm_copy = self.raft_sm.read().await.clone(); let last_applied_log = sm_copy.last_applied_log; let membership_config = { let log = self.raft_log.read().await; process_results(log.iter().rev().map(|idx| self.read_log(*idx)), |iter| { iter.skip_while(|entry| entry.index > last_applied_log) .find_map(|entry| match &entry.payload { EntryPayload::ConfigChange(cfg) => Some(cfg.membership.clone()), _ => None, }) })? .unwrap_or_else(|| MembershipConfig::new_initial(self.peer_id.into())) }; let term; let snapshot_bytes: Vec<u8>; { let mut db_tx = DBTransaction::new(); let mut log = self.raft_log.write().await; let mut current_snapshot = self.raft_snapshot.write().await; term = log .get(&last_applied_log) .and_then(|idx| self.read_log(*idx).ok()) .map(|entry| entry.term) .ok_or_else(|| { anyhow!( "last_applied_log {} not available during log compaction", last_applied_log ) })?; let new_log = log.split_off(&last_applied_log); for &idx in log.iter() { if idx != last_applied_log { db_tx.delete_log_object(idx); } } *log = new_log; log.insert(last_applied_log); db_tx.insert_log_object( last_applied_log, &Entry::<NewBlockRequest<Tx>>::new_snapshot_pointer( last_applied_log, term, "".into(), membership_config.clone(), ), )?; self.db.write_async(db_tx).await?; let snapshot = RaftSnapshot { index: last_applied_log, term, membership: membership_config.clone(), snapshot: sm_copy.snapshot, }; snapshot_bytes = binary_encode(&snapshot)?; *current_snapshot = Some(snapshot); }; Ok(CurrentSnapshotData { term, index: last_applied_log, membership: membership_config.clone(), snapshot: Box::new(Cursor::new(snapshot_bytes)), }) } #[tracing::instrument(level = "debug", skip(self), err)] async fn create_snapshot(&self) -> Result<(String, Box<Self::Snapshot>)> { Ok((String::from(""), Box::new(Cursor::new(Vec::new())))) } #[tracing::instrument(level = "debug", skip(self, snapshot), err)] async fn finalize_snapshot_installation( &self, index: u64, term: u64, delete_through: Option<u64>, id: String, snapshot: Box<Self::Snapshot>, ) -> Result<()> { let new_snapshot: RaftSnapshot = binary_decode(snapshot.get_ref().as_slice())?; { let mut db_tx = DBTransaction::new(); let mut log = self.raft_log.write().await; let membership_config = process_results(log.iter().rev().map(|idx| self.read_log(*idx)), |iter| { iter.skip_while(|entry| entry.index > index) .find_map(|entry| match &entry.payload { EntryPayload::ConfigChange(cfg) => Some(cfg.membership.clone()), _ => None, }) })? .unwrap_or_else(|| MembershipConfig::new_initial(self.peer_id.into())); match &delete_through { Some(through) => { let new_log = log.split_off(&(through + 1)); for &idx in log.iter() { if idx != index { db_tx.delete_log_object(idx); } } *log = new_log; } None => { for &idx in log.iter() { if idx != index { db_tx.delete_log_object(idx); } } log.clear(); } } log.insert(index); db_tx.insert_log_object( index, &Entry::<NewBlockRequest<Tx>>::new_snapshot_pointer( index, term, id, membership_config, ), )?; self.db.write_async(db_tx).await?; } { let mut sm = self.raft_sm.write().await; let leader_id = fetch_leader_id(&self.route_table).await?; let leader_addr = self.route_table.peer_address(leader_id)?; let mut height = sm.snapshot.current_height(); while height < new_snapshot.snapshot.current_height() { let block_proposal = get_block_proposal::<Tx>(leader_addr, height.next_height()).await?; let state_update = verify_block( &self.chain_cfg, &self.db, &mut sm.snapshot, &block_proposal, verify_consensus, ) .await?; commit_block( &block_proposal, &state_update, &self.db, &self.latest_block_header, &self.latest_tx_count, ) .await?; height = height.next_height(); } ensure!(sm.snapshot == new_snapshot.snapshot, "inconsistent block"); sm.last_applied_log = new_snapshot.index; self.latest_block_header.set_from_block( sm.snapshot .get_latest_block() .expect("Failed to get the latest block."), ); } { let mut current_snapshot = self.raft_snapshot.write().await; *current_snapshot = Some(new_snapshot); } Ok(()) } #[tracing::instrument(level = "debug", skip(self), err)] async fn get_current_snapshot(&self) -> Result<Option<CurrentSnapshotData<Self::Snapshot>>> { match &*self.raft_snapshot.read().await { Some(snapshot) => { let reader = binary_encode(&snapshot)?; Ok(Some(CurrentSnapshotData { index: snapshot.index, term: snapshot.term, membership: snapshot.membership.clone(), snapshot: Box::new(Cursor::new(reader)), })) } None => Ok(None), } } }
34.553982
97
0.536598
e9630c0a449ced84330459805d9c6798ab1da7e2
882
// SPDX-License-Identifier: Apache-2.0 // SPDX-FileCopyrightText: 2019-2022 Second State INC mod cpp_header; use anyhow::{anyhow, Result}; pub use cpp_header::to_cpp_header; use std::path::Path; use witx::{load, WitxError}; pub fn generate<P: AsRef<Path>>(inputs: &[P]) -> Result<String> { // TODO: drop the anyhow! part once witx switches to anyhow. let doc = load(&inputs).map_err(|e| match e { WitxError::Parse(err) => anyhow!(err), WitxError::Validation(err) => anyhow!(err), e => anyhow!(e.to_string()), })?; let inputs_str = &inputs .iter() .map(|p| { p.as_ref() .file_name() .unwrap() .to_str() .unwrap() .to_string() }) .collect::<Vec<_>>() .join(", "); Ok(to_cpp_header(&doc, &inputs_str)) }
25.941176
65
0.536281
08767a0b2324df3455c7832a4304b2ad36ff9652
1,121
use cloud_pubsub::Client; use serde_derive::Deserialize; use std::sync::Arc; use serde_json::json; use std::collections::HashMap; #[derive(Deserialize)] struct Config { topic: String, google_application_credentials: String, } #[tokio::main] async fn main() { let parsed_env = envy::from_env::<Config>(); if let Err(e) = parsed_env { eprintln!("ENV is not valid: {}", e); std::process::exit(1); } let config = parsed_env.unwrap(); let pubsub = match Client::new(config.google_application_credentials).await { Err(e) => panic!("Failed to initialize pubsub: {}", e), Ok(p) => Arc::new(p), }; let mut attributes : HashMap<String,String>=HashMap::new(); attributes.insert("Test_key".to_string(),"Test_Value".to_string()); let topic = Arc::new(pubsub.topic(config.topic.clone())); match topic.clone().publish("🔥",attributes).await { Ok(response) => { println!("{:?}", response); pubsub.stop(); std::process::exit(0); } Err(e) => eprintln!("Failed sending message {}", e), } }
28.025
81
0.602141
67d79067d0e3cf076d238368f90e197a6b113178
372
// Consider copying this into a general module, or to implement it for Vec<Vec<T>> pub fn transpose_vec<T: Copy>(vec: Vec<Vec<T>>) -> Vec<Vec<T>> { let length = vec[0].len(); // Scan the 2d vec columns one by one, emit a row for every column scanned vertically (0..length) .map(|index| vec.iter().map(|row| row[index]).collect()) .collect() }
37.2
89
0.629032
5bcc1ad24afa29346ed21bbc754749d7ce522b3e
115,139
// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0. use crate::storage::kv::{Modify, ScanMode, Snapshot, Statistics, WriteData}; use crate::storage::mvcc::{metrics::*, reader::MvccReader, ErrorInner, Result}; use crate::storage::types::TxnStatus; use engine_traits::{CF_DEFAULT, CF_LOCK, CF_WRITE}; use kvproto::kvrpcpb::{ExtraOp, IsolationLevel}; use std::fmt; use txn_types::{ is_short_value, Key, Lock, LockType, Mutation, MutationType, OldValue, TimeStamp, TxnExtra, Value, Write, WriteType, }; pub const MAX_TXN_WRITE_SIZE: usize = 32 * 1024; pub struct GcInfo { pub found_versions: usize, pub deleted_versions: usize, pub is_completed: bool, } /// `ReleasedLock` contains the information of the lock released by `commit`, `rollback` and so on. /// It's used by `LockManager` to wake up transactions waiting for locks. #[derive(Debug)] pub struct ReleasedLock { /// The hash value of the lock. pub hash: u64, /// Whether it is a pessimistic lock. pub pessimistic: bool, } impl ReleasedLock { fn new(key: &Key, pessimistic: bool) -> Self { Self { hash: key.gen_hash(), pessimistic, } } } pub struct MvccTxn<S: Snapshot> { reader: MvccReader<S>, start_ts: TimeStamp, write_size: usize, writes: WriteData, // collapse continuous rollbacks. collapse_rollback: bool, pub extra_op: ExtraOp, } impl<S: Snapshot> MvccTxn<S> { pub fn new(snapshot: S, start_ts: TimeStamp, fill_cache: bool) -> MvccTxn<S> { // FIXME: use session variable to indicate fill cache or not. // ScanMode is `None`, since in prewrite and other operations, keys are not given in // order and we use prefix seek for each key. An exception is GC, which uses forward // scan only. // IsolationLevel is `Si`, actually the method we use in MvccTxn does not rely on // isolation level, so it can be any value. Self::from_reader( MvccReader::new(snapshot, None, fill_cache, IsolationLevel::Si), start_ts, ) } // Use `ScanMode::Forward` when gc or prewrite with multiple `Mutation::Insert`, // which would seek less times. // When `scan_mode` is `Some(ScanMode::Forward)`, all keys must be written by // in ascending order. pub fn for_scan( snapshot: S, scan_mode: Option<ScanMode>, start_ts: TimeStamp, fill_cache: bool, ) -> MvccTxn<S> { Self::from_reader( MvccReader::new(snapshot, scan_mode, fill_cache, IsolationLevel::Si), start_ts, ) } fn from_reader(reader: MvccReader<S>, start_ts: TimeStamp) -> MvccTxn<S> { MvccTxn { reader, start_ts, write_size: 0, writes: WriteData::default(), collapse_rollback: true, extra_op: ExtraOp::Noop, } } pub fn collapse_rollback(&mut self, collapse: bool) { self.collapse_rollback = collapse; } pub fn set_start_ts(&mut self, start_ts: TimeStamp) { self.start_ts = start_ts; } pub fn into_modifies(self) -> Vec<Modify> { self.writes.modifies } pub fn take_extra(&mut self) -> TxnExtra { std::mem::take(&mut self.writes.extra) } pub fn take_statistics(&mut self) -> Statistics { let mut statistics = Statistics::default(); self.reader.collect_statistics_into(&mut statistics); statistics } pub fn write_size(&self) -> usize { self.write_size } fn put_lock(&mut self, key: Key, lock: &Lock) { let write = Modify::Put(CF_LOCK, key, lock.to_bytes()); self.write_size += write.size(); self.writes.modifies.push(write); } fn unlock_key(&mut self, key: Key, pessimistic: bool) -> Option<ReleasedLock> { let released = ReleasedLock::new(&key, pessimistic); let write = Modify::Delete(CF_LOCK, key); self.write_size += write.size(); self.writes.modifies.push(write); Some(released) } fn put_value(&mut self, key: Key, ts: TimeStamp, value: Value) { let write = Modify::Put(CF_DEFAULT, key.append_ts(ts), value); self.write_size += write.size(); self.writes.modifies.push(write); } fn delete_value(&mut self, key: Key, ts: TimeStamp) { let write = Modify::Delete(CF_DEFAULT, key.append_ts(ts)); self.write_size += write.size(); self.writes.modifies.push(write); } fn put_write(&mut self, key: Key, ts: TimeStamp, value: Value) { let write = Modify::Put(CF_WRITE, key.append_ts(ts), value); self.write_size += write.size(); self.writes.modifies.push(write); } fn delete_write(&mut self, key: Key, ts: TimeStamp) { let write = Modify::Delete(CF_WRITE, key.append_ts(ts)); self.write_size += write.size(); self.writes.modifies.push(write); } fn key_exist(&mut self, key: &Key, ts: TimeStamp) -> Result<bool> { Ok(self.reader.get_write(&key, ts)?.is_some()) } fn prewrite_key_value( &mut self, key: Key, lock_type: LockType, primary: &[u8], value: Option<Value>, lock_ttl: u64, for_update_ts: TimeStamp, txn_size: u64, min_commit_ts: TimeStamp, ) { let mut lock = Lock::new( lock_type, primary.to_vec(), self.start_ts, lock_ttl, None, for_update_ts, txn_size, min_commit_ts, ); if let Some(value) = value { if is_short_value(&value) { // If the value is short, embed it in Lock. lock.short_value = Some(value); } else { // value is long self.put_value(key.clone(), self.start_ts, value); } } self.put_lock(key, &lock); } fn rollback_lock( &mut self, key: Key, lock: &Lock, is_pessimistic_txn: bool, ) -> Result<Option<ReleasedLock>> { // If prewrite type is DEL or LOCK or PESSIMISTIC, it is no need to delete value. if lock.short_value.is_none() && lock.lock_type == LockType::Put { self.delete_value(key.clone(), lock.ts); } // Only the primary key of a pessimistic transaction needs to be protected. let protected: bool = is_pessimistic_txn && key.is_encoded_from(&lock.primary); let write = Write::new_rollback(self.start_ts, protected); self.put_write(key.clone(), self.start_ts, write.as_ref().to_bytes()); if self.collapse_rollback { self.collapse_prev_rollback(key.clone())?; } Ok(self.unlock_key(key, is_pessimistic_txn)) } /// Checks the existence of the key according to `should_not_exist`. /// If not, returns an `AlreadyExist` error. fn check_data_constraint( &mut self, should_not_exist: bool, write: &Write, write_commit_ts: TimeStamp, key: &Key, ) -> Result<()> { if !should_not_exist || write.write_type == WriteType::Delete { return Ok(()); } // The current key exists under any of the following conditions: // 1.The current write type is `PUT` // 2.The current write type is `Rollback` or `Lock`, and the key have an older version. if write.write_type == WriteType::Put || self.key_exist(&key, write_commit_ts.prev())? { return Err(ErrorInner::AlreadyExist { key: key.to_raw()? }.into()); } Ok(()) } // Pessimistic transactions only acquire pessimistic locks on row keys. // The corrsponding index keys are not locked until pessimistic prewrite. // It's possible that lock conflict occours on them, but the isolation is // guaranteed by pessimistic locks on row keys, so let TiDB resolves these // locks immediately. fn handle_non_pessimistic_lock_conflict(&self, key: Key, lock: Lock) -> Result<()> { // The previous pessimistic transaction has been committed or aborted. // Resolve it immediately. // // Because the row key is locked, the optimistic transaction will // abort. Resolve it immediately. let mut info = lock.into_lock_info(key.into_raw()?); // Set ttl to 0 so TiDB will resolve lock immediately. info.set_lock_ttl(0); Err(ErrorInner::KeyIsLocked(info).into()) } pub fn acquire_pessimistic_lock( &mut self, key: Key, primary: &[u8], should_not_exist: bool, lock_ttl: u64, for_update_ts: TimeStamp, need_value: bool, min_commit_ts: TimeStamp, ) -> Result<Option<Value>> { fail_point!("acquire_pessimistic_lock", |err| Err(make_txn_error( err, &key, self.start_ts, ) .into())); fn pessimistic_lock( primary: &[u8], start_ts: TimeStamp, lock_ttl: u64, for_update_ts: TimeStamp, min_commit_ts: TimeStamp, ) -> Lock { Lock::new( LockType::Pessimistic, primary.to_vec(), start_ts, lock_ttl, None, for_update_ts, 0, min_commit_ts, ) } let mut val = None; if let Some(lock) = self.reader.load_lock(&key)? { if lock.ts != self.start_ts { return Err(ErrorInner::KeyIsLocked(lock.into_lock_info(key.into_raw()?)).into()); } if lock.lock_type != LockType::Pessimistic { return Err(ErrorInner::LockTypeNotMatch { start_ts: self.start_ts, key: key.into_raw()?, pessimistic: false, } .into()); } if need_value { val = self.reader.get(&key, for_update_ts, true)?; } // Overwrite the lock with small for_update_ts if for_update_ts > lock.for_update_ts { let lock = pessimistic_lock( primary, self.start_ts, lock_ttl, for_update_ts, min_commit_ts, ); self.put_lock(key, &lock); } else { MVCC_DUPLICATE_CMD_COUNTER_VEC .acquire_pessimistic_lock .inc(); } return Ok(val); } if let Some((commit_ts, write)) = self.reader.seek_write(&key, TimeStamp::max())? { // The isolation level of pessimistic transactions is RC. `for_update_ts` is // the commit_ts of the data this transaction read. If exists a commit version // whose commit timestamp is larger than current `for_update_ts`, the // transaction should retry to get the latest data. if commit_ts > for_update_ts { MVCC_CONFLICT_COUNTER .acquire_pessimistic_lock_conflict .inc(); return Err(ErrorInner::WriteConflict { start_ts: self.start_ts, conflict_start_ts: write.start_ts, conflict_commit_ts: commit_ts, key: key.into_raw()?, primary: primary.to_vec(), } .into()); } // Handle rollback. // If the start timestamp of write is equal to transaction's start timestamp // as well as commit timestamp, the lock is already rollbacked. if write.start_ts == self.start_ts && commit_ts == self.start_ts { assert!(write.write_type == WriteType::Rollback); return Err(ErrorInner::PessimisticLockRolledBack { start_ts: self.start_ts, key: key.into_raw()?, } .into()); } // If `commit_ts` we seek is already before `start_ts`, the rollback must not exist. if commit_ts > self.start_ts { if let Some((commit_ts, write)) = self.reader.seek_write(&key, self.start_ts)? { if write.start_ts == self.start_ts { assert!( commit_ts == self.start_ts && write.write_type == WriteType::Rollback ); return Err(ErrorInner::PessimisticLockRolledBack { start_ts: self.start_ts, key: key.into_raw()?, } .into()); } } } // Check data constraint when acquiring pessimistic lock. self.check_data_constraint(should_not_exist, &write, commit_ts, &key)?; if need_value { val = match write.write_type { // If it's a valid Write, no need to read again. WriteType::Put => Some(self.reader.load_data(&key, write)?), WriteType::Delete => None, WriteType::Lock | WriteType::Rollback => { self.reader.get(&key, commit_ts.prev(), true)? } }; } } let lock = pessimistic_lock( primary, self.start_ts, lock_ttl, for_update_ts, min_commit_ts, ); self.put_lock(key, &lock); Ok(val) } pub fn pessimistic_prewrite( &mut self, mutation: Mutation, primary: &[u8], is_pessimistic_lock: bool, mut lock_ttl: u64, for_update_ts: TimeStamp, txn_size: u64, mut min_commit_ts: TimeStamp, pipelined_pessimistic_lock: bool, ) -> Result<()> { if mutation.should_not_write() { return Err(box_err!( "cannot handle checkNotExists in pessimistic prewrite" )); } let mutation_type = mutation.mutation_type(); let lock_type = LockType::from_mutation(&mutation); let (key, value) = mutation.into_key_value(); fail_point!("pessimistic_prewrite", |err| Err(make_txn_error( err, &key, self.start_ts, ) .into())); if let Some(lock) = self.reader.load_lock(&key)? { if lock.ts != self.start_ts { // Abort on lock belonging to other transaction if // prewrites a pessimistic lock. if is_pessimistic_lock { warn!( "prewrite failed (pessimistic lock not found)"; "start_ts" => self.start_ts, "key" => %key, "lock_ts" => lock.ts ); return Err(ErrorInner::PessimisticLockNotFound { start_ts: self.start_ts, key: key.into_raw()?, } .into()); } return self.handle_non_pessimistic_lock_conflict(key, lock); } else { if lock.lock_type != LockType::Pessimistic { // Duplicated command. No need to overwrite the lock and data. MVCC_DUPLICATE_CMD_COUNTER_VEC.prewrite.inc(); return Ok(()); } // The lock is pessimistic and owned by this txn, go through to overwrite it. // The ttl and min_commit_ts of the lock may have been pushed forward. lock_ttl = std::cmp::max(lock_ttl, lock.ttl); min_commit_ts = std::cmp::max(min_commit_ts, lock.min_commit_ts); } } else if is_pessimistic_lock { self.amend_pessimistic_lock(pipelined_pessimistic_lock, &key)?; } self.check_extra_op(&key, mutation_type, None)?; // No need to check data constraint, it's resolved by pessimistic locks. self.prewrite_key_value( key, lock_type.unwrap(), primary, value, lock_ttl, for_update_ts, txn_size, min_commit_ts, ); Ok(()) } fn amend_pessimistic_lock( &mut self, pipelined_pessimistic_lock: bool, key: &Key, ) -> Result<()> { if !pipelined_pessimistic_lock { // Pessimistic lock does not exist, the transaction should be aborted. warn!( "prewrite failed (pessimistic lock not found)"; "start_ts" => self.start_ts, "key" => %key ); return Err(ErrorInner::PessimisticLockNotFound { start_ts: self.start_ts, key: key.clone().into_raw()?, } .into()); } if let Some((commit_ts, _)) = self.reader.seek_write(key, TimeStamp::max())? { if commit_ts >= self.start_ts { warn!( "prewrite failed (pessimistic lock not found)"; "start_ts" => self.start_ts, "commit_ts" => commit_ts, "key" => %key ); MVCC_CONFLICT_COUNTER .pipelined_acquire_pessimistic_lock_amend_fail .inc(); return Err(ErrorInner::PessimisticLockNotFound { start_ts: self.start_ts, key: key.clone().into_raw()?, } .into()); } } // Used pipelined pessimistic lock acquiring in this txn but failed // Luckily no other txn modified this lock, amend it by treat it as optimistic txn. MVCC_CONFLICT_COUNTER .pipelined_acquire_pessimistic_lock_amend_success .inc(); Ok(()) } pub fn prewrite( &mut self, mutation: Mutation, primary: &[u8], skip_constraint_check: bool, lock_ttl: u64, txn_size: u64, min_commit_ts: TimeStamp, ) -> Result<()> { let lock_type = LockType::from_mutation(&mutation); // For the insert/checkNotExists operation, the old key should not be in the system. let should_not_exist = mutation.should_not_exists(); let should_not_write = mutation.should_not_write(); let mutation_type = mutation.mutation_type(); let (key, value) = mutation.into_key_value(); fail_point!("prewrite", |err| Err(make_txn_error( err, &key, self.start_ts, ) .into())); // Check whether the current key is locked at any timestamp. if let Some(lock) = self.reader.load_lock(&key)? { if lock.ts != self.start_ts { return Err(ErrorInner::KeyIsLocked(lock.into_lock_info(key.into_raw()?)).into()); } // TODO: remove it in future if lock.lock_type == LockType::Pessimistic { return Err(ErrorInner::LockTypeNotMatch { start_ts: self.start_ts, key: key.into_raw()?, pessimistic: true, } .into()); } // Duplicated command. No need to overwrite the lock and data. MVCC_DUPLICATE_CMD_COUNTER_VEC.prewrite.inc(); return Ok(()); } let mut prev_write = None; // Check whether there is a newer version. if !skip_constraint_check { if let Some((commit_ts, write)) = self.reader.seek_write(&key, TimeStamp::max())? { // Abort on writes after our start timestamp ... // If exists a commit version whose commit timestamp is larger than or equal to // current start timestamp, we should abort current prewrite, even if the commit // type is Rollback. if commit_ts >= self.start_ts { MVCC_CONFLICT_COUNTER.prewrite_write_conflict.inc(); return Err(ErrorInner::WriteConflict { start_ts: self.start_ts, conflict_start_ts: write.start_ts, conflict_commit_ts: commit_ts, key: key.into_raw()?, primary: primary.to_vec(), } .into()); } // Should check it when no lock exists, otherwise it can report error when there is // a lock belonging to a committed transaction which deletes the key. self.check_data_constraint(should_not_exist, &write, commit_ts, &key)?; prev_write = Some(write); } } if should_not_write { return Ok(()); } self.check_extra_op(&key, mutation_type, prev_write)?; self.prewrite_key_value( key, lock_type.unwrap(), primary, value, lock_ttl, TimeStamp::zero(), txn_size, min_commit_ts, ); Ok(()) } pub fn commit(&mut self, key: Key, commit_ts: TimeStamp) -> Result<Option<ReleasedLock>> { fail_point!("commit", |err| Err(make_txn_error( err, &key, self.start_ts, ) .into())); let (lock_type, short_value, is_pessimistic_txn) = match self.reader.load_lock(&key)? { Some(ref mut lock) if lock.ts == self.start_ts => { // A lock with larger min_commit_ts than current commit_ts can't be committed if commit_ts < lock.min_commit_ts { info!( "trying to commit with smaller commit_ts than min_commit_ts"; "key" => %key, "start_ts" => self.start_ts, "commit_ts" => commit_ts, "min_commit_ts" => lock.min_commit_ts, ); return Err(ErrorInner::CommitTsExpired { start_ts: self.start_ts, commit_ts, key: key.into_raw()?, min_commit_ts: lock.min_commit_ts, } .into()); } // It's an abnormal routine since pessimistic locks shouldn't be committed in our // transaction model. But a pessimistic lock will be left if the pessimistic // rollback request fails to send and the transaction need not to acquire // this lock again(due to WriteConflict). If the transaction is committed, we // should commit this pessimistic lock too. if lock.lock_type == LockType::Pessimistic { warn!( "commit a pessimistic lock with Lock type"; "key" => %key, "start_ts" => self.start_ts, "commit_ts" => commit_ts, ); // Commit with WriteType::Lock. lock.lock_type = LockType::Lock; } ( lock.lock_type, lock.short_value.take(), !lock.for_update_ts.is_zero(), ) } _ => { return match self.reader.get_txn_commit_info(&key, self.start_ts)? { Some((_, WriteType::Rollback)) | None => { MVCC_CONFLICT_COUNTER.commit_lock_not_found.inc(); // None: related Rollback has been collapsed. // Rollback: rollback by concurrent transaction. info!( "txn conflict (lock not found)"; "key" => %key, "start_ts" => self.start_ts, "commit_ts" => commit_ts, ); Err(ErrorInner::TxnLockNotFound { start_ts: self.start_ts, commit_ts, key: key.into_raw()?, } .into()) } // Committed by concurrent transaction. Some((_, WriteType::Put)) | Some((_, WriteType::Delete)) | Some((_, WriteType::Lock)) => { MVCC_DUPLICATE_CMD_COUNTER_VEC.commit.inc(); Ok(None) } }; } }; let write = Write::new( WriteType::from_lock_type(lock_type).unwrap(), self.start_ts, short_value, ); self.put_write(key.clone(), commit_ts, write.as_ref().to_bytes()); Ok(self.unlock_key(key, is_pessimistic_txn)) } pub fn rollback(&mut self, key: Key) -> Result<Option<ReleasedLock>> { fail_point!("rollback", |err| Err(make_txn_error( err, &key, self.start_ts, ) .into())); // Rollback is called only if the transaction is known to fail. Under the circumstances, // the rollback record needn't be protected. self.cleanup(key, TimeStamp::zero(), false) } fn check_txn_status_missing_lock( &mut self, primary_key: Key, rollback_if_not_exist: bool, protect_rollback: bool, ) -> Result<TxnStatus> { MVCC_CHECK_TXN_STATUS_COUNTER_VEC.get_commit_info.inc(); match self .reader .get_txn_commit_info(&primary_key, self.start_ts)? { Some((ts, write_type)) => { if write_type == WriteType::Rollback { Ok(TxnStatus::RolledBack) } else { Ok(TxnStatus::committed(ts)) } } None => { if rollback_if_not_exist { let ts = self.start_ts; // collapse previous rollback if exist. if self.collapse_rollback { self.collapse_prev_rollback(primary_key.clone())?; } // Insert a Rollback to Write CF in case that a stale prewrite // command is received after a cleanup command. let write = Write::new_rollback(ts, protect_rollback); self.put_write(primary_key, ts, write.as_ref().to_bytes()); MVCC_CHECK_TXN_STATUS_COUNTER_VEC.rollback.inc(); Ok(TxnStatus::LockNotExist) } else { Err(ErrorInner::TxnNotFound { start_ts: self.start_ts, key: primary_key.into_raw()?, } .into()) } } } } /// Cleanup the lock if it's TTL has expired, comparing with `current_ts`. If `current_ts` is 0, /// cleanup the lock without checking TTL. If the lock is the primary lock of a pessimistic /// transaction, the rollback record is protected from being collapsed. /// /// Returns whether the lock is a pessimistic lock. Returns error if the key has already been /// committed. pub fn cleanup( &mut self, key: Key, current_ts: TimeStamp, protect_rollback: bool, ) -> Result<Option<ReleasedLock>> { fail_point!("cleanup", |err| Err(make_txn_error( err, &key, self.start_ts, ) .into())); match self.reader.load_lock(&key)? { Some(ref lock) if lock.ts == self.start_ts => { // If current_ts is not 0, check the Lock's TTL. // If the lock is not expired, do not rollback it but report key is locked. if !current_ts.is_zero() && lock.ts.physical() + lock.ttl >= current_ts.physical() { return Err(ErrorInner::KeyIsLocked( lock.clone().into_lock_info(key.into_raw()?), ) .into()); } let is_pessimistic_txn = !lock.for_update_ts.is_zero(); self.rollback_lock(key, lock, is_pessimistic_txn) } _ => match self.check_txn_status_missing_lock(key, true, protect_rollback)? { TxnStatus::Committed { commit_ts } => { MVCC_CONFLICT_COUNTER.rollback_committed.inc(); Err(ErrorInner::Committed { commit_ts }.into()) } TxnStatus::RolledBack => { // Return Ok on Rollback already exist. MVCC_DUPLICATE_CMD_COUNTER_VEC.rollback.inc(); Ok(None) } TxnStatus::LockNotExist => Ok(None), _ => unreachable!(), }, } } /// Delete any pessimistic lock with small for_update_ts belongs to this transaction. pub fn pessimistic_rollback( &mut self, key: Key, for_update_ts: TimeStamp, ) -> Result<Option<ReleasedLock>> { fail_point!("pessimistic_rollback", |err| Err(make_txn_error( err, &key, self.start_ts, ) .into())); if let Some(lock) = self.reader.load_lock(&key)? { if lock.lock_type == LockType::Pessimistic && lock.ts == self.start_ts && lock.for_update_ts <= for_update_ts { return Ok(self.unlock_key(key, true)); } } Ok(None) } fn collapse_prev_rollback(&mut self, key: Key) -> Result<()> { if let Some((commit_ts, write)) = self.reader.seek_write(&key, self.start_ts)? { if write.write_type == WriteType::Rollback && !write.as_ref().is_protected() { self.delete_write(key, commit_ts); } } Ok(()) } /// Update a primary key's TTL if `advise_ttl > lock.ttl`. /// /// Returns the new TTL. pub fn txn_heart_beat(&mut self, primary_key: Key, advise_ttl: u64) -> Result<u64> { fail_point!("txn_heart_beat", |err| Err(make_txn_error( err, &primary_key, self.start_ts, ) .into())); if let Some(mut lock) = self.reader.load_lock(&primary_key)? { if lock.ts == self.start_ts { if lock.ttl < advise_ttl { lock.ttl = advise_ttl; self.put_lock(primary_key, &lock); } else { debug!( "txn_heart_beat with advise_ttl not large than current ttl"; "primary_key" => %primary_key, "start_ts" => self.start_ts, "advise_ttl" => advise_ttl, "current_ttl" => lock.ttl, ); } return Ok(lock.ttl); } } debug!( "txn_heart_beat invoked but lock is absent"; "primary_key" => %primary_key, "start_ts" => self.start_ts, "advise_ttl" => advise_ttl, ); Err(ErrorInner::TxnLockNotFound { start_ts: self.start_ts, commit_ts: TimeStamp::zero(), key: primary_key.into_raw()?, } .into()) } /// Check the status of a transaction. /// /// This operation checks whether a transaction has expired its primary lock's TTL, rollback the /// transaction if expired, or update the transaction's min_commit_ts according to the metadata /// in the primary lock. /// /// When transaction T1 meets T2's lock, it may invoke this on T2's primary key. In this /// situation, `self.start_ts` is T2's `start_ts`, `caller_start_ts` is T1's `start_ts`, and /// the `current_ts` is literally the timestamp when this function is invoked. It may not be /// accurate. /// /// Returns (`lock_ttl`, `commit_ts`, `is_pessimistic_txn`). /// After checking, if the lock is still alive, it retrieves the Lock's TTL; if the transaction /// is committed, get the commit_ts; otherwise, if the transaction is rolled back or there's /// no information about the transaction, results will be both 0. pub fn check_txn_status( &mut self, primary_key: Key, caller_start_ts: TimeStamp, current_ts: TimeStamp, rollback_if_not_exist: bool, ) -> Result<(TxnStatus, Option<ReleasedLock>)> { fail_point!("check_txn_status", |err| Err(make_txn_error( err, &primary_key, self.start_ts, ) .into())); match self.reader.load_lock(&primary_key)? { Some(ref mut lock) if lock.ts == self.start_ts => { let is_pessimistic_txn = !lock.for_update_ts.is_zero(); if lock.ts.physical() + lock.ttl < current_ts.physical() { // If the lock is expired, clean it up. let released = self.rollback_lock(primary_key, lock, is_pessimistic_txn)?; MVCC_CHECK_TXN_STATUS_COUNTER_VEC.rollback.inc(); return Ok((TxnStatus::TtlExpire, released)); } // If lock.min_commit_ts is 0, it's not a large transaction and we can't push forward // its min_commit_ts otherwise the transaction can't be committed by old version TiDB // during rolling update. if !lock.min_commit_ts.is_zero() // If the caller_start_ts is max, it's a point get in the autocommit transaction. // We don't push forward lock's min_commit_ts and the point get can ingore the lock // next time because it's not committed. && !caller_start_ts.is_max() // Push forward the min_commit_ts so that reading won't be blocked by locks. && caller_start_ts >= lock.min_commit_ts { lock.min_commit_ts = caller_start_ts.next(); if lock.min_commit_ts < current_ts { lock.min_commit_ts = current_ts; } self.put_lock(primary_key, lock); MVCC_CHECK_TXN_STATUS_COUNTER_VEC.update_ts.inc(); } Ok((TxnStatus::uncommitted(lock.ttl, lock.min_commit_ts), None)) } // The rollback must be protected, see more on // [issue #7364](https://github.com/tikv/tikv/issues/7364) _ => self .check_txn_status_missing_lock(primary_key, rollback_if_not_exist, true) .map(|s| (s, None)), } } pub fn gc_ttl(&mut self, key: Key, expiry: TimeStamp, safe_point: TimeStamp) -> Result<GcInfo> { let mut remove_older = false; let mut ts = TimeStamp::max(); let mut found_versions = 0; let mut deleted_versions = 0; let mut latest_delete = None; let mut is_completed = true; while let Some((commit, write)) = self.reader.seek_write(&key, ts)? { ts = commit.prev(); found_versions += 1; if self.write_size >= MAX_TXN_WRITE_SIZE { // Cannot remove latest delete when we haven't iterate all versions. latest_delete = None; is_completed = false; break; } if remove_older { self.delete_write(key.clone(), commit); if write.write_type == WriteType::Put && write.short_value.is_none() { self.delete_value(key.clone(), write.start_ts); } deleted_versions += 1; continue; } if commit > safe_point { continue; } if commit <= expiry { // delete everything that's older than ttl remove_older = true; self.delete_write(key.clone(), commit); deleted_versions += 1; continue; } // Set `remove_older` after we find the latest value. match write.write_type { WriteType::Put | WriteType::Delete => { remove_older = true; } WriteType::Rollback | WriteType::Lock => {} } // Latest write before `safe_point` can be deleted if its type is Delete, // Rollback or Lock. match write.write_type { WriteType::Delete => { latest_delete = Some(commit); } WriteType::Rollback | WriteType::Lock => { self.delete_write(key.clone(), commit); deleted_versions += 1; } WriteType::Put => {} } } if let Some(commit) = latest_delete { self.delete_write(key, commit); deleted_versions += 1; } MVCC_VERSIONS_HISTOGRAM.observe(found_versions as f64); if deleted_versions > 0 { GC_DELETE_VERSIONS_HISTOGRAM.observe(deleted_versions as f64); } Ok(GcInfo { found_versions, deleted_versions, is_completed, }) } pub fn gc(&mut self, key: Key, safe_point: TimeStamp) -> Result<GcInfo> { let mut remove_older = false; let mut ts = TimeStamp::max(); let mut found_versions = 0; let mut deleted_versions = 0; let mut latest_delete = None; let mut is_completed = true; while let Some((commit, write)) = self.reader.seek_write(&key, ts)? { ts = commit.prev(); found_versions += 1; if self.write_size >= MAX_TXN_WRITE_SIZE { // Cannot remove latest delete when we haven't iterate all versions. latest_delete = None; is_completed = false; break; } if remove_older { self.delete_write(key.clone(), commit); if write.write_type == WriteType::Put && write.short_value.is_none() { self.delete_value(key.clone(), write.start_ts); } deleted_versions += 1; continue; } if commit > safe_point { continue; } // Set `remove_older` after we find the latest value. match write.write_type { WriteType::Put | WriteType::Delete => { remove_older = true; } WriteType::Rollback | WriteType::Lock => {} } // Latest write before `safe_point` can be deleted if its type is Delete, // Rollback or Lock. match write.write_type { WriteType::Delete => { latest_delete = Some(commit); } WriteType::Rollback | WriteType::Lock => { self.delete_write(key.clone(), commit); deleted_versions += 1; } WriteType::Put => {} } } if let Some(commit) = latest_delete { self.delete_write(key, commit); deleted_versions += 1; } MVCC_VERSIONS_HISTOGRAM.observe(found_versions as f64); if deleted_versions > 0 { GC_DELETE_VERSIONS_HISTOGRAM.observe(deleted_versions as f64); } Ok(GcInfo { found_versions, deleted_versions, is_completed, }) } // Check and execute the extra operation. // Currently we use it only for reading the old value for CDC. fn check_extra_op( &mut self, key: &Key, mutation_type: MutationType, prev_write: Option<Write>, ) -> Result<()> { use crate::storage::mvcc::reader::seek_for_valid_write; if self.extra_op == ExtraOp::ReadOldValue && (mutation_type == MutationType::Put || mutation_type == MutationType::Delete) { let old_value = if let Some(w) = prev_write { // If write is Rollback or Lock, seek for valid write record. if w.write_type == WriteType::Rollback || w.write_type == WriteType::Lock { let write_cursor = self.reader.write_cursor.as_mut().unwrap(); // Skip the current write record. write_cursor.next(&mut self.reader.statistics.write); let write = seek_for_valid_write( write_cursor, key, self.start_ts, &mut self.reader.statistics, )?; write.map(|w| OldValue { short_value: w.short_value, start_ts: w.start_ts, }) } else { Some(OldValue { short_value: w.short_value, start_ts: w.start_ts, }) } } else { None }; // If write is None or cannot find a previously valid write record. self.writes.extra.add_old_value( key.clone().append_ts(self.start_ts), old_value, mutation_type, ); } Ok(()) } } impl<S: Snapshot> fmt::Debug for MvccTxn<S> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "txn @{}", self.start_ts) } } #[cfg(feature = "failpoints")] fn make_txn_error(s: Option<String>, key: &Key, start_ts: TimeStamp) -> ErrorInner { if let Some(s) = s { match s.to_ascii_lowercase().as_str() { "keyislocked" => { let mut info = kvproto::kvrpcpb::LockInfo::default(); info.set_key(key.to_raw().unwrap()); info.set_primary_lock(key.to_raw().unwrap()); info.set_lock_ttl(3000); ErrorInner::KeyIsLocked(info) } "committed" => ErrorInner::Committed { commit_ts: TimeStamp::zero(), }, "pessimisticlockrolledback" => ErrorInner::PessimisticLockRolledBack { start_ts, key: key.to_raw().unwrap(), }, "txnlocknotfound" => ErrorInner::TxnLockNotFound { start_ts, commit_ts: TimeStamp::zero(), key: key.to_raw().unwrap(), }, "txnnotfound" => ErrorInner::TxnNotFound { start_ts, key: key.to_raw().unwrap(), }, "locktypenotmatch" => ErrorInner::LockTypeNotMatch { start_ts, key: key.to_raw().unwrap(), pessimistic: false, }, "writeconflict" => ErrorInner::WriteConflict { start_ts, conflict_start_ts: TimeStamp::zero(), conflict_commit_ts: TimeStamp::zero(), key: key.to_raw().unwrap(), primary: vec![], }, "deadlock" => ErrorInner::Deadlock { start_ts, lock_ts: TimeStamp::zero(), lock_key: key.to_raw().unwrap(), deadlock_key_hash: 0, }, "alreadyexist" => ErrorInner::AlreadyExist { key: key.to_raw().unwrap(), }, "committsexpired" => ErrorInner::CommitTsExpired { start_ts, commit_ts: TimeStamp::zero(), key: key.to_raw().unwrap(), min_commit_ts: TimeStamp::zero(), }, "pessimisticlocknotfound" => ErrorInner::PessimisticLockNotFound { start_ts, key: key.to_raw().unwrap(), }, _ => ErrorInner::Other(box_err!("unexpected error string")), } } else { ErrorInner::Other(box_err!("empty error string")) } } /// Create a new MvccTxn using a u64 literal for the timestamp. /// /// Intended to only be used in test code. #[macro_export] macro_rules! new_txn { ($ss: expr, $ts: literal, $fill_cache: expr) => { $crate::storage::mvcc::MvccTxn::new($ss, $ts.into(), $fill_cache) }; } #[cfg(test)] mod tests { use super::*; use crate::storage::kv::{Engine, TestEngineBuilder}; use crate::storage::mvcc::tests::*; use crate::storage::mvcc::{Error, ErrorInner, MvccReader}; use kvproto::kvrpcpb::Context; use txn_types::{TimeStamp, SHORT_VALUE_MAX_LEN}; fn test_mvcc_txn_read_imp(k1: &[u8], k2: &[u8], v: &[u8]) { let engine = TestEngineBuilder::new().build().unwrap(); must_get_none(&engine, k1, 1); must_prewrite_put(&engine, k1, v, k1, 2); must_rollback(&engine, k1, 2); // should ignore rollback must_get_none(&engine, k1, 3); must_prewrite_lock(&engine, k1, k1, 3); must_commit(&engine, k1, 3, 4); // should ignore read lock must_get_none(&engine, k1, 5); must_prewrite_put(&engine, k1, v, k1, 5); must_prewrite_put(&engine, k2, v, k1, 5); // should not be affected by later locks must_get_none(&engine, k1, 4); // should read pending locks must_get_err(&engine, k1, 7); // should ignore the primary lock and get none when reading the latest record must_get_none(&engine, k1, u64::max_value()); // should read secondary locks even when reading the latest record must_get_err(&engine, k2, u64::max_value()); must_commit(&engine, k1, 5, 10); must_commit(&engine, k2, 5, 10); must_get_none(&engine, k1, 3); // should not read with ts < commit_ts must_get_none(&engine, k1, 7); // should read with ts > commit_ts must_get(&engine, k1, 13, v); // should read the latest record if `ts == u64::max_value()` must_get(&engine, k1, u64::max_value(), v); must_prewrite_delete(&engine, k1, k1, 15); // should ignore the lock and get previous record when reading the latest record must_get(&engine, k1, u64::max_value(), v); must_commit(&engine, k1, 15, 20); must_get_none(&engine, k1, 3); must_get_none(&engine, k1, 7); must_get(&engine, k1, 13, v); must_get(&engine, k1, 17, v); must_get_none(&engine, k1, 23); // intersecting timestamps with pessimistic txn // T1: start_ts = 25, commit_ts = 27 // T2: start_ts = 23, commit_ts = 31 must_prewrite_put(&engine, k1, v, k1, 25); must_commit(&engine, k1, 25, 27); must_acquire_pessimistic_lock(&engine, k1, k1, 23, 29); must_get(&engine, k1, 30, v); must_pessimistic_prewrite_delete(&engine, k1, k1, 23, 29, true); must_get_err(&engine, k1, 30); // should read the latest record when `ts == u64::max_value()` // even if lock.start_ts(23) < latest write.commit_ts(27) must_get(&engine, k1, u64::max_value(), v); must_commit(&engine, k1, 23, 31); must_get(&engine, k1, 30, v); must_get_none(&engine, k1, 32); } #[test] fn test_mvcc_txn_read() { test_mvcc_txn_read_imp(b"k1", b"k2", b"v1"); let long_value = "v".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); test_mvcc_txn_read_imp(b"k1", b"k2", &long_value); } fn test_mvcc_txn_prewrite_imp(k: &[u8], v: &[u8]) { let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put(&engine, k, v, k, 5); // Key is locked. must_locked(&engine, k, 5); // Retry prewrite. must_prewrite_put(&engine, k, v, k, 5); // Conflict. must_prewrite_lock_err(&engine, k, k, 6); must_commit(&engine, k, 5, 10); must_written(&engine, k, 5, 10, WriteType::Put); // Delayed prewrite request after committing should do nothing. must_prewrite_put_err(&engine, k, v, k, 5); must_unlocked(&engine, k); // Write conflict. must_prewrite_lock_err(&engine, k, k, 6); must_unlocked(&engine, k); // Not conflict. must_prewrite_lock(&engine, k, k, 12); must_locked(&engine, k, 12); must_rollback(&engine, k, 12); must_unlocked(&engine, k); must_written(&engine, k, 12, 12, WriteType::Rollback); // Cannot retry Prewrite after rollback. must_prewrite_lock_err(&engine, k, k, 12); // Can prewrite after rollback. must_prewrite_delete(&engine, k, k, 13); must_rollback(&engine, k, 13); must_unlocked(&engine, k); } #[test] fn test_mvcc_txn_prewrite_insert() { let engine = TestEngineBuilder::new().build().unwrap(); let (k1, v1, v2, v3) = (b"k1", b"v1", b"v2", b"v3"); must_prewrite_put(&engine, k1, v1, k1, 1); must_commit(&engine, k1, 1, 2); fn expect_error<T, F>(x: Result<T>, err_matcher: F) where F: FnOnce(Error) + Send + 'static, { match x { Err(e) => err_matcher(e), _ => panic!("expect result to be an error"), } } // "k1" already exist, returns AlreadyExist error. expect_error(try_prewrite_insert(&engine, k1, v2, k1, 3), |e| match e { Error(box ErrorInner::AlreadyExist { .. }) => (), _ => panic!("unexpected error: {:?}", e), }); // Delete "k1" must_prewrite_delete(&engine, k1, k1, 4); // There is a lock, returns KeyIsLocked error. expect_error(try_prewrite_insert(&engine, k1, v2, k1, 6), |e| match e { Error(box ErrorInner::KeyIsLocked(_)) => (), _ => panic!("unexpected error: {:?}", e), }); must_commit(&engine, k1, 4, 5); // After delete "k1", insert returns ok. assert!(try_prewrite_insert(&engine, k1, v2, k1, 6).is_ok()); must_commit(&engine, k1, 6, 7); // Rollback must_prewrite_put(&engine, k1, v3, k1, 8); must_rollback(&engine, k1, 8); expect_error(try_prewrite_insert(&engine, k1, v3, k1, 9), |e| match e { Error(box ErrorInner::AlreadyExist { .. }) => (), _ => panic!("unexpected error: {:?}", e), }); // Delete "k1" again must_prewrite_delete(&engine, k1, k1, 10); must_commit(&engine, k1, 10, 11); // Rollback again must_prewrite_put(&engine, k1, v3, k1, 12); must_rollback(&engine, k1, 12); // After delete "k1", insert returns ok. assert!(try_prewrite_insert(&engine, k1, v2, k1, 13).is_ok()); must_commit(&engine, k1, 13, 14); } #[test] fn test_mvcc_txn_prewrite_check_not_exist() { let engine = TestEngineBuilder::new().build().unwrap(); let (k1, v1, v2, v3) = (b"k1", b"v1", b"v2", b"v3"); must_prewrite_put(&engine, k1, v1, k1, 1); must_commit(&engine, k1, 1, 2); // "k1" already exist, returns AlreadyExist error. assert!(try_prewrite_check_not_exists(&engine, k1, k1, 3).is_err()); // Delete "k1" must_prewrite_delete(&engine, k1, k1, 4); must_commit(&engine, k1, 4, 5); // After delete "k1", check_not_exists returns ok. assert!(try_prewrite_check_not_exists(&engine, k1, k1, 6).is_ok()); assert!(try_prewrite_insert(&engine, k1, v2, k1, 7).is_ok()); must_commit(&engine, k1, 7, 8); // Rollback must_prewrite_put(&engine, k1, v3, k1, 9); must_rollback(&engine, k1, 9); assert!(try_prewrite_check_not_exists(&engine, k1, k1, 10).is_err()); // Delete "k1" again must_prewrite_delete(&engine, k1, k1, 11); must_commit(&engine, k1, 11, 12); // Rollback again must_prewrite_put(&engine, k1, v3, k1, 13); must_rollback(&engine, k1, 13); // After delete "k1", check_not_exists returns ok. assert!(try_prewrite_check_not_exists(&engine, k1, k1, 14).is_ok()); } #[test] fn test_mvcc_txn_pessmistic_prewrite_check_not_exist() { let engine = TestEngineBuilder::new().build().unwrap(); let k = b"k1"; assert!(try_pessimistic_prewrite_check_not_exists(&engine, k, k, 3).is_err()) } #[test] fn test_rollback_lock_optimistic() { let engine = TestEngineBuilder::new().build().unwrap(); let (k, v) = (b"k1", b"v1"); must_prewrite_put(&engine, k, v, k, 5); must_commit(&engine, k, 5, 10); // Lock must_prewrite_lock(&engine, k, k, 15); must_locked(&engine, k, 15); // Rollback lock must_rollback(&engine, k, 15); // Rollbacks of optimistic transactions needn't be protected must_get_rollback_protected(&engine, k, 15, false); } #[test] fn test_rollback_lock_pessimistic() { let engine = TestEngineBuilder::new().build().unwrap(); let (k1, k2, v) = (b"k1", b"k2", b"v1"); must_acquire_pessimistic_lock(&engine, k1, k1, 5, 5); must_acquire_pessimistic_lock(&engine, k2, k1, 5, 7); must_rollback(&engine, k1, 5); must_rollback(&engine, k2, 5); // The rollback of the primary key should be protected must_get_rollback_protected(&engine, k1, 5, true); // The rollback of the secondary key needn't be protected must_get_rollback_protected(&engine, k2, 5, false); must_acquire_pessimistic_lock(&engine, k1, k1, 15, 15); must_acquire_pessimistic_lock(&engine, k2, k1, 15, 17); must_pessimistic_prewrite_put(&engine, k1, v, k1, 15, 17, true); must_pessimistic_prewrite_put(&engine, k2, v, k1, 15, 17, true); must_rollback(&engine, k1, 15); must_rollback(&engine, k2, 15); // The rollback of the primary key should be protected must_get_rollback_protected(&engine, k1, 15, true); // The rollback of the secondary key needn't be protected must_get_rollback_protected(&engine, k2, 15, false); } #[test] fn test_rollback_del() { let engine = TestEngineBuilder::new().build().unwrap(); let (k, v) = (b"k1", b"v1"); must_prewrite_put(&engine, k, v, k, 5); must_commit(&engine, k, 5, 10); // Prewrite delete must_prewrite_delete(&engine, k, k, 15); must_locked(&engine, k, 15); // Rollback delete must_rollback(&engine, k, 15); } #[test] fn test_cleanup() { // Cleanup's logic is mostly similar to rollback, except the TTL check. Tests that not // related to TTL check should be covered by other test cases. let engine = TestEngineBuilder::new().build().unwrap(); // Shorthand for composing ts. let ts = TimeStamp::compose; let (k, v) = (b"k", b"v"); must_prewrite_put(&engine, k, v, k, ts(10, 0)); must_locked(&engine, k, ts(10, 0)); must_txn_heart_beat(&engine, k, ts(10, 0), 100, 100); // Check the last txn_heart_beat has set the lock's TTL to 100. must_txn_heart_beat(&engine, k, ts(10, 0), 90, 100); // TTL not expired. Do nothing but returns an error. must_cleanup_err(&engine, k, ts(10, 0), ts(20, 0)); must_locked(&engine, k, ts(10, 0)); // Try to cleanup another transaction's lock. Does nothing. must_cleanup(&engine, k, ts(10, 1), ts(120, 0)); // If there is no exisiting lock when cleanup, it may be a pessimistic transaction, // so the rollback should be protected. must_get_rollback_protected(&engine, k, ts(10, 1), true); must_locked(&engine, k, ts(10, 0)); // TTL expired. The lock should be removed. must_cleanup(&engine, k, ts(10, 0), ts(120, 0)); must_unlocked(&engine, k); // Rollbacks of optimistic transactions needn't be protected must_get_rollback_protected(&engine, k, ts(10, 0), false); must_get_rollback_ts(&engine, k, ts(10, 0)); // Rollbacks of primary keys in pessimistic transactions should be protected must_acquire_pessimistic_lock(&engine, k, k, ts(11, 1), ts(12, 1)); must_cleanup(&engine, k, ts(11, 1), ts(120, 0)); must_get_rollback_protected(&engine, k, ts(11, 1), true); must_acquire_pessimistic_lock(&engine, k, k, ts(13, 1), ts(14, 1)); must_pessimistic_prewrite_put(&engine, k, v, k, ts(13, 1), ts(14, 1), true); must_cleanup(&engine, k, ts(13, 1), ts(120, 0)); must_get_rollback_protected(&engine, k, ts(13, 1), true); } #[test] fn test_mvcc_txn_prewrite() { test_mvcc_txn_prewrite_imp(b"k1", b"v1"); let long_value = "v".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); test_mvcc_txn_prewrite_imp(b"k2", &long_value); } fn test_mvcc_txn_commit_ok_imp(k1: &[u8], v1: &[u8], k2: &[u8], k3: &[u8]) { let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put(&engine, k1, v1, k1, 10); must_prewrite_lock(&engine, k2, k1, 10); must_prewrite_delete(&engine, k3, k1, 10); must_locked(&engine, k1, 10); must_locked(&engine, k2, 10); must_locked(&engine, k3, 10); must_commit(&engine, k1, 10, 15); must_commit(&engine, k2, 10, 15); must_commit(&engine, k3, 10, 15); must_written(&engine, k1, 10, 15, WriteType::Put); must_written(&engine, k2, 10, 15, WriteType::Lock); must_written(&engine, k3, 10, 15, WriteType::Delete); // commit should be idempotent must_commit(&engine, k1, 10, 15); must_commit(&engine, k2, 10, 15); must_commit(&engine, k3, 10, 15); } #[test] fn test_mvcc_txn_commit_ok() { test_mvcc_txn_commit_ok_imp(b"x", b"v", b"y", b"z"); let long_value = "v".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); test_mvcc_txn_commit_ok_imp(b"x", &long_value, b"y", b"z"); } fn test_mvcc_txn_commit_err_imp(k: &[u8], v: &[u8]) { let engine = TestEngineBuilder::new().build().unwrap(); // Not prewrite yet must_commit_err(&engine, k, 1, 2); must_prewrite_put(&engine, k, v, k, 5); // start_ts not match must_commit_err(&engine, k, 4, 5); must_rollback(&engine, k, 5); // commit after rollback must_commit_err(&engine, k, 5, 6); } #[test] fn test_mvcc_txn_commit_err() { test_mvcc_txn_commit_err_imp(b"k", b"v"); let long_value = "v".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); test_mvcc_txn_commit_err_imp(b"k2", &long_value); } #[test] fn test_min_commit_ts() { let engine = TestEngineBuilder::new().build().unwrap(); let (k, v) = (b"k", b"v"); // Shortcuts let ts = TimeStamp::compose; let uncommitted = super::TxnStatus::uncommitted; must_prewrite_put_for_large_txn(&engine, k, v, k, ts(10, 0), 100, 0); must_check_txn_status( &engine, k, ts(10, 0), ts(20, 0), ts(20, 0), true, uncommitted(100, ts(20, 1)), ); // The the min_commit_ts should be ts(20, 1) must_commit_err(&engine, k, ts(10, 0), ts(15, 0)); must_commit_err(&engine, k, ts(10, 0), ts(20, 0)); must_commit(&engine, k, ts(10, 0), ts(20, 1)); must_prewrite_put_for_large_txn(&engine, k, v, k, ts(30, 0), 100, 0); must_check_txn_status( &engine, k, ts(30, 0), ts(40, 0), ts(40, 0), true, uncommitted(100, ts(40, 1)), ); must_commit(&engine, k, ts(30, 0), ts(50, 0)); // If the min_commit_ts of the pessimistic lock is greater than prewrite's, use it. must_acquire_pessimistic_lock_for_large_txn(&engine, k, k, ts(60, 0), ts(60, 0), 100); must_check_txn_status( &engine, k, ts(60, 0), ts(70, 0), ts(70, 0), true, uncommitted(100, ts(70, 1)), ); must_prewrite_put_impl( &engine, k, v, k, ts(60, 0), true, 50, ts(60, 0), 1, ts(60, 1), ); // The min_commit_ts is ts(70, 0) other than ts(60, 1) in prewrite request. must_large_txn_locked(&engine, k, ts(60, 0), 100, ts(70, 1), false); must_commit_err(&engine, k, ts(60, 0), ts(65, 0)); must_commit(&engine, k, ts(60, 0), ts(80, 0)); } #[test] fn test_mvcc_txn_rollback_after_commit() { let engine = TestEngineBuilder::new().build().unwrap(); let k = b"k"; let v = b"v"; let t1 = 1; let t2 = 10; let t3 = 20; let t4 = 30; must_prewrite_put(&engine, k, v, k, t1); must_rollback(&engine, k, t2); must_rollback(&engine, k, t2); must_rollback(&engine, k, t4); must_commit(&engine, k, t1, t3); // The rollback should be failed since the transaction // was committed before. must_rollback_err(&engine, k, t1); must_get(&engine, k, t4, v); } fn test_mvcc_txn_rollback_imp(k: &[u8], v: &[u8]) { let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put(&engine, k, v, k, 5); must_rollback(&engine, k, 5); // Rollback should be idempotent must_rollback(&engine, k, 5); // Lock should be released after rollback must_unlocked(&engine, k); must_prewrite_lock(&engine, k, k, 10); must_rollback(&engine, k, 10); // data should be dropped after rollback must_get_none(&engine, k, 20); // Can't rollback committed transaction. must_prewrite_put(&engine, k, v, k, 25); must_commit(&engine, k, 25, 30); must_rollback_err(&engine, k, 25); must_rollback_err(&engine, k, 25); // Can't rollback other transaction's lock must_prewrite_delete(&engine, k, k, 35); must_rollback(&engine, k, 34); must_rollback(&engine, k, 36); must_written(&engine, k, 34, 34, WriteType::Rollback); must_written(&engine, k, 36, 36, WriteType::Rollback); must_locked(&engine, k, 35); must_commit(&engine, k, 35, 40); must_get(&engine, k, 39, v); must_get_none(&engine, k, 41); } #[test] fn test_mvcc_txn_rollback() { test_mvcc_txn_rollback_imp(b"k", b"v"); let long_value = "v".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); test_mvcc_txn_rollback_imp(b"k2", &long_value); } #[test] fn test_mvcc_txn_rollback_before_prewrite() { let engine = TestEngineBuilder::new().build().unwrap(); let key = b"key"; must_rollback(&engine, key, 5); must_prewrite_lock_err(&engine, key, key, 5); } fn test_gc_imp(k: &[u8], v1: &[u8], v2: &[u8], v3: &[u8], v4: &[u8]) { let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put(&engine, k, v1, k, 5); must_commit(&engine, k, 5, 10); must_prewrite_put(&engine, k, v2, k, 15); must_commit(&engine, k, 15, 20); must_prewrite_delete(&engine, k, k, 25); must_commit(&engine, k, 25, 30); must_prewrite_put(&engine, k, v3, k, 35); must_commit(&engine, k, 35, 40); must_prewrite_lock(&engine, k, k, 45); must_commit(&engine, k, 45, 50); must_prewrite_put(&engine, k, v4, k, 55); must_rollback(&engine, k, 55); // Transactions: // startTS commitTS Command // -- // 55 - PUT "x55" (Rollback) // 45 50 LOCK // 35 40 PUT "x35" // 25 30 DELETE // 15 20 PUT "x15" // 5 10 PUT "x5" // CF data layout: // ts CFDefault CFWrite // -- // 55 Rollback(PUT,50) // 50 Commit(LOCK,45) // 45 // 40 Commit(PUT,35) // 35 x35 // 30 Commit(Delete,25) // 25 // 20 Commit(PUT,15) // 15 x15 // 10 Commit(PUT,5) // 5 x5 must_gc(&engine, k, 12); must_get(&engine, k, 12, v1); must_gc(&engine, k, 22); must_get(&engine, k, 22, v2); must_get_none(&engine, k, 12); must_gc(&engine, k, 32); must_get_none(&engine, k, 22); must_get_none(&engine, k, 35); must_gc(&engine, k, 60); must_get(&engine, k, 62, v3); } #[test] fn test_gc() { test_gc_imp(b"k1", b"v1", b"v2", b"v3", b"v4"); let v1 = "x".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); let v2 = "y".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); let v3 = "z".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); let v4 = "v".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); test_gc_imp(b"k2", &v1, &v2, &v3, &v4); } fn test_write_imp(k: &[u8], v: &[u8], k2: &[u8]) { let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put(&engine, k, v, k, 5); must_seek_write_none(&engine, k, 5); must_commit(&engine, k, 5, 10); must_seek_write(&engine, k, TimeStamp::max(), 5, 10, WriteType::Put); must_seek_write_none(&engine, k2, TimeStamp::max()); must_get_commit_ts(&engine, k, 5, 10); must_prewrite_delete(&engine, k, k, 15); must_rollback(&engine, k, 15); must_seek_write(&engine, k, TimeStamp::max(), 15, 15, WriteType::Rollback); must_get_commit_ts(&engine, k, 5, 10); must_get_commit_ts_none(&engine, k, 15); must_prewrite_lock(&engine, k, k, 25); must_commit(&engine, k, 25, 30); must_seek_write(&engine, k, TimeStamp::max(), 25, 30, WriteType::Lock); must_get_commit_ts(&engine, k, 25, 30); } #[test] fn test_write() { test_write_imp(b"kk", b"v1", b"k"); let v2 = "x".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); test_write_imp(b"kk", &v2, b"k"); } fn test_scan_keys_imp(keys: Vec<&[u8]>, values: Vec<&[u8]>) { let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put(&engine, keys[0], values[0], keys[0], 1); must_commit(&engine, keys[0], 1, 10); must_prewrite_lock(&engine, keys[1], keys[1], 1); must_commit(&engine, keys[1], 1, 5); must_prewrite_delete(&engine, keys[2], keys[2], 1); must_commit(&engine, keys[2], 1, 20); must_prewrite_put(&engine, keys[3], values[1], keys[3], 1); must_prewrite_lock(&engine, keys[4], keys[4], 10); must_prewrite_delete(&engine, keys[5], keys[5], 5); must_scan_keys(&engine, None, 100, vec![keys[0], keys[1], keys[2]], None); must_scan_keys(&engine, None, 3, vec![keys[0], keys[1], keys[2]], None); must_scan_keys(&engine, None, 2, vec![keys[0], keys[1]], Some(keys[1])); must_scan_keys(&engine, Some(keys[1]), 1, vec![keys[1]], Some(keys[1])); } #[test] fn test_scan_keys() { test_scan_keys_imp(vec![b"a", b"c", b"e", b"b", b"d", b"f"], vec![b"a", b"b"]); let v1 = "x".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); let v4 = "v".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); test_scan_keys_imp(vec![b"a", b"c", b"e", b"b", b"d", b"f"], vec![&v1, &v4]); } fn test_write_size_imp(k: &[u8], v: &[u8], pk: &[u8]) { let engine = TestEngineBuilder::new().build().unwrap(); let ctx = Context::default(); let snapshot = engine.snapshot(&ctx).unwrap(); let mut txn = new_txn!(snapshot, 10, true); let key = Key::from_raw(k); assert_eq!(txn.write_size(), 0); txn.prewrite( Mutation::Put((key.clone(), v.to_vec())), pk, false, 0, 0, TimeStamp::default(), ) .unwrap(); assert!(txn.write_size() > 0); engine .write(&ctx, WriteData::from_modifies(txn.into_modifies())) .unwrap(); let snapshot = engine.snapshot(&ctx).unwrap(); let mut txn = new_txn!(snapshot, 10, true); txn.commit(key, 15.into()).unwrap(); assert!(txn.write_size() > 0); engine .write(&ctx, WriteData::from_modifies(txn.into_modifies())) .unwrap(); } #[test] fn test_write_size() { test_write_size_imp(b"key", b"value", b"pk"); let v = "x".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes(); test_write_size_imp(b"key", &v, b"pk"); } #[test] fn test_skip_constraint_check() { let engine = TestEngineBuilder::new().build().unwrap(); let (key, value) = (b"key", b"value"); must_prewrite_put(&engine, key, value, key, 5); must_commit(&engine, key, 5, 10); let ctx = Context::default(); let snapshot = engine.snapshot(&ctx).unwrap(); let mut txn = new_txn!(snapshot, 5, true); assert!(txn .prewrite( Mutation::Put((Key::from_raw(key), value.to_vec())), key, false, 0, 0, TimeStamp::default() ) .is_err()); let ctx = Context::default(); let snapshot = engine.snapshot(&ctx).unwrap(); let mut txn = new_txn!(snapshot, 5, true); assert!(txn .prewrite( Mutation::Put((Key::from_raw(key), value.to_vec())), key, true, 0, 0, TimeStamp::default() ) .is_ok()); } #[test] fn test_read_commit() { let engine = TestEngineBuilder::new().build().unwrap(); let (key, v1, v2) = (b"key", b"v1", b"v2"); must_prewrite_put(&engine, key, v1, key, 5); must_commit(&engine, key, 5, 10); must_prewrite_put(&engine, key, v2, key, 15); must_get_err(&engine, key, 20); must_get_rc(&engine, key, 12, v1); must_get_rc(&engine, key, 20, v1); } #[test] fn test_collapse_prev_rollback() { let engine = TestEngineBuilder::new().build().unwrap(); let (key, value) = (b"key", b"value"); // Add a Rollback whose start ts is 1. must_prewrite_put(&engine, key, value, key, 1); must_rollback_collapsed(&engine, key, 1); must_get_rollback_ts(&engine, key, 1); // Add a Rollback whose start ts is 2, the previous Rollback whose // start ts is 1 will be collapsed. must_prewrite_put(&engine, key, value, key, 2); must_rollback_collapsed(&engine, key, 2); must_get_none(&engine, key, 2); must_get_rollback_ts(&engine, key, 2); must_get_rollback_ts_none(&engine, key, 1); // Rollback arrive before Prewrite, it will collapse the // previous rollback whose start ts is 2. must_rollback_collapsed(&engine, key, 3); must_get_none(&engine, key, 3); must_get_rollback_ts(&engine, key, 3); must_get_rollback_ts_none(&engine, key, 2); } #[test] fn test_scan_values_in_default() { let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put( &engine, &[2], "v".repeat(SHORT_VALUE_MAX_LEN + 1).as_bytes(), &[2], 3, ); must_commit(&engine, &[2], 3, 3); must_prewrite_put( &engine, &[3], "a".repeat(SHORT_VALUE_MAX_LEN + 1).as_bytes(), &[3], 3, ); must_commit(&engine, &[3], 3, 4); must_prewrite_put( &engine, &[3], "b".repeat(SHORT_VALUE_MAX_LEN + 1).as_bytes(), &[3], 5, ); must_commit(&engine, &[3], 5, 5); must_prewrite_put( &engine, &[6], "x".repeat(SHORT_VALUE_MAX_LEN + 1).as_bytes(), &[6], 3, ); must_commit(&engine, &[6], 3, 6); let snapshot = engine.snapshot(&Context::default()).unwrap(); let mut reader = MvccReader::new(snapshot, Some(ScanMode::Forward), true, IsolationLevel::Si); let v = reader.scan_values_in_default(&Key::from_raw(&[3])).unwrap(); assert_eq!(v.len(), 2); assert_eq!( v[1], (3.into(), "a".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes()) ); assert_eq!( v[0], (5.into(), "b".repeat(SHORT_VALUE_MAX_LEN + 1).into_bytes()) ); } #[test] fn test_seek_ts() { let engine = TestEngineBuilder::new().build().unwrap(); must_prewrite_put(&engine, &[2], b"vv", &[2], 3); must_commit(&engine, &[2], 3, 3); must_prewrite_put( &engine, &[3], "a".repeat(SHORT_VALUE_MAX_LEN + 1).as_bytes(), &[3], 4, ); must_commit(&engine, &[3], 4, 4); must_prewrite_put( &engine, &[5], "b".repeat(SHORT_VALUE_MAX_LEN + 1).as_bytes(), &[5], 2, ); must_commit(&engine, &[5], 2, 5); must_prewrite_put(&engine, &[6], b"xxx", &[6], 3); must_commit(&engine, &[6], 3, 6); let snapshot = engine.snapshot(&Context::default()).unwrap(); let mut reader = MvccReader::new(snapshot, Some(ScanMode::Forward), true, IsolationLevel::Si); assert_eq!( reader.seek_ts(3.into()).unwrap().unwrap(), Key::from_raw(&[2]) ); } #[test] fn test_pessimistic_lock() { let engine = TestEngineBuilder::new().build().unwrap(); let k = b"k1"; let v = b"v1"; // TODO: Some corner cases don't give proper results. Although they are not important, we // should consider whether they are better to be fixed. // Normal must_acquire_pessimistic_lock(&engine, k, k, 1, 1); must_pessimistic_locked(&engine, k, 1, 1); must_pessimistic_prewrite_put(&engine, k, v, k, 1, 1, true); must_locked(&engine, k, 1); must_commit(&engine, k, 1, 2); must_unlocked(&engine, k); // Lock conflict must_prewrite_put(&engine, k, v, k, 3); must_acquire_pessimistic_lock_err(&engine, k, k, 4, 4); must_cleanup(&engine, k, 3, 0); must_unlocked(&engine, k); must_acquire_pessimistic_lock(&engine, k, k, 5, 5); must_prewrite_lock_err(&engine, k, k, 6); must_acquire_pessimistic_lock_err(&engine, k, k, 6, 6); must_cleanup(&engine, k, 5, 0); must_unlocked(&engine, k); // Data conflict must_prewrite_put(&engine, k, v, k, 7); must_commit(&engine, k, 7, 9); must_unlocked(&engine, k); must_prewrite_lock_err(&engine, k, k, 8); must_acquire_pessimistic_lock_err(&engine, k, k, 8, 8); must_acquire_pessimistic_lock(&engine, k, k, 8, 9); must_pessimistic_prewrite_put(&engine, k, v, k, 8, 8, true); must_commit(&engine, k, 8, 10); must_unlocked(&engine, k); // Rollback must_acquire_pessimistic_lock(&engine, k, k, 11, 11); must_pessimistic_locked(&engine, k, 11, 11); must_cleanup(&engine, k, 11, 0); must_acquire_pessimistic_lock_err(&engine, k, k, 11, 11); must_pessimistic_prewrite_put_err(&engine, k, v, k, 11, 11, true); must_prewrite_lock_err(&engine, k, k, 11); must_unlocked(&engine, k); must_acquire_pessimistic_lock(&engine, k, k, 12, 12); must_pessimistic_prewrite_put(&engine, k, v, k, 12, 12, true); must_locked(&engine, k, 12); must_cleanup(&engine, k, 12, 0); must_acquire_pessimistic_lock_err(&engine, k, k, 12, 12); must_pessimistic_prewrite_put_err(&engine, k, v, k, 12, 12, true); must_prewrite_lock_err(&engine, k, k, 12); must_unlocked(&engine, k); // Duplicated must_acquire_pessimistic_lock(&engine, k, k, 13, 13); must_pessimistic_locked(&engine, k, 13, 13); must_acquire_pessimistic_lock(&engine, k, k, 13, 13); must_pessimistic_locked(&engine, k, 13, 13); must_pessimistic_prewrite_put(&engine, k, v, k, 13, 13, true); must_locked(&engine, k, 13); must_pessimistic_prewrite_put(&engine, k, v, k, 13, 13, true); must_locked(&engine, k, 13); must_commit(&engine, k, 13, 14); must_unlocked(&engine, k); must_commit(&engine, k, 13, 14); must_unlocked(&engine, k); // Pessimistic lock doesn't block reads. must_acquire_pessimistic_lock(&engine, k, k, 15, 15); must_pessimistic_locked(&engine, k, 15, 15); must_get(&engine, k, 16, v); must_pessimistic_prewrite_delete(&engine, k, k, 15, 15, true); must_get_err(&engine, k, 16); must_commit(&engine, k, 15, 17); // Rollback must_acquire_pessimistic_lock(&engine, k, k, 18, 18); must_rollback(&engine, k, 18); must_unlocked(&engine, k); must_prewrite_put(&engine, k, v, k, 19); must_commit(&engine, k, 19, 20); must_acquire_pessimistic_lock_err(&engine, k, k, 18, 21); must_unlocked(&engine, k); // Prewrite non-exist pessimistic lock must_pessimistic_prewrite_put_err(&engine, k, v, k, 22, 22, true); // LockTypeNotMatch must_prewrite_put(&engine, k, v, k, 23); must_locked(&engine, k, 23); must_acquire_pessimistic_lock_err(&engine, k, k, 23, 23); must_cleanup(&engine, k, 23, 0); must_acquire_pessimistic_lock(&engine, k, k, 24, 24); must_pessimistic_locked(&engine, k, 24, 24); must_prewrite_put_err(&engine, k, v, k, 24); must_rollback(&engine, k, 24); // Acquire lock on a prewritten key should fail. must_acquire_pessimistic_lock(&engine, k, k, 26, 26); must_pessimistic_locked(&engine, k, 26, 26); must_pessimistic_prewrite_delete(&engine, k, k, 26, 26, true); must_locked(&engine, k, 26); must_acquire_pessimistic_lock_err(&engine, k, k, 26, 26); must_locked(&engine, k, 26); // Acquire lock on a committed key should fail. must_commit(&engine, k, 26, 27); must_unlocked(&engine, k); must_get_none(&engine, k, 28); must_acquire_pessimistic_lock_err(&engine, k, k, 26, 26); must_unlocked(&engine, k); must_get_none(&engine, k, 28); // Pessimistic prewrite on a committed key should fail. must_pessimistic_prewrite_put_err(&engine, k, v, k, 26, 26, true); must_unlocked(&engine, k); must_get_none(&engine, k, 28); // Currently we cannot avoid this. must_acquire_pessimistic_lock(&engine, k, k, 26, 29); must_pessimistic_rollback(&engine, k, 26, 29); must_unlocked(&engine, k); // Non pessimistic key in pessimistic transaction. must_pessimistic_prewrite_put(&engine, k, v, k, 30, 30, false); must_locked(&engine, k, 30); must_commit(&engine, k, 30, 31); must_unlocked(&engine, k); must_get_commit_ts(&engine, k, 30, 31); // Rollback collapsed. must_rollback_collapsed(&engine, k, 32); must_rollback_collapsed(&engine, k, 33); must_acquire_pessimistic_lock_err(&engine, k, k, 32, 32); // Currently we cannot avoid this. must_acquire_pessimistic_lock(&engine, k, k, 32, 34); must_pessimistic_rollback(&engine, k, 32, 34); must_unlocked(&engine, k); // Acquire lock when there is lock with different for_update_ts. must_acquire_pessimistic_lock(&engine, k, k, 35, 36); must_pessimistic_locked(&engine, k, 35, 36); must_acquire_pessimistic_lock(&engine, k, k, 35, 35); must_pessimistic_locked(&engine, k, 35, 36); must_acquire_pessimistic_lock(&engine, k, k, 35, 37); must_pessimistic_locked(&engine, k, 35, 37); // Cannot prewrite when there is another transaction's pessimistic lock. must_pessimistic_prewrite_put_err(&engine, k, v, k, 36, 36, true); must_pessimistic_prewrite_put_err(&engine, k, v, k, 36, 38, true); must_pessimistic_locked(&engine, k, 35, 37); // Cannot prewrite when there is another transaction's non-pessimistic lock. must_pessimistic_prewrite_put(&engine, k, v, k, 35, 37, true); must_locked(&engine, k, 35); must_pessimistic_prewrite_put_err(&engine, k, v, k, 36, 38, true); must_locked(&engine, k, 35); // Commit pessimistic transaction's key but with smaller commit_ts than for_update_ts. // Currently not checked, so in this case it will actually be successfully committed. must_commit(&engine, k, 35, 36); must_unlocked(&engine, k); must_get_commit_ts(&engine, k, 35, 36); // Prewrite meets pessimistic lock on a non-pessimistic key. // Currently not checked, so prewrite will success. must_acquire_pessimistic_lock(&engine, k, k, 40, 40); must_pessimistic_locked(&engine, k, 40, 40); must_pessimistic_prewrite_put(&engine, k, v, k, 40, 40, false); must_locked(&engine, k, 40); must_commit(&engine, k, 40, 41); must_unlocked(&engine, k); // Prewrite with different for_update_ts. // Currently not checked. must_acquire_pessimistic_lock(&engine, k, k, 42, 45); must_pessimistic_locked(&engine, k, 42, 45); must_pessimistic_prewrite_put(&engine, k, v, k, 42, 43, true); must_locked(&engine, k, 42); must_commit(&engine, k, 42, 45); must_unlocked(&engine, k); must_acquire_pessimistic_lock(&engine, k, k, 46, 47); must_pessimistic_locked(&engine, k, 46, 47); must_pessimistic_prewrite_put(&engine, k, v, k, 46, 48, true); must_locked(&engine, k, 46); must_commit(&engine, k, 46, 49); must_unlocked(&engine, k); // Prewrite on non-pessimistic key meets write with larger commit_ts than current // for_update_ts (non-pessimistic data conflict). // Normally non-pessimistic keys in pessimistic transactions are used when we are sure that // there won't be conflicts. So this case is also not checked, and prewrite will succeeed. must_pessimistic_prewrite_put(&engine, k, v, k, 47, 48, false); must_locked(&engine, k, 47); must_cleanup(&engine, k, 47, 0); must_unlocked(&engine, k); // The rollback of the primary key in a pessimistic transaction should be protected from // being collapsed. must_acquire_pessimistic_lock(&engine, k, k, 49, 60); must_pessimistic_prewrite_put(&engine, k, v, k, 49, 60, true); must_locked(&engine, k, 49); must_cleanup(&engine, k, 49, 0); must_get_rollback_protected(&engine, k, 49, true); must_prewrite_put(&engine, k, v, k, 51); must_rollback_collapsed(&engine, k, 51); must_acquire_pessimistic_lock_err(&engine, k, k, 49, 60); // start_ts and commit_ts interlacing for start_ts in &[140, 150, 160] { let for_update_ts = start_ts + 48; let commit_ts = start_ts + 50; must_acquire_pessimistic_lock(&engine, k, k, *start_ts, for_update_ts); must_pessimistic_prewrite_put(&engine, k, v, k, *start_ts, for_update_ts, true); must_commit(&engine, k, *start_ts, commit_ts); must_get(&engine, k, commit_ts + 1, v); } must_rollback(&engine, k, 170); // Now the data should be like: (start_ts -> commit_ts) // 140 -> 190 // 150 -> 200 // 160 -> 210 // 170 -> rollback must_get_commit_ts(&engine, k, 140, 190); must_get_commit_ts(&engine, k, 150, 200); must_get_commit_ts(&engine, k, 160, 210); must_get_rollback_ts(&engine, k, 170); } #[test] fn test_pessimistic_txn_ttl() { let engine = TestEngineBuilder::new().build().unwrap(); let (k, v) = (b"k", b"v"); // Pessimistic prewrite keeps the larger TTL of the prewrite request and the original // pessimisitic lock. must_acquire_pessimistic_lock_with_ttl(&engine, k, k, 10, 10, 100); must_pessimistic_locked(&engine, k, 10, 10); must_pessimistic_prewrite_put_with_ttl(&engine, k, v, k, 10, 10, true, 110); must_locked_with_ttl(&engine, k, 10, 110); must_rollback(&engine, k, 10); // TTL not changed if the pessimistic lock's TTL is larger than that provided in the // prewrite request. must_acquire_pessimistic_lock_with_ttl(&engine, k, k, 20, 20, 100); must_pessimistic_locked(&engine, k, 20, 20); must_pessimistic_prewrite_put_with_ttl(&engine, k, v, k, 20, 20, true, 90); must_locked_with_ttl(&engine, k, 20, 100); } #[test] fn test_pessimistic_rollback() { let engine = TestEngineBuilder::new().build().unwrap(); let k = b"k1"; let v = b"v1"; // Normal must_acquire_pessimistic_lock(&engine, k, k, 1, 1); must_pessimistic_locked(&engine, k, 1, 1); must_pessimistic_rollback(&engine, k, 1, 1); must_unlocked(&engine, k); must_get_commit_ts_none(&engine, k, 1); // Pessimistic rollback is idempotent must_pessimistic_rollback(&engine, k, 1, 1); must_unlocked(&engine, k); must_get_commit_ts_none(&engine, k, 1); // Succeed if the lock doesn't exist. must_pessimistic_rollback(&engine, k, 2, 2); // Do nothing if meets other transaction's pessimistic lock must_acquire_pessimistic_lock(&engine, k, k, 2, 3); must_pessimistic_rollback(&engine, k, 1, 1); must_pessimistic_rollback(&engine, k, 1, 2); must_pessimistic_rollback(&engine, k, 1, 3); must_pessimistic_rollback(&engine, k, 1, 4); must_pessimistic_rollback(&engine, k, 3, 3); must_pessimistic_rollback(&engine, k, 4, 4); // Succeed if for_update_ts is larger; do nothing if for_update_ts is smaller. must_pessimistic_locked(&engine, k, 2, 3); must_pessimistic_rollback(&engine, k, 2, 2); must_pessimistic_locked(&engine, k, 2, 3); must_pessimistic_rollback(&engine, k, 2, 4); must_unlocked(&engine, k); // Do nothing if rollbacks a non-pessimistic lock. must_prewrite_put(&engine, k, v, k, 3); must_locked(&engine, k, 3); must_pessimistic_rollback(&engine, k, 3, 3); must_locked(&engine, k, 3); // Do nothing if meets other transaction's optimistic lock must_pessimistic_rollback(&engine, k, 2, 2); must_pessimistic_rollback(&engine, k, 2, 3); must_pessimistic_rollback(&engine, k, 2, 4); must_pessimistic_rollback(&engine, k, 4, 4); must_locked(&engine, k, 3); // Do nothing if committed must_commit(&engine, k, 3, 4); must_unlocked(&engine, k); must_get_commit_ts(&engine, k, 3, 4); must_pessimistic_rollback(&engine, k, 3, 3); must_pessimistic_rollback(&engine, k, 3, 4); must_pessimistic_rollback(&engine, k, 3, 5); } #[test] fn test_overwrite_pessimistic_lock() { let engine = TestEngineBuilder::new().build().unwrap(); let k = b"k1"; must_acquire_pessimistic_lock(&engine, k, k, 1, 2); must_pessimistic_locked(&engine, k, 1, 2); must_acquire_pessimistic_lock(&engine, k, k, 1, 1); must_pessimistic_locked(&engine, k, 1, 2); must_acquire_pessimistic_lock(&engine, k, k, 1, 3); must_pessimistic_locked(&engine, k, 1, 3); } #[test] fn test_txn_heart_beat() { let engine = TestEngineBuilder::new().build().unwrap(); let (k, v) = (b"k1", b"v1"); let test = |ts| { // Do nothing if advise_ttl is less smaller than current TTL. must_txn_heart_beat(&engine, k, ts, 90, 100); // Return the new TTL if the TTL when the TTL is updated. must_txn_heart_beat(&engine, k, ts, 110, 110); // The lock's TTL is updated and persisted into the db. must_txn_heart_beat(&engine, k, ts, 90, 110); // Heart beat another transaction's lock will lead to an error. must_txn_heart_beat_err(&engine, k, ts - 1, 150); must_txn_heart_beat_err(&engine, k, ts + 1, 150); // The existing lock is not changed. must_txn_heart_beat(&engine, k, ts, 90, 110); }; // No lock. must_txn_heart_beat_err(&engine, k, 5, 100); // Create a lock with TTL=100. // The initial TTL will be set to 0 after calling must_prewrite_put. Update it first. must_prewrite_put(&engine, k, v, k, 5); must_locked(&engine, k, 5); must_txn_heart_beat(&engine, k, 5, 100, 100); test(5); must_locked(&engine, k, 5); must_commit(&engine, k, 5, 10); must_unlocked(&engine, k); // No lock. must_txn_heart_beat_err(&engine, k, 5, 100); must_txn_heart_beat_err(&engine, k, 10, 100); must_acquire_pessimistic_lock(&engine, k, k, 8, 15); must_pessimistic_locked(&engine, k, 8, 15); must_txn_heart_beat(&engine, k, 8, 100, 100); test(8); must_pessimistic_locked(&engine, k, 8, 15); } fn test_check_txn_status_impl(rollback_if_not_exist: bool) { let engine = TestEngineBuilder::new().build().unwrap(); let (k, v) = (b"k1", b"v1"); let ts = TimeStamp::compose; // Shortcuts use super::TxnStatus::*; let committed = TxnStatus::committed; let uncommitted = TxnStatus::uncommitted; let r = rollback_if_not_exist; // Try to check a not exist thing. if r { must_check_txn_status(&engine, k, ts(3, 0), ts(3, 1), ts(3, 2), r, LockNotExist); // A protected rollback record will be written. must_get_rollback_protected(&engine, k, ts(3, 0), true); } else { must_check_txn_status_err(&engine, k, ts(3, 0), ts(3, 1), ts(3, 2), r); } // Lock the key with TTL=100. must_prewrite_put_for_large_txn(&engine, k, v, k, ts(5, 0), 100, 0); // The initial min_commit_ts is start_ts + 1. must_large_txn_locked(&engine, k, ts(5, 0), 100, ts(5, 1), false); // Update min_commit_ts to current_ts. must_check_txn_status( &engine, k, ts(5, 0), ts(6, 0), ts(7, 0), r, uncommitted(100, ts(7, 0)), ); must_large_txn_locked(&engine, k, ts(5, 0), 100, ts(7, 0), false); // Update min_commit_ts to caller_start_ts + 1 if current_ts < caller_start_ts. // This case should be impossible. But if it happens, we prevents it. must_check_txn_status( &engine, k, ts(5, 0), ts(9, 0), ts(8, 0), r, uncommitted(100, ts(9, 1)), ); must_large_txn_locked(&engine, k, ts(5, 0), 100, ts(9, 1), false); // caller_start_ts < lock.min_commit_ts < current_ts // When caller_start_ts < lock.min_commit_ts, no need to update it. must_check_txn_status( &engine, k, ts(5, 0), ts(8, 0), ts(10, 0), r, uncommitted(100, ts(9, 1)), ); must_large_txn_locked(&engine, k, ts(5, 0), 100, ts(9, 1), false); // current_ts < lock.min_commit_ts < caller_start_ts must_check_txn_status( &engine, k, ts(5, 0), ts(11, 0), ts(9, 0), r, uncommitted(100, ts(11, 1)), ); must_large_txn_locked(&engine, k, ts(5, 0), 100, ts(11, 1), false); // For same caller_start_ts and current_ts, update min_commit_ts to caller_start_ts + 1 must_check_txn_status( &engine, k, ts(5, 0), ts(12, 0), ts(12, 0), r, uncommitted(100, ts(12, 1)), ); must_large_txn_locked(&engine, k, ts(5, 0), 100, ts(12, 1), false); // Logical time is also considered in the comparing must_check_txn_status( &engine, k, ts(5, 0), ts(13, 1), ts(13, 3), r, uncommitted(100, ts(13, 3)), ); must_large_txn_locked(&engine, k, ts(5, 0), 100, ts(13, 3), false); must_commit(&engine, k, ts(5, 0), ts(15, 0)); must_unlocked(&engine, k); // Check committed key will get the commit ts. must_check_txn_status( &engine, k, ts(5, 0), ts(12, 0), ts(12, 0), r, committed(ts(15, 0)), ); must_unlocked(&engine, k); must_prewrite_put_for_large_txn(&engine, k, v, k, ts(20, 0), 100, 0); // Check a committed transaction when there is another lock. Expect getting the commit ts. must_check_txn_status( &engine, k, ts(5, 0), ts(12, 0), ts(12, 0), r, committed(ts(15, 0)), ); // Check a not existing transaction, the result depends on whether `rollback_if_not_exist` // is set. if r { must_check_txn_status(&engine, k, ts(6, 0), ts(12, 0), ts(12, 0), r, LockNotExist); // And a rollback record will be written. must_seek_write( &engine, k, ts(6, 0), ts(6, 0), ts(6, 0), WriteType::Rollback, ); } else { must_check_txn_status_err(&engine, k, ts(6, 0), ts(12, 0), ts(12, 0), r); } // TTL check is based on physical time (in ms). When logical time's difference is larger // than TTL, the lock won't be resolved. must_check_txn_status( &engine, k, ts(20, 0), ts(21, 105), ts(21, 105), r, uncommitted(100, ts(21, 106)), ); must_large_txn_locked(&engine, k, ts(20, 0), 100, ts(21, 106), false); // If physical time's difference exceeds TTL, lock will be resolved. must_check_txn_status(&engine, k, ts(20, 0), ts(121, 0), ts(121, 0), r, TtlExpire); must_unlocked(&engine, k); must_seek_write( &engine, k, TimeStamp::max(), ts(20, 0), ts(20, 0), WriteType::Rollback, ); // Push the min_commit_ts of pessimistic locks. must_acquire_pessimistic_lock_for_large_txn(&engine, k, k, ts(4, 0), ts(130, 0), 200); must_large_txn_locked(&engine, k, ts(4, 0), 200, ts(130, 1), true); must_check_txn_status( &engine, k, ts(4, 0), ts(135, 0), ts(135, 0), r, uncommitted(200, ts(135, 1)), ); must_large_txn_locked(&engine, k, ts(4, 0), 200, ts(135, 1), true); // Commit the key. must_pessimistic_prewrite_put(&engine, k, v, k, ts(4, 0), ts(130, 0), true); must_commit(&engine, k, ts(4, 0), ts(140, 0)); must_unlocked(&engine, k); must_get_commit_ts(&engine, k, ts(4, 0), ts(140, 0)); // Now the transactions are intersecting: // T1: start_ts = 5, commit_ts = 15 // T2: start_ts = 20, rollback // T3: start_ts = 4, commit_ts = 140 must_check_txn_status( &engine, k, ts(4, 0), ts(10, 0), ts(10, 0), r, committed(ts(140, 0)), ); must_check_txn_status( &engine, k, ts(5, 0), ts(10, 0), ts(10, 0), r, committed(ts(15, 0)), ); must_check_txn_status(&engine, k, ts(20, 0), ts(10, 0), ts(10, 0), r, RolledBack); // Rollback expired pessimistic lock. must_acquire_pessimistic_lock_for_large_txn(&engine, k, k, ts(150, 0), ts(150, 0), 100); must_check_txn_status( &engine, k, ts(150, 0), ts(160, 0), ts(160, 0), r, uncommitted(100, ts(160, 1)), ); must_large_txn_locked(&engine, k, ts(150, 0), 100, ts(160, 1), true); must_check_txn_status(&engine, k, ts(150, 0), ts(160, 0), ts(260, 0), r, TtlExpire); must_unlocked(&engine, k); // Rolling back a pessimistic lock should leave Rollback mark. must_seek_write( &engine, k, TimeStamp::max(), ts(150, 0), ts(150, 0), WriteType::Rollback, ); // Rollback when current_ts is u64::max_value() must_prewrite_put_for_large_txn(&engine, k, v, k, ts(270, 0), 100, 0); must_large_txn_locked(&engine, k, ts(270, 0), 100, ts(270, 1), false); must_check_txn_status( &engine, k, ts(270, 0), ts(271, 0), TimeStamp::max(), r, TtlExpire, ); must_unlocked(&engine, k); must_seek_write( &engine, k, TimeStamp::max(), ts(270, 0), ts(270, 0), WriteType::Rollback, ); must_acquire_pessimistic_lock_for_large_txn(&engine, k, k, ts(280, 0), ts(280, 0), 100); must_large_txn_locked(&engine, k, ts(280, 0), 100, ts(280, 1), true); must_check_txn_status( &engine, k, ts(280, 0), ts(281, 0), TimeStamp::max(), r, TtlExpire, ); must_unlocked(&engine, k); must_seek_write( &engine, k, TimeStamp::max(), ts(280, 0), ts(280, 0), WriteType::Rollback, ); // Don't push forward the min_commit_ts if the min_commit_ts of the lock is 0. must_acquire_pessimistic_lock_with_ttl(&engine, k, k, ts(290, 0), ts(290, 0), 100); must_check_txn_status( &engine, k, ts(290, 0), ts(300, 0), ts(300, 0), r, uncommitted(100, TimeStamp::zero()), ); must_large_txn_locked(&engine, k, ts(290, 0), 100, TimeStamp::zero(), true); must_pessimistic_rollback(&engine, k, ts(290, 0), ts(290, 0)); must_prewrite_put_impl( &engine, k, v, k, ts(300, 0), false, 100, TimeStamp::zero(), 1, /* min_commit_ts */ TimeStamp::zero(), ); must_check_txn_status( &engine, k, ts(300, 0), ts(310, 0), ts(310, 0), r, uncommitted(100, TimeStamp::zero()), ); must_large_txn_locked(&engine, k, ts(300, 0), 100, TimeStamp::zero(), false); must_rollback(&engine, k, ts(300, 0)); must_prewrite_put_for_large_txn(&engine, k, v, k, ts(310, 0), 100, 0); must_large_txn_locked(&engine, k, ts(310, 0), 100, ts(310, 1), false); // Don't push forward the min_commit_ts if caller_start_ts is max. must_check_txn_status( &engine, k, ts(310, 0), TimeStamp::max(), ts(320, 0), r, uncommitted(100, ts(310, 1)), ); must_commit(&engine, k, ts(310, 0), ts(315, 0)); must_check_txn_status( &engine, k, ts(310, 0), TimeStamp::max(), ts(320, 0), r, committed(ts(315, 0)), ); } #[test] fn test_check_txn_status() { test_check_txn_status_impl(false); test_check_txn_status_impl(true); } #[test] fn test_constraint_check_with_overlapping_txn() { let engine = TestEngineBuilder::new().build().unwrap(); let k = b"k1"; let v = b"v1"; must_prewrite_put(&engine, k, v, k, 10); must_commit(&engine, k, 10, 11); must_acquire_pessimistic_lock(&engine, k, k, 5, 12); must_pessimistic_prewrite_lock(&engine, k, k, 5, 12, true); must_commit(&engine, k, 5, 15); // Now in write cf: // start_ts = 10, commit_ts = 11, Put("v1") // start_ts = 5, commit_ts = 15, Lock must_get(&engine, k, 19, v); assert!(try_prewrite_insert(&engine, k, v, k, 20).is_err()); } #[test] fn test_lock_info_validation() { use kvproto::kvrpcpb::{LockInfo, Op}; let engine = TestEngineBuilder::new().build().unwrap(); let k = b"k"; let v = b"v"; let assert_lock_info_eq = |e, expected_lock_info: &LockInfo| match e { Error(box ErrorInner::KeyIsLocked(info)) => assert_eq!(info, *expected_lock_info), _ => panic!("unexpected error"), }; for is_optimistic in &[false, true] { let mut expected_lock_info = LockInfo::default(); expected_lock_info.set_primary_lock(k.to_vec()); expected_lock_info.set_lock_version(10); expected_lock_info.set_key(k.to_vec()); expected_lock_info.set_lock_ttl(3); if *is_optimistic { expected_lock_info.set_txn_size(10); expected_lock_info.set_lock_type(Op::Put); // Write an optimistic lock. must_prewrite_put_impl( &engine, expected_lock_info.get_key(), v, expected_lock_info.get_primary_lock(), expected_lock_info.get_lock_version(), false, expected_lock_info.get_lock_ttl(), TimeStamp::zero(), expected_lock_info.get_txn_size(), TimeStamp::zero(), ); } else { expected_lock_info.set_lock_type(Op::PessimisticLock); expected_lock_info.set_lock_for_update_ts(10); // Write a pessimistic lock. must_acquire_pessimistic_lock_impl( &engine, expected_lock_info.get_key(), expected_lock_info.get_primary_lock(), expected_lock_info.get_lock_version(), expected_lock_info.get_lock_ttl(), expected_lock_info.get_lock_for_update_ts(), false, TimeStamp::zero(), ); } assert_lock_info_eq( must_prewrite_put_err(&engine, k, v, k, 20), &expected_lock_info, ); assert_lock_info_eq( must_acquire_pessimistic_lock_err(&engine, k, k, 30, 30), &expected_lock_info, ); // If the lock is not expired, cleanup will return the lock info. assert_lock_info_eq(must_cleanup_err(&engine, k, 10, 1), &expected_lock_info); expected_lock_info.set_lock_ttl(0); assert_lock_info_eq( must_pessimistic_prewrite_put_err(&engine, k, v, k, 40, 40, false), &expected_lock_info, ); // Delete the lock if *is_optimistic { must_rollback(&engine, k, expected_lock_info.get_lock_version()); } else { must_pessimistic_rollback( &engine, k, expected_lock_info.get_lock_version(), expected_lock_info.get_lock_for_update_ts(), ); } } } #[test] fn test_non_pessimistic_lock_conflict_with_optimistic_txn() { let engine = TestEngineBuilder::new().build().unwrap(); let k = b"k1"; let v = b"v1"; must_prewrite_put(&engine, k, v, k, 2); must_locked(&engine, k, 2); must_pessimistic_prewrite_put_err(&engine, k, v, k, 1, 1, false); must_pessimistic_prewrite_put_err(&engine, k, v, k, 3, 3, false); } #[test] fn test_non_pessimistic_lock_conflict_with_pessismitic_txn() { let engine = TestEngineBuilder::new().build().unwrap(); // k1 is a row key, k2 is the corresponding index key. let (k1, v1) = (b"k1", b"v1"); let (k2, v2) = (b"k2", b"v2"); let (k3, v3) = (b"k3", b"v3"); // Commit k3 at 20. must_prewrite_put(&engine, k3, v3, k3, 1); must_commit(&engine, k3, 1, 20); // Txn-10 acquires pessimistic locks on k1 and k3. must_acquire_pessimistic_lock(&engine, k1, k1, 10, 10); must_acquire_pessimistic_lock_err(&engine, k3, k1, 10, 10); // Update for_update_ts to 20 due to write conflict must_acquire_pessimistic_lock(&engine, k3, k1, 10, 20); must_pessimistic_prewrite_put(&engine, k1, v1, k1, 10, 20, true); must_pessimistic_prewrite_put(&engine, k3, v3, k1, 10, 20, true); // Write a non-pessimistic lock with for_update_ts 20. must_pessimistic_prewrite_put(&engine, k2, v2, k1, 10, 20, false); // Roll back the primary key due to timeout, but the non-pessimistic lock is not rolled // back. must_rollback(&engine, k1, 10); // Txn-15 acquires pessimistic locks on k1. must_acquire_pessimistic_lock(&engine, k1, k1, 15, 15); must_pessimistic_prewrite_put(&engine, k1, v1, k1, 15, 15, true); // There is a non-pessimistic lock conflict here. match must_pessimistic_prewrite_put_err(&engine, k2, v2, k1, 15, 15, false) { Error(box ErrorInner::KeyIsLocked(info)) => assert_eq!(info.get_lock_ttl(), 0), e => panic!("unexpected error: {}", e), }; } #[test] fn test_commit_pessimistic_lock() { let engine = TestEngineBuilder::new().build().unwrap(); let k = b"k"; must_acquire_pessimistic_lock(&engine, k, k, 10, 10); must_commit_err(&engine, k, 20, 30); must_commit(&engine, k, 10, 20); must_seek_write(&engine, k, 30, 10, 20, WriteType::Lock); } #[test] fn test_pessimistic_lock_return_value() { let engine = TestEngineBuilder::new().build().unwrap(); let (k, v) = (b"k", b"v"); assert_eq!( must_acquire_pessimistic_lock_return_value(&engine, k, k, 10, 10), None ); must_pessimistic_locked(&engine, k, 10, 10); must_pessimistic_rollback(&engine, k, 10, 10); // Put must_prewrite_put(&engine, k, v, k, 10); // KeyIsLocked match must_acquire_pessimistic_lock_return_value_err(&engine, k, k, 20, 20) { Error(box ErrorInner::KeyIsLocked(_)) => (), e => panic!("unexpected error: {}", e), }; must_commit(&engine, k, 10, 20); // WriteConflict match must_acquire_pessimistic_lock_return_value_err(&engine, k, k, 15, 15) { Error(box ErrorInner::WriteConflict { .. }) => (), e => panic!("unexpected error: {}", e), }; assert_eq!( must_acquire_pessimistic_lock_return_value(&engine, k, k, 25, 25), Some(v.to_vec()) ); must_pessimistic_locked(&engine, k, 25, 25); must_pessimistic_rollback(&engine, k, 25, 25); // Skip Write::Lock must_prewrite_lock(&engine, k, k, 30); must_commit(&engine, k, 30, 40); assert_eq!( must_acquire_pessimistic_lock_return_value(&engine, k, k, 45, 45), Some(v.to_vec()) ); must_pessimistic_locked(&engine, k, 45, 45); must_pessimistic_rollback(&engine, k, 45, 45); // Skip Write::Rollback must_rollback(&engine, k, 50); assert_eq!( must_acquire_pessimistic_lock_return_value(&engine, k, k, 55, 55), Some(v.to_vec()) ); must_pessimistic_locked(&engine, k, 55, 55); must_pessimistic_rollback(&engine, k, 55, 55); // Delete must_prewrite_delete(&engine, k, k, 60); must_commit(&engine, k, 60, 70); assert_eq!( must_acquire_pessimistic_lock_return_value(&engine, k, k, 75, 75), None ); // Duplicated command assert_eq!( must_acquire_pessimistic_lock_return_value(&engine, k, k, 75, 75), None ); assert_eq!( must_acquire_pessimistic_lock_return_value(&engine, k, k, 75, 55), Some(v.to_vec()) ); must_pessimistic_locked(&engine, k, 75, 75); must_pessimistic_rollback(&engine, k, 75, 75); } #[test] fn test_extra_op_old_value() { let engine = TestEngineBuilder::new().build().unwrap(); let key = Key::from_raw(b"key"); let ctx = Context::default(); let new_old_value = |short_value, start_ts| OldValue { short_value, start_ts, }; let cases = vec![ ( Mutation::Put((key.clone(), b"v0".to_vec())), false, 5, 5, None, true, ), ( Mutation::Put((key.clone(), b"v1".to_vec())), false, 6, 6, Some(new_old_value(Some(b"v0".to_vec()), 5.into())), true, ), (Mutation::Lock(key.clone()), false, 7, 7, None, false), ( Mutation::Lock(key.clone()), false, 8, 8, Some(new_old_value(Some(b"v1".to_vec()), 6.into())), false, ), ( Mutation::Put((key.clone(), vec![b'0'; 5120])), false, 9, 9, Some(new_old_value(Some(b"v1".to_vec()), 6.into())), true, ), ( Mutation::Put((key.clone(), b"v3".to_vec())), false, 10, 10, Some(new_old_value(None, 9.into())), true, ), ( Mutation::Put((key.clone(), b"v4".to_vec())), true, 11, 11, None, true, ), ]; let write = |modifies| { engine.write(&ctx, modifies).unwrap(); }; let new_txn = |start_ts| { let snapshot = engine.snapshot(&ctx).unwrap(); MvccTxn::new(snapshot, start_ts, true) }; for case in cases { let (mutation, is_pessimistic, start_ts, commit_ts, old_value, check_old_value) = case; let mutation_type = mutation.mutation_type(); let mut txn = new_txn(start_ts.into()); txn.extra_op = ExtraOp::ReadOldValue; if is_pessimistic { txn.acquire_pessimistic_lock( key.clone(), b"key", false, 0, start_ts.into(), false, TimeStamp::zero(), ) .unwrap(); write(WriteData::from_modifies(txn.into_modifies())); txn = new_txn(start_ts.into()); txn.extra_op = ExtraOp::ReadOldValue; txn.pessimistic_prewrite( mutation, b"key", true, 0, start_ts.into(), 0, TimeStamp::zero(), false, ) .unwrap(); } else { txn.prewrite(mutation, b"key", false, 0, 0, TimeStamp::default()) .unwrap(); } if check_old_value { let extra = txn.take_extra(); let ts_key = key.clone().append_ts(start_ts.into()); assert!( extra.get_old_values().get(&ts_key).is_some(), "{}", start_ts ); assert_eq!(extra.get_old_values()[&ts_key], (old_value, mutation_type)); } write(WriteData::from_modifies(txn.into_modifies())); let mut txn = new_txn(start_ts.into()); txn.commit(key.clone(), commit_ts.into()).unwrap(); engine .write(&ctx, WriteData::from_modifies(txn.into_modifies())) .unwrap(); } } }
36.436392
103
0.53451
1a79230b399c40af1633de020da14f0e862eceb8
5,462
use crate::phases::OpaqueRenderPhase; use crate::shaders; use fnv::FnvHashMap; use glam::Vec3; use rafx::api::RafxResult; use rafx::assets::{ AssetManager, BufferAsset, DefaultAssetTypeHandler, DefaultAssetTypeLoadHandler, ImageAsset, MaterialInstanceAsset, }; use rafx::distill::loader::handle::Handle; use rafx::framework::{BufferResource, DescriptorSetArc, ResourceArc}; use serde::{Deserialize, Serialize}; use std::sync::Arc; use type_uuid::*; pub type LevelUid = i64; pub type TileSetUid = i64; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct LdtkLayerDrawCallData { pub vertex_data_offset_in_bytes: u32, pub index_data_offset_in_bytes: u32, pub index_count: u32, pub z_pos: f32, } #[derive(Serialize, Deserialize, Clone, Debug)] pub struct LdtkLayerData { pub material_instance: Handle<MaterialInstanceAsset>, pub draw_call_data: Vec<LdtkLayerDrawCallData>, pub z_pos: f32, pub world_x_pos: i64, pub world_y_pos: i64, pub grid_width: i64, pub grid_height: i64, pub grid_size: i64, } #[derive(Serialize, Deserialize, Clone, Debug)] pub struct LdtkLevelData { pub layer_data: Vec<LdtkLayerData>, pub vertex_data: Option<Handle<BufferAsset>>, pub index_data: Option<Handle<BufferAsset>>, } #[derive(Serialize, Deserialize, Clone, Debug)] pub struct LdtkTileSet { pub image: Handle<ImageAsset>, pub material_instance: Handle<MaterialInstanceAsset>, pub image_width: u32, pub image_height: u32, } #[derive(TypeUuid, Serialize, Deserialize, Clone, Debug)] #[uuid = "98c635e3-b277-422f-bd6a-bf0b83814211"] pub struct LdtkAssetData { pub tilesets: FnvHashMap<TileSetUid, LdtkTileSet>, pub levels: FnvHashMap<LevelUid, LdtkLevelData>, } #[derive(Clone, Debug)] pub struct LdtkLayer { pub per_layer_descriptor_set: DescriptorSetArc, pub width: i64, pub height: i64, pub center: Vec3, } #[derive(Clone, Debug)] pub struct LdtkLevel { pub layers: Vec<LdtkLayer>, pub vertex_buffer: Option<ResourceArc<BufferResource>>, pub index_buffer: Option<ResourceArc<BufferResource>>, } #[derive(Debug)] pub struct LdtkProjectAssetInner { pub data: LdtkAssetData, pub levels: FnvHashMap<LevelUid, LdtkLevel>, } #[derive(TypeUuid, Clone, Debug)] #[uuid = "231e4fd5-add2-4024-a479-d8181b3e52a3"] pub struct LdtkProjectAsset { pub inner: Arc<LdtkProjectAssetInner>, } pub struct LdtkLoadHandler; impl DefaultAssetTypeLoadHandler<LdtkAssetData, LdtkProjectAsset> for LdtkLoadHandler { #[profiling::function] fn load( asset_manager: &mut AssetManager, ldtk_asset: LdtkAssetData, ) -> RafxResult<LdtkProjectAsset> { let mut levels = FnvHashMap::<LevelUid, LdtkLevel>::default(); for (&level_uid, level_data) in &ldtk_asset.levels { let mut layers = Vec::default(); for layer_data in &level_data.layer_data { let material_instance = asset_manager .latest_asset(&layer_data.material_instance) .unwrap(); let opaque_phase_pass_index = material_instance .material .find_pass_by_phase::<OpaqueRenderPhase>() .expect("tileset material must have pass for opaque phase"); let tileset_image_set_index = shaders::tile_layer_frag::TEX_DESCRIPTOR_SET_INDEX; let descriptor_set = material_instance.material_descriptor_sets [opaque_phase_pass_index][tileset_image_set_index] .clone() .unwrap(); let width = layer_data.grid_width * layer_data.grid_size; let height = layer_data.grid_height * layer_data.grid_size; layers.push(LdtkLayer { per_layer_descriptor_set: descriptor_set, center: Vec3::new( (layer_data.world_x_pos + (width / 2)) as f32, (layer_data.world_y_pos + (height / 2)) as f32, layer_data.z_pos, ), width, height, }); } let mut vertex_buffer = None; let mut index_buffer = None; if let (Some(vertex_data_handle), Some(index_data_handle)) = (&level_data.vertex_data, &level_data.index_data) { vertex_buffer = Some( asset_manager .latest_asset(&vertex_data_handle) .unwrap() .buffer .clone(), ); index_buffer = Some( asset_manager .latest_asset(&index_data_handle) .unwrap() .buffer .clone(), ); } levels.insert( level_uid, LdtkLevel { layers, vertex_buffer, index_buffer, }, ); } let inner = LdtkProjectAssetInner { data: ldtk_asset, levels, }; Ok(LdtkProjectAsset { inner: Arc::new(inner), }) } } pub type LdtkAssetType = DefaultAssetTypeHandler<LdtkAssetData, LdtkProjectAsset, LdtkLoadHandler>;
31.94152
99
0.601611
8ff35d8ddf10fc8e68421eb7429c298af0a2b408
357
// run-pass #![feature(generators)] fn main() { static || { loop { // Test that `opt` is not live across the yield, even when borrowed in a loop // See https://github.com/rust-lang/rust/issues/52792 let opt = { yield; true }; &opt; } }; }
19.833333
89
0.434174
4bbf25cbe553a32fda745690f12d2d5809198818
7,389
use luminance::context::GraphicsContext; use luminance::framebuffer::{DepthSlot, Framebuffer}; use luminance::pipeline::PipelineState; use luminance::pixel::{Depth32F, Pixel}; use luminance::texture::{Dim2, Texture}; use luminance_glfw::{ Action, CursorMode, GlfwSurface, Key, Surface as _, WindowDim, WindowEvent, WindowOpt, }; use cgmath::{EuclideanSpace, InnerSpace, Point3, Quaternion, Rad, Rotation3, Vector3}; use std::path::Path; use std::process::exit; use std::time::Instant; mod engine; pub mod terrain; const X_DEFAULT_SIZE: u32 = 1000; const Y_DEFAULT_SIZE: u32 = 1000; fn main() { //let t = terrain::generate(100,100); // println!("{:?} {:?}", Vector3 { // x: 0., // y: 0., // z: 0. // }, Vector3 { // x: 0., // y: 0., // z: 0. // } + Vector3::<f32>::unit_x()); // return; let surface = GlfwSurface::new( WindowDim::Windowed(X_DEFAULT_SIZE, Y_DEFAULT_SIZE), "Hello, world!", WindowOpt::default().set_cursor_mode(CursorMode::Disabled), ); let res = match surface { Ok(surface) => { eprintln!("graphics surface created"); main_loop(surface); 0 } Err(e) => { eprintln!("cannot create graphics surface:\n{}", e); 1 } }; if res == 1 { exit(1); } } fn main_loop(mut surface: GlfwSurface) { let mut file_loader = engine::FileLoader::new(); let mut size = [X_DEFAULT_SIZE, Y_DEFAULT_SIZE]; let start_t = Instant::now(); let entity = engine::hud::Entity::load( &mut file_loader, &mut surface, &[ engine::hud::VertexPosition::new([0, 0]), engine::hud::VertexPosition::new([100, 0]), engine::hud::VertexPosition::new([100, 200]), engine::hud::VertexPosition::new([0, 200]), ], &[0, 1, 2, 0, 2, 3], Path::new("texture.ron"), ) .expect("Error creeating entity"); let font = engine::text::Font::new( "Roboto", engine::text::FontWeight::Black, engine::text::FontStyle::Regular, 20., ); // let mut rt_font = engine::text_rusttype::Font::new("Roboto", engine::text_rusttype::FontWeight::Black, engine::text_rusttype::FontStyle::Regular, 20.); // font.set_color(engine::RgbaColor::new(255,0,0,170)); let mut entity2 = engine::hud::Entity::new_entity_from_string( &mut file_loader, &mut surface, "Lies & deception".into(), &font, ) .unwrap(); entity2.set_pos([100, 100]); entity2.set_depth(-1.0); let mut back_buffer = surface.back_buffer().unwrap(); let depth_map_size = [1024, 1024]; let depth_fb: Framebuffer<Dim2, (), Depth32F> = Framebuffer::new(&mut surface, depth_map_size, 0, Default::default()).unwrap(); let depth_e = engine::hud::DepthEntity::new( &mut surface, &[ engine::hud::Vertex::new( engine::hud::VertexPosition::new([0, 0]), engine::hud::VertexUV::new([0, 0]), ), engine::hud::Vertex::new( engine::hud::VertexPosition::new([500, 0]), engine::hud::VertexUV::new([depth_map_size[0], 0]), ), engine::hud::Vertex::new( engine::hud::VertexPosition::new([0, 500]), engine::hud::VertexUV::new([0, depth_map_size[1]]), ), engine::hud::Vertex::new( engine::hud::VertexPosition::new([500, 500]), engine::hud::VertexUV::new(depth_map_size), ), ], &[0, 1, 2, 1, 2, 3], size, ); let pos = [0, 0]; let mut hud_registry = engine::EntityRegistry::new(); //hud_registry.register(&"Playeer", entity); hud_registry.register(&"Text", entity2); // hud_registry.register(&"Shadow", depth_e); let renderer = engine::hud::Renderer::new(); let mut key_registry = engine::KeyRegistry::new(); let mut spatial_renderer = engine::spatial::Renderer::new(&mut file_loader, &mut surface, size, depth_map_size); // let mut depth_renderer = engine::spatial::depth::Renderer::new(&mut file_loader, &mut surface, size); let mut last_pos = [0.0; 2]; let mut f = true; 'app: loop { let mut resized = false; // handle events for event in surface.poll_events() { key_registry.event(&event); match &event { WindowEvent::Close | WindowEvent::Key(Key::Escape, _, Action::Release, _) => { break 'app } WindowEvent::Key(Key::K, _, Action::Press, _) => { // hud_registry // .get_mut(&"Playeer") // .unwrap() // .set_state("2") // .expect("Error setting state"); } WindowEvent::CursorPos(x, y) => { if !f { let move_x = -(*x - last_pos[0]); let move_y = *y - last_pos[1]; // let e = Euler { // x: Rad(0.005 * move_y).normalize_signed(), // y: Rad(0.005) * move_x, // z: Rad(0.), // }; let qy = Quaternion::from_angle_y(Rad(0.005 * move_x)) .cast() .unwrap(); let qx = Quaternion::from_angle_x(Rad(0.005 * move_y)) .cast() .unwrap(); spatial_renderer.camera.rot = spatial_renderer.camera.rot * qy * qx; spatial_renderer.camera.update_dir(); // let e: Euler<Rad<f32>> = spatial_renderer.camera.rot.into(); // dbg!(e, Rad(0.005 * move_x)); } else { f = false; } last_pos = [*x, *y]; } WindowEvent::FramebufferSize(x, y) => { size = [*x as u32, *y as u32]; resized = true; } _ => (), } } key_registry.for_pressed_keys(|key| { let mut fd_scale = 0.0; let mut rt_scale = 0.0; let speed = 0.5; match key { Key::W => { //pos[1] += 10; fd_scale = 1.; } Key::S => { //pos[1] -= 10; fd_scale = -1.; } Key::A => { //pos[0] -= 10; rt_scale = -1.; } Key::D => { //pos[0] += 10; rt_scale = 1.; } _ => (), }; let mut fd = spatial_renderer.camera.dir.clone().normalize(); //fd.y = 0.; let mut rt = Vector3::new(-fd.z, 0.0, fd.x); fd = fd.normalize_to(fd_scale * speed); rt = rt.normalize_to(rt_scale * speed); spatial_renderer.camera.pos += fd + rt; }); spatial_renderer.camera.update(); let light_pos = spatial_renderer.camera.pos + Vector3::new(0.0, 10., 0.); spatial_renderer.depth_camera.pos = light_pos; spatial_renderer .depth_camera .look_at(spatial_renderer.mesh.pos); if resized { back_buffer = surface.back_buffer().unwrap(); } // println!("{:?}", d.size()); // entity.set_pos(pos.clone()); // hud_registry // .get_mut(&"Playeer") // .unwrap() // .set_pos(pos.clone()); // rendering code goes here let t = start_t.elapsed().as_millis() as f32 * 1e-3; // hud_registry.get_mut(&"Text").unwrap().update_text(&mut surface, &format!("{:.2}", t), &font).expect("Error updating text"); // Dynamic text rendering let color = [t.cos(), t.sin(), 0.5, 1.]; // draw the shadows surface.pipeline_builder().pipeline( &depth_fb, &PipelineState::default(), |pipeline, mut shd_gate| { // let light_pos = (-1.1,2.,3.).into(); spatial_renderer.render_depth(&mut shd_gate, &pipeline, &size); }, ); surface.pipeline_builder().pipeline( &back_buffer, &PipelineState::default().set_clear_color(color), |pipeline, mut shd_gate| { spatial_renderer.render(&mut shd_gate, &pipeline, &size); // Render the HUD last renderer.render( &hud_registry, &mut shd_gate, &pipeline, &size, &depth_e, depth_fb.depth_slot(), ); }, ); // let tex = engine::depth_texture_to_color(&mut surface, ); // hud_registry // .get_mut(&"Shadow") // .unwrap() // .update_tex(*(depth_fb.depth_slot().clone())); // swap buffer chains surface.swap_buffers(); } }
25.926316
155
0.608472
01e988c1822ca6b2e989c6ac75e417094b21c256
14,606
//! Utility functions for working with Cap'n Proto Raft messages. #![allow(dead_code)] use std::net::SocketAddr; use std::rc::Rc; use std::collections::HashMap; use capnp::message::{Builder, HeapAllocator}; use {ClientId, Term, LogIndex, ServerId, LogId, TransactionId}; use messages_capnp::{client_request, client_response, connection_preamble, message}; use transaction; // ConnectionPreamble pub fn server_connection_preamble(id: ServerId, addr: &SocketAddr, community_string: &str, peers: &HashMap<ServerId, SocketAddr>) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut server = message.init_root::<connection_preamble::Builder>() .init_id() .init_server(); server.set_addr(&format!("{}", addr)); server.set_id(id.as_u64()); server.set_community(community_string); let mut entry_list = server.init_peers(peers.len() as u32); for (n, entry) in peers.iter().enumerate() { let mut slot = entry_list.borrow().get(n as u32); slot.set_id(entry.0.as_u64()); slot.set_addr(&format!("{}", entry.1)); } } Rc::new(message) } pub fn client_connection_preamble(id: ClientId, username: &str, password: &str) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut client = message.init_root::<connection_preamble::Builder>() .init_id() .init_client(); client.set_username(username); client.set_password(password); client.set_id(id.as_bytes()); } Rc::new(message) } pub fn server_add(id: ServerId, community_string: &str, addr: &SocketAddr) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut message = message.init_root::<connection_preamble::Builder>() .init_id() .init_server_add(); message.set_id(id.as_u64()); message.set_community(community_string); message.set_addr(&format!("{}", addr)); } Rc::new(message) } // AppendEntries pub fn append_entries_request(term: Term, prev_log_index: LogIndex, prev_log_term: Term, entries: &[(Term, &[u8])], leader_commit: LogIndex, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let bytes = &lid.as_bytes(); assert!(bytes.len() > 0); let mut message = Builder::new_default(); { let mut request = message.init_root::<message::Builder>(); request.set_log_id(bytes); let mut request = request.init_append_entries_request(); request.set_term(term.as_u64()); request.set_prev_log_index(prev_log_index.as_u64()); request.set_prev_log_term(prev_log_term.as_u64()); request.set_leader_commit(leader_commit.as_u64()); let mut entry_list = request.init_entries(entries.len() as u32); for (n, entry) in entries.iter().enumerate() { let mut slot = entry_list.borrow().get(n as u32); slot.set_term(entry.0.into()); slot.set_data(entry.1); } } Rc::new(message) } pub fn append_entries_response_success(term: Term, log_index: LogIndex, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let bytes = &lid.as_bytes(); assert!(bytes.len() > 0); let mut message = Builder::new_default(); { let mut response = message.init_root::<message::Builder>(); response.set_log_id(bytes); let mut response = response.init_append_entries_response(); response.set_term(term.as_u64()); response.set_success(log_index.as_u64()); } Rc::new(message) } pub fn append_entries_response_stale_term(term: Term, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<message::Builder>(); response.set_log_id(&lid.as_bytes()); let mut response = response.init_append_entries_response(); response.set_term(term.as_u64()); response.set_stale_term(()); } Rc::new(message) } pub fn append_entries_response_inconsistent_prev_entry(term: Term, index: LogIndex, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<message::Builder>(); response.set_log_id(&lid.as_bytes()); let mut response = response.init_append_entries_response(); response.set_term(term.as_u64()); response.set_inconsistent_prev_entry(index.into()); } Rc::new(message) } pub fn append_entries_response_internal_error(term: Term, error: &str, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<message::Builder>(); response.set_log_id(&lid.as_bytes()); let mut response = response.init_append_entries_response(); response.set_term(term.as_u64()); response.set_internal_error(error); } Rc::new(message) } // RequestVote pub fn request_vote_request(term: Term, last_log_index: LogIndex, last_log_term: Term, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut request = message.init_root::<message::Builder>(); request.set_log_id(&lid.as_bytes()); let mut request = request.init_request_vote_request(); request.set_term(term.as_u64()); request.set_last_log_index(last_log_index.as_u64()); request.set_last_log_term(last_log_term.as_u64()); } Rc::new(message) } pub fn request_vote_response_granted(term: Term, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<message::Builder>(); response.set_log_id(&lid.as_bytes()); let mut response = response.init_request_vote_response(); response.set_term(term.as_u64()); response.set_granted(()); } Rc::new(message) } pub fn request_vote_response_stale_term(term: Term, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<message::Builder>(); response.set_log_id(&lid.as_bytes()); let mut response = response.init_request_vote_response(); response.set_term(term.as_u64()); response.set_stale_term(()); } Rc::new(message) } pub fn request_vote_response_already_voted(term: Term, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<message::Builder>(); response.set_log_id(&lid.as_bytes()); let mut response = response.init_request_vote_response(); response.set_term(term.as_u64()); response.set_already_voted(()); } Rc::new(message) } pub fn request_vote_response_inconsistent_log(term: Term, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<message::Builder>(); response.set_log_id(&lid.as_bytes()); let mut response = response.init_request_vote_response(); response.set_term(term.as_u64()); response.set_inconsistent_log(()); } Rc::new(message) } pub fn request_vote_response_internal_error(term: Term, error: &str, lid: &LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<message::Builder>(); response.set_log_id(&lid.as_bytes()); let mut response = response.init_request_vote_response(); response.set_term(term.as_u64()); response.set_internal_error(error); } Rc::new(message) } // Ping pub fn ping_request(session: TransactionId, lid: &LogId) -> Builder<HeapAllocator> { let mut message = Builder::new_default(); { let mut request = message.init_root::<client_request::Builder>(); request.set_log_id(&lid.as_bytes()); let mut request = request.init_ping(); request.set_session(&session.as_bytes()); } message } // Query pub fn query_request(entry: &[u8], lid: &LogId) -> Builder<HeapAllocator> { let mut message = Builder::new_default(); { let mut request = message.init_root::<client_request::Builder>(); request.set_log_id(&lid.as_bytes()); let mut request = request.init_query(); request.set_query(entry); } message } // Proposal pub fn proposal_request(session: TransactionId, entry: &[u8], lid: LogId) -> Builder<HeapAllocator> { let mut message = Builder::new_default(); { let mut request = message.init_root::<client_request::Builder>(); request.set_log_id(&lid.as_bytes()); let mut request = request.init_proposal(); request.set_entry(entry); request.set_session(&session.as_bytes()); } message } // Query / Proposal Response pub fn command_response_success(data: &[u8], lid: LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<client_response::Builder>(); response.set_log_id(&lid.as_bytes()); response.init_proposal() .set_success(data); } Rc::new(message) } pub fn command_response_unknown_leader(lid: LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<client_response::Builder>(); response.set_log_id(&lid.as_bytes()); response.init_proposal() .set_unknown_leader(()); } Rc::new(message) } pub fn command_response_not_leader(leader_hint: &SocketAddr, lid: LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<client_response::Builder>(); response.set_log_id(&lid.as_bytes()); response.init_proposal() .set_not_leader(&format!("{}", leader_hint)); } Rc::new(message) } // Transaction pub fn transaction_begin(lid: LogId, session: TransactionId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut request = message.init_root::<message::Builder>(); request.set_log_id(&lid.as_bytes()); request.init_transaction_begin().set_session(&session.as_bytes()); } Rc::new(message) } pub fn transaction_commit(lid: LogId, session: TransactionId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut request = message.init_root::<message::Builder>(); request.set_log_id(&lid.as_bytes()); request.init_transaction_commit().set_session(&session.as_bytes()); } Rc::new(message) } pub fn client_transaction_begin(lid: LogId, session: TransactionId) -> Builder<HeapAllocator> { let mut message = Builder::new_default(); { let mut request = message.init_root::<client_request::Builder>(); request.set_log_id(&lid.as_bytes()); request.init_transaction_begin() .set_session(&session.as_bytes()); } message } pub fn client_transaction_commit(lid: LogId, session: TransactionId) -> Builder<HeapAllocator> { let mut message = Builder::new_default(); { let mut request = message.init_root::<client_request::Builder>(); request.set_log_id(&lid.as_bytes()); request.init_transaction_commit() .set_session(&session.as_bytes()); } message } pub fn client_transaction_rollback(lid: LogId, session: TransactionId) -> Builder<HeapAllocator> { let mut message = Builder::new_default(); { let mut request = message.init_root::<client_request::Builder>(); request.set_log_id(&lid.as_bytes()); request.init_transaction_rollback().set_session(&session.as_bytes()); } message } pub fn command_transaction_success(data: &[u8], lid: LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<client_response::Builder>(); response.set_log_id(&lid.as_bytes()); response.init_proposal() .set_success(data); } Rc::new(message) } pub fn command_transaction_failure(error: transaction::TransactionError, lid: LogId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut response = message.init_root::<client_response::Builder>(); response.set_log_id(&lid.as_bytes()); response.init_transaction() .set_failure(format!("{}", error).as_bytes()); } Rc::new(message) } pub fn transaction_rollback(lid: LogId, session: TransactionId) -> Rc<Builder<HeapAllocator>> { let mut message = Builder::new_default(); { let mut request = message.init_root::<message::Builder>(); request.set_log_id(&lid.as_bytes()); request.init_transaction_rollback().set_session(&session.as_bytes()); } Rc::new(message) }
35.026379
99
0.589895
bb223b782938c4e6d232cb467f3795e8c0a8921d
4,146
use std::ffi::CString; use std::ops::Deref; use super::{Ass, Bitmap, Flags, Text, Type}; use crate::ffi::*; use libc::c_int; pub enum RectMut<'a> { None(*mut AVSubtitleRect), Bitmap(BitmapMut<'a>), Text(TextMut<'a>), Ass(AssMut<'a>), } impl<'a> RectMut<'a> { pub unsafe fn wrap(ptr: *mut AVSubtitleRect) -> Self { match Type::from((*ptr).type_) { Type::None => RectMut::None(ptr), Type::Bitmap => RectMut::Bitmap(BitmapMut::wrap(ptr)), Type::Text => RectMut::Text(TextMut::wrap(ptr)), Type::Ass => RectMut::Ass(AssMut::wrap(ptr)), } } pub unsafe fn as_ptr(&self) -> *const AVSubtitleRect { match *self { RectMut::None(ptr) => ptr as *const _, RectMut::Bitmap(ref b) => b.as_ptr(), RectMut::Text(ref t) => t.as_ptr(), RectMut::Ass(ref a) => a.as_ptr(), } } pub unsafe fn as_mut_ptr(&mut self) -> *mut AVSubtitleRect { match *self { RectMut::None(ptr) => ptr, RectMut::Bitmap(ref mut b) => b.as_mut_ptr(), RectMut::Text(ref mut t) => t.as_mut_ptr(), RectMut::Ass(ref mut a) => a.as_mut_ptr(), } } } impl<'a> RectMut<'a> { pub fn flags(&self) -> Flags { unsafe { Flags::from_bits_truncate(match *self { RectMut::None(ptr) => (*ptr).flags, RectMut::Bitmap(ref b) => (*b.as_ptr()).flags, RectMut::Text(ref t) => (*t.as_ptr()).flags, RectMut::Ass(ref a) => (*a.as_ptr()).flags, }) } } } pub struct BitmapMut<'a> { immutable: Bitmap<'a>, } impl<'a> BitmapMut<'a> { pub unsafe fn wrap(ptr: *mut AVSubtitleRect) -> Self { BitmapMut { immutable: Bitmap::wrap(ptr as *const _), } } pub unsafe fn as_mut_ptr(&mut self) -> *mut AVSubtitleRect { self.as_ptr() as *mut _ } } impl<'a> BitmapMut<'a> { pub fn set_x(&mut self, value: usize) { unsafe { (*self.as_mut_ptr()).x = value as c_int; } } pub fn set_y(&mut self, value: usize) { unsafe { (*self.as_mut_ptr()).y = value as c_int; } } pub fn set_width(&mut self, value: u32) { unsafe { (*self.as_mut_ptr()).w = value as c_int; } } pub fn set_height(&mut self, value: u32) { unsafe { (*self.as_mut_ptr()).h = value as c_int; } } pub fn set_colors(&mut self, value: usize) { unsafe { (*self.as_mut_ptr()).nb_colors = value as c_int; } } } impl<'a> Deref for BitmapMut<'a> { type Target = Bitmap<'a>; fn deref(&self) -> &Self::Target { &self.immutable } } pub struct TextMut<'a> { immutable: Text<'a>, } impl<'a> TextMut<'a> { pub unsafe fn wrap(ptr: *mut AVSubtitleRect) -> Self { TextMut { immutable: Text::wrap(ptr as *const _), } } pub unsafe fn as_mut_ptr(&mut self) -> *mut AVSubtitleRect { self.as_ptr() as *mut _ } } impl<'a> TextMut<'a> { pub fn set(&mut self, value: &str) { let value = CString::new(value).unwrap(); unsafe { (*self.as_mut_ptr()).text = av_strdup(value.as_ptr()); } } } impl<'a> Deref for TextMut<'a> { type Target = Text<'a>; fn deref(&self) -> &Self::Target { &self.immutable } } pub struct AssMut<'a> { immutable: Ass<'a>, } impl<'a> AssMut<'a> { pub unsafe fn wrap(ptr: *mut AVSubtitleRect) -> Self { AssMut { immutable: Ass::wrap(ptr), } } pub unsafe fn as_mut_ptr(&mut self) -> *mut AVSubtitleRect { self.as_ptr() as *mut _ } } impl<'a> AssMut<'a> { pub fn set(&mut self, value: &str) { let value = CString::new(value).unwrap(); unsafe { (*self.as_mut_ptr()).ass = av_strdup(value.as_ptr()); } } } impl<'a> Deref for AssMut<'a> { type Target = Ass<'a>; fn deref(&self) -> &Self::Target { &self.immutable } }
23.033333
66
0.51206
9be1374f85d99af66d4c126e0e9034b2f3793525
883
#![feature(const_raw_ptr_to_usize_cast, const_compare_raw_pointers, const_raw_ptr_deref)] fn main() {} // unconst and bad, will thus error in miri const X: bool = unsafe { &1 as *const i32 == &2 as *const i32 }; //~ ERROR any use of this // unconst and bad, will thus error in miri const X2: bool = unsafe { 42 as *const i32 == 43 as *const i32 }; //~ ERROR any use of this // unconst and fine const Y: usize = unsafe { 42usize as *const i32 as usize + 1 }; // unconst and bad, will thus error in miri const Y2: usize = unsafe { &1 as *const i32 as usize + 1 }; //~ ERROR any use of this // unconst and fine const Z: i32 = unsafe { *(&1 as *const i32) }; // unconst and bad, will thus error in miri const Z2: i32 = unsafe { *(42 as *const i32) }; //~ ERROR any use of this value will cause const Z3: i32 = unsafe { *(44 as *const i32) }; //~ ERROR any use of this value will cause
49.055556
91
0.672707
21def2b705c2414d70b4546e3ad4c7837da09abb
184
#[cfg(target_os = "macos")] mod macos; #[cfg(target_os = "macos")] pub use macos::Sudo; #[cfg(target_os = "windows")] mod windows; #[cfg(target_os="windows")] pub use windows::Sudo;
16.727273
29
0.663043
bfb6786fdd2449a22a7983619e8924b74b43bf0c
3,580
#pragma version(1) #pragma rs java_package_name(layers) rs_allocation In_Blob; rs_allocation Kernel_Blob; rs_allocation Bias_Blob; rs_allocation Out_Alloc; int c_i; int h_i; int w_i; int n_k; int c_k; int h_k; int w_k; int h_o; int w_o; int pad_x; int pad_y; int stride_x; int stride_y; int group; void root(float4* out, uint32_t x) { float4 sum1, sum2; sum1.x = sum1.y = sum1.z = sum1.w = 0; sum2.x = sum2.y = sum2.z = sum2.w = 0; int kernel_num = x % (n_k / 8); int g = (kernel_num * 8) / (n_k / group); int channel_offset = g * c_k / 4; int h_num = (x * 8) / (w_o * n_k); int w_num = (x % (w_o * n_k / 8)) / (n_k / 8); int c_k_new = c_k / 4; for (int h = 0 ; h < h_k ; h++){ for (int w = 0 ; w < w_k ; w++){ for (int i = 0 ; i < c_k_new ; i++) { int cur_x = h_num * stride_x + h; //should take care of the strides(Be careful) int cur_y = w_num * stride_y + w; //should take care of the strides(Be careful) if (cur_x < pad_x || cur_x >= (pad_x + h_i)) continue; else if (cur_y < pad_y || cur_y >= (pad_y + w_i)) continue; else { int frame_index = (cur_x - pad_x) * w_i * c_i / 4 + (cur_y - pad_y) * c_i / 4 + (i + channel_offset); float4 frame_value = rsGetElementAt_float4(In_Blob,frame_index); float4 kernel_value1, kernel_value2; int kernel_size = h_k * w_k * c_k_new; int kernel_offset = 2 * kernel_size; int kernel_index = kernel_num * 8 * kernel_size + h * w_k * c_k_new + w * c_k_new + i; kernel_value1 = rsGetElementAt_float4(Kernel_Blob, kernel_index); kernel_value2 = rsGetElementAt_float4(Kernel_Blob, kernel_index + kernel_size); sum1.x += dot(frame_value, kernel_value1); sum2.x += dot(frame_value, kernel_value2); kernel_index += kernel_offset; kernel_value1 = rsGetElementAt_float4(Kernel_Blob, kernel_index); kernel_value2 = rsGetElementAt_float4(Kernel_Blob, kernel_index + kernel_size); sum1.y += dot(frame_value, kernel_value1); sum2.y += dot(frame_value, kernel_value2); kernel_index += kernel_offset; kernel_value1 = rsGetElementAt_float4(Kernel_Blob, kernel_index); kernel_value2 = rsGetElementAt_float4(Kernel_Blob, kernel_index + kernel_size); sum1.z += dot(frame_value, kernel_value1); sum2.z += dot(frame_value, kernel_value2); kernel_index += kernel_offset; kernel_value1 = rsGetElementAt_float4(Kernel_Blob, kernel_index); kernel_value2 = rsGetElementAt_float4(Kernel_Blob, kernel_index + kernel_size); sum1.w += dot(frame_value, kernel_value1); sum2.w += dot(frame_value, kernel_value2); } } } } float4 bias1 = rsGetElementAt_float4(Bias_Blob,kernel_num * 2); float4 bias2 = rsGetElementAt_float4(Bias_Blob,kernel_num * 2 + 1); sum1.x += bias1.x; sum2.x += bias1.y; sum1.y += bias1.z; sum2.y += bias1.w; sum1.z += bias2.x; sum2.z += bias2.y; sum1.w += bias2.z; sum2.w += bias2.w; rsSetElementAt_float4(Out_Alloc, sum2, x); (*out) = sum1; }
33.457944
121
0.554469
c1d3a1fff312c37328686466e33eb147d062d0e8
2,451
use std::convert::TryInto; use crate::{ board::{CastleRight, Chess}, Color::*, {Piece, PieceType::*} }; use anyhow::{bail, Result}; pub fn parse(fen: &str) -> Result<Chess> { let mut chess = Chess { board: [None; 64], white_castle: CastleRight::new(), black_castle: CastleRight::new(), en_passant: None, turn: White, }; let tokens: Vec<&str> = fen.split_ascii_whitespace().collect(); if tokens.len() != 6 { bail!("Error parsing FEN: {}", fen) } for (i, rank) in tokens[0].split("/").enumerate() { let mut col = 0; for chr in rank.chars() { if let Some(num) = chr.to_digit(10) { col += num } else { let index = i * 8 + col as usize; chess.board[index] = Some(match chr { 'P' => {Piece::new(Pawn, index.into(), White)} 'N' => {Piece::new(Knight, index.into(), White)} 'B' => {Piece::new(Bishop, index.into(), White)} 'R' => {Piece::new(Rook, index.into(), White)} 'Q' => {Piece::new(Queen, index.into(), White)} 'K' => {Piece::new(King, index.into(), White)} 'p' => {Piece::new(Pawn, index.into(), Black)} 'n' => {Piece::new(Knight, index.into(), Black)} 'b' => {Piece::new(Bishop, index.into(), Black)} 'r' => {Piece::new(Rook, index.into(), Black)} 'q' => {Piece::new(Queen, index.into(), Black)} 'k' => {Piece::new(King, index.into(), Black)} _ => bail!("found illegal char: {}", chr), }); col += 1 } } } chess.turn = match tokens[1].as_bytes()[0] as char { 'w' => White, 'b' => Black, chr @ _ => bail!("found illegal char: {}", chr) }; if tokens[2].contains('K') { chess.white_castle.set_kingside_castle_on() } if tokens[2].contains('Q') { chess.white_castle.set_queenside_castle_on() } if tokens[2].contains('k') { chess.black_castle.set_kingside_castle_on() } if tokens[2].contains('q') { chess.black_castle.set_queenside_castle_on() } if let Ok(en_passant) = tokens[3].try_into() { chess.en_passant = Some(en_passant) } Ok(chess) }
31.831169
68
0.478172
fe5cc31e6df453f96ef45d01ebf38a722aac45e6
1,101
#![allow(warnings)] // NOT RECOMMENDED // #[derive(Debug)] // enum Color { // Red, // Green, // Blue, // } #[derive(Debug)] enum Color { Red=0xff0000, Green=0x00ff00, Blue=0x0000ff, } enum Message { Quit, Move { x: i32, y: i32 }, Write(String), ChangeColor(Color), } type Msg = Message; fn main() { let color = Color::Red; match color { Color::Red => println!("The color is Red!"), Color::Green => println!("The color is Green!"), Color::Blue => println!("The color is Blue!"), } println!("{:06x}", color as u32); // let msg = Message::ChangeColor(Color::Blue); // let msg = Msg::Write(String::from("Hello, world!")); use Message::*; let msg = Write(String::from("Hello, world!")); match msg { Message::Quit => println!("The Quit message was sent!"), Message::Move { x, y } => { println!("x = {} and y = {}", x, y); } Message::Write(text) => println!("text: {}", text), Message::ChangeColor(color) => println!("color to: {:?}", color), } }
23.425532
73
0.526794
deaaf52879545b035521fd18f4d282a329dc2fb2
1,242
use std::collections::VecDeque; // 55. Jump Game, Medium // https://leetcode.com/problems/jump-game/ impl Solution { pub fn can_jump(mut nums: Vec<i32>) -> bool { let n = nums.len() - 1; let mut pos: VecDeque<usize> = VecDeque::new(); pos.push_back(0); while !pos.is_empty() && pos.back().unwrap() < &n { let curr = *pos.back().unwrap(); let next = nums[curr] as usize; if next == 0 { pos.pop_back(); } else { nums[curr] -= 1; pos.push_back(curr + next); } } !pos.is_empty() && pos.back().unwrap() >= &n } } struct Solution {} #[cfg(test)] mod tests { use super::*; use crate::{vec_string, vec_vec_i32, vec_vec_string}; #[test] fn test_can_jump() { assert_eq!(Solution::can_jump(vec![2, 3, 1, 1, 4]), true); } #[test] fn test_can_jump2() { assert_eq!(Solution::can_jump(vec![3, 2, 1, 0, 4]), false); } #[test] fn test_can_jump3() { assert_eq!(Solution::can_jump(vec![3, 0, 8, 2, 0, 0, 1]), true); } #[test] fn test_can_jump4() { assert_eq!(Solution::can_jump(vec![0]), true); } }
23
72
0.504831
e63dd1a8a87d376b3f8ee5f0c3b0c37e77014a45
3,892
//! A crate for using the Kamar api to fetch notices //! //! # Sample usage //! ```rust //! use kni_rs::Portal; //! //! #[tokio::main] //! async fn main() { //! let portal = Portal::new("https://demo.school.kiwi/api/api.php"); //! let notices = portal.get_notices_today().await.unwrap(); //! println!("{:?}", notices) //! } //! ``` #[cfg(test)] mod tests; /// Structures used to represent the data from the Kamar API pub mod response; use hyper::{body::HttpBody as _, Client, Request, Body}; use hyper_tls::HttpsConnector; use chrono::prelude::*; use crate::response::{NoticesResults, NoticesError}; use serde_xml_rs::from_reader; /// Struct used to access the Kamar API. pub struct Portal { url: String, auth_key: String } impl Portal { /// Creates a new `Portal` struct with the given url and the key vtku /// # Params /// - `url`: The full url of the portal api, i.e. `https://demo.school.kiwi/api/api.php` pub fn new(url: &str) -> Self { Portal { url: url.into(), auth_key: "vtku".into() } } /// Creates a new `Portal` struct with the given url and key /// # Params /// - `url`: The full url of the portal api, i.e. `https://demo.school.kiwi/api/api.php` /// - `key`: The authentication key from the API, if the default does not work pub fn with_key(url: &str, key: &str) -> Self { Portal { url: url.into(), auth_key: key.into() } } /// Gets the notices for today from the specified portal pub async fn get_notices_today(&self) -> Result<NoticesResults, NoticesError> { let now = chrono::Utc::now(); self.get_notices(&now).await } /// Gets the notices for the specified date from the specified portal /// # Params /// - `date`: The date that you would like to get the notices for pub async fn get_notices(&self, date: &chrono::DateTime<Utc>) -> Result<NoticesResults, NoticesError> { // Create a new HTTPS hyper client let https = HttpsConnector::new(); let client = Client::builder().build::<_, hyper::Body>(https); // Format the date let formatted = date.format("%d/%m/%Y").to_string(); // Create kamar request let request = Request::builder() .method("POST") .header("Content-Type", "application/x-www-form-urlencoded") .header("User-Agent", "KAMAR/ CFNetwork/ Darwin/") .uri(self.url.clone()) .body(Body::from(format!("Command=GetNotices&Key={}&Date={}", self.auth_key, formatted))) .unwrap(); // Get the response let mut res = client.request(request).await.unwrap(); // Read the data let mut data: Vec<u8> = vec!(); while let Some(chunk) = res.body_mut().data().await { let mut b: Vec<u8> = chunk.unwrap().as_ref().iter().cloned().collect(); data.append(&mut b); } // Pass and return the response let passed: Result<NoticesResults, serde_xml_rs::Error> = from_reader(data.as_slice()); return match passed { Ok(result) => { Ok(result) }, Err(_) => { if let Ok(result) = from_reader::<&[u8], NoticesError>(data.as_slice()) { return Err(result); } Ok(from_reader(data.as_slice()).unwrap()) } } } } /// Turns date string into `DateTime<Utc>` /// # Params /// - `date`: The date you want to pass, expected format: %Y-%m-%d pub fn parse_date(date: &str) -> DateTime<Utc> { let adjusted_date = format!("{}T12:00:00", date); let naive = NaiveDateTime::parse_from_str(adjusted_date.as_str(), "%Y-%m-%dT%H:%M:%S").unwrap(); let date_time: DateTime<Local> = Local.from_local_datetime(&naive).unwrap(); date_time.with_timezone(&Utc) }
33.551724
107
0.584018
89a8934db5d2a7d4eb701811befcad7e2a820acc
465
//! test deriving on various struct types use keypath::Keyable; #[derive(Keyable)] struct PlainStruct; #[derive(Keyable)] struct EmptyTupleStruct(); #[derive(Keyable)] struct SingleTupleStruct(bool); #[derive(Keyable)] struct MultiTupleStruct(bool, i64, String); #[derive(Keyable)] struct EmptyFieldStruct {} #[derive(Keyable)] struct SingleFieldStruct { a: bool, } #[derive(Keyable)] struct MultiFieldStruct { a: bool, b: i64, c: String, }
15
43
0.707527
219af4d9791017e017175c2def048f38f2b5162e
1,658
pub struct IconSdCard { props: crate::Props, } impl yew::Component for IconSdCard { type Properties = crate::Props; type Message = (); fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self { Self { props } } fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender { true } fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender { false } fn view(&self) -> yew::prelude::Html { yew::prelude::html! { <svg class=self.props.class.unwrap_or("") width=self.props.size.unwrap_or(24).to_string() height=self.props.size.unwrap_or(24).to_string() viewBox="0 0 24 24" fill=self.props.fill.unwrap_or("none") stroke=self.props.color.unwrap_or("currentColor") stroke-width=self.props.stroke_width.unwrap_or(2).to_string() stroke-linecap=self.props.stroke_linecap.unwrap_or("round") stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round") > <svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M18 2h-7.17c-.53 0-1.04.21-1.42.59L4.6 7.42c-.37.37-.6.88-.6 1.4V20c0 1.1.9 2 2 2h12c1.1 0 2-.9 2-2V4c0-1.1-.9-2-2-2zm-7 6c-.55 0-1-.45-1-1V5c0-.55.45-1 1-1s1 .45 1 1v2c0 .55-.45 1-1 1zm3 0c-.55 0-1-.45-1-1V5c0-.55.45-1 1-1s1 .45 1 1v2c0 .55-.45 1-1 1zm3 0c-.55 0-1-.45-1-1V5c0-.55.45-1 1-1s1 .45 1 1v2c0 .55-.45 1-1 1z"/></svg> </svg> } } }
36.043478
471
0.57117
6414ce24255564faa13830e3e6a3192e38c80731
2,071
use crate::lookup_value; use serde::{Deserialize, Serialize}; use std::borrow::Cow; use std::fmt; /// A vaccination entry. /// /// It provides all the necessary detail regarding a vaccination record. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct Vaccination { /// Targeted Disease or agent #[serde(rename = "tg")] pub targeted_disease: Cow<'static, str>, /// Vaccine or prophylaxis #[serde(rename = "vp")] pub vaccine_prophylaxis: Cow<'static, str>, /// Vaccine medicinal product #[serde(rename = "mp")] pub medicinal_product: Cow<'static, str>, /// Marketing Authorization Holder - if no MAH present, then manufacturer #[serde(rename = "ma")] pub manufacturer: Cow<'static, str>, /// Dose Number #[serde(rename = "dn")] pub dose_number: usize, /// Total Series of Doses #[serde(rename = "sd")] pub total_doses: usize, /// ISO8601 complete date: Date of Vaccination #[serde(rename = "dt")] pub date: Cow<'static, str>, /// Country of Vaccination #[serde(rename = "co")] pub country: Cow<'static, str>, /// Certificate Issuer #[serde(rename = "is")] pub issuer: Cow<'static, str>, /// Unique Certificate Identifier: UVCI #[serde(rename = "ci")] pub id: Cow<'static, str>, } impl Vaccination { /// Updates all the ids in the vaccination entry with their descriptive counterparts using /// the official valueset. pub fn expand_values(&mut self) { lookup_value(&mut self.targeted_disease); lookup_value(&mut self.vaccine_prophylaxis); lookup_value(&mut self.medicinal_product); lookup_value(&mut self.manufacturer); lookup_value(&mut self.country); } } impl fmt::Display for Vaccination { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "Vaccinated against {} with {} of {} doses on {}. Issued by {}", self.targeted_disease, self.dose_number, self.total_doses, self.date, self.issuer ) } }
32.359375
94
0.63351
90b2da11e0424dab5464aa26ed8473ad5028079d
11,894
use std::borrow::Cow; use std::collections::HashMap; use std::io; use std::io::prelude::*; use crate::lang::surface::{ Constant, ItemData, Module, Pattern, PatternData, StructType, Term, TermData, }; use crate::pass::surface_to_pretty::Prec; pub struct Context { items: HashMap<String, ItemMeta>, locals: Vec<(String, LocalMeta)>, } struct ItemMeta { id: String, } struct LocalMeta { id: String, } impl Context { pub fn new() -> Context { Context { items: HashMap::new(), locals: Vec::new(), } } fn get_id(&self, name: &str) -> Option<&str> { if let Some((_, meta)) = self.locals.iter().rev().find(|(n, _)| n == name) { return Some(meta.id.as_str()); } if let Some(meta) = self.items.get(name) { return Some(meta.id.as_str()); } // TODO: globals None } #[allow(clippy::write_literal)] pub fn from_module(&mut self, writer: &mut impl Write, module: &Module) -> io::Result<()> { write!( writer, r##"<!-- This file is automatically @generated by {pkg_name} {pkg_version} It is not intended for manual editing. --> <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta http-equiv="X-UA-Compatible" content="ie=edge"> <title>{module_name}</title> <style> {minireset} {style} </style> </head> <body> <section class="module"> "##, pkg_name = env!("CARGO_PKG_NAME"), pkg_version = env!("CARGO_PKG_VERSION"), module_name = "", // TODO: module name minireset = include_str!("./surface_to_doc/minireset.min.css").trim(), style = include_str!("./surface_to_doc/style.css").trim(), )?; if !module.doc.is_empty() { writeln!(writer, r##" <section class="doc">"##)?; from_doc_lines(writer, " ", &module.doc)?; writeln!(writer, r##" </section>"##)?; } writeln!(writer, r##" <dl class="items">"##)?; for item in &module.items { let (name, item) = match &item.data { ItemData::Constant(constant) => self.from_constant(writer, constant)?, ItemData::StructType(struct_type) => self.from_struct_type(writer, struct_type)?, }; self.items.insert(name, item); } write!( writer, r##" </dl> </section> </body> </html> "## )?; self.items.clear(); Ok(()) } fn from_constant( &self, writer: &mut impl Write, constant: &Constant, ) -> io::Result<(String, ItemMeta)> { let id = format!("items[{}]", constant.name.data); writeln!( writer, r##" <dt id="{id}" class="item constant">"##, id = id, )?; match &constant.type_ { None => writeln!( writer, r##" <a href="#{id}">{name}</a>"##, id = id, name = constant.name.data, )?, Some(r#type) => writeln!( writer, r##" const <a href="#{id}">{name}</a> : {type_}"##, id = id, name = constant.name.data, type_ = self.from_term_prec(r#type, Prec::Term), )?, } write!( writer, r##" </dt> <dd class="item constant"> "## )?; if !constant.doc.is_empty() { writeln!(writer, r##" <section class="doc">"##)?; from_doc_lines(writer, " ", &constant.doc)?; writeln!(writer, r##" </section>"##)?; } let term = self.from_term_prec(&constant.term, Prec::Term); write!( writer, r##" <section class="term"> {} </section> </dd> "##, term )?; Ok((constant.name.data.clone(), ItemMeta { id })) } fn from_struct_type( &mut self, writer: &mut impl Write, struct_type: &StructType, ) -> io::Result<(String, ItemMeta)> { let id = format!("items[{}]", struct_type.name.data); writeln!( writer, r##" <dt id="{id}" class="item struct">"##, id = id )?; // TODO: params match &struct_type.type_ { None => writeln!( writer, r##" struct <a href="#{id}">{name}</a>"##, id = id, name = struct_type.name.data, )?, Some(r#type) => writeln!( writer, r##" struct <a href="#{id}">{name}</a> : {type_}"##, id = id, name = struct_type.name.data, type_ = self.from_term_prec(&r#type, Prec::Term), )?, } writeln!(writer, r##" </dt>"##)?; writeln!(writer, r##" <dd class="item struct">"##)?; if !struct_type.doc.is_empty() { writeln!(writer, r##" <section class="doc">"##)?; from_doc_lines(writer, " ", &struct_type.doc)?; writeln!(writer, r##" </section>"##)?; } if !struct_type.fields.is_empty() { writeln!(writer, r##" <dl class="fields">"##)?; for field in &struct_type.fields { let field_id = format!("{}.fields[{}]", id, field.label.data); let r#type = self.from_term_prec(&field.type_, Prec::Term); write!( writer, r##" <dt id="{id}" class="field"> <a href="#{id}">{name}</a> : {type_} </dt> <dd class="field"> <section class="doc"> "##, id = field_id, name = field.label.data, type_ = r#type, )?; from_doc_lines(writer, " ", &field.doc)?; write!( writer, r##" </section> </dd> "## )?; self.locals .push((field.label.data.clone(), LocalMeta { id: field_id })); } writeln!(writer, r##" </dl>"##)?; } self.locals .truncate(self.locals.len().saturating_sub(struct_type.fields.len())); writeln!(writer, r##" </dd>"##)?; Ok((struct_type.name.data.clone(), ItemMeta { id })) } fn from_term_prec<'term>(&self, term: &'term Term, prec: Prec) -> Cow<'term, str> { use itertools::Itertools; match &term.data { TermData::Name(name) => format!( r##"<var><a href="#{id}">{name}</a></var>"##, id = self.get_id(name).unwrap_or(""), name = name ) .into(), TermData::KindType => "Kind".into(), TermData::TypeType => "Type".into(), TermData::Ann(term, r#type) => format!( "{lparen}{term} : {type}{rparen}", lparen = if prec > Prec::Term { "(" } else { "" }, rparen = if prec > Prec::Term { ")" } else { "" }, term = self.from_term_prec(term, Prec::Arrow), type = self.from_term_prec(r#type, Prec::Term), ) .into(), TermData::FunctionType(param_type, body_type) => format!( "{lparen}{param_type} &rarr; {body_type}{rparen}", lparen = if prec > Prec::Arrow { "(" } else { "" }, rparen = if prec > Prec::Arrow { ")" } else { "" }, param_type = self.from_term_prec(param_type, Prec::App), body_type = self.from_term_prec(body_type, Prec::Arrow), ) .into(), TermData::FunctionElim(head, arguments) => format!( // TODO: multiline formatting! "{lparen}{head} {arguments}{rparen}", lparen = if prec > Prec::App { "(" } else { "" }, rparen = if prec > Prec::App { ")" } else { "" }, head = self.from_term_prec(head, Prec::Atomic), arguments = arguments .iter() .map(|argument| self.from_term_prec(argument, Prec::Atomic)) .format(" "), ) .into(), TermData::StructTerm(field_definitions) => format!( // TODO: multiline formatting! "struct {{ {field_definitions} }}", field_definitions = field_definitions .iter() .map(|field_definition| format!( "{} = {}", &field_definition.label.data, self.from_term_prec(&field_definition.term, Prec::Term) )) .format(", "), ) .into(), TermData::StructElim(head, label) => format!( "{head}.{label}", head = self.from_term_prec(head, Prec::Atomic), label = &label.data, ) .into(), TermData::SequenceTerm(elem_terms) => format!( // TODO: multiline formatting! "[{elems}]", elems = elem_terms .iter() .map(|elem_term| self.from_term_prec(elem_term, Prec::Term)) .format(", "), ) .into(), TermData::NumberLiteral(literal) => format!("{}", literal).into(), TermData::If(head, if_true, if_false) => format!( // TODO: multiline formatting! "if {head} {{ {if_true} }} else {{ {if_false} }}", head = self.from_term_prec(head, Prec::Term), if_true = self.from_term_prec(if_true, Prec::Term), if_false = self.from_term_prec(if_false, Prec::Term), ) .into(), TermData::Match(head, branches) => format!( // TODO: multiline formatting! "match {head} {{ {branches} }}", head = self.from_term_prec(head, Prec::Term), branches = branches .iter() .map(|(pattern, term)| format!( "{pattern} &rArr; {term}", pattern = self.from_pattern(pattern), term = self.from_term_prec(term, Prec::Term), )) .format(", "), ) .into(), TermData::FormatType => "Format".into(), TermData::Repr => "repr".into(), TermData::Error => r##"<strong>(invalid data description)</strong>"##.into(), } } fn from_pattern<'term>(&self, pattern: &'term Pattern) -> Cow<'term, str> { match &pattern.data { PatternData::Name(name) => format!(r##"<a href="#">{}</a>"##, name).into(), // TODO: add local binding PatternData::NumberLiteral(literal) => format!("{}", literal).into(), } } } fn from_doc_lines(writer: &mut impl Write, prefix: &str, doc_lines: &[String]) -> io::Result<()> { // TODO: parse markdown for doc_line in doc_lines.iter() { let doc_line = match doc_line { line if line.starts_with(' ') => &line[" ".len()..], line => &line[..], }; writeln!(writer, "{}{}", prefix, doc_line)?; } Ok(()) }
32.408719
114
0.445855
4ad729f1bc60a114b34bcb591d9a33d321582223
680
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // aux-build:issue-5521.rs // pretty-expanded FIXME #23616 extern crate issue_5521 as foo; fn bar(a: foo::map) { if false { panic!(); } else { let _b = &(*a)[&2]; } } fn main() {}
25.185185
68
0.673529
560ea09b057f50ad7ae9887e16ff2119218a8de2
85
mod example; mod v0; pub use example::ExampleResolver; pub use v0::V0PriceResolver;
14.166667
33
0.776471
bf4c9de7416208b0780e0337e896e80667f1084a
198
mod backend; mod types; pub use self::backend::Backend; pub use self::types::{ AddressBook, AddressBookBackend, Contact, DEFAULT_GRINBOX_PORT, DEFAULT_MWCMQS_PORT, DEFAULT_MWCMQS_DOMAIN, };
24.75
69
0.772727
f740259649aa0317893564986a7ac9c1d41d9cd4
719
use serde_derive::{Deserialize, Serialize}; use config::Config; #[derive(Debug, Serialize, Deserialize)] #[serde(default)] pub struct Settings { pub db_host: String, } impl Default for Settings { fn default() -> Self { Self { db_host: String::from("default"), } } } #[test] fn set_defaults() { let c = Config::default(); let s: Settings = c.try_deserialize().expect("Deserialization failed"); assert_eq!(s.db_host, "default"); } #[test] fn try_from_defaults() { let c = Config::try_from(&Settings::default()).expect("Serialization failed"); let s: Settings = c.try_deserialize().expect("Deserialization failed"); assert_eq!(s.db_host, "default"); }
21.787879
82
0.64395
4a027ebf5f26137b1e8081258502c4923fd90599
4,348
use combine::{ Parser, Stream, error::ParseError, parser, token, look_ahead, optional, choice }; use crate::parser::*; use crate::glyph_class::*; use super::value_record::*; use super::glyph_class::*; use super::lookup::*; use super::util::*; #[derive(Debug)] pub struct GlyphPatternItem { pub class: GlyphClass, pub value_record: Option<ValueRecord>, pub lookup: Option<Lookup> } #[derive(Debug, Default)] pub struct GlyphPattern { pub prefix: Vec<GlyphPatternItem>, pub glyphs: Vec<GlyphPatternItem>, pub suffix: Vec<GlyphPatternItem>, pub has_marks: bool, pub num_value_records: usize, pub num_lookups: usize } impl GlyphPattern { fn new() -> Self { GlyphPattern { prefix: Vec::new(), glyphs: Vec::new(), suffix: Vec::new(), has_marks: false, num_value_records: 0, num_lookups: 0 } } } // eats trailing whitespace because otherwise we'd have to lookahead *all* of the whitespace pub(crate) fn glyph_pattern<Input>() -> impl Parser<FeaRsStream<Input>, Output = GlyphPattern> where Input: Stream<Token = u8, Position = SourcePosition>, Input::Error: ParseError<Input::Token, Input::Range, Input::Position> { #[derive(Debug)] enum Next { GlyphClass(GlyphClass, Option<ValueRecord>), MarkedGlyphClass(GlyphClass, Option<ValueRecord>, Option<Lookup>), End } parser(|input| { let mut parse_iter = optional_whitespace() .with(choice(( peek(literal_ignore_case("by")) .map(|_| Next::End), peek(literal_ignore_case("from")) .map(|_| Next::End), look_ahead(token(b';')) .map(|_| Next::End), look_ahead(token(b',')) .map(|_| Next::End), glyph_class_or_glyph() .skip(optional_whitespace()) .and(optional(token(b'\'')) .skip(optional_whitespace()) .and(optional( peek(literal_ignore_case("lookup")) .with(lookup())))) .skip(optional_whitespace()) .and(optional(value_record())) .map(|((gc, (mark, lookup)), vr)| match mark { Some(_) => Next::MarkedGlyphClass(gc, vr, lookup), None => Next::GlyphClass(gc, vr) }) ))) .iter(input); let mut pattern = GlyphPattern::new(); for next in &mut parse_iter { match next { Next::GlyphClass(gc, vr) => { let item = GlyphPatternItem { class: gc, value_record: vr, lookup: None }; if pattern.glyphs.len() > 0 { pattern.suffix.push(item); } else { pattern.prefix.push(item); } }, Next::MarkedGlyphClass(gc, vr, lookup) => { // FIXME: raise error if a second run of marked characters occurs. // i.e. if we're marked but there's already characters in suffix pattern.has_marks = true; if lookup.is_some() { pattern.num_lookups += 1; } let item = GlyphPatternItem { class: gc, value_record: vr, lookup, }; pattern.glyphs.push(item); }, Next::End => break, }; } if pattern.glyphs.is_empty() && pattern.suffix.is_empty() { let pattern = GlyphPattern { prefix: Vec::new(), glyphs: pattern.prefix, suffix: Vec::new(), ..pattern }; parse_iter.into_result(pattern) } else { parse_iter.into_result(pattern) } }) }
28.233766
94
0.466421
69941d088b8ce00c9986615b75240807e7167380
43,957
// Copyright 2020-2021, The Tremor Team // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use super::super::pb; use super::common; use super::id; use super::resource; use crate::errors::Result; use tremor_otelapis::opentelemetry::proto::{ collector::metrics::v1::ExportMetricsServiceRequest, metrics::v1::{ double_summary_data_point::ValueAtQuantile, metric::{self, Data}, DoubleDataPoint, DoubleExemplar, DoubleGauge, DoubleHistogram, DoubleHistogramDataPoint, DoubleSum, DoubleSummary, DoubleSummaryDataPoint, InstrumentationLibraryMetrics, IntDataPoint, IntExemplar, IntGauge, IntHistogram, IntHistogramDataPoint, IntSum, Metric, ResourceMetrics, }, }; use tremor_value::literal; use tremor_value::StaticNode; use tremor_value::Value; use value_trait::ValueAccess; pub(crate) fn int_exemplars_to_json<'event>(data: Vec<IntExemplar>) -> Value<'event> { let mut json: Vec<Value> = Vec::new(); for exemplar in data { json.push(literal!({ "span_id": id::hex_span_id_to_json(&exemplar.span_id), "trace_id": id::hex_trace_id_to_json(&exemplar.trace_id), "filtered_labels": common::string_key_value_to_json(exemplar.filtered_labels), "time_unix_nano": exemplar.time_unix_nano, "value": exemplar.value })) } Value::Array(json) } pub(crate) fn int_exemplars_to_pb(json: Option<&Value<'_>>) -> Result<Vec<IntExemplar>> { if let Some(Value::Array(json)) = json { let mut pb = Vec::new(); for data in json { let filtered_labels = common::string_key_value_to_pb(data.get("filtered_labels"))?; let span_id = id::hex_span_id_to_pb(data.get("span_id"))?; let trace_id = id::hex_trace_id_to_pb(data.get("trace_id"))?; let time_unix_nano = pb::maybe_int_to_pbu64(data.get("time_unix_nano"))?; let value = pb::maybe_int_to_pbi64(data.get("value"))?; pb.push(IntExemplar { filtered_labels, time_unix_nano, value, span_id, trace_id, }); } return Ok(pb); } Err("Unable to map json value to Exemplars pb".into()) } pub(crate) fn double_exemplars_to_json<'event>(data: Vec<DoubleExemplar>) -> Value<'event> { let mut json: Vec<Value> = Vec::new(); for exemplar in data { json.push(literal!({ "span_id": id::hex_span_id_to_json(&exemplar.span_id), "trace_id": id::hex_trace_id_to_json(&exemplar.trace_id), "filtered_labels": common::string_key_value_to_json(exemplar.filtered_labels), "time_unix_nano": exemplar.time_unix_nano, "value": exemplar.value })) } Value::Array(json) } pub(crate) fn double_exemplars_to_pb(json: Option<&Value<'_>>) -> Result<Vec<DoubleExemplar>> { if let Some(Value::Array(json)) = json { let mut pb = Vec::with_capacity(json.len()); for data in json { let filtered_labels = common::string_key_value_to_pb(data.get("filtered_labels"))?; let span_id = id::hex_span_id_to_pb(data.get("span_id"))?; let trace_id = id::hex_trace_id_to_pb(data.get("trace_id"))?; let time_unix_nano = pb::maybe_int_to_pbu64(data.get("time_unix_nano"))?; let value = pb::maybe_double_to_pb(data.get("value"))?; pb.push(DoubleExemplar { filtered_labels, time_unix_nano, value, span_id, trace_id, }); } return Ok(pb); } Err("Unable to map json value to Exemplars pb".into()) } pub(crate) fn quantile_values_to_json<'event>(data: Vec<ValueAtQuantile>) -> Value<'event> { let mut json: Vec<Value> = Vec::with_capacity(data.len()); for data in data { json.push(literal!({ "value": data.value, "quantile": data.quantile, })) } Value::Array(json) } pub(crate) fn quantile_values_to_pb(json: Option<&Value<'_>>) -> Result<Vec<ValueAtQuantile>> { if let Some(Value::Array(json)) = json { let mut arr = Vec::with_capacity(json.len()); for data in json { let value = pb::maybe_double_to_pb(data.get("value"))?; let quantile = pb::maybe_double_to_pb(data.get("quantile"))?; arr.push(ValueAtQuantile { quantile, value }); } return Ok(arr); } Err("Unable to map json value to ValueAtQuantiles".into()) } pub(crate) fn int_data_points_to_json<'event>(pb: Vec<IntDataPoint>) -> Value<'event> { let mut json = Vec::with_capacity(pb.len()); for data in pb { let labels = common::string_key_value_to_json(data.labels); let exemplars = int_exemplars_to_json(data.exemplars); let v: Value = literal!({ "value": data.value, "start_time_unix_nano": data.start_time_unix_nano, "time_unix_nano": data.time_unix_nano, "labels": labels, "exemplars": exemplars, }); json.push(v); } Value::Array(json) } pub(crate) fn int_data_points_to_pb(json: Option<&Value<'_>>) -> Result<Vec<IntDataPoint>> { if let Some(Value::Array(data)) = json { let mut pb = Vec::with_capacity(data.len()); for item in data { let labels = common::string_key_value_to_pb(item.get("labels"))?; let exemplars = int_exemplars_to_pb(item.get("exemplars"))?; let time_unix_nano = pb::maybe_int_to_pbu64(item.get("time_unix_nano"))?; let start_time_unix_nano = pb::maybe_int_to_pbu64(item.get("start_time_unix_nano"))?; let value = pb::maybe_int_to_pbi64(item.get("value"))?; pb.push(IntDataPoint { labels, start_time_unix_nano, time_unix_nano, value, exemplars, }) } return Ok(pb); }; Err("Unable to map json value to otel pb IntDataPoint list".into()) } pub(crate) fn double_data_points_to_json<'event>(pb: Vec<DoubleDataPoint>) -> Value<'event> { let mut json = Vec::with_capacity(pb.len()); for data in pb { let labels = common::string_key_value_to_json(data.labels); let exemplars = double_exemplars_to_json(data.exemplars); let v: Value = literal!({ "value": data.value, "start_time_unix_nano": data.start_time_unix_nano, "time_unix_nano": data.time_unix_nano, "labels": labels, "exemplars": exemplars, }); json.push(v); } Value::Array(json) } pub(crate) fn double_data_points_to_pb(json: Option<&Value<'_>>) -> Result<Vec<DoubleDataPoint>> { if let Some(Value::Array(data)) = json { let mut pb = Vec::with_capacity(data.len()); for item in data { let labels = common::string_key_value_to_pb(item.get("labels"))?; let exemplars = double_exemplars_to_pb(item.get("exemplars"))?; let time_unix_nano = pb::maybe_int_to_pbu64(item.get("time_unix_nano"))?; let start_time_unix_nano = pb::maybe_int_to_pbu64(item.get("start_time_unix_nano"))?; let value = pb::maybe_double_to_pb(item.get("value"))?; pb.push(DoubleDataPoint { labels, start_time_unix_nano, time_unix_nano, value, exemplars, }) } return Ok(pb); }; Err("Unable to map json value to otel pb DoubleDataPoint list".into()) } pub(crate) fn double_histo_data_points_to_json<'event>( pb: Vec<DoubleHistogramDataPoint>, ) -> Value<'event> { let mut json = Vec::with_capacity(pb.len()); for points in pb { let labels = common::string_key_value_to_json(points.labels); let exemplars = double_exemplars_to_json(points.exemplars); let v: Value = literal!({ "start_time_unix_nano": points.start_time_unix_nano, "time_unix_nano": points.time_unix_nano, "labels": labels, "exemplars": exemplars, "sum": points.sum, "count": points.count, "explicit_bounds": points.explicit_bounds, "bucket_counts": points.bucket_counts, }); json.push(v); } Value::Array(json) } pub(crate) fn double_histo_data_points_to_pb( json: Option<&Value<'_>>, ) -> Result<Vec<DoubleHistogramDataPoint>> { if let Some(Value::Array(data)) = json { let mut pb = Vec::with_capacity(data.len()); for item in data { let labels = common::string_key_value_to_pb(item.get("labels"))?; let time_unix_nano = pb::maybe_int_to_pbu64(item.get("time_unix_nano"))?; let start_time_unix_nano = pb::maybe_int_to_pbu64(item.get("start_time_unix_nano"))?; let sum = pb::maybe_double_to_pb(item.get("sum"))?; let count = pb::maybe_int_to_pbu64(item.get("count"))?; let exemplars = double_exemplars_to_pb(item.get("exemplars"))?; let explicit_bounds = pb::f64_repeated_to_pb(item.get("explicit_bounds"))?; let bucket_counts = pb::u64_repeated_to_pb(item.get("explicit_bounds"))?; pb.push(DoubleHistogramDataPoint { labels, start_time_unix_nano, time_unix_nano, count, sum, bucket_counts, explicit_bounds, exemplars, }) } return Ok(pb); }; Err("Unable to map json value to otel pb DoubleHistogramDataPoint list".into()) } pub(crate) fn double_summary_data_points_to_json<'event>( pb: Vec<DoubleSummaryDataPoint>, ) -> Value<'event> { let mut json = Vec::with_capacity(pb.len()); for points in pb { let labels = common::string_key_value_to_json(points.labels); let quantile_values = quantile_values_to_json(points.quantile_values); let v: Value = literal!({ "start_time_unix_nano": points.start_time_unix_nano, "time_unix_nano": points.time_unix_nano, "labels": labels, "quantile_values": quantile_values, "sum": points.sum, "count": points.count, }); json.push(v); } Value::Array(json) } pub(crate) fn double_summary_data_points_to_pb( json: Option<&Value<'_>>, ) -> Result<Vec<DoubleSummaryDataPoint>> { if let Some(Value::Array(data)) = json { let mut pb = Vec::with_capacity(data.len()); for item in data { let labels = common::string_key_value_to_pb(item.get("labels"))?; let time_unix_nano = pb::maybe_int_to_pbu64(item.get("time_unix_nano"))?; let start_time_unix_nano = pb::maybe_int_to_pbu64(item.get("start_time_unix_nano"))?; let sum = pb::maybe_double_to_pb(item.get("sum"))?; let count = pb::maybe_int_to_pbu64(item.get("count"))?; let quantile_values = quantile_values_to_pb(item.get("quantile_values"))?; pb.push(DoubleSummaryDataPoint { labels, start_time_unix_nano, time_unix_nano, count, sum, quantile_values, }) } return Ok(pb); }; Err("Unable to map json value to otel pb DoubleSummaryDataPoint list".into()) } pub(crate) fn int_histo_data_points_to_json<'event>( pb: Vec<IntHistogramDataPoint>, ) -> Value<'event> { let mut json = Vec::with_capacity(pb.len()); for points in pb { let labels = common::string_key_value_to_json(points.labels); let exemplars = int_exemplars_to_json(points.exemplars); let v: Value = literal!({ "start_time_unix_nano": points.start_time_unix_nano, "time_unix_nano": points.time_unix_nano, "labels": labels, "exemplars": exemplars, "sum": points.sum, "count": points.count, "explicit_bounds": points.explicit_bounds, "bucket_counts": points.bucket_counts, }); json.push(v); } Value::Array(json) } pub(crate) fn int_histo_data_points_to_pb( json: Option<&Value<'_>>, ) -> Result<Vec<IntHistogramDataPoint>> { if let Some(Value::Array(data)) = json { let mut pb = Vec::with_capacity(data.len()); for item in data { let labels = common::string_key_value_to_pb(item.get("labels"))?; let time_unix_nano = pb::maybe_int_to_pbu64(item.get("time_unix_nano"))?; let start_time_unix_nano = pb::maybe_int_to_pbu64(item.get("start_time_unix_nano"))?; let sum = pb::maybe_int_to_pbi64(item.get("sum"))?; let count = pb::maybe_int_to_pbu64(item.get("count"))?; let exemplars = int_exemplars_to_pb(item.get("exemplars"))?; let explicit_bounds = pb::f64_repeated_to_pb(item.get("explicit_bounds"))?; let bucket_counts = pb::u64_repeated_to_pb(item.get("explicit_bounds"))?; pb.push(IntHistogramDataPoint { labels, start_time_unix_nano, time_unix_nano, count, sum, bucket_counts, explicit_bounds, exemplars, }) } return Ok(pb); }; Err("Unable to map json value to otel pb IntHistogramDataPoint list".into()) } pub(crate) fn int_sum_data_points_to_json<'event>(pb: Vec<IntDataPoint>) -> Value<'event> { int_data_points_to_json(pb) } pub(crate) fn metrics_data_to_json<'event>(pb: Option<metric::Data>) -> Value<'event> { if let Some(pb) = pb { let json: Value = match pb { Data::IntGauge(data) => literal!({ "int-gauge": { "data_points": int_data_points_to_json(data.data_points) }}), Data::DoubleSum(data) => literal!({ "double-sum": { "is_monotonic": data.is_monotonic, "data_points": double_data_points_to_json(data.data_points), "aggregation_temporality": data.aggregation_temporality, }}), Data::DoubleGauge(data) => literal!({ "double-gauge": { "data_points": double_data_points_to_json(data.data_points), }}), Data::DoubleHistogram(data) => literal!({ "double-histogram": { "data_points": double_histo_data_points_to_json(data.data_points), "aggregation_temporality": data.aggregation_temporality, }}), Data::DoubleSummary(data) => literal!({ "double-summary": { "data_points": double_summary_data_points_to_json(data.data_points), }}), Data::IntHistogram(data) => literal!({ "int-histogram": { "data_points": int_histo_data_points_to_json(data.data_points), "aggregation_temporality": data.aggregation_temporality, }}), Data::IntSum(data) => literal!({ "int-sum": { "is_monotonic": data.is_monotonic, "data_points": int_sum_data_points_to_json(data.data_points), "aggregation_temporality": data.aggregation_temporality, } }), }; json } else { Value::Static(StaticNode::Null) } } #[allow(clippy::too_many_lines)] pub(crate) fn metrics_data_to_pb(data: Option<&Value<'_>>) -> Result<metric::Data> { if let Some(Value::Object(json)) = data { if let Some(Value::Object(json)) = json.get("int-gauge") { let data_points = int_data_points_to_pb(json.get("data_points"))?; return Ok(metric::Data::IntGauge(IntGauge { data_points })); } else if let Some(Value::Object(json)) = json.get("double-gauge") { let data_points = double_data_points_to_pb(json.get("data_points"))?; return Ok(metric::Data::DoubleGauge(DoubleGauge { data_points })); } else if let Some(Value::Object(json)) = json.get("int-sum") { let data_points = int_data_points_to_pb(json.get("data_points"))?; let is_monotonic = pb::maybe_bool_to_pb(json.get("is_monotonic"))?; let aggregation_temporality = pb::maybe_int_to_pbi32(json.get("aggregation_temporality"))?; return Ok(metric::Data::IntSum(IntSum { data_points, aggregation_temporality, is_monotonic, })); } else if let Some(Value::Object(json)) = json.get("double-sum") { let data_points = double_data_points_to_pb(json.get("data_points"))?; let is_monotonic = pb::maybe_bool_to_pb(json.get("is_monotonic"))?; let aggregation_temporality = pb::maybe_int_to_pbi32(json.get("aggregation_temporality"))?; return Ok(metric::Data::DoubleSum(DoubleSum { data_points, aggregation_temporality, is_monotonic, })); } else if let Some(Value::Object(json)) = json.get("int-histogram") { let data_points = int_histo_data_points_to_pb(json.get("data_points"))?; let aggregation_temporality = pb::maybe_int_to_pbi32(json.get("aggregation_temporality"))?; return Ok(metric::Data::IntHistogram(IntHistogram { data_points, aggregation_temporality, })); } else if let Some(Value::Object(json)) = json.get("double-histogram") { let data_points = double_histo_data_points_to_pb(json.get("data_points"))?; let aggregation_temporality = pb::maybe_int_to_pbi32(json.get("aggregation_temporality"))?; return Ok(metric::Data::DoubleHistogram(DoubleHistogram { data_points, aggregation_temporality, })); } else if let Some(Value::Object(json)) = json.get("double-summary") { let data_points = double_summary_data_points_to_pb(json.get("data_points"))?; return Ok(metric::Data::DoubleSummary(DoubleSummary { data_points })); } } Err("Invalid metric data point type - cannot convert to pb".into()) } pub(crate) fn instrumentation_library_metrics_to_json<'event>( pb: Vec<tremor_otelapis::opentelemetry::proto::metrics::v1::InstrumentationLibraryMetrics>, ) -> Value<'event> { let mut json = Vec::with_capacity(pb.len()); for data in pb { let mut metrics = Vec::new(); for metric in data.metrics { let data = metrics_data_to_json(metric.data); metrics.push(literal!({ "name": metric.name, "description": metric.description, "data": data, "unit": metric.unit, })); } json.push(literal!({ "instrumentation_library": common::maybe_instrumentation_library_to_json(data.instrumentation_library), "metrics": metrics })); } literal!(json) } pub(crate) fn instrumentation_library_metrics_to_pb( data: Option<&Value<'_>>, ) -> Result<Vec<InstrumentationLibraryMetrics>> { if let Some(Value::Array(data)) = data { let mut pb = Vec::with_capacity(data.len()); for ilm in data { if let Value::Object(data) = ilm { let mut metrics = Vec::new(); if let Some(Value::Array(data)) = data.get("metrics") { for metric in data { let name: String = pb::maybe_string_to_pb(metric.get("name"))?; let description: String = pb::maybe_string_to_pb(metric.get("description"))?; let unit: String = pb::maybe_string_to_pb(metric.get("unit"))?; let metric_data: Option<metric::Data> = Some(metrics_data_to_pb(metric.get("data"))?); metrics.push(Metric { name, description, unit, data: metric_data, }); } } let il = data.get("instrumentation_library"); let e = InstrumentationLibraryMetrics { instrumentation_library: common::maybe_instrumentation_library_to_pb(il)?, metrics, }; pb.push(e); } } return Ok(pb); } Err("Invalid json mapping for InstrumentationLibraryMetrics".into()) } pub(crate) fn resource_metrics_to_json<'event>( request: ExportMetricsServiceRequest, ) -> Result<Value<'event>> { let mut metrics: Vec<Value> = Vec::with_capacity(request.resource_metrics.len()); for metric in request.resource_metrics { let ilm = instrumentation_library_metrics_to_json(metric.instrumentation_library_metrics); metrics.push(literal!({ "instrumentation_library_metrics": ilm, "resource": resource::resource_to_json(metric.resource)?, })); } Ok(literal!({ "metrics": metrics })) } pub(crate) fn resource_metrics_to_pb(json: Option<&Value<'_>>) -> Result<Vec<ResourceMetrics>> { if let Some(Value::Object(json)) = json { if let Some(Value::Array(json)) = json.get("metrics") { let mut pb = Vec::with_capacity(json.len()); for json in json { if let Value::Object(json) = json { let instrumentation_library_metrics = instrumentation_library_metrics_to_pb( json.get("instrumentation_library_metrics"), )?; let resource = Some(resource::maybe_resource_to_pb(json.get("resource"))?); let item = ResourceMetrics { resource, instrumentation_library_metrics, }; pb.push(item); } } return Ok(pb); } } Err("Invalid json mapping for otel metrics message - cannot convert to pb".into()) } #[cfg(test)] mod tests { use tremor_otelapis::opentelemetry::proto::{ common::v1::InstrumentationLibrary, resource::v1::Resource, }; use super::*; #[test] fn int_exemplars() -> Result<()> { let nanos = tremor_common::time::nanotime(); let span_id_pb = id::random_span_id_bytes(nanos); let span_id_json = id::test::pb_span_id_to_json(&span_id_pb); let trace_id_json = id::random_trace_id_value(nanos); let trace_id_pb = id::test::json_trace_id_to_pb(Some(&trace_id_json))?; let pb = vec![IntExemplar { span_id: span_id_pb.clone(), trace_id: trace_id_pb, time_unix_nano: 0, filtered_labels: vec![], value: 42, }]; let json = int_exemplars_to_json(pb.clone()); let back_again = int_exemplars_to_pb(Some(&json))?; let expected: Value = literal!([{ "time_unix_nano": 0, "span_id": span_id_json, "trace_id": trace_id_json, "filtered_labels": {}, "value": 42 }]); assert_eq!(expected, json); assert_eq!(pb, back_again); // Empty let json = int_exemplars_to_json(vec![]); let back_again = int_exemplars_to_pb(Some(&json))?; let expected: Value = literal!([]); assert_eq!(expected, json); assert_eq!(back_again, vec![]); Ok(()) } #[test] fn double_exemplars() -> Result<()> { let nanos = tremor_common::time::nanotime(); let span_id_pb = id::random_span_id_bytes(nanos); let span_id_json = id::test::pb_span_id_to_json(&span_id_pb); let trace_id_json = id::random_trace_id_value(nanos); let trace_id_pb = id::test::json_trace_id_to_pb(Some(&trace_id_json))?; let pb = vec![DoubleExemplar { span_id: span_id_pb.clone(), trace_id: trace_id_pb, time_unix_nano: 0, filtered_labels: vec![], value: 42.42, }]; let json = double_exemplars_to_json(pb.clone()); let back_again = double_exemplars_to_pb(Some(&json))?; let expected: Value = literal!([{ "time_unix_nano": 0, "span_id": span_id_json, "trace_id": trace_id_json, "filtered_labels": {}, "value": 42.42 }]); assert_eq!(expected, json); assert_eq!(pb, back_again); // Empty let json = double_exemplars_to_json(vec![]); let back_again = double_exemplars_to_pb(Some(&json))?; let expected: Value = literal!([]); assert_eq!(expected, json); assert_eq!(back_again, vec![]); Ok(()) } #[test] fn quantile_values() -> Result<()> { let pb = vec![ValueAtQuantile { value: 42.42, quantile: 0.3, }]; let json = quantile_values_to_json(pb.clone()); let back_again = quantile_values_to_pb(Some(&json))?; let expected: Value = literal!([{ "value": 42.42, "quantile": 0.3, }]); assert_eq!(expected, json); assert_eq!(pb, back_again); // Empty let json = quantile_values_to_json(vec![]); let back_again = quantile_values_to_pb(Some(&json))?; let expected: Value = literal!([]); assert_eq!(expected, json); assert_eq!(back_again, vec![]); Ok(()) } #[test] fn int_data_points() -> Result<()> { let pb = vec![IntDataPoint { value: 42, start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], }]; let json = int_data_points_to_json(pb.clone()); let back_again = int_data_points_to_pb(Some(&json))?; let expected: Value = literal!([{ "value": 42, "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [] }]); assert_eq!(expected, json); assert_eq!(pb, back_again); // Empty let json = int_data_points_to_json(vec![]); let back_again = int_data_points_to_pb(Some(&json))?; let expected: Value = literal!([]); assert_eq!(expected, json); assert_eq!(back_again, vec![]); Ok(()) } #[test] fn double_data_points() -> Result<()> { let pb = vec![DoubleDataPoint { value: 42.42, start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], }]; let json = double_data_points_to_json(pb.clone()); let back_again = double_data_points_to_pb(Some(&json))?; let expected: Value = literal!([{ "value": 42.42, "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [] }]); assert_eq!(expected, json); assert_eq!(pb, back_again); // Empty let json = double_data_points_to_json(vec![]); let back_again = double_data_points_to_pb(Some(&json))?; let expected: Value = literal!([]); assert_eq!(expected, json); assert_eq!(back_again, vec![]); Ok(()) } #[test] fn int_histo_data_points() -> Result<()> { let pb = vec![IntHistogramDataPoint { start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], sum: 0, count: 0, explicit_bounds: vec![], bucket_counts: vec![], }]; let json = int_histo_data_points_to_json(pb.clone()); let back_again = int_histo_data_points_to_pb(Some(&json))?; let expected: Value = literal!([{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [], "sum": 0, "count": 0, "explicit_bounds": [], "bucket_counts": [], }]); assert_eq!(expected, json); assert_eq!(pb, back_again); // Empty let json = int_histo_data_points_to_json(vec![]); let back_again = int_data_points_to_pb(Some(&json))?; let expected: Value = literal!([]); assert_eq!(expected, json); assert_eq!(back_again, vec![]); Ok(()) } #[test] fn double_histo_data_points() -> Result<()> { let pb = vec![DoubleHistogramDataPoint { start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], sum: 0.0, count: 0, explicit_bounds: vec![], bucket_counts: vec![], }]; let json = double_histo_data_points_to_json(pb.clone()); let back_again = double_histo_data_points_to_pb(Some(&json))?; let expected: Value = literal!([{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [], "sum": 0.0, "count": 0, "explicit_bounds": [], "bucket_counts": [], }]); assert_eq!(expected, json); assert_eq!(pb, back_again); // Empty let json = double_histo_data_points_to_json(vec![]); let back_again = double_histo_data_points_to_pb(Some(&json))?; let expected: Value = literal!([]); assert_eq!(expected, json); assert_eq!(back_again, vec![]); Ok(()) } #[test] fn double_summary_data_points() -> Result<()> { let pb = vec![DoubleSummaryDataPoint { start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], sum: 0.0, count: 0, quantile_values: vec![ValueAtQuantile { value: 0.1, quantile: 0.2, }], }]; let json = double_summary_data_points_to_json(pb.clone()); let back_again = double_summary_data_points_to_pb(Some(&json))?; let expected: Value = literal!([{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "sum": 0.0, "count": 0, "quantile_values": [ { "value": 0.1, "quantile": 0.2 }] }]); assert_eq!(expected, json); assert_eq!(pb, back_again); // Empty let json = double_summary_data_points_to_json(vec![]); let back_again = double_summary_data_points_to_pb(Some(&json))?; let expected: Value = literal!([]); assert_eq!(expected, json); assert_eq!(back_again, vec![]); Ok(()) } #[test] fn metrics_data_int_gauge() -> Result<()> { let pb = Some(metric::Data::IntGauge(IntGauge { data_points: vec![IntDataPoint { value: 42, start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], }], })); let json = metrics_data_to_json(pb.clone()); let back_again = metrics_data_to_pb(Some(&json))?; let expected: Value = literal!({ "int-gauge": { "data_points": [{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [], "value": 42 }] }}); assert_eq!(expected, json); assert_eq!(pb, Some(back_again)); Ok(()) } #[test] fn metrics_data_double_sum() -> Result<()> { let pb = Some(metric::Data::DoubleSum(DoubleSum { is_monotonic: false, aggregation_temporality: 0, data_points: vec![DoubleDataPoint { value: 43.43, start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], }], })); let json = metrics_data_to_json(pb.clone()); let back_again = metrics_data_to_pb(Some(&json))?; let expected: Value = literal!({ "double-sum": { "is_monotonic": false, "aggregation_temporality": 0, "data_points": [{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [], "value": 43.43 }] }}); assert_eq!(expected, json); assert_eq!(pb, Some(back_again)); Ok(()) } #[test] fn metrics_data_double_gauge() -> Result<()> { let pb = Some(metric::Data::DoubleGauge(DoubleGauge { data_points: vec![DoubleDataPoint { value: 43.43, start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], }], })); let json = metrics_data_to_json(pb.clone()); let back_again = metrics_data_to_pb(Some(&json))?; let expected: Value = literal!({ "double-gauge": { "data_points": [{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [], "value": 43.43 }] }}); assert_eq!(expected, json); assert_eq!(pb, Some(back_again)); Ok(()) } #[test] fn metrics_data_double_histo() -> Result<()> { let pb = Some(metric::Data::DoubleHistogram(DoubleHistogram { aggregation_temporality: 0, data_points: vec![DoubleHistogramDataPoint { start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], count: 5, sum: 10.0, bucket_counts: vec![], explicit_bounds: vec![], }], })); let json = metrics_data_to_json(pb.clone()); let back_again = metrics_data_to_pb(Some(&json))?; let expected: Value = literal!({ "double-histogram": { "aggregation_temporality": 0, "data_points": [{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [], "sum": 10.0, "count": 5, "bucket_counts": [], "explicit_bounds": [] }] } }); assert_eq!(expected, json); assert_eq!(pb, Some(back_again)); Ok(()) } #[test] fn metrics_data_double_summary() -> Result<()> { let pb = Some(metric::Data::DoubleSummary(DoubleSummary { data_points: vec![DoubleSummaryDataPoint { start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], count: 0, sum: 0.0, quantile_values: vec![], }], })); let json = metrics_data_to_json(pb.clone()); let back_again = metrics_data_to_pb(Some(&json))?; let expected: Value = literal!({ "double-summary": { "data_points": [{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "count": 0, "sum": 0.0, "quantile_values": [] }] }}); assert_eq!(expected, json); assert_eq!(pb, Some(back_again)); Ok(()) } #[test] fn metrics_data_int_histo() -> Result<()> { let pb = Some(metric::Data::IntHistogram(IntHistogram { aggregation_temporality: 0, data_points: vec![IntHistogramDataPoint { start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], count: 5, sum: 10, bucket_counts: vec![], explicit_bounds: vec![], }], })); let json = metrics_data_to_json(pb.clone()); let back_again = metrics_data_to_pb(Some(&json))?; let expected: Value = literal!({ "int-histogram": { "aggregation_temporality": 0, "data_points": [{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [], "count": 5, "sum": 10, "bucket_counts": [], "explicit_bounds": [] }] }}); assert_eq!(expected, json); assert_eq!(pb, Some(back_again)); Ok(()) } #[test] fn metrics_data_int_sum() -> Result<()> { let pb = Some(metric::Data::IntSum(IntSum { is_monotonic: false, aggregation_temporality: 0, data_points: vec![IntDataPoint { value: 4, start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], }], })); let json = metrics_data_to_json(pb.clone()); let back_again = metrics_data_to_pb(Some(&json))?; let expected: Value = literal!({ "int-sum": { "is_monotonic": false, "aggregation_temporality": 0, "data_points": [{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [], "value": 4 }] }}); assert_eq!(expected, json); assert_eq!(pb, Some(back_again)); Ok(()) } #[test] fn instrumentation_library_metrics() -> Result<()> { let pb = vec![InstrumentationLibraryMetrics { instrumentation_library: Some(InstrumentationLibrary { name: "name".into(), version: "v0.1.2".into(), }), // TODO For now its an error for this to be None - may need to revisit metrics: vec![Metric { name: "test".into(), description: "blah blah blah blah".into(), unit: "badgerfeet".into(), data: Some(metric::Data::IntGauge(IntGauge { data_points: vec![IntDataPoint { value: 42, start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], }], })), }], }]; let json = instrumentation_library_metrics_to_json(pb.clone()); let back_again = instrumentation_library_metrics_to_pb(Some(&json))?; let expected: Value = literal!([{ "instrumentation_library": { "name": "name", "version": "v0.1.2" }, "metrics": [{ "name": "test", "description": "blah blah blah blah", "unit": "badgerfeet", "data": { "int-gauge": { "data_points": [{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [], "value": 42 }] } }, }] }]); assert_eq!(expected, json); assert_eq!(pb, back_again); Ok(()) } #[test] fn resource_metrics() -> Result<()> { let pb = ExportMetricsServiceRequest { resource_metrics: vec![ResourceMetrics { resource: Some(Resource { attributes: vec![], dropped_attributes_count: 8, }), instrumentation_library_metrics: vec![InstrumentationLibraryMetrics { instrumentation_library: Some(InstrumentationLibrary { name: "name".into(), version: "v0.1.2".into(), }), // TODO For now its an error for this to be None - may need to revisit metrics: vec![Metric { name: "test".into(), description: "blah blah blah blah".into(), unit: "badgerfeet".into(), data: Some(metric::Data::IntGauge(IntGauge { data_points: vec![IntDataPoint { value: 42, start_time_unix_nano: 0, time_unix_nano: 0, labels: vec![], exemplars: vec![], }], })), }], }], }], }; let json = resource_metrics_to_json(pb.clone())?; let back_again = resource_metrics_to_pb(Some(&json))?; let expected: Value = literal!({ "metrics": [ { "resource": { "attributes": {}, "dropped_attributes_count": 8 }, "instrumentation_library_metrics": [{ "instrumentation_library": { "name": "name", "version": "v0.1.2" }, "metrics": [{ "name": "test", "description": "blah blah blah blah", "unit": "badgerfeet", "data": { "int-gauge": { "data_points": [{ "start_time_unix_nano": 0, "time_unix_nano": 0, "labels": {}, "exemplars": [], "value": 42 }] } }, }] }] } ] }); assert_eq!(expected, json); assert_eq!(pb.resource_metrics, back_again); Ok(()) } }
36.328099
115
0.525445
b99ff56129cbd8cbda1eb03920506a88c9cb59f8
8,560
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or // http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or // http://opensource.org/licenses/MIT>, at your option. This file may not be // copied, modified, or distributed except according to those terms. use crate::lexer::preprocessor::context::PreprocContext; use crate::lexer::{Lexer, LocToken, Token}; use crate::parser::declarations::{decl::DeclSpecifierParser, pointer::PointerDeclaratorParser}; use crate::parser::expression; use crate::parser::r#type::r#type::Type; #[derive(Clone, Debug, PartialEq)] pub enum Operator { Op(expression::Operator), UD(String), Conv(Type), } impl Operator { pub fn is_conv(&self) -> bool { match self { Operator::Op(_) => false, _ => true, } } } pub(crate) struct OperatorParser<'a, 'b, PC: PreprocContext> { lexer: &'b mut Lexer<'a, PC>, } impl<'a, 'b, PC: PreprocContext> OperatorParser<'a, 'b, PC> { pub(crate) fn new(lexer: &'b mut Lexer<'a, PC>) -> Self { Self { lexer } } pub(crate) fn parse(self, tok: Option<LocToken>) -> (Option<LocToken>, Option<Operator>) { let tok = tok.unwrap_or_else(|| self.lexer.next_useful()); if tok.tok != Token::Operator { return (Some(tok), None); } let tok = self.lexer.next_useful(); match tok.tok { Token::LiteralString(_) => { let tok = self.lexer.next_useful(); if let Token::Identifier(id) = tok.tok { (None, Some(Operator::UD(id))) } else { unreachable!("Invalid token in operator name: {:?}", tok); } } Token::LiteralStringUD(s_ud) => { let (_, ud) = *s_ud; (None, Some(Operator::UD(ud))) } Token::New => { let tok = self.lexer.next_useful(); if tok.tok == Token::LeftBrack { let tok = self.lexer.next_useful(); if tok.tok == Token::RightBrack { (None, Some(Operator::Op(expression::Operator::NewArray))) } else { unreachable!("Invalid token in operator name: {:?}", tok); } } else { (Some(tok), Some(Operator::Op(expression::Operator::New))) } } Token::Delete => { let tok = self.lexer.next_useful(); if tok.tok == Token::LeftBrack { let tok = self.lexer.next_useful(); if tok.tok == Token::RightBrack { (None, Some(Operator::Op(expression::Operator::DeleteArray))) } else { unreachable!("Invalid token in operator name: {:?}", tok); } } else { (Some(tok), Some(Operator::Op(expression::Operator::Delete))) } } Token::CoAwait => (None, Some(Operator::Op(expression::Operator::CoAwait))), Token::LeftParen => { let tok = self.lexer.next_useful(); if tok.tok == Token::RightParen { (None, Some(Operator::Op(expression::Operator::Call))) } else { unreachable!("Invalid token in operator name: {:?}", tok); } } Token::LeftBrack => { let tok = self.lexer.next_useful(); if tok.tok == Token::RightBrack { (None, Some(Operator::Op(expression::Operator::Subscript))) } else { unreachable!("Invalid token in operator name: {:?}", tok); } } Token::Arrow => (None, Some(Operator::Op(expression::Operator::Arrow))), Token::ArrowStar => ( None, Some(Operator::Op(expression::Operator::ArrowIndirection)), ), Token::Tilde => (None, Some(Operator::Op(expression::Operator::BitNeg))), Token::Not => (None, Some(Operator::Op(expression::Operator::Not))), Token::Plus => (None, Some(Operator::Op(expression::Operator::Plus))), Token::Minus => (None, Some(Operator::Op(expression::Operator::Minus))), Token::Star => (None, Some(Operator::Op(expression::Operator::Indirection))), Token::Divide => (None, Some(Operator::Op(expression::Operator::Div))), Token::Modulo => (None, Some(Operator::Op(expression::Operator::Mod))), Token::Xor => (None, Some(Operator::Op(expression::Operator::BitXor))), Token::And => (None, Some(Operator::Op(expression::Operator::AddressOf))), Token::Or => (None, Some(Operator::Op(expression::Operator::BitOr))), Token::Equal => (None, Some(Operator::Op(expression::Operator::Assign))), Token::PlusEqual => (None, Some(Operator::Op(expression::Operator::AddAssign))), Token::MinusEqual => (None, Some(Operator::Op(expression::Operator::SubAssign))), Token::StarEqual => (None, Some(Operator::Op(expression::Operator::MulAssign))), Token::DivideEqual => (None, Some(Operator::Op(expression::Operator::DivAssign))), Token::ModuloEqual => (None, Some(Operator::Op(expression::Operator::ModAssign))), Token::XorEqual => (None, Some(Operator::Op(expression::Operator::XorAssign))), Token::AndEqual => (None, Some(Operator::Op(expression::Operator::AndAssign))), Token::OrEqual => (None, Some(Operator::Op(expression::Operator::OrAssign))), Token::EqualEqual => (None, Some(Operator::Op(expression::Operator::Eq))), Token::NotEqual => (None, Some(Operator::Op(expression::Operator::Neq))), Token::Lower => (None, Some(Operator::Op(expression::Operator::Lt))), Token::Greater => (None, Some(Operator::Op(expression::Operator::Gt))), Token::LowerEqual => (None, Some(Operator::Op(expression::Operator::Leq))), Token::GreaterEqual => (None, Some(Operator::Op(expression::Operator::Geq))), Token::LowerEqualGreater => { (None, Some(Operator::Op(expression::Operator::ThreeWayComp))) } Token::AndAnd => (None, Some(Operator::Op(expression::Operator::And))), Token::OrOr => (None, Some(Operator::Op(expression::Operator::Or))), Token::LeftShift => (None, Some(Operator::Op(expression::Operator::LShift))), Token::RightShift => (None, Some(Operator::Op(expression::Operator::RShift))), Token::LeftShiftEqual => (None, Some(Operator::Op(expression::Operator::LShiftAssign))), Token::RightShiftEqual => { (None, Some(Operator::Op(expression::Operator::RShiftAssign))) } Token::PlusPlus => (None, Some(Operator::Op(expression::Operator::PreInc))), Token::MinusMinus => (None, Some(Operator::Op(expression::Operator::PreDec))), Token::Comma => (None, Some(Operator::Op(expression::Operator::Comma))), _ => { let ctp = ConversionTypeParser::new(self.lexer); let (tok, typ) = ctp.parse(Some(tok)); if let Some(typ) = typ { (tok, Some(Operator::Conv(typ))) } else { unreachable!("Invalid token in operator name: {:?}", tok); } // TODO: add operator literal: http://eel.is/c++draft/over.literal#nt:literal-operator-id } } } } pub struct ConversionTypeParser<'a, 'b, PC: PreprocContext> { lexer: &'b mut Lexer<'a, PC>, } impl<'a, 'b, PC: PreprocContext> ConversionTypeParser<'a, 'b, PC> { pub(crate) fn new(lexer: &'b mut Lexer<'a, PC>) -> Self { Self { lexer } } pub(crate) fn parse(self, tok: Option<LocToken>) -> (Option<LocToken>, Option<Type>) { let dsp = DeclSpecifierParser::new(self.lexer); let (tok, (_, typ, _)) = dsp.parse(tok, None); let mut typ = if let Some(typ) = typ { typ } else { return (tok, None); }; // Pointer: *, &, && let pdp = PointerDeclaratorParser::new(self.lexer); let (tok, ptrs) = pdp.parse(tok, None); typ.pointers = ptrs; (tok, Some(typ)) } }
45.775401
105
0.536565
edab160e68f9cff7638b5bed706b334d2094d1b3
5,518
use amethyst::winit::VirtualKeyCode; use derivative::Derivative; use serde::{Deserialize, Serialize}; /// Keyboard layout variants. #[derive(Clone, Copy, Debug, Derivative, Deserialize, Hash, PartialEq, Eq, Serialize)] #[derivative(Default)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub enum KeyboardLayout { /// US keyboard layout #[derivative(Default)] Us, } impl KeyboardLayout { /// Returns the keyboard buttons present for this keyboard layout. /// /// Development note: This is not called `keys()` as it is ambiguous with [`HashMap::keys`]. pub fn buttons(self) -> Vec<VirtualKeyCode> { match self { KeyboardLayout::Us => { vec![ // Bottom row. VirtualKeyCode::LControl, VirtualKeyCode::LWin, VirtualKeyCode::LAlt, VirtualKeyCode::Space, VirtualKeyCode::RAlt, VirtualKeyCode::RWin, VirtualKeyCode::Apps, // Context menu VirtualKeyCode::RControl, VirtualKeyCode::Left, VirtualKeyCode::Down, VirtualKeyCode::Right, VirtualKeyCode::Numpad0, VirtualKeyCode::NumpadComma, VirtualKeyCode::NumpadEnter, // Second row. VirtualKeyCode::LShift, VirtualKeyCode::Z, VirtualKeyCode::X, VirtualKeyCode::C, VirtualKeyCode::V, VirtualKeyCode::B, VirtualKeyCode::N, VirtualKeyCode::M, VirtualKeyCode::Comma, VirtualKeyCode::Period, VirtualKeyCode::Slash, VirtualKeyCode::RShift, VirtualKeyCode::Up, VirtualKeyCode::Numpad1, VirtualKeyCode::Numpad2, VirtualKeyCode::Numpad3, // Third row. VirtualKeyCode::Capital, VirtualKeyCode::A, VirtualKeyCode::S, VirtualKeyCode::D, VirtualKeyCode::F, VirtualKeyCode::G, VirtualKeyCode::H, VirtualKeyCode::J, VirtualKeyCode::K, VirtualKeyCode::L, VirtualKeyCode::Semicolon, // Colon VirtualKeyCode::Apostrophe, // Quote / double quote. VirtualKeyCode::Return, // Enter VirtualKeyCode::Numpad4, VirtualKeyCode::Numpad5, VirtualKeyCode::Numpad6, VirtualKeyCode::Add, // Fourth row. VirtualKeyCode::Tab, VirtualKeyCode::Q, VirtualKeyCode::W, VirtualKeyCode::E, VirtualKeyCode::R, VirtualKeyCode::T, VirtualKeyCode::Y, VirtualKeyCode::U, VirtualKeyCode::I, VirtualKeyCode::O, VirtualKeyCode::P, VirtualKeyCode::LBracket, VirtualKeyCode::RBracket, VirtualKeyCode::Backslash, VirtualKeyCode::End, VirtualKeyCode::Delete, VirtualKeyCode::PageDown, VirtualKeyCode::Numpad7, VirtualKeyCode::Numpad8, VirtualKeyCode::Numpad9, // Fifth row. VirtualKeyCode::Grave, VirtualKeyCode::Key1, VirtualKeyCode::Key2, VirtualKeyCode::Key3, VirtualKeyCode::Key4, VirtualKeyCode::Key5, VirtualKeyCode::Key6, VirtualKeyCode::Key7, VirtualKeyCode::Key8, VirtualKeyCode::Key9, VirtualKeyCode::Key0, VirtualKeyCode::Minus, // Underline VirtualKeyCode::Equals, VirtualKeyCode::Back, VirtualKeyCode::Insert, VirtualKeyCode::Home, VirtualKeyCode::PageUp, VirtualKeyCode::Numlock, VirtualKeyCode::Divide, VirtualKeyCode::Multiply, VirtualKeyCode::Subtract, // NumpadSubtract // Top row. VirtualKeyCode::Escape, VirtualKeyCode::F1, VirtualKeyCode::F2, VirtualKeyCode::F3, VirtualKeyCode::F4, VirtualKeyCode::F5, VirtualKeyCode::F6, VirtualKeyCode::F7, VirtualKeyCode::F8, VirtualKeyCode::F9, VirtualKeyCode::F10, VirtualKeyCode::F11, VirtualKeyCode::F12, VirtualKeyCode::Snapshot, // Print Screen VirtualKeyCode::Sysrq, VirtualKeyCode::Scroll, VirtualKeyCode::Pause, ] } } } }
39.697842
96
0.455781
bba45c4eb40a60df9c2e1f7c48bd652edb460213
21,150
//#![allow(unused_imports)] // Last edit: 00:30 - 21/11/2021 use teloxide::{prelude::*, types::{ChatPermissions, Me}, utils::command::BotCommand}; //use teloxide::utils::command::ParseError; use std::env; use std::error::Error; use std::str; use std::str::FromStr; use std::process::Command; use chrono::{DateTime, Duration, NaiveDateTime, Utc}; use teloxide::types::Message; //use std::io::{stdin, stdout, Write}; /* fn custom_parse_function(s: String) -> Result<(u8, String), ParseError> { let vec = s.split_whitespace().collect::<Vec<_>>(); let (left, right) = match vec.as_slice() { [l, r] => (l, r), _ => return Err(ParseError::IncorrectFormat("might be 2 arguments!".into())), }; left.parse::<u8>() .map(|res| (res, (*right).to_string())) .map_err(|_| ParseError::Custom("First argument must be a integer!".to_owned().into())) } */ #[derive(BotCommand)] #[command(rename = "lowercase", description = "Lista comandi")] enum Commands { #[command(description = "Mostra lista comandi.")] Help, #[command(description = "Gestisci una macro.", parse_with = "split")] Macro {option: String, macro_str: String}, #[command(description = "Banna un utente da un gruppo.")] Ban {reason: String}, #[command(description = "Kicka un utente da un gruppo.")] Kick {reason_k: String}, #[command(description = "Muta un utente da un gruppo.", parse_with = "split")] Mute {time: u64, unit: UnitOfTime}, #[command(description = "Annulla il ban ad un utente di un gruppo.")] Unban, #[command(description = "Controlla i log.")] Logs, #[command(description = "Il bot risponde?")] Ping, #[command(description = "La mia pagina github.")] Info, #[command(description = "Effettua un calcolo.", parse_with = "split")] Calc {x: u32, y: u32, operator: String}, #[command(description = "Lista contatti utili.")] Contatti {who: String}, #[command(description = "Consulta gli orari")] Orari, #[command(description = "Cerca un'aula virtuale webex", parse_with = "split")] Webex {nome: String, cognome: String}, #[command(description = "Effettua una ricerca su google")] Google {query: String}, #[command(description = "Cerca sulla wiki di Arch Linux")] Wiki {query_arch: String}, #[command(description = "Cerca sulle pagine del manuale")] Man {query_man: String}, } enum UnitOfTime { Seconds, Minutes, Hours, } impl FromStr for UnitOfTime { type Err = &'static str; fn from_str(s: &str) -> Result<Self, <Self as FromStr>::Err> { match s { "h" | "hours" => Ok(UnitOfTime::Hours), "m" | "minutes" => Ok(UnitOfTime::Minutes), "s" | "seconds" => Ok(UnitOfTime::Seconds), _ => Err("Allowed units: h, m, s"), } } } struct Cinfo { email: String, sito: String, telefono: String, } struct Didattica { email: String, sito: String, telefono: String, } // Calculates time of user restriction. fn calc_restrict_time(time: u64, unit: UnitOfTime) -> Duration { match unit { UnitOfTime::Hours => Duration::hours(time as i64), UnitOfTime::Minutes => Duration::minutes(time as i64), UnitOfTime::Seconds => Duration::seconds(time as i64), } } type Cx = UpdateWithCx<AutoSend<Bot>, Message>; //Muta un utente rispondendo a un suo messaggio //Aggiungere welcome_message e macro personalizzabili async fn mute_user(cx: &Cx, time: Duration) -> Result<(), Box<dyn Error + Send + Sync>> { match cx.update.reply_to_message() { Some(msg1) => { // Controlliamo i permessi di chi invoca il comando // Che ruolo ha? Amministratore o utente let member_mute = cx.requester.get_chat_member(cx.update.chat_id(), cx.update.from().unwrap().id).send().await?; let _member_mute = member_mute.is_privileged(); match _member_mute { true => { // Se entra qui dentro vuol dire che un amministratore sta invocando il comando // Utente che "subisce" il comando // controlliamo se e' un admin o un normale utente let to_mute = cx.requester.get_chat_member(cx.update.chat_id(), msg1.from().unwrap().id).send().await?; let _to_mute = to_mute.is_privileged(); match _to_mute { // L'utente che "subisce" il comando e' un amministratore true => { cx.reply_to("Non posso usare questo comando su un amministratore").send().await?; } // L'utente che "subisce" il comando e' un utente normale // Si puo' procedere con il comando false => { cx.requester .restrict_chat_member( cx.update.chat_id(), msg1.from().expect("Must be MessageKind::Common").id, ChatPermissions::default(), ) .until_date( DateTime::<Utc>::from_utc( NaiveDateTime::from_timestamp(cx.update.date as i64, 0), Utc, ) + time, ) .await?; cx.reply_to(format!("{} e' stato mutato fino al {}", msg1.from().unwrap().first_name, DateTime::<Utc>::from_utc( NaiveDateTime::from_timestamp(cx.update.date as i64, 0), Utc, ) + time)).send().await?; } } } false => { // Un utente normale sta cercando di usare un comando per soli amministratori cx.reply_to("Non hai i permessi necessari per usare questo comando").send().await?; } } } None => { // Non viene specificato nessun messaggio a cui rispondere cx.reply_to("Usa questo comando in risposta ad un messaggio").send().await?; } } Ok(()) } // Kicka un utente async fn kick_user(cx: &Cx, str_msg: &str, reason_k: String) -> Result<(), Box<dyn Error + Send + Sync>> { match cx.update.reply_to_message() { Some(mes) => { // Controlliamo i permessi di chi invoca il comando // Che ruolo ha? Amministratore o utente let member_kick = cx.requester.get_chat_member(cx.update.chat_id(), cx.update.from().unwrap().id).send().await?; let _member_kick = member_kick.is_privileged(); match _member_kick { true => { // Se entra qui dentro vuol dire che un amministratore sta invocando il comando // Utente che "subisce" il comando // controlliamo se e' un admin o un normale utente let to_kick = cx.requester.get_chat_member(cx.update.chat_id(), mes.from().unwrap().id).send().await?; let _to_kick = to_kick.is_privileged(); match _to_kick { // L'utente che "subisce" il comando e' un amministratore true => { cx.reply_to("Non posso usare questo comando su un amministratore").send().await?; } // L'utente che "subisce" il comando e' un utente normale // Si puo' procedere con il comando false => { let mut rsn_k = ""; let mut r_k = rsn_k.to_owned(); cx.requester .unban_chat_member(cx.update.chat_id(), mes.from().unwrap().id) .send() .await?; if reason_k.is_empty() == false { rsn_k = "Motivo: "; r_k = rsn_k.to_owned() + &reason_k; } cx.reply_to(format!("{} {}\n{}", mes.from().unwrap().first_name, str_msg, r_k)).send().await?; //cx.answer(format!("Utente {} kickato", mes.from().unwrap().id)).await?; } } } false => { // Un utente normale sta cercando di usare un comando per soli amministratori cx.reply_to("Non hai i permessi necessari per usare questo comando").send().await?; } } } None => { // Non viene specificato nessun messaggio a cui rispondere cx.reply_to("Usa questo comando in risposta ad un messaggio").send().await?; } } Ok(()) } // Banna un utente async fn ban_user(cx: &Cx, reason: String) -> Result<(), Box<dyn Error + Send + Sync>> { match cx.update.reply_to_message() { Some(message) => { // Controlliamo i permessi di chi invoca il comando // Che ruolo ha? Amministratore o utente let member_ban = cx.requester.get_chat_member(cx.update.chat_id(), cx.update.from().unwrap().id).send().await?; let _member_ban = member_ban.is_privileged(); match _member_ban { true => { // Se entra qui dentro vuol dire che un amministratore sta invocando il comando // Utente che "subisce" il comando // controlliamo se e' un admin o un normale utente let to_ban = cx.requester.get_chat_member(cx.update.chat_id(), message.from().unwrap().id).send().await?; let _to_ban= to_ban.is_privileged(); match _to_ban { // L'utente che "subisce" il comando e' un amministratore true => { cx.reply_to("Non posso usare questo comando su un amministratore").send().await?; } // L'utente che "subisce" il comando e' un utente normale // Si puo' procedere con il comando false => { let mut rsn = ""; let mut r = rsn.to_owned(); cx.requester .kick_chat_member( cx.update.chat_id(), message.from().expect("Must be MessageKind::Common").id, ).await?; if reason.is_empty() == false { rsn = "Motivo: "; r = rsn.to_owned() + &reason; } cx.reply_to(format!("{} e' stato bannato\n{}", message.from().unwrap().first_name, r)).send().await?; } } } false => { cx.reply_to("Non hai i permessi necessari per usare questo comando").send().await?; } } } None => { cx.reply_to("Usa questo comando in risposta ad un messaggio").send().await?; } } Ok(()) } async fn action(cx: UpdateWithCx<AutoSend<Bot>, Message>, command: Commands) -> Result<(), Box<dyn Error + Send + Sync>> { match command { Commands::Help => { //cx.reply_to(format!("{}", &Commands::descriptions())).send().await?; print_(&cx, &Commands::descriptions()).await?; } Commands::Logs => { print_(&cx, "@rootinit controlla i log").await?; } Commands::Ping => { print_(&cx, "pong").await?; } Commands::Orari => { print_(&cx, "https://orarilezioni.unicam.it").await?; } Commands::Webex{nome, cognome} => { cx.reply_to(format!("https://unicam.webex.com/meet/{}.{}", nome.to_lowercase(), cognome.to_lowercase())).send().await?; } Commands::Google{query} => { //let q = String::from(query); //assert!(query.contains(char::is_whitespace)); let result = query.replace(" ", "+"); cx.reply_to(format!("https://www.google.com/search?q={}", result)).send().await?; } Commands::Wiki{query_arch} => { let result_arch = query_arch.replace(" ", "+"); cx.reply_to(format!("https://wiki.archlinux.org/index.php?search={}", result_arch)).send().await?; } Commands::Man{query_man} => { cx.reply_to(format!("https://man.archlinux.org/search?q={}&go=Go", query_man)).send().await?; } Commands::Contatti{who} => { match who.as_str() { "cinfo" => { let cinfo = Cinfo { email: String::from("cinfo@unicam.it"), sito: String::from("https://cinfo.unicam.it"), telefono: String::from("0737402113"), }; cx.reply_to(format!("Email: {}\nSito web: {}\nTelefono: {}", cinfo.email, cinfo.sito, cinfo.telefono)).send().await?; } "segreteria" => { let segreteria = Didattica { email: String::from("segreteriastudenti.scienze@unicam.it"), sito: String::from("https://www.unicam.it/studente/segreterie-studenti"), telefono: String::from("0737637336"), }; cx.reply_to(format!("Email: {}\nSito web: {}\nTelefono: {}", segreteria.email, segreteria.sito, segreteria.telefono)).send().await?; cx.reply_to(format!("ORARI\nLunedi': 10:30 - 13:00\nMartedi': 15:00 - 17:00\nMercoledi': 10:30 - 13:00\nGiovedi': 15:00 - 17:00\nVenerdi': 10:30 - 13:00")).send().await?; } _ => { cx.reply_to(format!("Devi specificare di chi vuoi i contatti (segreteria, cinfo, ecc...)")).send().await?; } }; } Commands::Calc{x, y, operator} => { match operator.as_str() { "+" | "add" => { let a = x.checked_add(y); match a { Some(_v_add) => { print_op(&cx, "", x+y).await?; } None => { print_(&cx, "Ops, non sono stato in grado di effettuare il calcolo, riprova").await?; } }; } "-" | "sub" => { let s = x.checked_sub(y); match s { Some(_v_sub) => { print_op(&cx, "", x-y).await?; } None => { print_(&cx, "Ops, non sono stato in grado di effettuare il calcolo, riprova").await?; } }; } "x" | "mul" => { let m = x.checked_add(y); match m { Some(_v_mul) => { print_op(&cx, "", x*y).await?; } None => { print_(&cx, "Ops, non sono stato in grado di effettuare il calcolo, riprova").await?; } }; } "/" | "div" => { let d = x.checked_add(y); match d { Some(_v_div) => { print_op(&cx, "", x/y).await?; } None => { print_(&cx, "Ops, non sono stato in grado di effettuare il calcolo, riprova").await?; } }; } "**"| "pow" => { //let p = x.pow(y); let p = x.checked_pow(y); match p { Some(_v) => { print_op(&cx, "", x.pow(y)).await?; } None => { print_(&cx, "Ops, non sono stato in grado di effettuare il calcolo, riprova").await?; } }; } _ => { print_(&cx, "Non ho capito che operazione devo fare ").await?; } } } Commands::Info => { print_(&cx, "https://github.com/Gasu16/HacktoberBot").await?; } Commands::Unban => { kick_user(&cx, "e' stato sbannato", "".to_string()).await?; } Commands::Ban{reason} => { ban_user(&cx, reason).await?; //cx.reply_to(format!("Motivo: {}", reason)).send().await?; } Commands::Kick{reason_k} => { kick_user(&cx, "e' stato kickato", reason_k).await?; } Commands::Mute{time, unit} => { mute_user(&cx, calc_restrict_time(time, unit)).await?; } Commands::Macro{option, macro_str} => { match option.as_str() { "-a" | "--add" => { print_(&cx, "Macro aggiunta").await?; } "-e" | "--edit" => { print_(&cx, "Macro editata").await?; } "-r" | "--remove" => { print_(&cx, "Macro rimossa").await?; } "-c" | "--to-ascii" => { let mut cmd = Command::new("sh"); let j = ["echo", macro_str.as_str()].join(" "); cmd.arg("-c").arg(j); let _cmd = cmd.output().expect("Comando non letto correttamente"); print_with(&cx, "", _cmd.stdout).await?; } _ => { print_(&cx, "Comando non valido").await?; } } } }; Ok(()) } #[tokio::main] async fn main() { run().await; } async fn print_(cx: &Cx, to_print: &str) -> Result<(), Box<dyn Error + Send + Sync>> { if let Err(e) = cx.reply_to(format!("{}", to_print)).send().await { println!("Error: {}", e.to_string()); } Ok(()) } async fn print_with(cx: &Cx, to_print_with: &str, to_arg_with: Vec<u8>) -> Result<(), Box<dyn Error + Send + Sync>> { if let Err(er) = cx.reply_to(format!("{} {:?}", to_print_with, to_arg_with)).send().await { println!("Error: {}", er.to_string()); } Ok(()) } async fn print_op(cx: &Cx, to_print_op: &str, to_arg_op: u32) -> Result<(), Box<dyn Error + Send + Sync>> { if let Err(op_err) = cx.reply_to(format!("{} {:?}", to_print_op, to_arg_op)).send().await { println!("Error: {}", op_err.to_string()); } Ok(()) } async fn run() { teloxide::enable_logging!(); log::info!("Starting simple_commands_bot..."); let _bot = Bot::from_env().auto_send(); let Me {user: _bot_user, ..} = _bot.get_me().await.unwrap(); let _bot_name: String = "INIT.D".into(); teloxide::commands_repl(_bot, _bot_name, action).await; }
37.170475
190
0.440804
38327e4e38a2740013683f2c2022d2199bf0c90c
26,865
// * This file is part of the uutils coreutils package. // * // * (c) Dorota Kapturkiewicz <dokaptur@gmail.com> // * // * For the full copyright and license information, please view the LICENSE // * file that was distributed with this source code. // spell-checker:ignore (ToDOs) corasick memchr Roff trunc oset iset #[macro_use] extern crate uucore; use clap::{App, Arg}; use regex::Regex; use std::cmp; use std::collections::{BTreeSet, HashMap, HashSet}; use std::default::Default; use std::fs::File; use std::io::{stdin, stdout, BufRead, BufReader, BufWriter, Read, Write}; static NAME: &str = "ptx"; static VERSION: &str = env!("CARGO_PKG_VERSION"); static BRIEF: &str = "Usage: ptx [OPTION]... [INPUT]... (without -G) or: \ ptx -G [OPTION]... [INPUT [OUTPUT]] \n Output a permuted index, \ including context, of the words in the input files. \n\n Mandatory \ arguments to long options are mandatory for short options too.\n With no FILE, or when FILE is -, read standard input. \ Default is '-F /'."; #[derive(Debug)] enum OutFormat { Dumb, Roff, Tex, } #[derive(Debug)] struct Config { format: OutFormat, gnu_ext: bool, auto_ref: bool, input_ref: bool, right_ref: bool, ignore_case: bool, macro_name: String, trunc_str: String, context_regex: String, line_width: usize, gap_size: usize, } impl Default for Config { fn default() -> Config { Config { format: OutFormat::Dumb, gnu_ext: true, auto_ref: false, input_ref: false, right_ref: false, ignore_case: false, macro_name: "xx".to_owned(), trunc_str: "/".to_owned(), context_regex: "\\w+".to_owned(), line_width: 72, gap_size: 3, } } } fn read_word_filter_file(matches: &clap::ArgMatches, option: &str) -> HashSet<String> { let filename = matches .value_of(option) .expect("parsing options failed!") .to_string(); let reader = BufReader::new(crash_if_err!(1, File::open(filename))); let mut words: HashSet<String> = HashSet::new(); for word in reader.lines() { words.insert(crash_if_err!(1, word)); } words } #[derive(Debug)] struct WordFilter { only_specified: bool, ignore_specified: bool, only_set: HashSet<String>, ignore_set: HashSet<String>, word_regex: String, } impl WordFilter { fn new(matches: &clap::ArgMatches, config: &Config) -> WordFilter { let (o, oset): (bool, HashSet<String>) = if matches.is_present(options::ONLY_FILE) { (true, read_word_filter_file(matches, options::ONLY_FILE)) } else { (false, HashSet::new()) }; let (i, iset): (bool, HashSet<String>) = if matches.is_present(options::IGNORE_FILE) { (true, read_word_filter_file(matches, options::IGNORE_FILE)) } else { (false, HashSet::new()) }; if matches.is_present(options::BREAK_FILE) { crash!(1, "-b not implemented yet"); } // Ignore empty string regex from cmd-line-args let arg_reg: Option<String> = if matches.is_present(options::WORD_REGEXP) { match matches.value_of(options::WORD_REGEXP) { Some(v) => match v.is_empty() { true => None, false => Some(v.to_string()), }, None => None, } } else { None }; let reg = match arg_reg { Some(arg_reg) => arg_reg, None => { if config.gnu_ext { "\\w+".to_owned() } else { "[^ \t\n]+".to_owned() } } }; WordFilter { only_specified: o, ignore_specified: i, only_set: oset, ignore_set: iset, word_regex: reg, } } } #[derive(Debug, PartialOrd, PartialEq, Eq, Ord)] struct WordRef { word: String, global_line_nr: usize, local_line_nr: usize, position: usize, position_end: usize, filename: String, } fn get_config(matches: &clap::ArgMatches) -> Config { let mut config: Config = Default::default(); let err_msg = "parsing options failed"; if matches.is_present(options::TRADITIONAL) { config.gnu_ext = false; config.format = OutFormat::Roff; config.context_regex = "[^ \t\n]+".to_owned(); } else { crash!(1, "GNU extensions not implemented yet"); } if matches.is_present(options::SENTENCE_REGEXP) { crash!(1, "-S not implemented yet"); } config.auto_ref = matches.is_present(options::AUTO_REFERENCE); config.input_ref = matches.is_present(options::REFERENCES); config.right_ref &= matches.is_present(options::RIGHT_SIDE_REFS); config.ignore_case = matches.is_present(options::IGNORE_CASE); if matches.is_present(options::MACRO_NAME) { config.macro_name = matches .value_of(options::MACRO_NAME) .expect(err_msg) .to_string(); } if matches.is_present(options::FLAG_TRUNCATION) { config.trunc_str = matches .value_of(options::FLAG_TRUNCATION) .expect(err_msg) .to_string(); } if matches.is_present(options::WIDTH) { let width_str = matches.value_of(options::WIDTH).expect(err_msg).to_string(); config.line_width = crash_if_err!(1, usize::from_str_radix(&width_str, 10)); } if matches.is_present(options::GAP_SIZE) { let gap_str = matches .value_of(options::GAP_SIZE) .expect(err_msg) .to_string(); config.gap_size = crash_if_err!(1, usize::from_str_radix(&gap_str, 10)); } if matches.is_present(options::FORMAT_ROFF) { config.format = OutFormat::Roff; } if matches.is_present(options::FORMAT_TEX) { config.format = OutFormat::Tex; } config } struct FileContent { lines: Vec<String>, chars_lines: Vec<Vec<char>>, offset: usize, } type FileMap = HashMap<String, FileContent>; fn read_input(input_files: &[String], config: &Config) -> FileMap { let mut file_map: FileMap = HashMap::new(); let mut files = Vec::new(); if input_files.is_empty() { files.push("-"); } else if config.gnu_ext { for file in input_files { files.push(&file); } } else { files.push(&input_files[0]); } let mut offset: usize = 0; for filename in files { let reader: BufReader<Box<dyn Read>> = BufReader::new(if filename == "-" { Box::new(stdin()) } else { let file = crash_if_err!(1, File::open(filename)); Box::new(file) }); let lines: Vec<String> = reader.lines().map(|x| crash_if_err!(1, x)).collect(); // Indexing UTF-8 string requires walking from the beginning, which can hurts performance badly when the line is long. // Since we will be jumping around the line a lot, we dump the content into a Vec<char>, which can be indexed in constant time. let chars_lines: Vec<Vec<char>> = lines.iter().map(|x| x.chars().collect()).collect(); let size = lines.len(); file_map.insert( filename.to_owned(), FileContent { lines, chars_lines, offset, }, ); offset += size } file_map } /// Go through every lines in the input files and record each match occurance as a `WordRef`. fn create_word_set(config: &Config, filter: &WordFilter, file_map: &FileMap) -> BTreeSet<WordRef> { let reg = Regex::new(&filter.word_regex).unwrap(); let ref_reg = Regex::new(&config.context_regex).unwrap(); let mut word_set: BTreeSet<WordRef> = BTreeSet::new(); for (file, lines) in file_map.iter() { let mut count: usize = 0; let offs = lines.offset; for line in &lines.lines { // if -r, exclude reference from word set let (ref_beg, ref_end) = match ref_reg.find(line) { Some(x) => (x.start(), x.end()), None => (0, 0), }; // match words with given regex for mat in reg.find_iter(line) { let (beg, end) = (mat.start(), mat.end()); if config.input_ref && ((beg, end) == (ref_beg, ref_end)) { continue; } let mut word = line[beg..end].to_owned(); if filter.only_specified && !(filter.only_set.contains(&word)) { continue; } if filter.ignore_specified && filter.ignore_set.contains(&word) { continue; } if config.ignore_case { word = word.to_lowercase(); } word_set.insert(WordRef { word, filename: file.clone(), global_line_nr: offs + count, local_line_nr: count, position: beg, position_end: end, }); } count += 1; } } word_set } fn get_reference(config: &Config, word_ref: &WordRef, line: &str, context_reg: &Regex) -> String { if config.auto_ref { format!("{}:{}", word_ref.filename, word_ref.local_line_nr + 1) } else if config.input_ref { let (beg, end) = match context_reg.find(line) { Some(x) => (x.start(), x.end()), None => (0, 0), }; line[beg..end].to_string() } else { String::new() } } fn assert_str_integrity(s: &[char], beg: usize, end: usize) { assert!(beg <= end); assert!(end <= s.len()); } fn trim_broken_word_left(s: &[char], beg: usize, end: usize) -> usize { assert_str_integrity(s, beg, end); if beg == end || beg == 0 || s[beg].is_whitespace() || s[beg - 1].is_whitespace() { return beg; } let mut b = beg; while b < end && !s[b].is_whitespace() { b += 1; } b } fn trim_broken_word_right(s: &[char], beg: usize, end: usize) -> usize { assert_str_integrity(s, beg, end); if beg == end || end == s.len() || s[end - 1].is_whitespace() || s[end].is_whitespace() { return end; } let mut e = end; while beg < e && !s[e - 1].is_whitespace() { e -= 1; } e } fn trim_idx(s: &[char], beg: usize, end: usize) -> (usize, usize) { assert_str_integrity(s, beg, end); let mut b = beg; let mut e = end; while b < e && s[b].is_whitespace() { b += 1; } while b < e && s[e - 1].is_whitespace() { e -= 1; } (b, e) } fn get_output_chunks( all_before: &[char], keyword: &str, all_after: &[char], config: &Config, ) -> (String, String, String, String) { // Chunk size logics are mostly copied from the GNU ptx source. // https://github.com/MaiZure/coreutils-8.3/blob/master/src/ptx.c#L1234 let half_line_size = (config.line_width / 2) as usize; let max_before_size = cmp::max(half_line_size as isize - config.gap_size as isize, 0) as usize; let max_after_size = cmp::max( half_line_size as isize - (2 * config.trunc_str.len()) as isize - keyword.len() as isize - 1, 0, ) as usize; // Allocate plenty space for all the chunks. let mut head = String::with_capacity(half_line_size); let mut before = String::with_capacity(half_line_size); let mut after = String::with_capacity(half_line_size); let mut tail = String::with_capacity(half_line_size); // the before chunk // trim whitespace away from all_before to get the index where the before chunk should end. let (_, before_end) = trim_idx(all_before, 0, all_before.len()); // the minimum possible begin index of the before_chunk is the end index minus the length. let before_beg = cmp::max(before_end as isize - max_before_size as isize, 0) as usize; // in case that falls in the middle of a word, trim away the word. let before_beg = trim_broken_word_left(all_before, before_beg, before_end); // trim away white space. let (before_beg, before_end) = trim_idx(all_before, before_beg, before_end); // and get the string. let before_str: String = all_before[before_beg..before_end].iter().collect(); before.push_str(&before_str); assert!(max_before_size >= before.len()); // the after chunk // must be no longer than the minimum between the max size and the total available string. let after_end = cmp::min(max_after_size, all_after.len()); // in case that falls in the middle of a word, trim away the word. let after_end = trim_broken_word_right(all_after, 0, after_end); // trim away white space. let (_, after_end) = trim_idx(all_after, 0, after_end); // and get the string let after_str: String = all_after[0..after_end].iter().collect(); after.push_str(&after_str); assert!(max_after_size >= after.len()); // the tail chunk // max size of the tail chunk = max size of left half - space taken by before chunk - gap size. let max_tail_size = cmp::max( max_before_size as isize - before.len() as isize - config.gap_size as isize, 0, ) as usize; // the tail chunk takes text starting from where the after chunk ends (with whitespaces trimmed). let (tail_beg, _) = trim_idx(all_after, after_end, all_after.len()); // end = begin + max length let tail_end = cmp::min(all_after.len(), tail_beg + max_tail_size) as usize; // in case that falls in the middle of a word, trim away the word. let tail_end = trim_broken_word_right(all_after, tail_beg, tail_end); // trim away whitespace again. let (tail_beg, tail_end) = trim_idx(all_after, tail_beg, tail_end); // and get the string let tail_str: String = all_after[tail_beg..tail_end].iter().collect(); tail.push_str(&tail_str); // the head chunk // max size of the head chunk = max size of right half - space taken by after chunk - gap size. let max_head_size = cmp::max( max_after_size as isize - after.len() as isize - config.gap_size as isize, 0, ) as usize; // the head chunk takes text from before the before chunk let (_, head_end) = trim_idx(all_before, 0, before_beg); // begin = end - max length let head_beg = cmp::max(head_end as isize - max_head_size as isize, 0) as usize; // in case that falls in the middle of a word, trim away the word. let head_beg = trim_broken_word_left(all_before, head_beg, head_end); // trim away white space again. let (head_beg, head_end) = trim_idx(all_before, head_beg, head_end); // and get the string. let head_str: String = all_before[head_beg..head_end].iter().collect(); head.push_str(&head_str); // put right context truncation string if needed if after_end != all_after.len() && tail_beg == tail_end { after.push_str(&config.trunc_str); } else if after_end != all_after.len() && tail_end != all_after.len() { tail.push_str(&config.trunc_str); } // put left context truncation string if needed if before_beg != 0 && head_beg == head_end { before = format!("{}{}", config.trunc_str, before); } else if before_beg != 0 && head_beg != 0 { head = format!("{}{}", config.trunc_str, head); } (tail, before, after, head) } fn tex_mapper(x: char) -> String { match x { '\\' => "\\backslash{}".to_owned(), '$' | '%' | '#' | '&' | '_' => format!("\\{}", x), '}' | '{' => format!("$\\{}$", x), _ => x.to_string(), } } /// Escape special characters for TeX. fn format_tex_field(s: &str) -> String { let mapped_chunks: Vec<String> = s.chars().map(tex_mapper).collect(); mapped_chunks.join("") } fn format_tex_line( config: &Config, word_ref: &WordRef, line: &str, chars_line: &[char], reference: &str, ) -> String { let mut output = String::new(); output.push_str(&format!("\\{} ", config.macro_name)); let all_before = if config.input_ref { let before = &line[0..word_ref.position]; let before_start_trimoff = word_ref.position - before.trim_start_matches(reference).trim_start().len(); let before_end_index = before.len(); &chars_line[before_start_trimoff..cmp::max(before_end_index, before_start_trimoff)] } else { let before_chars_trim_idx = (0, word_ref.position); &chars_line[before_chars_trim_idx.0..before_chars_trim_idx.1] }; let keyword = &line[word_ref.position..word_ref.position_end]; let after_chars_trim_idx = (word_ref.position_end, chars_line.len()); let all_after = &chars_line[after_chars_trim_idx.0..after_chars_trim_idx.1]; let (tail, before, after, head) = get_output_chunks(&all_before, &keyword, &all_after, &config); output.push_str(&format!( "{5}{0}{6}{5}{1}{6}{5}{2}{6}{5}{3}{6}{5}{4}{6}", format_tex_field(&tail), format_tex_field(&before), format_tex_field(keyword), format_tex_field(&after), format_tex_field(&head), "{", "}" )); if config.auto_ref || config.input_ref { output.push_str(&format!("{}{}{}", "{", format_tex_field(&reference), "}")); } output } fn format_roff_field(s: &str) -> String { s.replace("\"", "\"\"") } fn format_roff_line( config: &Config, word_ref: &WordRef, line: &str, chars_line: &[char], reference: &str, ) -> String { let mut output = String::new(); output.push_str(&format!(".{}", config.macro_name)); let all_before = if config.input_ref { let before = &line[0..word_ref.position]; let before_start_trimoff = word_ref.position - before.trim_start_matches(reference).trim_start().len(); let before_end_index = before.len(); &chars_line[before_start_trimoff..cmp::max(before_end_index, before_start_trimoff)] } else { let before_chars_trim_idx = (0, word_ref.position); &chars_line[before_chars_trim_idx.0..before_chars_trim_idx.1] }; let keyword = &line[word_ref.position..word_ref.position_end]; let after_chars_trim_idx = (word_ref.position_end, chars_line.len()); let all_after = &chars_line[after_chars_trim_idx.0..after_chars_trim_idx.1]; let (tail, before, after, head) = get_output_chunks(&all_before, &keyword, &all_after, &config); output.push_str(&format!( " \"{}\" \"{}\" \"{}{}\" \"{}\"", format_roff_field(&tail), format_roff_field(&before), format_roff_field(keyword), format_roff_field(&after), format_roff_field(&head) )); if config.auto_ref || config.input_ref { output.push_str(&format!(" \"{}\"", format_roff_field(&reference))); } output } fn write_traditional_output( config: &Config, file_map: &FileMap, words: &BTreeSet<WordRef>, output_filename: &str, ) { let mut writer: BufWriter<Box<dyn Write>> = BufWriter::new(if output_filename == "-" { Box::new(stdout()) } else { let file = crash_if_err!(1, File::create(output_filename)); Box::new(file) }); let context_reg = Regex::new(&config.context_regex).unwrap(); for word_ref in words.iter() { let file_map_value: &FileContent = file_map .get(&(word_ref.filename)) .expect("Missing file in file map"); let FileContent { ref lines, ref chars_lines, offset: _, } = *(file_map_value); let reference = get_reference( config, word_ref, &lines[word_ref.local_line_nr], &context_reg, ); let output_line: String = match config.format { OutFormat::Tex => format_tex_line( config, word_ref, &lines[word_ref.local_line_nr], &chars_lines[word_ref.local_line_nr], &reference, ), OutFormat::Roff => format_roff_line( config, word_ref, &lines[word_ref.local_line_nr], &chars_lines[word_ref.local_line_nr], &reference, ), OutFormat::Dumb => crash!(1, "There is no dumb format with GNU extensions disabled"), }; crash_if_err!(1, writeln!(writer, "{}", output_line)); } } mod options { pub static FILE: &str = "file"; pub static AUTO_REFERENCE: &str = "auto-reference"; pub static TRADITIONAL: &str = "traditional"; pub static FLAG_TRUNCATION: &str = "flag-truncation"; pub static MACRO_NAME: &str = "macro-name"; pub static FORMAT_ROFF: &str = "format=roff"; pub static RIGHT_SIDE_REFS: &str = "right-side-refs"; pub static SENTENCE_REGEXP: &str = "sentence-regexp"; pub static FORMAT_TEX: &str = "format=tex"; pub static WORD_REGEXP: &str = "word-regexp"; pub static BREAK_FILE: &str = "break-file"; pub static IGNORE_CASE: &str = "ignore-case"; pub static GAP_SIZE: &str = "gap-size"; pub static IGNORE_FILE: &str = "ignore-file"; pub static ONLY_FILE: &str = "only-file"; pub static REFERENCES: &str = "references"; pub static WIDTH: &str = "width"; } pub fn uumain(args: impl uucore::Args) -> i32 { let args = args.collect_str(); // let mut opts = Options::new(); let matches = App::new(executable!()) .name(NAME) .version(VERSION) .usage(BRIEF) .arg(Arg::with_name(options::FILE).hidden(true).multiple(true)) .arg( Arg::with_name(options::AUTO_REFERENCE) .short("A") .long(options::AUTO_REFERENCE) .help("output automatically generated references") .takes_value(false), ) .arg( Arg::with_name(options::TRADITIONAL) .short("G") .long(options::TRADITIONAL) .help("behave more like System V 'ptx'"), ) .arg( Arg::with_name(options::FLAG_TRUNCATION) .short("F") .long(options::FLAG_TRUNCATION) .help("use STRING for flagging line truncations") .value_name("STRING") .takes_value(true), ) .arg( Arg::with_name(options::MACRO_NAME) .short("M") .long(options::MACRO_NAME) .help("macro name to use instead of 'xx'") .value_name("STRING") .takes_value(true), ) .arg( Arg::with_name(options::FORMAT_ROFF) .short("O") .long(options::FORMAT_ROFF) .help("generate output as roff directives"), ) .arg( Arg::with_name(options::RIGHT_SIDE_REFS) .short("R") .long(options::RIGHT_SIDE_REFS) .help("put references at right, not counted in -w") .takes_value(false), ) .arg( Arg::with_name(options::SENTENCE_REGEXP) .short("S") .long(options::SENTENCE_REGEXP) .help("for end of lines or end of sentences") .value_name("REGEXP") .takes_value(true), ) .arg( Arg::with_name(options::FORMAT_TEX) .short("T") .long(options::FORMAT_TEX) .help("generate output as TeX directives"), ) .arg( Arg::with_name(options::WORD_REGEXP) .short("W") .long(options::WORD_REGEXP) .help("use REGEXP to match each keyword") .value_name("REGEXP") .takes_value(true), ) .arg( Arg::with_name(options::BREAK_FILE) .short("b") .long(options::BREAK_FILE) .help("word break characters in this FILE") .value_name("FILE") .takes_value(true), ) .arg( Arg::with_name(options::IGNORE_CASE) .short("f") .long(options::IGNORE_CASE) .help("fold lower case to upper case for sorting") .takes_value(false), ) .arg( Arg::with_name(options::GAP_SIZE) .short("g") .long(options::GAP_SIZE) .help("gap size in columns between output fields") .value_name("NUMBER") .takes_value(true), ) .arg( Arg::with_name(options::IGNORE_FILE) .short("i") .long(options::IGNORE_FILE) .help("read ignore word list from FILE") .value_name("FILE") .takes_value(true), ) .arg( Arg::with_name(options::ONLY_FILE) .short("o") .long(options::ONLY_FILE) .help("read only word list from this FILE") .value_name("FILE") .takes_value(true), ) .arg( Arg::with_name(options::REFERENCES) .short("r") .long(options::REFERENCES) .help("first field of each line is a reference") .value_name("FILE") .takes_value(false), ) .arg( Arg::with_name(options::WIDTH) .short("w") .long(options::WIDTH) .help("output width in columns, reference excluded") .value_name("NUMBER") .takes_value(true), ) .get_matches_from(args); let input_files: Vec<String> = match &matches.values_of(options::FILE) { Some(v) => v.clone().map(|v| v.to_owned()).collect(), None => vec!["-".to_string()], }; let config = get_config(&matches); let word_filter = WordFilter::new(&matches, &config); let file_map = read_input(&input_files, &config); let word_set = create_word_set(&config, &word_filter, &file_map); let output_file = if !config.gnu_ext && matches.args.len() == 2 { matches.value_of(options::FILE).unwrap_or("-").to_string() } else { "-".to_owned() }; write_traditional_output(&config, &file_map, &word_set, &output_file); 0 }
34.486521
135
0.569514
e403451ee67e1cf249358b8b1efc734325454665
5,948
#[doc = r" Value read from the register"] pub struct R { bits: u16, } #[doc = r" Value to write to the register"] pub struct W { bits: u16, } impl super::UCB0IE { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `UCRXIE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum UCRXIER { #[doc = r" Reserved"] _Reserved(bool), } impl UCRXIER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { UCRXIER::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> UCRXIER { match value { i => UCRXIER::_Reserved(i), } } } #[doc = "Possible values of the field `UCTXIE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum UCTXIER { #[doc = r" Reserved"] _Reserved(bool), } impl UCTXIER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { UCTXIER::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> UCTXIER { match value { i => UCTXIER::_Reserved(i), } } } #[doc = "Values that can be written to the field `UCRXIE`"] pub enum UCRXIEW {} impl UCRXIEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self {} } } #[doc = r" Proxy"] pub struct _UCRXIEW<'a> { w: &'a mut W, } impl<'a> _UCRXIEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: UCRXIEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u16) << OFFSET); self.w.bits |= ((value & MASK) as u16) << OFFSET; self.w } } #[doc = "Values that can be written to the field `UCTXIE`"] pub enum UCTXIEW {} impl UCTXIEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self {} } } #[doc = r" Proxy"] pub struct _UCTXIEW<'a> { w: &'a mut W, } impl<'a> _UCTXIEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: UCTXIEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u16) << OFFSET); self.w.bits |= ((value & MASK) as u16) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u16 { self.bits } #[doc = "Bit 0 - USCI Receive Interrupt Enable"] #[inline] pub fn ucrxie(&self) -> UCRXIER { UCRXIER::_from({ const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u16) != 0 }) } #[doc = "Bit 1 - USCI Transmit Interrupt Enable"] #[inline] pub fn uctxie(&self) -> UCTXIER { UCTXIER::_from({ const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u16) != 0 }) } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u16) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - USCI Receive Interrupt Enable"] #[inline] pub fn ucrxie(&mut self) -> _UCRXIEW { _UCRXIEW { w: self } } #[doc = "Bit 1 - USCI Transmit Interrupt Enable"] #[inline] pub fn uctxie(&mut self) -> _UCTXIEW { _UCTXIEW { w: self } } }
24.887029
59
0.505548
116ca6ce18881dfc1346110e1e58f180b0777b78
33,944
// This file is part of Substrate. // Copyright (C) 2018-2021 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! # Contract Pallet //! //! The Contract module provides functionality for the runtime to deploy and execute WebAssembly smart-contracts. //! //! - [`Config`] //! - [`Call`] //! //! ## Overview //! //! This module extends accounts based on the [`Currency`] trait to have smart-contract functionality. It can //! be used with other modules that implement accounts based on [`Currency`]. These "smart-contract accounts" //! have the ability to instantiate smart-contracts and make calls to other contract and non-contract accounts. //! //! The smart-contract code is stored once in a code cache, and later retrievable via its hash. //! This means that multiple smart-contracts can be instantiated from the same hash, without replicating //! the code each time. //! //! When a smart-contract is called, its associated code is retrieved via the code hash and gets executed. //! This call can alter the storage entries of the smart-contract account, instantiate new smart-contracts, //! or call other smart-contracts. //! //! Finally, when an account is reaped, its associated code and storage of the smart-contract account //! will also be deleted. //! //! ### Gas //! //! Senders must specify a gas limit with every call, as all instructions invoked by the smart-contract require gas. //! Unused gas is refunded after the call, regardless of the execution outcome. //! //! If the gas limit is reached, then all calls and state changes (including balance transfers) are only //! reverted at the current call's contract level. For example, if contract A calls B and B runs out of gas mid-call, //! then all of B's calls are reverted. Assuming correct error handling by contract A, A's other calls and state //! changes still persist. //! //! ### Notable Scenarios //! //! Contract call failures are not always cascading. When failures occur in a sub-call, they do not "bubble up", //! and the call will only revert at the specific contract level. For example, if contract A calls contract B, and B //! fails, A can decide how to handle that failure, either proceeding or reverting A's changes. //! //! ## Interface //! //! ### Dispatchable functions //! //! * [`Pallet::instantiate_with_code`] - Deploys a new contract from the supplied wasm binary, //! optionally transferring //! some balance. This instantiates a new smart contract account with the supplied code and //! calls its constructor to initialize the contract. //! * [`Pallet::instantiate`] - The same as `instantiate_with_code` but instead of uploading new //! code an existing `code_hash` is supplied. //! * [`Pallet::call`] - Makes a call to an account, optionally transferring some balance. //! * [`Pallet::claim_surcharge`] - Evict a contract that cannot pay rent anymore. //! //! ## Usage //! //! The Contract module is a work in progress. The following examples show how this Contract module //! can be used to instantiate and call contracts. //! //! * [`ink`](https://github.com/paritytech/ink) is //! an [`eDSL`](https://wiki.haskell.org/Embedded_domain_specific_language) that enables writing //! WebAssembly based smart contracts in the Rust programming language. This is a work in progress. #![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(feature = "runtime-benchmarks", recursion_limit="512")] #[macro_use] mod gas; mod storage; mod exec; mod wasm; mod rent; mod benchmarking; mod schedule; mod migration; pub mod chain_extension; pub mod weights; #[cfg(test)] mod tests; pub use crate::{ pallet::*, schedule::{Schedule, Limits, InstructionWeights, HostFnWeights}, exec::Frame, }; use crate::{ gas::GasMeter, exec::{Stack as ExecStack, Executable}, rent::Rent, storage::{Storage, DeletedContract, ContractInfo, AliveContractInfo, TombstoneContractInfo}, weights::WeightInfo, wasm::PrefabWasmModule, }; use sp_core::{Bytes, crypto::UncheckedFrom}; use sp_std::prelude::*; use sp_runtime::{ traits::{ Hash, StaticLookup, Convert, Saturating, Zero, }, Perbill, }; use frame_support::{ traits::{OnUnbalanced, Currency, Get, Time, Randomness, Filter}, weights::{Weight, PostDispatchInfo, WithPostDispatchInfo, GetDispatchInfo}, dispatch::Dispatchable, }; use frame_system::Pallet as System; use pallet_contracts_primitives::{ RentProjectionResult, GetStorageResult, ContractAccessError, ContractExecResult, ContractInstantiateResult, Code, InstantiateReturnValue, }; type CodeHash<T> = <T as frame_system::Config>::Hash; type TrieId = Vec<u8>; type BalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::Balance; type NegativeImbalanceOf<T> = <<T as Config>::Currency as Currency<<T as frame_system::Config>::AccountId>>::NegativeImbalance; #[frame_support::pallet] pub mod pallet { use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; use super::*; #[pallet::config] pub trait Config: frame_system::Config { /// The time implementation used to supply timestamps to conntracts through `seal_now`. type Time: Time; /// The generator used to supply randomness to contracts through `seal_random`. type Randomness: Randomness<Self::Hash, Self::BlockNumber>; /// The currency in which fees are paid and contract balances are held. type Currency: Currency<Self::AccountId>; /// The overarching event type. type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; /// The overarching call type. type Call: Dispatchable<Origin=Self::Origin, PostInfo=PostDispatchInfo> + GetDispatchInfo + codec::Decode + IsType<<Self as frame_system::Config>::Call>; /// Filter that is applied to calls dispatched by contracts. /// /// Use this filter to control which dispatchables are callable by contracts. /// This is applied in **addition** to [`frame_system::Config::BaseCallFilter`]. /// It is recommended to treat this as a whitelist. /// /// # Subsistence Threshold /// /// The runtime **must** make sure that any allowed dispatchable makes sure that the /// `total_balance` of the contract stays above [`Pallet::subsistence_threshold()`]. /// Otherwise contracts can clutter the storage with their tombstones without /// deposting the correct amount of balance. /// /// # Stability /// /// The runtime **must** make sure that all dispatchables that are callable by /// contracts remain stable. In addition [`Self::Call`] itself must remain stable. /// This means that no existing variants are allowed to switch their positions. /// /// # Note /// /// Note that dispatchables that are called via contracts do not spawn their /// own wasm instance for each call (as opposed to when called via a transaction). /// Therefore please make sure to be restrictive about which dispatchables are allowed /// in order to not introduce a new DoS vector like memory allocation patterns that can /// be exploited to drive the runtime into a panic. type CallFilter: Filter<<Self as frame_system::Config>::Call>; /// Handler for rent payments. type RentPayment: OnUnbalanced<NegativeImbalanceOf<Self>>; /// Used to answer contracts' queries regarding the current weight price. This is **not** /// used to calculate the actual fee and is only for informational purposes. type WeightPrice: Convert<Weight, BalanceOf<Self>>; /// Describes the weights of the dispatchables of this module and is also used to /// construct a default cost schedule. type WeightInfo: WeightInfo; /// Type that allows the runtime authors to add new host functions for a contract to call. type ChainExtension: chain_extension::ChainExtension<Self>; /// Cost schedule and limits. #[pallet::constant] type Schedule: Get<Schedule<Self>>; /// Number of block delay an extrinsic claim surcharge has. /// /// When claim surcharge is called by an extrinsic the rent is checked /// for current_block - delay #[pallet::constant] type SignedClaimHandicap: Get<Self::BlockNumber>; /// The minimum amount required to generate a tombstone. #[pallet::constant] type TombstoneDeposit: Get<BalanceOf<Self>>; /// The balance every contract needs to deposit to stay alive indefinitely. /// /// This is different from the [`Self::TombstoneDeposit`] because this only needs to be /// deposited while the contract is alive. Costs for additional storage are added to /// this base cost. /// /// This is a simple way to ensure that contracts with empty storage eventually get deleted by /// making them pay rent. This creates an incentive to remove them early in order to save rent. #[pallet::constant] type DepositPerContract: Get<BalanceOf<Self>>; /// The balance a contract needs to deposit per storage byte to stay alive indefinitely. /// /// Let's suppose the deposit is 1,000 BU (balance units)/byte and the rent is 1 BU/byte/day, /// then a contract with 1,000,000 BU that uses 1,000 bytes of storage would pay no rent. /// But if the balance reduced to 500,000 BU and the storage stayed the same at 1,000, /// then it would pay 500 BU/day. #[pallet::constant] type DepositPerStorageByte: Get<BalanceOf<Self>>; /// The balance a contract needs to deposit per storage item to stay alive indefinitely. /// /// It works the same as [`Self::DepositPerStorageByte`] but for storage items. #[pallet::constant] type DepositPerStorageItem: Get<BalanceOf<Self>>; /// The fraction of the deposit that should be used as rent per block. /// /// When a contract hasn't enough balance deposited to stay alive indefinitely it needs /// to pay per block for the storage it consumes that is not covered by the deposit. /// This determines how high this rent payment is per block as a fraction of the deposit. #[pallet::constant] type RentFraction: Get<Perbill>; /// Reward that is received by the party whose touch has led /// to removal of a contract. #[pallet::constant] type SurchargeReward: Get<BalanceOf<Self>>; /// The type of the call stack determines the maximum nesting depth of contract calls. /// /// The allowed depth is `CallStack::size() + 1`. /// Therefore a size of `0` means that a contract cannot use call or instantiate. /// In other words only the origin called "root contract" is allowed to execute then. type CallStack: smallvec::Array<Item=Frame<Self>>; /// The maximum number of tries that can be queued for deletion. #[pallet::constant] type DeletionQueueDepth: Get<u32>; /// The maximum amount of weight that can be consumed per block for lazy trie removal. #[pallet::constant] type DeletionWeightLimit: Get<Weight>; } #[pallet::pallet] pub struct Pallet<T>(PhantomData<T>); #[pallet::hooks] impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> where T::AccountId: UncheckedFrom<T::Hash>, T::AccountId: AsRef<[u8]>, { fn on_initialize(_block: T::BlockNumber) -> Weight { // We do not want to go above the block limit and rather avoid lazy deletion // in that case. This should only happen on runtime upgrades. let weight_limit = T::BlockWeights::get().max_block .saturating_sub(System::<T>::block_weight().total()) .min(T::DeletionWeightLimit::get()); Storage::<T>::process_deletion_queue_batch(weight_limit) .saturating_add(T::WeightInfo::on_initialize()) } fn on_runtime_upgrade() -> Weight { migration::migrate::<T>() } } #[pallet::call] impl<T: Config> Pallet<T> where T::AccountId: UncheckedFrom<T::Hash>, T::AccountId: AsRef<[u8]>, { /// Makes a call to an account, optionally transferring some balance. /// /// * If the account is a smart-contract account, the associated code will be /// executed and any value will be transferred. /// * If the account is a regular account, any value will be transferred. /// * If no account exists and the call value is not less than `existential_deposit`, /// a regular account will be created and any value will be transferred. #[pallet::weight(T::WeightInfo::call().saturating_add(*gas_limit))] pub fn call( origin: OriginFor<T>, dest: <T::Lookup as StaticLookup>::Source, #[pallet::compact] value: BalanceOf<T>, #[pallet::compact] gas_limit: Weight, data: Vec<u8> ) -> DispatchResultWithPostInfo { let origin = ensure_signed(origin)?; let dest = T::Lookup::lookup(dest)?; let mut gas_meter = GasMeter::new(gas_limit); let schedule = T::Schedule::get(); let result = ExecStack::<T, PrefabWasmModule<T>>::run_call( origin, dest, &mut gas_meter, &schedule, value, data, None, ); gas_meter.into_dispatch_result(result, T::WeightInfo::call()) } /// Instantiates a new contract from the supplied `code` optionally transferring /// some balance. /// /// This is the only function that can deploy new code to the chain. /// /// # Parameters /// /// * `endowment`: The balance to transfer from the `origin` to the newly created contract. /// * `gas_limit`: The gas limit enforced when executing the constructor. /// * `code`: The contract code to deploy in raw bytes. /// * `data`: The input data to pass to the contract constructor. /// * `salt`: Used for the address derivation. See [`Pallet::contract_address`]. /// /// Instantiation is executed as follows: /// /// - The supplied `code` is instrumented, deployed, and a `code_hash` is created for that code. /// - If the `code_hash` already exists on the chain the underlying `code` will be shared. /// - The destination address is computed based on the sender, code_hash and the salt. /// - The smart-contract account is created at the computed address. /// - The `endowment` is transferred to the new account. /// - The `deploy` function is executed in the context of the newly-created account. #[pallet::weight( T::WeightInfo::instantiate_with_code( code.len() as u32 / 1024, salt.len() as u32 / 1024, ) .saturating_add(*gas_limit) )] pub fn instantiate_with_code( origin: OriginFor<T>, #[pallet::compact] endowment: BalanceOf<T>, #[pallet::compact] gas_limit: Weight, code: Vec<u8>, data: Vec<u8>, salt: Vec<u8>, ) -> DispatchResultWithPostInfo { let origin = ensure_signed(origin)?; let code_len = code.len() as u32; ensure!(code_len <= T::Schedule::get().limits.code_len, Error::<T>::CodeTooLarge); let mut gas_meter = GasMeter::new(gas_limit); let schedule = T::Schedule::get(); let executable = PrefabWasmModule::from_code(code, &schedule)?; let code_len = executable.code_len(); ensure!(code_len <= T::Schedule::get().limits.code_len, Error::<T>::CodeTooLarge); let result = ExecStack::<T, PrefabWasmModule<T>>::run_instantiate( origin, executable, &mut gas_meter, &schedule, endowment, data, &salt, None, ).map(|(_address, output)| output); gas_meter.into_dispatch_result( result, T::WeightInfo::instantiate_with_code(code_len / 1024, salt.len() as u32 / 1024) ) } /// Instantiates a contract from a previously deployed wasm binary. /// /// This function is identical to [`Self::instantiate_with_code`] but without the /// code deployment step. Instead, the `code_hash` of an on-chain deployed wasm binary /// must be supplied. #[pallet::weight( T::WeightInfo::instantiate(salt.len() as u32 / 1024).saturating_add(*gas_limit) )] pub fn instantiate( origin: OriginFor<T>, #[pallet::compact] endowment: BalanceOf<T>, #[pallet::compact] gas_limit: Weight, code_hash: CodeHash<T>, data: Vec<u8>, salt: Vec<u8>, ) -> DispatchResultWithPostInfo { let origin = ensure_signed(origin)?; let mut gas_meter = GasMeter::new(gas_limit); let schedule = T::Schedule::get(); let executable = PrefabWasmModule::from_storage(code_hash, &schedule, &mut gas_meter)?; let result = ExecStack::<T, PrefabWasmModule<T>>::run_instantiate( origin, executable, &mut gas_meter, &schedule, endowment, data, &salt, None, ).map(|(_address, output)| output); gas_meter.into_dispatch_result( result, T::WeightInfo::instantiate(salt.len() as u32 / 1024), ) } /// Allows block producers to claim a small reward for evicting a contract. If a block /// producer fails to do so, a regular users will be allowed to claim the reward. /// /// In case of a successful eviction no fees are charged from the sender. However, the /// reward is capped by the total amount of rent that was paid by the contract while /// it was alive. /// /// If contract is not evicted as a result of this call, [`Error::ContractNotEvictable`] /// is returned and the sender is not eligible for the reward. #[pallet::weight(T::WeightInfo::claim_surcharge(T::Schedule::get().limits.code_len / 1024))] pub fn claim_surcharge( origin: OriginFor<T>, dest: T::AccountId, aux_sender: Option<T::AccountId> ) -> DispatchResultWithPostInfo { let origin = origin.into(); let (signed, rewarded) = match (origin, aux_sender) { (Ok(frame_system::RawOrigin::Signed(account)), None) => { (true, account) }, (Ok(frame_system::RawOrigin::None), Some(aux_sender)) => { (false, aux_sender) }, _ => Err(Error::<T>::InvalidSurchargeClaim)?, }; // Add some advantage for block producers (who send unsigned extrinsics) by // adding a handicap: for signed extrinsics we use a slightly older block number // for the eviction check. This can be viewed as if we pushed regular users back in past. let handicap = if signed { T::SignedClaimHandicap::get() } else { Zero::zero() }; // If poking the contract has lead to eviction of the contract, give out the rewards. match Rent::<T, PrefabWasmModule<T>>::try_eviction(&dest, handicap)? { (Some(rent_paid), code_len) => { T::Currency::deposit_into_existing( &rewarded, T::SurchargeReward::get().min(rent_paid), ) .map(|_| PostDispatchInfo { actual_weight: Some(T::WeightInfo::claim_surcharge(code_len / 1024)), pays_fee: Pays::No, }) .map_err(Into::into) } (None, code_len) => Err(Error::<T>::ContractNotEvictable.with_weight( T::WeightInfo::claim_surcharge(code_len / 1024) )), } } } #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId", T::Hash = "Hash", BalanceOf<T> = "Balance")] pub enum Event<T: Config> { /// Contract deployed by address at the specified address. \[deployer, contract\] Instantiated(T::AccountId, T::AccountId), /// Contract has been evicted and is now in tombstone state. \[contract\] Evicted(T::AccountId), /// Contract has been terminated without leaving a tombstone. /// \[contract, beneficiary\] /// /// # Params /// /// - `contract`: The contract that was terminated. /// - `beneficiary`: The account that received the contracts remaining balance. /// /// # Note /// /// The only way for a contract to be removed without a tombstone and emitting /// this event is by calling `seal_terminate`. Terminated(T::AccountId, T::AccountId), /// Restoration of a contract has been successful. /// \[restorer, dest, code_hash, rent_allowance\] /// /// # Params /// /// - `restorer`: Account ID of the restoring contract. /// - `dest`: Account ID of the restored contract. /// - `code_hash`: Code hash of the restored contract. /// - `rent_allowance`: Rent allowance of the restored contract. Restored(T::AccountId, T::AccountId, T::Hash, BalanceOf<T>), /// Code with the specified hash has been stored. \[code_hash\] CodeStored(T::Hash), /// Triggered when the current schedule is updated. /// \[version\] /// /// # Params /// /// - `version`: The version of the newly set schedule. ScheduleUpdated(u32), /// A custom event emitted by the contract. /// \[contract, data\] /// /// # Params /// /// - `contract`: The contract that emitted the event. /// - `data`: Data supplied by the contract. Metadata generated during contract /// compilation is needed to decode it. ContractEmitted(T::AccountId, Vec<u8>), /// A code with the specified hash was removed. /// \[code_hash\] /// /// This happens when the last contract that uses this code hash was removed or evicted. CodeRemoved(T::Hash), } #[pallet::error] pub enum Error<T> { /// A new schedule must have a greater version than the current one. InvalidScheduleVersion, /// An origin must be signed or inherent and auxiliary sender only provided on inherent. InvalidSurchargeClaim, /// Cannot restore from nonexisting or tombstone contract. InvalidSourceContract, /// Cannot restore to nonexisting or alive contract. InvalidDestinationContract, /// Tombstones don't match. InvalidTombstone, /// An origin TrieId written in the current block. InvalidContractOrigin, /// The executed contract exhausted its gas limit. OutOfGas, /// The output buffer supplied to a contract API call was too small. OutputBufferTooSmall, /// Performing the requested transfer would have brought the contract below /// the subsistence threshold. No transfer is allowed to do this in order to allow /// for a tombstone to be created. Use `seal_terminate` to remove a contract without /// leaving a tombstone behind. BelowSubsistenceThreshold, /// The newly created contract is below the subsistence threshold after executing /// its contructor. No contracts are allowed to exist below that threshold. NewContractNotFunded, /// Performing the requested transfer failed for a reason originating in the /// chosen currency implementation of the runtime. Most probably the balance is /// too low or locks are placed on it. TransferFailed, /// Performing a call was denied because the calling depth reached the limit /// of what is specified in the schedule. MaxCallDepthReached, /// No contract was found at the specified address. ContractNotFound, /// A tombstone exist at the specified address. /// /// Tombstone cannot be called. Anyone can use `seal_restore_to` in order to revive /// the contract, though. ContractIsTombstone, /// The called contract does not have enough balance to pay for its storage. /// /// The contract ran out of balance and is therefore eligible for eviction into a /// tombstone. Anyone can evict the contract by submitting a `claim_surcharge` /// extrinsic. Alternatively, a plain balance transfer can be used in order to /// increase the contracts funds so that it can be called again. RentNotPaid, /// The code supplied to `instantiate_with_code` exceeds the limit specified in the /// current schedule. CodeTooLarge, /// No code could be found at the supplied code hash. CodeNotFound, /// A buffer outside of sandbox memory was passed to a contract API function. OutOfBounds, /// Input passed to a contract API function failed to decode as expected type. DecodingFailed, /// Contract trapped during execution. ContractTrapped, /// The size defined in `T::MaxValueSize` was exceeded. ValueTooLarge, /// Termination of a contract is not allowed while the contract is already /// on the call stack. Can be triggered by `seal_terminate` or `seal_restore_to. TerminatedWhileReentrant, /// `seal_call` forwarded this contracts input. It therefore is no longer available. InputForwarded, /// The subject passed to `seal_random` exceeds the limit. RandomSubjectTooLong, /// The amount of topics passed to `seal_deposit_events` exceeds the limit. TooManyTopics, /// The topics passed to `seal_deposit_events` contains at least one duplicate. DuplicateTopics, /// The chain does not provide a chain extension. Calling the chain extension results /// in this error. Note that this usually shouldn't happen as deploying such contracts /// is rejected. NoChainExtension, /// Removal of a contract failed because the deletion queue is full. /// /// This can happen when either calling [`Pallet::claim_surcharge`] or `seal_terminate`. /// The queue is filled by deleting contracts and emptied by a fixed amount each block. /// Trying again during another block is the only way to resolve this issue. DeletionQueueFull, /// A contract could not be evicted because it has enough balance to pay rent. /// /// This can be returned from [`Pallet::claim_surcharge`] because the target /// contract has enough balance to pay for its rent. ContractNotEvictable, /// A storage modification exhausted the 32bit type that holds the storage size. /// /// This can either happen when the accumulated storage in bytes is too large or /// when number of storage items is too large. StorageExhausted, /// A contract with the same AccountId already exists. DuplicateContract, /// A contract self destructed in its constructor. /// /// This can be triggered by a call to `seal_terminate` or `seal_restore_to`. TerminatedInConstructor, /// The debug message specified to `seal_debug_message` does contain invalid UTF-8. DebugMessageInvalidUTF8, /// A call tried to invoke a contract that is flagged as non-reentrant. ReentranceDenied, } /// A mapping from an original code hash to the original code, untouched by instrumentation. #[pallet::storage] pub(crate) type PristineCode<T: Config> = StorageMap<_, Identity, CodeHash<T>, Vec<u8>>; /// A mapping between an original code hash and instrumented wasm code, ready for execution. #[pallet::storage] pub(crate) type CodeStorage<T: Config> = StorageMap<_, Identity, CodeHash<T>, PrefabWasmModule<T>>; /// The subtrie counter. #[pallet::storage] pub(crate) type AccountCounter<T: Config> = StorageValue<_, u64, ValueQuery>; /// The code associated with a given account. /// /// TWOX-NOTE: SAFE since `AccountId` is a secure hash. #[pallet::storage] pub(crate) type ContractInfoOf<T: Config> = StorageMap<_, Twox64Concat, T::AccountId, ContractInfo<T>>; /// Evicted contracts that await child trie deletion. /// /// Child trie deletion is a heavy operation depending on the amount of storage items /// stored in said trie. Therefore this operation is performed lazily in `on_initialize`. #[pallet::storage] pub(crate) type DeletionQueue<T: Config> = StorageValue<_, Vec<DeletedContract>, ValueQuery>; } impl<T: Config> Pallet<T> where T::AccountId: UncheckedFrom<T::Hash> + AsRef<[u8]>, { /// Perform a call to a specified contract. /// /// This function is similar to [`Self::call`], but doesn't perform any address lookups /// and better suitable for calling directly from Rust. /// /// # Note /// /// `debug` should only ever be set to `true` when executing as an RPC because /// it adds allocations and could be abused to drive the runtime into an OOM panic. /// If set to `true` it returns additional human readable debugging information. /// /// It returns the execution result and the amount of used weight. pub fn bare_call( origin: T::AccountId, dest: T::AccountId, value: BalanceOf<T>, gas_limit: Weight, input_data: Vec<u8>, debug: bool, ) -> ContractExecResult { let mut gas_meter = GasMeter::new(gas_limit); let schedule = T::Schedule::get(); let mut debug_message = if debug { Some(Vec::new()) } else { None }; let result = ExecStack::<T, PrefabWasmModule<T>>::run_call( origin, dest, &mut gas_meter, &schedule, value, input_data, debug_message.as_mut(), ); ContractExecResult { result: result.map_err(|r| r.error), gas_consumed: gas_meter.gas_consumed(), gas_required: gas_meter.gas_required(), debug_message: debug_message.unwrap_or_default(), } } /// Instantiate a new contract. /// /// This function is similar to [`Self::instantiate`], but doesn't perform any address lookups /// and better suitable for calling directly from Rust. /// /// It returns the execution result, account id and the amount of used weight. /// /// If `compute_projection` is set to `true` the result also contains the rent projection. /// This is optional because some non trivial and stateful work is performed to compute /// the projection. See [`Self::rent_projection`]. /// /// # Note /// /// `debug` should only ever be set to `true` when executing as an RPC because /// it adds allocations and could be abused to drive the runtime into an OOM panic. /// If set to `true` it returns additional human readable debugging information. pub fn bare_instantiate( origin: T::AccountId, endowment: BalanceOf<T>, gas_limit: Weight, code: Code<CodeHash<T>>, data: Vec<u8>, salt: Vec<u8>, compute_projection: bool, debug: bool, ) -> ContractInstantiateResult<T::AccountId, T::BlockNumber> { let mut gas_meter = GasMeter::new(gas_limit); let schedule = T::Schedule::get(); let executable = match code { Code::Upload(Bytes(binary)) => PrefabWasmModule::from_code(binary, &schedule), Code::Existing(hash) => PrefabWasmModule::from_storage(hash, &schedule, &mut gas_meter), }; let executable = match executable { Ok(executable) => executable, Err(error) => return ContractInstantiateResult { result: Err(error.into()), gas_consumed: gas_meter.gas_consumed(), gas_required: gas_meter.gas_required(), debug_message: Vec::new(), } }; let mut debug_message = if debug { Some(Vec::new()) } else { None }; let result = ExecStack::<T, PrefabWasmModule<T>>::run_instantiate( origin, executable, &mut gas_meter, &schedule, endowment, data, &salt, debug_message.as_mut(), ).and_then(|(account_id, result)| { let rent_projection = if compute_projection { Some(Rent::<T, PrefabWasmModule<T>>::compute_projection(&account_id) .map_err(|_| <Error<T>>::NewContractNotFunded)?) } else { None }; Ok(InstantiateReturnValue { result, account_id, rent_projection, }) }); ContractInstantiateResult { result: result.map_err(|e| e.error), gas_consumed: gas_meter.gas_consumed(), gas_required: gas_meter.gas_required(), debug_message: debug_message.unwrap_or_default(), } } /// Query storage of a specified contract under a specified key. pub fn get_storage(address: T::AccountId, key: [u8; 32]) -> GetStorageResult { let contract_info = ContractInfoOf::<T>::get(&address) .ok_or(ContractAccessError::DoesntExist)? .get_alive() .ok_or(ContractAccessError::IsTombstone)?; let maybe_value = Storage::<T>::read(&contract_info.trie_id, &key); Ok(maybe_value) } /// Query how many blocks the contract stays alive given that the amount endowment /// and consumed storage does not change. pub fn rent_projection(address: T::AccountId) -> RentProjectionResult<T::BlockNumber> { Rent::<T, PrefabWasmModule<T>>::compute_projection(&address) } /// Determine the address of a contract, /// /// This is the address generation function used by contract instantiation. Its result /// is only dependend on its inputs. It can therefore be used to reliably predict the /// address of a contract. This is akin to the formular of eth's CREATE2 opcode. There /// is no CREATE equivalent because CREATE2 is strictly more powerful. /// /// Formula: `hash(deploying_address ++ code_hash ++ salt)` pub fn contract_address( deploying_address: &T::AccountId, code_hash: &CodeHash<T>, salt: &[u8], ) -> T::AccountId { let buf: Vec<_> = deploying_address.as_ref().iter() .chain(code_hash.as_ref()) .chain(salt) .cloned() .collect(); UncheckedFrom::unchecked_from(T::Hashing::hash(&buf)) } /// Subsistence threshold is the extension of the minimum balance (aka existential deposit) /// by the tombstone deposit, required for leaving a tombstone. /// /// Rent or any contract initiated balance transfer mechanism cannot make the balance lower /// than the subsistence threshold in order to guarantee that a tombstone is created. /// /// The only way to completely kill a contract without a tombstone is calling `seal_terminate`. pub fn subsistence_threshold() -> BalanceOf<T> { T::Currency::minimum_balance().saturating_add(T::TombstoneDeposit::get()) } /// The in-memory size in bytes of the data structure associated with each contract. /// /// The data structure is also put into storage for each contract. The in-storage size /// is never larger than the in-memory representation and usually smaller due to compact /// encoding and lack of padding. /// /// # Note /// /// This returns the in-memory size because the in-storage size (SCALE encoded) cannot /// be efficiently determined. Treat this as an upper bound of the in-storage size. pub fn contract_info_size() -> u32 { sp_std::mem::size_of::<ContractInfo<T>>() as u32 } /// Store code for benchmarks which does not check nor instrument the code. #[cfg(feature = "runtime-benchmarks")] fn store_code_raw(code: Vec<u8>) -> frame_support::dispatch::DispatchResult { let schedule = T::Schedule::get(); PrefabWasmModule::store_code_unchecked(code, &schedule)?; Ok(()) } /// This exists so that benchmarks can determine the weight of running an instrumentation. #[cfg(feature = "runtime-benchmarks")] fn reinstrument_module( module: &mut PrefabWasmModule<T>, schedule: &Schedule<T> ) -> frame_support::dispatch::DispatchResult { self::wasm::reinstrument(module, schedule) } }
39.700585
117
0.711083
9c865a1b0d59817f13d996decb5bfa8b2fa92e1c
19,234
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::loaded_data::{struct_def::StructDef, types::Type}; use std::{ cell::{Ref, RefCell}, ops::Add, rc::Rc, }; use types::{ access_path::AccessPath, account_address::{AccountAddress, ADDRESS_LENGTH}, byte_array::ByteArray, contract_event::ContractEvent, }; use vm::{ errors::*, gas_schedule::{ words_in, AbstractMemorySize, GasAlgebra, GasCarrier, CONST_SIZE, REFERENCE_SIZE, STRUCT_SIZE, }, }; #[cfg(test)] #[path = "unit_tests/value_prop_tests.rs"] mod value_prop_tests; #[cfg(test)] #[path = "unit_tests/value_tests.rs"] mod value_tests; #[cfg(test)] #[path = "unit_tests/vm_types.rs"] mod vm_types; #[derive(Debug, Clone)] pub enum Value { Address(AccountAddress), U64(u64), Bool(bool), String(String), Struct(Vec<MutVal>), ByteArray(ByteArray), } impl Value { fn size(&self) -> AbstractMemorySize<GasCarrier> { match self { Value::U64(_) | Value::Bool(_) => *CONST_SIZE, Value::Address(_) => AbstractMemorySize::new(ADDRESS_LENGTH as u64), // Possible debate topic: Should we charge based upon the size of the string. // At this moment, we take the view that you should be charged as though you are // copying the string onto the stack here. This doesn't replicate // the semantics that we utilize currently, but this string may // need to be copied at some later time, so we need to charge based // upon the size of the memory that will possibly need to be accessed. Value::String(s) => words_in(AbstractMemorySize::new(s.len() as u64)), Value::Struct(vals) => vals .iter() .fold(*STRUCT_SIZE, |acc, vl| acc.map2(vl.size(), Add::add)), Value::ByteArray(key) => AbstractMemorySize::new(key.len() as u64), } } /// Normal code should always know what type this value has. This is made available only for /// tests. #[allow(non_snake_case)] #[doc(hidden)] pub fn to_struct_def_FOR_TESTING(&self) -> StructDef { let values = match self { Value::Struct(values) => values, _ => panic!("Value must be a struct {:?}", self), }; let fields = values .iter() .map(|mut_val| { let val = &*mut_val.peek(); match val { Value::Bool(_) => Type::Bool, Value::Address(_) => Type::Address, Value::U64(_) => Type::U64, Value::String(_) => Type::String, Value::ByteArray(_) => Type::ByteArray, Value::Struct(_) => Type::Struct(val.to_struct_def_FOR_TESTING()), } }) .collect(); StructDef::new(fields) } // Structural equality for Move values // Cannot use Rust's equality due to: // - Collections possibly having different representations but still being "equal" semantically pub fn equals(&self, v2: &Value) -> Result<bool, VMInvariantViolation> { Ok(match (self, v2) { (Value::Bool(b1), Value::Bool(b2)) => b1 == b2, (Value::Address(a1), Value::Address(a2)) => a1 == a2, (Value::U64(u1), Value::U64(u2)) => u1 == u2, (Value::String(s1), Value::String(s2)) => s1 == s2, (Value::Struct(s1), Value::Struct(s2)) => { if s1.len() != s2.len() { return Err(VMInvariantViolation::InternalTypeError); } for (mv1, mv2) in s1.iter().zip(s2) { if !MutVal::equals(mv1, mv2)? { return Ok(false); } } true } (Value::ByteArray(ba1), Value::ByteArray(ba2)) => ba1 == ba2, _ => return Err(VMInvariantViolation::InternalTypeError), }) } // Structural non-equality for Move values // Implemented by hand instead of `!equals` to allow for short circuiting pub fn not_equals(&self, v2: &Value) -> Result<bool, VMInvariantViolation> { Ok(match (self, v2) { (Value::Bool(b1), Value::Bool(b2)) => b1 != b2, (Value::Address(a1), Value::Address(a2)) => a1 != a2, (Value::U64(u1), Value::U64(u2)) => u1 != u2, (Value::String(s1), Value::String(s2)) => s1 != s2, (Value::Struct(s1), Value::Struct(s2)) => { if s1.len() != s2.len() { return Err(VMInvariantViolation::InternalTypeError); } for (mv1, mv2) in s1.iter().zip(s2) { if MutVal::not_equals(mv1, mv2)? { return Ok(true); } } false } (Value::ByteArray(ba1), Value::ByteArray(ba2)) => ba1 != ba2, _ => return Err(VMInvariantViolation::InternalTypeError), }) } } pub trait Reference where Self: std::marker::Sized + Clone, { fn borrow_field(&self, idx: u32) -> Option<Self>; fn read_reference(self) -> MutVal; fn mutate_reference(self, v: MutVal); fn size(&self) -> AbstractMemorySize<GasCarrier>; } #[derive(Debug)] pub struct MutVal(pub Rc<RefCell<Value>>); #[derive(Debug)] pub enum Local { Ref(MutVal), GlobalRef(GlobalRef), Value(MutVal), Invalid, } /// Status for on chain data (published resources): /// CLEAN - the data was only read /// DIRTY - the data was changed anywhere in the data tree of the given resource /// DELETED - MoveFrom was called on the given AccessPath for the given resource #[rustfmt::skip] #[allow(non_camel_case_types)] #[derive(PartialEq, Eq, Debug, Clone)] enum GlobalDataStatus { CLEAN = 0, DIRTY = 1, DELETED = 2, } /// A root into an instance on chain. /// Holds flags about the status of the instance and a reference count to balance /// Borrow* and ReleaseRef #[derive(PartialEq, Eq, Debug, Clone)] pub struct RootAccessPath { status: GlobalDataStatus, ref_count: u64, ap: AccessPath, } /// A GlobalRef holds the reference to the data and a shared reference to the root so /// status flags and reference count can be properly managed #[derive(Debug, Clone)] pub struct GlobalRef { root: Rc<RefCell<RootAccessPath>>, reference: MutVal, } impl Clone for MutVal { fn clone(&self) -> Self { MutVal(Rc::new(RefCell::new(self.peek().clone()))) } } impl Clone for Local { fn clone(&self) -> Self { match self { Local::Ref(v) => Local::Ref(v.shallow_clone()), Local::GlobalRef(v) => Local::GlobalRef(v.shallow_clone()), Local::Value(v) => Local::Value(v.clone()), Local::Invalid => Local::Invalid, } } } impl MutVal { pub fn try_own(mv: Self) -> Result<Value, VMInvariantViolation> { match Rc::try_unwrap(mv.0) { Ok(cell) => Ok(cell.into_inner()), Err(_) => Err(VMInvariantViolation::LocalReferenceError), } } pub fn peek(&self) -> Ref<Value> { self.0.borrow() } pub fn new(v: Value) -> Self { MutVal(Rc::new(RefCell::new(v))) } fn shallow_clone(&self) -> Self { MutVal(Rc::clone(&self.0)) } fn address(addr: AccountAddress) -> Self { MutVal::new(Value::Address(addr)) } fn u64(i: u64) -> Self { MutVal::new(Value::U64(i)) } fn bool(b: bool) -> Self { MutVal::new(Value::Bool(b)) } fn string(s: String) -> Self { MutVal::new(Value::String(s)) } fn struct_(v: Vec<MutVal>) -> Self { MutVal::new(Value::Struct(v)) } fn bytearray(v: ByteArray) -> Self { MutVal::new(Value::ByteArray(v)) } fn size(&self) -> AbstractMemorySize<GasCarrier> { self.peek().size() } // Structural equality for Move values // Cannot use Rust's equality due to: // - Collections possibly having different representations but still being "equal" semantically pub fn equals(&self, mv2: &MutVal) -> Result<bool, VMInvariantViolation> { self.peek().equals(&mv2.peek()) } // Structural non-equality for Move values // Implemented by hand instead of `!equals` to allow for short circuiting pub fn not_equals(&self, mv2: &MutVal) -> Result<bool, VMInvariantViolation> { self.peek().not_equals(&mv2.peek()) } } impl Reference for MutVal { fn borrow_field(&self, idx: u32) -> Option<Self> { match &*self.peek() { Value::Struct(ref vec) => vec.get(idx as usize).map(MutVal::shallow_clone), _ => None, } } fn read_reference(self) -> MutVal { self.clone() } fn mutate_reference(self, v: MutVal) { self.0.replace(v.peek().clone()); } fn size(&self) -> AbstractMemorySize<GasCarrier> { words_in(*REFERENCE_SIZE) } } impl Local { pub fn address(addr: AccountAddress) -> Self { Local::Value(MutVal::address(addr)) } pub fn u64(i: u64) -> Self { Local::Value(MutVal::u64(i)) } pub fn bool(b: bool) -> Self { Local::Value(MutVal::bool(b)) } pub fn string(s: String) -> Self { Local::Value(MutVal::string(s)) } pub fn struct_(v: Vec<MutVal>) -> Self { Local::Value(MutVal::struct_(v)) } pub fn bytearray(v: ByteArray) -> Self { Local::Value(MutVal::bytearray(v)) } pub fn borrow_local(&self) -> Option<Self> { match self { Local::Value(v) => Some(Local::Ref(v.shallow_clone())), _ => None, } } pub fn borrow_field(&self, idx: u32) -> Option<Self> { match self { Local::Ref(v) => v.borrow_field(idx).map(Local::Ref), Local::GlobalRef(v) => v.borrow_field(idx).map(Local::GlobalRef), _ => None, } } pub fn read_reference(self) -> Option<Self> { match self { Local::Ref(r) => Some(Local::Value(r.read_reference())), Local::GlobalRef(gr) => Some(Local::Value(gr.read_reference())), _ => None, } } pub fn mutate_reference(self, v: MutVal) { match self { Local::Ref(r) => r.mutate_reference(v), Local::GlobalRef(r) => r.mutate_reference(v), _ => (), } } pub fn release_reference(self) -> Result<(), VMRuntimeError> { if let Local::GlobalRef(r) = self { r.release_reference() } else { Ok(()) } } pub fn emit_event_data(self, byte_array: ByteArray, data: MutVal) -> Option<ContractEvent> { if let Local::GlobalRef(r) = self { r.emit_event_data(byte_array, data) } else { None } } pub fn value(self) -> Option<MutVal> { match self { Local::Value(v) => Some(v), _ => None, } } pub fn size(&self) -> AbstractMemorySize<GasCarrier> { match self { Local::Ref(v) => v.size(), Local::GlobalRef(v) => v.size(), Local::Value(v) => v.size(), Local::Invalid => *CONST_SIZE, } } // Structural equality for Move values // Cannot use Rust's equality due to: // - Internal representation of references // - Collections possibly having different representations but still being "equal" semantically pub fn equals(self, l2: Local) -> Result<bool, VMInvariantViolation> { match (self, l2) { (Local::Ref(mv1), Local::Ref(mv2)) | (Local::Value(mv1), Local::Value(mv2)) => { mv1.equals(&mv2) } (Local::GlobalRef(gr1), Local::GlobalRef(gr2)) => { gr1.read_reference().equals(&gr2.read_reference()) } (Local::GlobalRef(gr), Local::Ref(mv)) => gr.read_reference().equals(&mv), (Local::Ref(mv), Local::GlobalRef(gr)) => mv.equals(&gr.read_reference()), (Local::Invalid, Local::Invalid) => Ok(true), _ => Err(VMInvariantViolation::InternalTypeError), } } // Structural non-equality for Move values // Implemented by hand instead of `!equals` to allow for short circuiting pub fn not_equals(self, l2: Local) -> Result<bool, VMInvariantViolation> { match (self, l2) { (Local::Ref(mv1), Local::Ref(mv2)) | (Local::Value(mv1), Local::Value(mv2)) => { mv1.not_equals(&mv2) } (Local::GlobalRef(gr1), Local::GlobalRef(gr2)) => { gr1.read_reference().not_equals(&gr2.read_reference()) } (Local::GlobalRef(gr), Local::Ref(mv)) => gr.read_reference().not_equals(&mv), (Local::Ref(mv), Local::GlobalRef(gr)) => mv.not_equals(&gr.read_reference()), (Local::Invalid, Local::Invalid) => Ok(false), _ => Err(VMInvariantViolation::InternalTypeError), } } } impl RootAccessPath { pub fn new(ap: AccessPath) -> Self { RootAccessPath { status: GlobalDataStatus::CLEAN, ref_count: 0, ap, } } fn mark_dirty(&mut self) { self.status = GlobalDataStatus::DIRTY; } fn mark_deleted(&mut self) { self.status = GlobalDataStatus::DELETED; } // REVIEW: check for overflow? fn inc_ref_count(&mut self) { self.ref_count += 1; } // the check that the ref_count is already 0 is done in release_ref fn dec_ref_count(&mut self) { self.ref_count -= 1; } fn emit_event_data( &mut self, byte_array: ByteArray, counter: u64, data: MutVal, ) -> Option<ContractEvent> { let blob = match data.peek().simple_serialize() { Some(data) => data, None => return None, }; let ap = AccessPath::new_for_event(self.ap.address, &self.ap.path, byte_array.as_bytes()); Some(ContractEvent::new(ap, counter, blob)) } } impl GlobalRef { pub fn make_root(ap: AccessPath, reference: MutVal) -> Self { GlobalRef { root: Rc::new(RefCell::new(RootAccessPath::new(ap))), reference, } } pub fn move_to(ap: AccessPath, reference: MutVal) -> Self { let mut root = RootAccessPath::new(ap); root.mark_dirty(); GlobalRef { root: Rc::new(RefCell::new(root)), reference, } } fn new_ref(root: &GlobalRef, reference: MutVal) -> Self { // increment the global ref count root.root.borrow_mut().inc_ref_count(); GlobalRef { root: Rc::clone(&root.root), reference, } } // Return the resource behind the reference. // If the reference is not exclusively held by the cache (ref count 0) returns None pub fn get_data(self) -> Option<Value> { if self.root.borrow().ref_count > 0 { None } else { match Rc::try_unwrap(self.root) { Ok(_) => match Rc::try_unwrap(self.reference.0) { Ok(res) => Some(res.into_inner()), Err(_) => None, }, Err(_) => None, } } } pub fn is_loadable(&self) -> bool { self.root.borrow().ref_count == 0 && !self.is_deleted() } pub fn is_dirty(&self) -> bool { self.root.borrow().status == GlobalDataStatus::DIRTY } pub fn is_deleted(&self) -> bool { self.root.borrow().status == GlobalDataStatus::DELETED } pub fn is_clean(&self) -> bool { self.root.borrow().status == GlobalDataStatus::CLEAN } pub fn move_from(&mut self) -> MutVal { self.root.borrow_mut().mark_deleted(); self.reference.shallow_clone() } pub fn shallow_clone(&self) -> Self { // increment the global ref count self.root.borrow_mut().inc_ref_count(); GlobalRef { root: Rc::clone(&self.root), reference: self.reference.shallow_clone(), } } fn release_reference(self) -> Result<(), VMRuntimeError> { if self.root.borrow().ref_count == 0 { Err(VMRuntimeError { loc: Location::new(), err: VMErrorKind::GlobalRefAlreadyReleased, }) } else { self.root.borrow_mut().dec_ref_count(); Ok(()) } } fn emit_event_data(self, byte_array: ByteArray, data: MutVal) -> Option<ContractEvent> { self.root.borrow_mut().dec_ref_count(); let counter = match &*self.reference.peek() { Value::U64(i) => *i, _ => return None, }; self.reference.mutate_reference(MutVal::u64(counter + 1)); self.root .borrow_mut() .emit_event_data(byte_array, counter, data) } fn size(&self) -> AbstractMemorySize<GasCarrier> { *REFERENCE_SIZE } } impl Reference for GlobalRef { fn borrow_field(&self, idx: u32) -> Option<Self> { match &*self.reference.peek() { Value::Struct(ref vec) => match vec.get(idx as usize) { Some(field_ref) => { self.root.borrow_mut().dec_ref_count(); Some(GlobalRef::new_ref(self, field_ref.shallow_clone())) } None => None, }, _ => None, } } fn read_reference(self) -> MutVal { self.root.borrow_mut().dec_ref_count(); self.reference.clone() } fn mutate_reference(self, v: MutVal) { self.root.borrow_mut().dec_ref_count(); self.root.borrow_mut().mark_dirty(); self.reference.mutate_reference(v); } fn size(&self) -> AbstractMemorySize<GasCarrier> { words_in(*REFERENCE_SIZE) } } // // Conversion routines for the interpreter // impl From<MutVal> for Option<u64> { fn from(value: MutVal) -> Option<u64> { match &*value.peek() { Value::U64(i) => Some(*i), _ => None, } } } impl From<MutVal> for Option<bool> { fn from(value: MutVal) -> Option<bool> { match &*value.peek() { Value::Bool(b) => Some(*b), _ => None, } } } impl From<MutVal> for Option<AccountAddress> { fn from(value: MutVal) -> Option<AccountAddress> { match *value.peek() { Value::Address(addr) => Some(addr), _ => None, } } } impl From<MutVal> for Option<ByteArray> { fn from(value: MutVal) -> Option<ByteArray> { match &*value.peek() { Value::ByteArray(blob) => Some(blob.clone()), _ => None, } } } impl From<GlobalRef> for Option<AccountAddress> { fn from(value: GlobalRef) -> Option<AccountAddress> { match *value.reference.peek() { Value::Address(addr) => Some(addr), _ => None, } } }
30.100156
99
0.549704
290d1c032a5b2fa2bc0c2324059b458a74554339
11,104
// Copyright (c) The XPeer Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::proof::{ definition::bitmap::{AccumulatorBitmap, SparseMerkleBitmap}, AccountStateProof, AccumulatorProof, EventProof, SignedTransactionProof, SparseMerkleProof, }; use crypto::{ hash::{TestOnlyHash, ACCUMULATOR_PLACEHOLDER_HASH, SPARSE_MERKLE_PLACEHOLDER_HASH}, HashValue, }; use proptest::{collection::vec, prelude::*}; use proto_conv::{test_helper::assert_protobuf_encode_decode, FromProto, IntoProto}; fn accumulator_bitmap_iterator_test(bitmap_value: u64, expected_bits: Vec<bool>) { let bitmap = AccumulatorBitmap::new(bitmap_value); let bits: Vec<_> = bitmap.iter().collect(); assert_eq!(bits, expected_bits); let bitmap2: AccumulatorBitmap = bits.into_iter().collect(); let bitmap_value2: u64 = bitmap2.into(); assert_eq!(bitmap_value, bitmap_value2); } #[test] fn test_accumulator_bitmap() { accumulator_bitmap_iterator_test(0b0, vec![]); accumulator_bitmap_iterator_test(0b1, vec![true]); accumulator_bitmap_iterator_test(0b10_1101, vec![true, false, true, true, false, true]); } fn sparse_merkle_bitmap_iterator_test(bitmap_value: Vec<u8>, expected_bits: Vec<bool>) { let bitmap = SparseMerkleBitmap::new(bitmap_value.clone()); let bits: Vec<_> = bitmap.iter().collect(); assert_eq!(bits, expected_bits); let bitmap2: SparseMerkleBitmap = bits.into_iter().collect(); let bitmap_value2: Vec<_> = bitmap2.into(); assert_eq!(bitmap_value, bitmap_value2); } #[test] fn test_sparse_merkle_bitmap() { sparse_merkle_bitmap_iterator_test(vec![], vec![]); sparse_merkle_bitmap_iterator_test(vec![0b1000_0000], vec![true]); sparse_merkle_bitmap_iterator_test(vec![0b0100_0000], vec![false, true]); sparse_merkle_bitmap_iterator_test(vec![0b1001_0000], vec![true, false, false, true]); sparse_merkle_bitmap_iterator_test( vec![0b0001_0011], vec![false, false, false, true, false, false, true, true], ); sparse_merkle_bitmap_iterator_test( vec![0b0001_0011, 0b0010_0000], vec![ false, false, false, true, false, false, true, true, false, false, true, ], ); sparse_merkle_bitmap_iterator_test( vec![0b1001_0011, 0b0010_0011], vec![ true, false, false, true, false, false, true, true, false, false, true, false, false, false, true, true, ], ); } fn accumulator_proof_protobuf_conversion_test( siblings: Vec<HashValue>, expected_bitmap: u64, expected_num_non_default_siblings: usize, ) { let proof = AccumulatorProof::new(siblings); let compressed_proof = proof.clone().into_proto(); assert_eq!(compressed_proof.get_bitmap(), expected_bitmap); assert_eq!( compressed_proof.get_non_default_siblings().len(), expected_num_non_default_siblings ); let decompressed_proof = AccumulatorProof::from_proto(compressed_proof).unwrap(); assert_eq!(decompressed_proof, proof); } #[test] fn test_convert_accumulator_proof_to_protobuf() { accumulator_proof_protobuf_conversion_test(vec![], 0b0, 0); accumulator_proof_protobuf_conversion_test(vec![b"0".test_only_hash()], 0b1, 1); accumulator_proof_protobuf_conversion_test( vec![ b"0".test_only_hash(), b"1".test_only_hash(), b"2".test_only_hash(), ], 0b111, 3, ); accumulator_proof_protobuf_conversion_test( vec![ b"0".test_only_hash(), *ACCUMULATOR_PLACEHOLDER_HASH, b"2".test_only_hash(), ], 0b101, 2, ); accumulator_proof_protobuf_conversion_test( vec![ b"0".test_only_hash(), *ACCUMULATOR_PLACEHOLDER_HASH, *ACCUMULATOR_PLACEHOLDER_HASH, ], 0b100, 1, ); } #[test] fn test_convert_accumulator_proof_wrong_number_of_siblings() { let sibling0 = b"0".test_only_hash(); let sibling1 = b"1".test_only_hash(); let mut compressed_proof = crate::proto::proof::AccumulatorProof::new(); compressed_proof.set_bitmap(0b100); compressed_proof .mut_non_default_siblings() .push(sibling0.to_vec()); compressed_proof .mut_non_default_siblings() .push(sibling1.to_vec()); assert!(AccumulatorProof::from_proto(compressed_proof).is_err()); } #[test] fn test_convert_accumulator_proof_malformed_hashes() { let mut sibling0 = b"0".test_only_hash().to_vec(); sibling0.push(1); let mut compressed_proof = crate::proto::proof::AccumulatorProof::new(); compressed_proof.set_bitmap(0b100); compressed_proof.mut_non_default_siblings().push(sibling0); assert!(AccumulatorProof::from_proto(compressed_proof).is_err()); } fn sparse_merkle_proof_protobuf_conversion_test( leaf: Option<(HashValue, HashValue)>, siblings: Vec<HashValue>, expected_bitmap: Vec<u8>, expected_num_non_default_siblings: usize, ) { let proof = SparseMerkleProof::new(leaf, siblings); let compressed_proof = proof.clone().into_proto(); assert_eq!(expected_bitmap, compressed_proof.get_bitmap()); assert_eq!( compressed_proof.get_non_default_siblings().len(), expected_num_non_default_siblings ); let decompressed_proof = SparseMerkleProof::from_proto(compressed_proof).unwrap(); assert_eq!(decompressed_proof, proof); } #[test] fn test_convert_sparse_merkle_proof_to_protobuf() { sparse_merkle_proof_protobuf_conversion_test(None, vec![], vec![], 0); sparse_merkle_proof_protobuf_conversion_test( None, vec![b"0".test_only_hash()], vec![0b1000_0000], 1, ); sparse_merkle_proof_protobuf_conversion_test( None, vec![ b"0".test_only_hash(), b"1".test_only_hash(), b"2".test_only_hash(), ], vec![0b1110_0000], 3, ); sparse_merkle_proof_protobuf_conversion_test( None, vec![*SPARSE_MERKLE_PLACEHOLDER_HASH, b"1".test_only_hash()], vec![0b0100_0000], 1, ); sparse_merkle_proof_protobuf_conversion_test( None, vec![ b"0".test_only_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH, b"2".test_only_hash(), ], vec![0b1010_0000], 2, ); sparse_merkle_proof_protobuf_conversion_test( None, vec![ b"0".test_only_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH, *SPARSE_MERKLE_PLACEHOLDER_HASH, *SPARSE_MERKLE_PLACEHOLDER_HASH, *SPARSE_MERKLE_PLACEHOLDER_HASH, *SPARSE_MERKLE_PLACEHOLDER_HASH, *SPARSE_MERKLE_PLACEHOLDER_HASH, b"7".test_only_hash(), ], vec![0b1000_0001], 2, ); sparse_merkle_proof_protobuf_conversion_test( None, vec![ b"0".test_only_hash(), *SPARSE_MERKLE_PLACEHOLDER_HASH, *SPARSE_MERKLE_PLACEHOLDER_HASH, *SPARSE_MERKLE_PLACEHOLDER_HASH, *SPARSE_MERKLE_PLACEHOLDER_HASH, *SPARSE_MERKLE_PLACEHOLDER_HASH, *SPARSE_MERKLE_PLACEHOLDER_HASH, b"7".test_only_hash(), b"8".test_only_hash(), ], vec![0b1000_0001, 0b1000_0000], 3, ); sparse_merkle_proof_protobuf_conversion_test( Some((HashValue::random(), HashValue::random())), vec![b"0".test_only_hash()], vec![0b1000_0000], 1, ); } #[test] fn test_convert_sparse_merkle_proof_wrong_number_of_siblings() { let sibling0 = b"0".test_only_hash(); let sibling1 = b"1".test_only_hash(); let mut compressed_proof = crate::proto::proof::SparseMerkleProof::new(); compressed_proof.mut_bitmap().push(0b1000_0000); compressed_proof .mut_non_default_siblings() .push(sibling0.to_vec()); compressed_proof .mut_non_default_siblings() .push(sibling1.to_vec()); assert!(SparseMerkleProof::from_proto(compressed_proof).is_err()); } #[test] fn test_convert_sparse_merkle_proof_malformed_hashes() { let mut sibling0 = b"0".test_only_hash().to_vec(); sibling0.push(1); let mut compressed_proof = crate::proto::proof::SparseMerkleProof::new(); compressed_proof.mut_bitmap().push(0b1000_0000); compressed_proof.mut_non_default_siblings().push(sibling0); assert!(SparseMerkleProof::from_proto(compressed_proof).is_err()); } #[test] fn test_convert_sparse_merkle_proof_malformed_leaf() { let sibling0 = b"0".test_only_hash().to_vec(); let mut compressed_proof = crate::proto::proof::SparseMerkleProof::new(); compressed_proof.set_leaf(vec![1, 2, 3]); compressed_proof.mut_bitmap().push(0b1000_0000); compressed_proof.mut_non_default_siblings().push(sibling0); assert!(SparseMerkleProof::from_proto(compressed_proof).is_err()); } proptest! { #[test] fn test_accumulator_bitmap_iterator_roundtrip(value in any::<u64>()) { let bitmap = AccumulatorBitmap::new(value); let iter = bitmap.iter(); let bitmap2 = iter.collect(); prop_assert_eq!(bitmap, bitmap2); } #[test] fn test_accumulator_bitmap_iterator_inverse_roundtrip(mut value in vec(any::<bool>(), 0..63)) { value.insert(0, true); let bitmap: AccumulatorBitmap = value.iter().cloned().collect(); let value2: Vec<_> = bitmap.iter().collect(); prop_assert_eq!(value, value2); } #[test] fn test_sparse_merkle_bitmap_iterator_roundtrip(mut value in vec(any::<u8>(), 0..64)) { if !value.is_empty() && *value.last().unwrap() == 0 { *value.last_mut().unwrap() |= 0b100; } let bitmap = SparseMerkleBitmap::new(value); let iter = bitmap.iter(); let bitmap2 = iter.collect(); prop_assert_eq!(bitmap, bitmap2); } #[test] fn test_sparse_merkle_bitmap_iterator_inverse_roundtrip(mut value in vec(any::<bool>(), 0..255)) { value.push(true); let bitmap: SparseMerkleBitmap = value.iter().cloned().collect(); let value2: Vec<_> = bitmap.iter().collect(); prop_assert_eq!(value, value2); } #[test] fn test_accumulator_protobuf_conversion_roundtrip(proof in any::<AccumulatorProof>()) { assert_protobuf_encode_decode(&proof); } #[test] fn test_sparse_merkle_protobuf_conversion_roundtrip(proof in any::<SparseMerkleProof>()) { assert_protobuf_encode_decode(&proof); } #[test] fn test_signed_transaction_proof_protobuf_conversion_roundtrip(proof in any::<SignedTransactionProof>()) { assert_protobuf_encode_decode(&proof); } #[test] fn test_account_state_proof_protobuf_conversion_roundtrip(proof in any::<AccountStateProof>()) { assert_protobuf_encode_decode(&proof); } #[test] fn test_event_proof_protobuf_conversion_roundtrip(proof in any::<EventProof>()) { assert_protobuf_encode_decode(&proof); } }
33.546828
110
0.668498
482ee9366c74b980112dc319b17ed01b9ad27739
20,728
// This file is generated by rust-protobuf 2.20.0. Do not edit // @generated // https://github.com/rust-lang/rust-clippy/issues/702 #![allow(unknown_lints)] #![allow(clippy::all)] #![allow(unused_attributes)] #![rustfmt::skip] #![allow(box_pointers)] #![allow(dead_code)] #![allow(missing_docs)] #![allow(non_camel_case_types)] #![allow(non_snake_case)] #![allow(non_upper_case_globals)] #![allow(trivial_casts)] #![allow(unused_imports)] #![allow(unused_results)] //! Generated file from `cosmos/capability/v1beta1/capability.proto` /// Generated files are compatible only with the same version /// of protobuf runtime. // const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_20_0; #[derive(PartialEq,Clone,Default)] pub struct Capability { // message fields pub index: u64, // special fields pub unknown_fields: ::protobuf::UnknownFields, pub cached_size: ::protobuf::CachedSize, } impl<'a> ::std::default::Default for &'a Capability { fn default() -> &'a Capability { <Capability as ::protobuf::Message>::default_instance() } } impl Capability { pub fn new() -> Capability { ::std::default::Default::default() } // uint64 index = 1; pub fn get_index(&self) -> u64 { self.index } pub fn clear_index(&mut self) { self.index = 0; } // Param is passed by value, moved pub fn set_index(&mut self, v: u64) { self.index = v; } } impl ::protobuf::Message for Capability { fn is_initialized(&self) -> bool { true } fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { while !is.eof()? { let (field_number, wire_type) = is.read_tag_unpack()?; match field_number { 1 => { if wire_type != ::protobuf::wire_format::WireTypeVarint { return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type)); } let tmp = is.read_uint64()?; self.index = tmp; }, _ => { ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; }, }; } ::std::result::Result::Ok(()) } // Compute sizes of nested messages #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; if self.index != 0 { my_size += ::protobuf::rt::value_size(1, self.index, ::protobuf::wire_format::WireTypeVarint); } my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { if self.index != 0 { os.write_uint64(1, self.index)?; } os.write_unknown_fields(self.get_unknown_fields())?; ::std::result::Result::Ok(()) } fn get_cached_size(&self) -> u32 { self.cached_size.get() } fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { &self.unknown_fields } fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { &mut self.unknown_fields } fn as_any(&self) -> &dyn (::std::any::Any) { self as &dyn (::std::any::Any) } fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { self as &mut dyn (::std::any::Any) } fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> { self } fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { Self::descriptor_static() } fn new() -> Capability { Capability::new() } fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeUint64>( "index", |m: &Capability| { &m.index }, |m: &mut Capability| { &mut m.index }, )); ::protobuf::reflect::MessageDescriptor::new_pb_name::<Capability>( "Capability", fields, file_descriptor_proto() ) }) } fn default_instance() -> &'static Capability { static instance: ::protobuf::rt::LazyV2<Capability> = ::protobuf::rt::LazyV2::INIT; instance.get(Capability::new) } } impl ::protobuf::Clear for Capability { fn clear(&mut self) { self.index = 0; self.unknown_fields.clear(); } } impl ::std::fmt::Debug for Capability { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } impl ::protobuf::reflect::ProtobufValue for Capability { fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { ::protobuf::reflect::ReflectValueRef::Message(self) } } #[derive(PartialEq,Clone,Default)] pub struct Owner { // message fields pub module: ::std::string::String, pub name: ::std::string::String, // special fields pub unknown_fields: ::protobuf::UnknownFields, pub cached_size: ::protobuf::CachedSize, } impl<'a> ::std::default::Default for &'a Owner { fn default() -> &'a Owner { <Owner as ::protobuf::Message>::default_instance() } } impl Owner { pub fn new() -> Owner { ::std::default::Default::default() } // string module = 1; pub fn get_module(&self) -> &str { &self.module } pub fn clear_module(&mut self) { self.module.clear(); } // Param is passed by value, moved pub fn set_module(&mut self, v: ::std::string::String) { self.module = v; } // Mutable pointer to the field. // If field is not initialized, it is initialized with default value first. pub fn mut_module(&mut self) -> &mut ::std::string::String { &mut self.module } // Take field pub fn take_module(&mut self) -> ::std::string::String { ::std::mem::replace(&mut self.module, ::std::string::String::new()) } // string name = 2; pub fn get_name(&self) -> &str { &self.name } pub fn clear_name(&mut self) { self.name.clear(); } // Param is passed by value, moved pub fn set_name(&mut self, v: ::std::string::String) { self.name = v; } // Mutable pointer to the field. // If field is not initialized, it is initialized with default value first. pub fn mut_name(&mut self) -> &mut ::std::string::String { &mut self.name } // Take field pub fn take_name(&mut self) -> ::std::string::String { ::std::mem::replace(&mut self.name, ::std::string::String::new()) } } impl ::protobuf::Message for Owner { fn is_initialized(&self) -> bool { true } fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { while !is.eof()? { let (field_number, wire_type) = is.read_tag_unpack()?; match field_number { 1 => { ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.module)?; }, 2 => { ::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.name)?; }, _ => { ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; }, }; } ::std::result::Result::Ok(()) } // Compute sizes of nested messages #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; if !self.module.is_empty() { my_size += ::protobuf::rt::string_size(1, &self.module); } if !self.name.is_empty() { my_size += ::protobuf::rt::string_size(2, &self.name); } my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { if !self.module.is_empty() { os.write_string(1, &self.module)?; } if !self.name.is_empty() { os.write_string(2, &self.name)?; } os.write_unknown_fields(self.get_unknown_fields())?; ::std::result::Result::Ok(()) } fn get_cached_size(&self) -> u32 { self.cached_size.get() } fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { &self.unknown_fields } fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { &mut self.unknown_fields } fn as_any(&self) -> &dyn (::std::any::Any) { self as &dyn (::std::any::Any) } fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { self as &mut dyn (::std::any::Any) } fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> { self } fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { Self::descriptor_static() } fn new() -> Owner { Owner::new() } fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( "module", |m: &Owner| { &m.module }, |m: &mut Owner| { &mut m.module }, )); fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>( "name", |m: &Owner| { &m.name }, |m: &mut Owner| { &mut m.name }, )); ::protobuf::reflect::MessageDescriptor::new_pb_name::<Owner>( "Owner", fields, file_descriptor_proto() ) }) } fn default_instance() -> &'static Owner { static instance: ::protobuf::rt::LazyV2<Owner> = ::protobuf::rt::LazyV2::INIT; instance.get(Owner::new) } } impl ::protobuf::Clear for Owner { fn clear(&mut self) { self.module.clear(); self.name.clear(); self.unknown_fields.clear(); } } impl ::std::fmt::Debug for Owner { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } impl ::protobuf::reflect::ProtobufValue for Owner { fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { ::protobuf::reflect::ReflectValueRef::Message(self) } } #[derive(PartialEq,Clone,Default)] pub struct CapabilityOwners { // message fields pub owners: ::protobuf::RepeatedField<Owner>, // special fields pub unknown_fields: ::protobuf::UnknownFields, pub cached_size: ::protobuf::CachedSize, } impl<'a> ::std::default::Default for &'a CapabilityOwners { fn default() -> &'a CapabilityOwners { <CapabilityOwners as ::protobuf::Message>::default_instance() } } impl CapabilityOwners { pub fn new() -> CapabilityOwners { ::std::default::Default::default() } // repeated .cosmos.capability.v1beta1.Owner owners = 1; pub fn get_owners(&self) -> &[Owner] { &self.owners } pub fn clear_owners(&mut self) { self.owners.clear(); } // Param is passed by value, moved pub fn set_owners(&mut self, v: ::protobuf::RepeatedField<Owner>) { self.owners = v; } // Mutable pointer to the field. pub fn mut_owners(&mut self) -> &mut ::protobuf::RepeatedField<Owner> { &mut self.owners } // Take field pub fn take_owners(&mut self) -> ::protobuf::RepeatedField<Owner> { ::std::mem::replace(&mut self.owners, ::protobuf::RepeatedField::new()) } } impl ::protobuf::Message for CapabilityOwners { fn is_initialized(&self) -> bool { for v in &self.owners { if !v.is_initialized() { return false; } }; true } fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> { while !is.eof()? { let (field_number, wire_type) = is.read_tag_unpack()?; match field_number { 1 => { ::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.owners)?; }, _ => { ::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?; }, }; } ::std::result::Result::Ok(()) } // Compute sizes of nested messages #[allow(unused_variables)] fn compute_size(&self) -> u32 { let mut my_size = 0; for value in &self.owners { let len = value.compute_size(); my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len; }; my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields()); self.cached_size.set(my_size); my_size } fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> { for v in &self.owners { os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?; os.write_raw_varint32(v.get_cached_size())?; v.write_to_with_cached_sizes(os)?; }; os.write_unknown_fields(self.get_unknown_fields())?; ::std::result::Result::Ok(()) } fn get_cached_size(&self) -> u32 { self.cached_size.get() } fn get_unknown_fields(&self) -> &::protobuf::UnknownFields { &self.unknown_fields } fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields { &mut self.unknown_fields } fn as_any(&self) -> &dyn (::std::any::Any) { self as &dyn (::std::any::Any) } fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) { self as &mut dyn (::std::any::Any) } fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> { self } fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor { Self::descriptor_static() } fn new() -> CapabilityOwners { CapabilityOwners::new() } fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor { static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT; descriptor.get(|| { let mut fields = ::std::vec::Vec::new(); fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<Owner>>( "owners", |m: &CapabilityOwners| { &m.owners }, |m: &mut CapabilityOwners| { &mut m.owners }, )); ::protobuf::reflect::MessageDescriptor::new_pb_name::<CapabilityOwners>( "CapabilityOwners", fields, file_descriptor_proto() ) }) } fn default_instance() -> &'static CapabilityOwners { static instance: ::protobuf::rt::LazyV2<CapabilityOwners> = ::protobuf::rt::LazyV2::INIT; instance.get(CapabilityOwners::new) } } impl ::protobuf::Clear for CapabilityOwners { fn clear(&mut self) { self.owners.clear(); self.unknown_fields.clear(); } } impl ::std::fmt::Debug for CapabilityOwners { fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { ::protobuf::text_format::fmt(self, f) } } impl ::protobuf::reflect::ProtobufValue for CapabilityOwners { fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef { ::protobuf::reflect::ReflectValueRef::Message(self) } } static file_descriptor_proto_data: &'static [u8] = b"\ \n*cosmos/capability/v1beta1/capability.proto\x12\x19cosmos.capability.v\ 1beta1\x1a\x14gogoproto/gogo.proto\":\n\nCapability\x12&\n\x05index\x18\ \x01\x20\x01(\x04R\x05indexB\x10\xf2\xde\x1f\x0cyaml:\"index\":\x04\x98\ \xa0\x1f\0\"a\n\x05Owner\x12)\n\x06module\x18\x01\x20\x01(\tR\x06moduleB\ \x11\xf2\xde\x1f\ryaml:\"module\"\x12#\n\x04name\x18\x02\x20\x01(\tR\x04\ nameB\x0f\xf2\xde\x1f\x0byaml:\"name\":\x08\x88\xa0\x1f\0\x98\xa0\x1f\0\ \"R\n\x10CapabilityOwners\x12>\n\x06owners\x18\x01\x20\x03(\x0b2\x20.cos\ mos.capability.v1beta1.OwnerR\x06ownersB\x04\xc8\xde\x1f\0B1Z/github.com\ /cosmos/cosmos-sdk/x/capability/typesJ\xbe\x07\n\x06\x12\x04\0\0\x1d\x01\ \n\x08\n\x01\x0c\x12\x03\0\0\x12\n\x08\n\x01\x02\x12\x03\x01\x08!\n\x08\ \n\x01\x08\x12\x03\x03\0F\n\t\n\x02\x08\x0b\x12\x03\x03\0F\n\t\n\x02\x03\ \0\x12\x03\x05\x07\x1d\n\x88\x01\n\x02\x04\0\x12\x04\t\0\r\x01\x1a|\x20C\ apability\x20defines\x20an\x20implementation\x20of\x20an\x20object\x20ca\ pability.\x20The\x20index\n\x20provided\x20to\x20a\x20Capability\x20must\ \x20be\x20globally\x20unique.\n\n\n\n\x03\x04\0\x01\x12\x03\t\x08\x12\n\ \n\n\x03\x04\0\x07\x12\x03\n\x02.\n\r\n\x06\x04\0\x07\x83\xf4\x03\x12\ \x03\n\x02.\n\x0b\n\x04\x04\0\x02\0\x12\x03\x0c\x02=\n\r\n\x05\x04\0\x02\ \0\x04\x12\x04\x0c\x02\n.\n\x0c\n\x05\x04\0\x02\0\x05\x12\x03\x0c\x02\ \x08\n\x0c\n\x05\x04\0\x02\0\x01\x12\x03\x0c\t\x0e\n\x0c\n\x05\x04\0\x02\ \0\x03\x12\x03\x0c\x11\x12\n\x0c\n\x05\x04\0\x02\0\x08\x12\x03\x0c\x13<\ \n\x0f\n\x08\x04\0\x02\0\x08\xee\xfb\x03\x12\x03\x0c\x14;\nz\n\x02\x04\ \x01\x12\x04\x11\0\x17\x01\x1an\x20Owner\x20defines\x20a\x20single\x20ca\ pability\x20owner.\x20An\x20owner\x20is\x20defined\x20by\x20the\x20name\ \x20of\n\x20capability\x20and\x20the\x20module\x20name.\n\n\n\n\x03\x04\ \x01\x01\x12\x03\x11\x08\r\n\n\n\x03\x04\x01\x07\x12\x03\x12\x02.\n\r\n\ \x06\x04\x01\x07\x83\xf4\x03\x12\x03\x12\x02.\n\n\n\x03\x04\x01\x07\x12\ \x03\x13\x02.\n\r\n\x06\x04\x01\x07\x81\xf4\x03\x12\x03\x13\x02.\n\x0b\n\ \x04\x04\x01\x02\0\x12\x03\x15\x02?\n\r\n\x05\x04\x01\x02\0\x04\x12\x04\ \x15\x02\x13.\n\x0c\n\x05\x04\x01\x02\0\x05\x12\x03\x15\x02\x08\n\x0c\n\ \x05\x04\x01\x02\0\x01\x12\x03\x15\t\x0f\n\x0c\n\x05\x04\x01\x02\0\x03\ \x12\x03\x15\x12\x13\n\x0c\n\x05\x04\x01\x02\0\x08\x12\x03\x15\x14>\n\ \x0f\n\x08\x04\x01\x02\0\x08\xee\xfb\x03\x12\x03\x15\x15=\n\x0b\n\x04\ \x04\x01\x02\x01\x12\x03\x16\x02=\n\r\n\x05\x04\x01\x02\x01\x04\x12\x04\ \x16\x02\x15?\n\x0c\n\x05\x04\x01\x02\x01\x05\x12\x03\x16\x02\x08\n\x0c\ \n\x05\x04\x01\x02\x01\x01\x12\x03\x16\t\r\n\x0c\n\x05\x04\x01\x02\x01\ \x03\x12\x03\x16\x12\x13\n\x0c\n\x05\x04\x01\x02\x01\x08\x12\x03\x16\x14\ <\n\x0f\n\x08\x04\x01\x02\x01\x08\xee\xfb\x03\x12\x03\x16\x15;\nq\n\x02\ \x04\x02\x12\x04\x1b\0\x1d\x01\x1ae\x20CapabilityOwners\x20defines\x20a\ \x20set\x20of\x20owners\x20of\x20a\x20single\x20Capability.\x20The\x20se\ t\x20of\n\x20owners\x20must\x20be\x20unique.\n\n\n\n\x03\x04\x02\x01\x12\ \x03\x1b\x08\x18\n\x0b\n\x04\x04\x02\x02\0\x12\x03\x1c\x02;\n\x0c\n\x05\ \x04\x02\x02\0\x04\x12\x03\x1c\x02\n\n\x0c\n\x05\x04\x02\x02\0\x06\x12\ \x03\x1c\x0b\x10\n\x0c\n\x05\x04\x02\x02\0\x01\x12\x03\x1c\x11\x17\n\x0c\ \n\x05\x04\x02\x02\0\x03\x12\x03\x1c\x1a\x1b\n\x0c\n\x05\x04\x02\x02\0\ \x08\x12\x03\x1c\x1c:\n\x0f\n\x08\x04\x02\x02\0\x08\xe9\xfb\x03\x12\x03\ \x1c\x1d9b\x06proto3\ "; static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT; fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto { ::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap() } pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto { file_descriptor_proto_lazy.get(|| { parse_descriptor_proto() }) }
34.261157
136
0.592532
e6aa745d54e9d8c8d625a164437c67e84730c792
4,381
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use LinkerFlavor; use target::{LinkArgs, TargetOptions}; use std::default::Default; pub fn opts() -> TargetOptions { let mut pre_link_args = LinkArgs::new(); pre_link_args.insert(LinkerFlavor::Gcc, vec![ // And here, we see obscure linker flags #45. On windows, it has been // found to be necessary to have this flag to compile liblibc. // // First a bit of background. On Windows, the file format is not ELF, // but COFF (at least according to LLVM). COFF doesn't officially allow // for section names over 8 characters, apparently. Our metadata // section, ".note.rustc", you'll note is over 8 characters. // // On more recent versions of gcc on mingw, apparently the section name // is *not* truncated, but rather stored elsewhere in a separate lookup // table. On older versions of gcc, they apparently always truncated th // section names (at least in some cases). Truncating the section name // actually creates "invalid" objects [1] [2], but only for some // introspection tools, not in terms of whether it can be loaded. // // Long story short, passing this flag forces the linker to *not* // truncate section names (so we can find the metadata section after // it's compiled). The real kicker is that rust compiled just fine on // windows for quite a long time *without* this flag, so I have no idea // why it suddenly started failing for liblibc. Regardless, we // definitely don't want section name truncation, so we're keeping this // flag for windows. // // [1] - https://sourceware.org/bugzilla/show_bug.cgi?id=13130 // [2] - https://code.google.com/p/go/issues/detail?id=2139 "-Wl,--enable-long-section-names".to_string(), // Tell GCC to avoid linker plugins, because we are not bundling // them with Windows installer, and Rust does its own LTO anyways. "-fno-use-linker-plugin".to_string(), // Always enable DEP (NX bit) when it is available "-Wl,--nxcompat".to_string(), // Do not use the standard system startup files or libraries when linking "-nostdlib".to_string(), ]); let mut late_link_args = LinkArgs::new(); late_link_args.insert(LinkerFlavor::Gcc, vec![ "-lmingwex".to_string(), "-lmingw32".to_string(), "-lgcc".to_string(), // alas, mingw* libraries above depend on libgcc "-lmsvcrt".to_string(), "-luser32".to_string(), "-lkernel32".to_string(), ]); TargetOptions { // FIXME(#13846) this should be enabled for windows function_sections: false, linker: "gcc".to_string(), dynamic_linking: true, executables: true, dll_prefix: "".to_string(), dll_suffix: ".dll".to_string(), exe_suffix: ".exe".to_string(), staticlib_prefix: "".to_string(), staticlib_suffix: ".lib".to_string(), no_default_libraries: true, target_family: Some("windows".to_string()), is_like_windows: true, allows_weak_linkage: false, pre_link_args, pre_link_objects_exe: vec![ "crt2.o".to_string(), // mingw C runtime initialization for executables "rsbegin.o".to_string(), // Rust compiler runtime initialization, see rsbegin.rs ], pre_link_objects_dll: vec![ "dllcrt2.o".to_string(), // mingw C runtime initialization for dlls "rsbegin.o".to_string(), ], late_link_args, post_link_objects: vec![ "rsend.o".to_string() ], custom_unwind_resume: true, .. Default::default() } }
44.252525
92
0.614928
0e35500027ccd9baa06a0f4154fc5a8dae225912
37,307
//! Conversion of rust-analyzer specific types to lsp_types equivalents. use std::{ path::{self, Path}, sync::atomic::{AtomicU32, Ordering}, }; use ide::{ Assist, AssistKind, CallInfo, CompletionItem, CompletionItemKind, Documentation, FileId, FileRange, FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlPunct, HlRange, HlTag, Indel, InlayHint, InlayKind, InsertTextFormat, LineIndex, Markup, NavigationTarget, ReferenceAccess, RenameError, Runnable, Severity, SourceChange, SymbolKind, TextEdit, TextRange, TextSize, }; use itertools::Itertools; use crate::{ cargo_target_spec::CargoTargetSpec, global_state::GlobalStateSnapshot, line_endings::LineEndings, lsp_ext, semantic_tokens, Result, }; pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position { let line_col = line_index.line_col(offset); lsp_types::Position::new(line_col.line, line_col.col_utf16) } pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Range { let start = position(line_index, range.start()); let end = position(line_index, range.end()); lsp_types::Range::new(start, end) } pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind { match symbol_kind { SymbolKind::Function => lsp_types::SymbolKind::Function, SymbolKind::Struct => lsp_types::SymbolKind::Struct, SymbolKind::Enum => lsp_types::SymbolKind::Enum, SymbolKind::Variant => lsp_types::SymbolKind::EnumMember, SymbolKind::Trait => lsp_types::SymbolKind::Interface, SymbolKind::Macro => lsp_types::SymbolKind::Function, SymbolKind::Module => lsp_types::SymbolKind::Module, SymbolKind::TypeAlias | SymbolKind::TypeParam => lsp_types::SymbolKind::TypeParameter, SymbolKind::Field => lsp_types::SymbolKind::Field, SymbolKind::Static => lsp_types::SymbolKind::Constant, SymbolKind::Const => lsp_types::SymbolKind::Constant, SymbolKind::ConstParam => lsp_types::SymbolKind::Constant, SymbolKind::Impl => lsp_types::SymbolKind::Object, SymbolKind::Local | SymbolKind::SelfParam | SymbolKind::LifetimeParam | SymbolKind::ValueParam | SymbolKind::Label => lsp_types::SymbolKind::Variable, SymbolKind::Union => lsp_types::SymbolKind::Struct, } } pub(crate) fn document_highlight_kind( reference_access: ReferenceAccess, ) -> lsp_types::DocumentHighlightKind { match reference_access { ReferenceAccess::Read => lsp_types::DocumentHighlightKind::Read, ReferenceAccess::Write => lsp_types::DocumentHighlightKind::Write, } } pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity { match severity { Severity::Error => lsp_types::DiagnosticSeverity::Error, Severity::WeakWarning => lsp_types::DiagnosticSeverity::Hint, } } pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation { let value = crate::markdown::format_docs(documentation.as_str()); let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value }; lsp_types::Documentation::MarkupContent(markup_content) } pub(crate) fn insert_text_format( insert_text_format: InsertTextFormat, ) -> lsp_types::InsertTextFormat { match insert_text_format { InsertTextFormat::Snippet => lsp_types::InsertTextFormat::Snippet, InsertTextFormat::PlainText => lsp_types::InsertTextFormat::PlainText, } } pub(crate) fn completion_item_kind( completion_item_kind: CompletionItemKind, ) -> lsp_types::CompletionItemKind { match completion_item_kind { CompletionItemKind::Keyword => lsp_types::CompletionItemKind::Keyword, CompletionItemKind::Snippet => lsp_types::CompletionItemKind::Snippet, CompletionItemKind::Module => lsp_types::CompletionItemKind::Module, CompletionItemKind::Function => lsp_types::CompletionItemKind::Function, CompletionItemKind::Struct => lsp_types::CompletionItemKind::Struct, CompletionItemKind::Enum => lsp_types::CompletionItemKind::Enum, CompletionItemKind::EnumVariant => lsp_types::CompletionItemKind::EnumMember, CompletionItemKind::BuiltinType => lsp_types::CompletionItemKind::Struct, CompletionItemKind::Binding => lsp_types::CompletionItemKind::Variable, CompletionItemKind::Field => lsp_types::CompletionItemKind::Field, CompletionItemKind::Trait => lsp_types::CompletionItemKind::Interface, CompletionItemKind::TypeAlias => lsp_types::CompletionItemKind::Struct, CompletionItemKind::Const => lsp_types::CompletionItemKind::Constant, CompletionItemKind::Static => lsp_types::CompletionItemKind::Value, CompletionItemKind::Method => lsp_types::CompletionItemKind::Method, CompletionItemKind::TypeParam => lsp_types::CompletionItemKind::TypeParameter, CompletionItemKind::Macro => lsp_types::CompletionItemKind::Method, CompletionItemKind::Attribute => lsp_types::CompletionItemKind::EnumMember, CompletionItemKind::UnresolvedReference => lsp_types::CompletionItemKind::Reference, } } pub(crate) fn text_edit( line_index: &LineIndex, line_endings: LineEndings, indel: Indel, ) -> lsp_types::TextEdit { let range = range(line_index, indel.delete); let new_text = match line_endings { LineEndings::Unix => indel.insert, LineEndings::Dos => indel.insert.replace('\n', "\r\n"), }; lsp_types::TextEdit { range, new_text } } pub(crate) fn snippet_text_edit( line_index: &LineIndex, line_endings: LineEndings, is_snippet: bool, indel: Indel, ) -> lsp_ext::SnippetTextEdit { let text_edit = text_edit(line_index, line_endings, indel); let insert_text_format = if is_snippet { Some(lsp_types::InsertTextFormat::Snippet) } else { None }; lsp_ext::SnippetTextEdit { range: text_edit.range, new_text: text_edit.new_text, insert_text_format, } } pub(crate) fn text_edit_vec( line_index: &LineIndex, line_endings: LineEndings, text_edit: TextEdit, ) -> Vec<lsp_types::TextEdit> { text_edit.into_iter().map(|indel| self::text_edit(line_index, line_endings, indel)).collect() } pub(crate) fn snippet_text_edit_vec( line_index: &LineIndex, line_endings: LineEndings, is_snippet: bool, text_edit: TextEdit, ) -> Vec<lsp_ext::SnippetTextEdit> { text_edit .into_iter() .map(|indel| self::snippet_text_edit(line_index, line_endings, is_snippet, indel)) .collect() } pub(crate) fn completion_item( line_index: &LineIndex, line_endings: LineEndings, completion_item: CompletionItem, ) -> Vec<lsp_types::CompletionItem> { fn set_score(res: &mut lsp_types::CompletionItem, label: &str) { res.preselect = Some(true); // HACK: sort preselect items first res.sort_text = Some(format!(" {}", label)); } let mut additional_text_edits = Vec::new(); let mut text_edit = None; // LSP does not allow arbitrary edits in completion, so we have to do a // non-trivial mapping here. let source_range = completion_item.source_range(); for indel in completion_item.text_edit().iter() { if indel.delete.contains_range(source_range) { text_edit = Some(if indel.delete == source_range { self::text_edit(line_index, line_endings, indel.clone()) } else { assert!(source_range.end() == indel.delete.end()); let range1 = TextRange::new(indel.delete.start(), source_range.start()); let range2 = source_range; let indel1 = Indel::replace(range1, String::new()); let indel2 = Indel::replace(range2, indel.insert.clone()); additional_text_edits.push(self::text_edit(line_index, line_endings, indel1)); self::text_edit(line_index, line_endings, indel2) }) } else { assert!(source_range.intersect(indel.delete).is_none()); let text_edit = self::text_edit(line_index, line_endings, indel.clone()); additional_text_edits.push(text_edit); } } let text_edit = text_edit.unwrap(); let mut res = lsp_types::CompletionItem { label: completion_item.label().to_string(), detail: completion_item.detail().map(|it| it.to_string()), filter_text: Some(completion_item.lookup().to_string()), kind: completion_item.kind().map(completion_item_kind), text_edit: Some(text_edit.into()), additional_text_edits: Some(additional_text_edits), documentation: completion_item.documentation().map(documentation), deprecated: Some(completion_item.deprecated()), ..Default::default() }; if completion_item.score().is_some() { set_score(&mut res, completion_item.label()); } if completion_item.deprecated() { res.tags = Some(vec![lsp_types::CompletionItemTag::Deprecated]) } if completion_item.trigger_call_info() { res.command = Some(lsp_types::Command { title: "triggerParameterHints".into(), command: "editor.action.triggerParameterHints".into(), arguments: None, }); } let mut all_results = match completion_item.ref_match() { Some(ref_match) => { let mut refed = res.clone(); let (mutability, _score) = ref_match; let label = format!("&{}{}", mutability.as_keyword_for_ref(), refed.label); set_score(&mut refed, &label); refed.label = label; vec![res, refed] } None => vec![res], }; for mut r in all_results.iter_mut() { r.insert_text_format = Some(insert_text_format(completion_item.insert_text_format())); } all_results } pub(crate) fn signature_help( call_info: CallInfo, concise: bool, label_offsets: bool, ) -> lsp_types::SignatureHelp { let (label, parameters) = match (concise, label_offsets) { (_, false) => { let params = call_info .parameter_labels() .map(|label| lsp_types::ParameterInformation { label: lsp_types::ParameterLabel::Simple(label.to_string()), documentation: None, }) .collect::<Vec<_>>(); let label = if concise { call_info.parameter_labels().join(", ") } else { call_info.signature }; (label, params) } (false, true) => { let params = call_info .parameter_ranges() .iter() .map(|it| [u32::from(it.start()).into(), u32::from(it.end()).into()]) .map(|label_offsets| lsp_types::ParameterInformation { label: lsp_types::ParameterLabel::LabelOffsets(label_offsets), documentation: None, }) .collect::<Vec<_>>(); (call_info.signature, params) } (true, true) => { let mut params = Vec::new(); let mut label = String::new(); let mut first = true; for param in call_info.parameter_labels() { if !first { label.push_str(", "); } first = false; let start = label.len() as u32; label.push_str(param); let end = label.len() as u32; params.push(lsp_types::ParameterInformation { label: lsp_types::ParameterLabel::LabelOffsets([start, end]), documentation: None, }); } (label, params) } }; let documentation = if concise { None } else { call_info.doc.map(|doc| { lsp_types::Documentation::MarkupContent(lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value: doc, }) }) }; let active_parameter = call_info.active_parameter.map(|it| it as u32); let signature = lsp_types::SignatureInformation { label, documentation, parameters: Some(parameters), active_parameter, }; lsp_types::SignatureHelp { signatures: vec![signature], active_signature: None, active_parameter, } } pub(crate) fn inlay_hint(line_index: &LineIndex, inlay_hint: InlayHint) -> lsp_ext::InlayHint { lsp_ext::InlayHint { label: inlay_hint.label.to_string(), range: range(line_index, inlay_hint.range), kind: match inlay_hint.kind { InlayKind::ParameterHint => lsp_ext::InlayKind::ParameterHint, InlayKind::TypeHint => lsp_ext::InlayKind::TypeHint, InlayKind::ChainingHint => lsp_ext::InlayKind::ChainingHint, }, } } static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1); pub(crate) fn semantic_tokens( text: &str, line_index: &LineIndex, highlights: Vec<HlRange>, ) -> lsp_types::SemanticTokens { let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string(); let mut builder = semantic_tokens::SemanticTokensBuilder::new(id); for highlight_range in highlights { if highlight_range.highlight.is_empty() { continue; } let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); let token_index = semantic_tokens::type_index(type_); let modifier_bitset = mods.0; for mut text_range in line_index.lines(highlight_range.range) { if text[text_range].ends_with('\n') { text_range = TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n')); } let range = range(&line_index, text_range); builder.push(range, token_index, modifier_bitset); } } builder.build() } pub(crate) fn semantic_token_delta( previous: &lsp_types::SemanticTokens, current: &lsp_types::SemanticTokens, ) -> lsp_types::SemanticTokensDelta { let result_id = current.result_id.clone(); let edits = semantic_tokens::diff_tokens(&previous.data, &current.data); lsp_types::SemanticTokensDelta { result_id, edits } } fn semantic_token_type_and_modifiers( highlight: Highlight, ) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) { let mut mods = semantic_tokens::ModifierSet::default(); let type_ = match highlight.tag { HlTag::Symbol(symbol) => match symbol { SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE, SymbolKind::Impl => lsp_types::SemanticTokenType::TYPE, SymbolKind::Field => lsp_types::SemanticTokenType::PROPERTY, SymbolKind::TypeParam => lsp_types::SemanticTokenType::TYPE_PARAMETER, SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER, SymbolKind::LifetimeParam => semantic_tokens::LIFETIME, SymbolKind::Label => semantic_tokens::LABEL, SymbolKind::ValueParam => lsp_types::SemanticTokenType::PARAMETER, SymbolKind::SelfParam => semantic_tokens::SELF_KEYWORD, SymbolKind::Local => lsp_types::SemanticTokenType::VARIABLE, SymbolKind::Function => { if highlight.mods.contains(HlMod::Associated) { lsp_types::SemanticTokenType::METHOD } else { lsp_types::SemanticTokenType::FUNCTION } } SymbolKind::Const => { mods |= semantic_tokens::CONSTANT; mods |= lsp_types::SemanticTokenModifier::STATIC; lsp_types::SemanticTokenType::VARIABLE } SymbolKind::Static => { mods |= lsp_types::SemanticTokenModifier::STATIC; lsp_types::SemanticTokenType::VARIABLE } SymbolKind::Struct => lsp_types::SemanticTokenType::STRUCT, SymbolKind::Enum => lsp_types::SemanticTokenType::ENUM, SymbolKind::Variant => lsp_types::SemanticTokenType::ENUM_MEMBER, SymbolKind::Union => semantic_tokens::UNION, SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS, SymbolKind::Trait => lsp_types::SemanticTokenType::INTERFACE, SymbolKind::Macro => lsp_types::SemanticTokenType::MACRO, }, HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE, HlTag::None => semantic_tokens::GENERIC, HlTag::ByteLiteral | HlTag::NumericLiteral => lsp_types::SemanticTokenType::NUMBER, HlTag::BoolLiteral => semantic_tokens::BOOLEAN, HlTag::CharLiteral | HlTag::StringLiteral => lsp_types::SemanticTokenType::STRING, HlTag::Comment => lsp_types::SemanticTokenType::COMMENT, HlTag::Attribute => semantic_tokens::ATTRIBUTE, HlTag::Keyword => lsp_types::SemanticTokenType::KEYWORD, HlTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE, HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER, HlTag::Operator => lsp_types::SemanticTokenType::OPERATOR, HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE, HlTag::Punctuation(punct) => match punct { HlPunct::Bracket => semantic_tokens::BRACKET, HlPunct::Brace => semantic_tokens::BRACE, HlPunct::Parenthesis => semantic_tokens::PARENTHESIS, HlPunct::Angle => semantic_tokens::ANGLE, HlPunct::Comma => semantic_tokens::COMMA, HlPunct::Dot => semantic_tokens::DOT, HlPunct::Colon => semantic_tokens::COLON, HlPunct::Semi => semantic_tokens::SEMICOLON, HlPunct::Other => semantic_tokens::PUNCTUATION, }, }; for modifier in highlight.mods.iter() { let modifier = match modifier { HlMod::Attribute => semantic_tokens::ATTRIBUTE_MODIFIER, HlMod::Definition => lsp_types::SemanticTokenModifier::DECLARATION, HlMod::Documentation => lsp_types::SemanticTokenModifier::DOCUMENTATION, HlMod::Injected => semantic_tokens::INJECTED, HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW, HlMod::Mutable => semantic_tokens::MUTABLE, HlMod::Consuming => semantic_tokens::CONSUMING, HlMod::Unsafe => semantic_tokens::UNSAFE, HlMod::Callable => semantic_tokens::CALLABLE, HlMod::Static => lsp_types::SemanticTokenModifier::STATIC, HlMod::Associated => continue, }; mods |= modifier; } (type_, mods) } pub(crate) fn folding_range( text: &str, line_index: &LineIndex, line_folding_only: bool, fold: Fold, ) -> lsp_types::FoldingRange { let kind = match fold.kind { FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment), FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports), FoldKind::Mods | FoldKind::Block | FoldKind::ArgList => None, }; let range = range(line_index, fold.range); if line_folding_only { // Clients with line_folding_only == true (such as VSCode) will fold the whole end line // even if it contains text not in the folding range. To prevent that we exclude // range.end.line from the folding region if there is more text after range.end // on the same line. let has_more_text_on_end_line = text[TextRange::new(fold.range.end(), TextSize::of(text))] .chars() .take_while(|it| *it != '\n') .any(|it| !it.is_whitespace()); let end_line = if has_more_text_on_end_line { range.end.line.saturating_sub(1) } else { range.end.line }; lsp_types::FoldingRange { start_line: range.start.line, start_character: None, end_line, end_character: None, kind, } } else { lsp_types::FoldingRange { start_line: range.start.line, start_character: Some(range.start.character), end_line: range.end.line, end_character: Some(range.end.character), kind, } } } pub(crate) fn url(snap: &GlobalStateSnapshot, file_id: FileId) -> lsp_types::Url { snap.file_id_to_url(file_id) } /// Returns a `Url` object from a given path, will lowercase drive letters if present. /// This will only happen when processing windows paths. /// /// When processing non-windows path, this is essentially the same as `Url::from_file_path`. pub(crate) fn url_from_abs_path(path: &Path) -> lsp_types::Url { assert!(path.is_absolute()); let url = lsp_types::Url::from_file_path(path).unwrap(); match path.components().next() { Some(path::Component::Prefix(prefix)) if matches!(prefix.kind(), path::Prefix::Disk(_) | path::Prefix::VerbatimDisk(_)) => { // Need to lowercase driver letter } _ => return url, } let driver_letter_range = { let (scheme, drive_letter, _rest) = match url.as_str().splitn(3, ':').collect_tuple() { Some(it) => it, None => return url, }; let start = scheme.len() + ':'.len_utf8(); start..(start + drive_letter.len()) }; // Note: lowercasing the `path` itself doesn't help, the `Url::parse` // machinery *also* canonicalizes the drive letter. So, just massage the // string in place. let mut url = url.into_string(); url[driver_letter_range].make_ascii_lowercase(); lsp_types::Url::parse(&url).unwrap() } pub(crate) fn optional_versioned_text_document_identifier( snap: &GlobalStateSnapshot, file_id: FileId, ) -> lsp_types::OptionalVersionedTextDocumentIdentifier { let url = url(snap, file_id); let version = snap.url_file_version(&url); lsp_types::OptionalVersionedTextDocumentIdentifier { uri: url, version } } pub(crate) fn location( snap: &GlobalStateSnapshot, frange: FileRange, ) -> Result<lsp_types::Location> { let url = url(snap, frange.file_id); let line_index = snap.analysis.file_line_index(frange.file_id)?; let range = range(&line_index, frange.range); let loc = lsp_types::Location::new(url, range); Ok(loc) } /// Perefer using `location_link`, if the client has the cap. pub(crate) fn location_from_nav( snap: &GlobalStateSnapshot, nav: NavigationTarget, ) -> Result<lsp_types::Location> { let url = url(snap, nav.file_id); let line_index = snap.analysis.file_line_index(nav.file_id)?; let range = range(&line_index, nav.full_range); let loc = lsp_types::Location::new(url, range); Ok(loc) } pub(crate) fn location_link( snap: &GlobalStateSnapshot, src: Option<FileRange>, target: NavigationTarget, ) -> Result<lsp_types::LocationLink> { let origin_selection_range = match src { Some(src) => { let line_index = snap.analysis.file_line_index(src.file_id)?; let range = range(&line_index, src.range); Some(range) } None => None, }; let (target_uri, target_range, target_selection_range) = location_info(snap, target)?; let res = lsp_types::LocationLink { origin_selection_range, target_uri, target_range, target_selection_range, }; Ok(res) } fn location_info( snap: &GlobalStateSnapshot, target: NavigationTarget, ) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> { let line_index = snap.analysis.file_line_index(target.file_id)?; let target_uri = url(snap, target.file_id); let target_range = range(&line_index, target.full_range); let target_selection_range = target.focus_range.map(|it| range(&line_index, it)).unwrap_or(target_range); Ok((target_uri, target_range, target_selection_range)) } pub(crate) fn goto_definition_response( snap: &GlobalStateSnapshot, src: Option<FileRange>, targets: Vec<NavigationTarget>, ) -> Result<lsp_types::GotoDefinitionResponse> { if snap.config.location_link() { let links = targets .into_iter() .map(|nav| location_link(snap, src, nav)) .collect::<Result<Vec<_>>>()?; Ok(links.into()) } else { let locations = targets .into_iter() .map(|nav| { location(snap, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) }) .collect::<Result<Vec<_>>>()?; Ok(locations.into()) } } pub(crate) fn snippet_text_document_edit( snap: &GlobalStateSnapshot, is_snippet: bool, file_id: FileId, edit: TextEdit, ) -> Result<lsp_ext::SnippetTextDocumentEdit> { let text_document = optional_versioned_text_document_identifier(snap, file_id); let line_index = snap.analysis.file_line_index(file_id)?; let line_endings = snap.file_line_endings(file_id); let edits = edit .into_iter() .map(|it| snippet_text_edit(&line_index, line_endings, is_snippet, it)) .collect(); Ok(lsp_ext::SnippetTextDocumentEdit { text_document, edits }) } pub(crate) fn snippet_text_document_ops( snap: &GlobalStateSnapshot, file_system_edit: FileSystemEdit, ) -> Vec<lsp_ext::SnippetDocumentChangeOperation> { let mut ops = Vec::new(); match file_system_edit { FileSystemEdit::CreateFile { dst, initial_contents } => { let uri = snap.anchored_path(&dst); let create_file = lsp_types::ResourceOp::Create(lsp_types::CreateFile { uri: uri.clone(), options: None, annotation_id: None, }); ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(create_file)); if !initial_contents.is_empty() { let text_document = lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version: None }; let range = range(&LineIndex::new(""), TextRange::empty(TextSize::from(0))); let text_edit = lsp_ext::SnippetTextEdit { range, new_text: initial_contents, insert_text_format: Some(lsp_types::InsertTextFormat::PlainText), }; let edit_file = lsp_ext::SnippetTextDocumentEdit { text_document, edits: vec![text_edit] }; ops.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit_file)); } } FileSystemEdit::MoveFile { src, dst } => { let old_uri = snap.file_id_to_url(src); let new_uri = snap.anchored_path(&dst); let rename_file = lsp_types::ResourceOp::Rename(lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None, }); ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(rename_file)) } } ops } pub(crate) fn snippet_workspace_edit( snap: &GlobalStateSnapshot, source_change: SourceChange, ) -> Result<lsp_ext::SnippetWorkspaceEdit> { let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new(); for op in source_change.file_system_edits { let ops = snippet_text_document_ops(snap, op); document_changes.extend_from_slice(&ops); } for (file_id, edit) in source_change.source_file_edits { let edit = snippet_text_document_edit(&snap, source_change.is_snippet, file_id, edit)?; document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit)); } let workspace_edit = lsp_ext::SnippetWorkspaceEdit { changes: None, document_changes: Some(document_changes) }; Ok(workspace_edit) } pub(crate) fn workspace_edit( snap: &GlobalStateSnapshot, source_change: SourceChange, ) -> Result<lsp_types::WorkspaceEdit> { assert!(!source_change.is_snippet); snippet_workspace_edit(snap, source_change).map(|it| it.into()) } impl From<lsp_ext::SnippetWorkspaceEdit> for lsp_types::WorkspaceEdit { fn from(snippet_workspace_edit: lsp_ext::SnippetWorkspaceEdit) -> lsp_types::WorkspaceEdit { lsp_types::WorkspaceEdit { changes: None, document_changes: snippet_workspace_edit.document_changes.map(|changes| { lsp_types::DocumentChanges::Operations( changes .into_iter() .map(|change| match change { lsp_ext::SnippetDocumentChangeOperation::Op(op) => { lsp_types::DocumentChangeOperation::Op(op) } lsp_ext::SnippetDocumentChangeOperation::Edit(edit) => { lsp_types::DocumentChangeOperation::Edit( lsp_types::TextDocumentEdit { text_document: edit.text_document, edits: edit .edits .into_iter() .map(|edit| { lsp_types::OneOf::Left(lsp_types::TextEdit { range: edit.range, new_text: edit.new_text, }) }) .collect(), }, ) } }) .collect(), ) }), change_annotations: None, } } } pub(crate) fn call_hierarchy_item( snap: &GlobalStateSnapshot, target: NavigationTarget, ) -> Result<lsp_types::CallHierarchyItem> { let name = target.name.to_string(); let detail = target.description.clone(); let kind = target.kind.map(symbol_kind).unwrap_or(lsp_types::SymbolKind::Function); let (uri, range, selection_range) = location_info(snap, target)?; Ok(lsp_types::CallHierarchyItem { name, kind, tags: None, detail, uri, range, selection_range, data: None, }) } pub(crate) fn code_action_kind(kind: AssistKind) -> lsp_types::CodeActionKind { match kind { AssistKind::None | AssistKind::Generate => lsp_types::CodeActionKind::EMPTY, AssistKind::QuickFix => lsp_types::CodeActionKind::QUICKFIX, AssistKind::Refactor => lsp_types::CodeActionKind::REFACTOR, AssistKind::RefactorExtract => lsp_types::CodeActionKind::REFACTOR_EXTRACT, AssistKind::RefactorInline => lsp_types::CodeActionKind::REFACTOR_INLINE, AssistKind::RefactorRewrite => lsp_types::CodeActionKind::REFACTOR_REWRITE, } } pub(crate) fn unresolved_code_action( snap: &GlobalStateSnapshot, code_action_params: lsp_types::CodeActionParams, assist: Assist, index: usize, ) -> Result<lsp_ext::CodeAction> { assert!(assist.source_change.is_none()); let res = lsp_ext::CodeAction { title: assist.label.to_string(), group: assist.group.filter(|_| snap.config.code_action_group()).map(|gr| gr.0), kind: Some(code_action_kind(assist.id.1)), edit: None, is_preferred: None, data: Some(lsp_ext::CodeActionData { id: format!("{}:{}", assist.id.0, index.to_string()), code_action_params, }), }; Ok(res) } pub(crate) fn resolved_code_action( snap: &GlobalStateSnapshot, assist: Assist, ) -> Result<lsp_ext::CodeAction> { let change = assist.source_change.unwrap(); let res = lsp_ext::CodeAction { edit: Some(snippet_workspace_edit(snap, change)?), title: assist.label.to_string(), group: assist.group.filter(|_| snap.config.code_action_group()).map(|gr| gr.0), kind: Some(code_action_kind(assist.id.1)), is_preferred: None, data: None, }; Ok(res) } pub(crate) fn runnable( snap: &GlobalStateSnapshot, file_id: FileId, runnable: Runnable, ) -> Result<lsp_ext::Runnable> { let config = snap.config.runnables(); let spec = CargoTargetSpec::for_file(snap, file_id)?; let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone()); let target = spec.as_ref().map(|s| s.target.clone()); let (cargo_args, executable_args) = CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg)?; let label = runnable.label(target); let location = location_link(snap, None, runnable.nav)?; Ok(lsp_ext::Runnable { label, location: Some(location), kind: lsp_ext::RunnableKind::Cargo, args: lsp_ext::CargoRunnable { workspace_root: workspace_root.map(|it| it.into()), override_cargo: config.override_cargo, cargo_args, cargo_extra_args: config.cargo_extra_args, executable_args, expect_test: None, }, }) } pub(crate) fn markup_content(markup: Markup) -> lsp_types::MarkupContent { let value = crate::markdown::format_docs(markup.as_str()); lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value } } pub(crate) fn rename_error(err: RenameError) -> crate::LspError { crate::LspError { code: lsp_server::ErrorCode::InvalidParams as i32, message: err.to_string() } } #[cfg(test)] mod tests { use hir::PrefixKind; use ide::Analysis; use ide_db::helpers::{insert_use::InsertUseConfig, SnippetCap}; use super::*; #[test] fn test_completion_with_ref() { let fixture = r#" struct Foo; fn foo(arg: &Foo) {} fn main() { let arg = Foo; foo($0) }"#; let (offset, text) = test_utils::extract_offset(fixture); let line_index = LineIndex::new(&text); let (analysis, file_id) = Analysis::from_single_file(text); let completions: Vec<(String, Option<String>)> = analysis .completions( &ide::CompletionConfig { enable_postfix_completions: true, enable_imports_on_the_fly: true, add_call_parenthesis: true, add_call_argument_snippets: true, snippet_cap: SnippetCap::new(true), insert_use: InsertUseConfig { merge: None, prefix_kind: PrefixKind::Plain }, }, ide_db::base_db::FilePosition { file_id, offset }, ) .unwrap() .unwrap() .into_iter() .filter(|c| c.label().ends_with("arg")) .map(|c| completion_item(&line_index, LineEndings::Unix, c)) .flat_map(|comps| comps.into_iter().map(|c| (c.label, c.sort_text))) .collect(); expect_test::expect![[r#" [ ( "arg", None, ), ( "&arg", Some( " &arg", ), ), ] "#]] .assert_debug_eq(&completions); } #[test] fn conv_fold_line_folding_only_fixup() { let text = r#"mod a; mod b; mod c; fn main() { if cond { a::do_a(); } else { b::do_b(); } }"#; let (analysis, file_id) = Analysis::from_single_file(text.to_string()); let folds = analysis.folding_ranges(file_id).unwrap(); assert_eq!(folds.len(), 4); let line_index = LineIndex::new(&text); let converted: Vec<lsp_types::FoldingRange> = folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect(); let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)]; assert_eq!(converted.len(), expected_lines.len()); for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) { assert_eq!(folding_range.start_line, *start_line); assert_eq!(folding_range.start_character, None); assert_eq!(folding_range.end_line, *end_line); assert_eq!(folding_range.end_character, None); } } // `Url` is not able to parse windows paths on unix machines. #[test] #[cfg(target_os = "windows")] fn test_lowercase_drive_letter_with_drive() { let url = url_from_abs_path(Path::new("C:\\Test")); assert_eq!(url.to_string(), "file:///c:/Test"); } #[test] #[cfg(target_os = "windows")] fn test_drive_without_colon_passthrough() { let url = url_from_abs_path(Path::new(r#"\\localhost\C$\my_dir"#)); assert_eq!(url.to_string(), "file://localhost/C$/my_dir"); } }
38.620083
130
0.61664
8a67e0eb66e09a139d1f0839e6f760d265f5006f
7,004
//! Entity is an abstraction over Profiles and Vaults, easing the use of these primitives in //! authentication and authorization APIs. #![deny(unsafe_code)] #![warn( // prevented by big_array // missing_docs, trivial_casts, trivial_numeric_casts, unused_import_braces, unused_qualifications )] #![cfg_attr(not(feature = "std"), no_std)] #[cfg(feature = "std")] extern crate core; #[cfg(feature = "alloc")] #[macro_use] extern crate alloc; use cfg_if::cfg_if; pub use change::*; pub use channel::*; pub use contact::*; pub use entity::*; pub use entity_builder::*; pub use error::*; pub use identifiers::*; pub use key_attributes::*; pub use lease::*; use ockam_channel::SecureChannelVault; use ockam_core::compat::{collections::HashMap, string::String, vec::Vec}; use ockam_core::{AsyncTryClone, Decodable, Encodable, Result}; use ockam_vault::{Hasher, KeyIdVault, SecretVault, Signer, Verifier}; pub use profile::*; pub use profile_state::*; pub use traits::*; pub use worker::*; use crate::EntityError; mod authentication; mod change; pub mod change_history; mod channel; mod contact; mod entity; mod entity_builder; mod error; mod identifiers; mod key_attributes; mod lease; mod profile; mod profile_state; mod signature; mod traits; mod worker; cfg_if! { if #[cfg(feature = "credentials")] { mod credential; pub use credential::*; } } /// Traits required for a Vault implementation suitable for use in a Profile pub trait ProfileVault: SecretVault + SecureChannelVault + KeyIdVault + Hasher + Signer + Verifier + AsyncTryClone + Send + 'static { } impl<D> ProfileVault for D where D: SecretVault + SecureChannelVault + KeyIdVault + Hasher + Signer + Verifier + AsyncTryClone + Send + 'static { } /// Profile event attributes pub type ProfileEventAttributes = HashMap<String, String>; /// Contacts Database pub type Contacts = HashMap<ProfileIdentifier, Contact>; #[cfg(feature = "credentials")] pub use signature_bbs_plus::{PublicKey as BbsPublicKey, SecretKey as BbsSecretKey}; #[cfg(feature = "credentials")] pub use signature_bls::{PublicKey as BlsPublicKey, SecretKey as BlsSecretKey}; pub struct ProfileSerializationUtil; impl ProfileSerializationUtil { /// Serialize [`Contact`] in binary form for storing/transferring over the network pub fn serialize_contact(contact: &Contact) -> Result<Vec<u8>> { contact.encode().map_err(|_| EntityError::BareError.into()) } /// Deserialize [`Contact`] from binary form pub fn deserialize_contact(contact: &[u8]) -> Result<Contact> { let contact = Contact::decode(contact).map_err(|_| EntityError::BareError)?; Ok(contact) } /// Serialize [`ProfileChangeEvent`]s to binary form for storing/transferring over the network pub fn serialize_change_events(change_events: &[ProfileChangeEvent]) -> Result<Vec<u8>> { change_events .encode() .map_err(|_| EntityError::BareError.into()) } /// Deserialize [`ProfileChangeEvent`]s from binary form pub fn deserialize_change_events(change_events: &[u8]) -> Result<Vec<ProfileChangeEvent>> { let change_events = Vec::<ProfileChangeEvent>::decode(change_events).map_err(|_| EntityError::BareError)?; Ok(change_events) } } #[cfg(test)] mod test { use super::*; use ockam_core::Error; use ockam_node::Context; use ockam_vault_sync_core::Vault; fn test_error<S: Into<String>>(msg: S) -> Result<()> { Err(Error::new(0, msg.into())) } async fn test_basic_profile_key_ops(profile: &mut (impl Identity + Sync)) -> Result<()> { if !profile.verify_changes().await? { return test_error("verify_changes failed"); } let secret1 = profile.get_root_secret_key().await?; let public1 = profile.get_root_public_key().await?; profile.create_key("Truck management".to_string()).await?; if !profile.verify_changes().await? { return test_error("verify_changes failed"); } let secret2 = profile .get_secret_key("Truck management".to_string()) .await?; let public2 = profile.get_public_key("Truck management".into()).await?; if secret1 == secret2 { return test_error("secret did not change after create_key"); } if public1 == public2 { return test_error("public did not change after create_key"); } profile.rotate_root_secret_key().await?; if !profile.verify_changes().await? { return test_error("verify_changes failed"); } let secret3 = profile.get_root_secret_key().await?; let public3 = profile.get_root_public_key().await?; profile.rotate_root_secret_key().await?; if !profile.verify_changes().await? { return test_error("verify_changes failed"); } if secret1 == secret3 { return test_error("secret did not change after rotate_key"); } if public1 == public3 { return test_error("public did not change after rotate_key"); } Ok(()) } async fn test_update_contact_after_change( alice: &mut (impl Identity + Sync), bob: &mut (impl Identity + Sync), ) -> Result<()> { let contact_alice = alice.as_contact().await?; let alice_id = contact_alice.identifier().clone(); if !bob.verify_and_add_contact(contact_alice).await? { return test_error("bob failed to add alice"); } alice.rotate_root_secret_key().await?; let alice_changes = alice.get_changes().await?; let last_change = alice_changes.last().unwrap().clone(); if !bob .verify_and_update_contact(&alice_id, &[last_change]) .await? { return test_error("bob failed to update alice"); } Ok(()) } #[ockam_macros::test] async fn async_tests(ctx: &mut Context) -> Result<()> { let alice_vault = Vault::create(ctx).await.expect("failed to create vault"); let bob_vault = Vault::create(ctx).await.expect("failed to create vault"); let entity_alice = Entity::create(ctx, &alice_vault).await?; let entity_bob = Entity::create(ctx, &bob_vault).await?; let mut alice = entity_alice.current_profile().await.unwrap().unwrap(); let mut bob = entity_bob.current_profile().await.unwrap().unwrap(); let mut results = vec![]; results.push(test_basic_profile_key_ops(&mut alice).await); results.push(test_update_contact_after_change(&mut alice, &mut bob).await); ctx.stop().await?; for r in results { match r { Err(e) => panic!("{}", e.domain().clone()), _ => (), } } Ok(()) } }
28.356275
98
0.636922
286f73aed8d829281f7b5a3e37f6b0576a59fa75
5,690
use super::task::Task; use rand::{Rng, thread_rng, distributions::Alphanumeric}; use rocket::local::asynchronous::Client; use rocket::http::{Status, ContentType}; // We use a lock to synchronize between tests so DB operations don't collide. // For now. In the future, we'll have a nice way to run each test in a DB // transaction so we can regain concurrency. static DB_LOCK: parking_lot::Mutex<()> = parking_lot::const_mutex(()); macro_rules! run_test { (|$client:ident, $conn:ident| $block:expr) => ({ let _lock = DB_LOCK.lock(); rocket::async_test(async move { let $client = Client::tracked(super::rocket()).await.expect("Rocket client"); let db = super::DbConn::get_one($client.rocket()).await; let $conn = db.expect("failed to get database connection for testing"); Task::delete_all(&$conn).await.expect("failed to delete all tasks for testing"); $block }) }) } #[test] fn test_index() { use rocket::local::blocking::Client; let _lock = DB_LOCK.lock(); let client = Client::tracked(super::rocket()).unwrap(); let response = client.get("/").dispatch(); assert_eq!(response.status(), Status::Ok); } #[test] fn test_insertion_deletion() { run_test!(|client, conn| { // Get the tasks before making changes. let init_tasks = Task::all(&conn).await.unwrap(); // Issue a request to insert a new task. client.post("/todo") .header(ContentType::Form) .body("description=My+first+task") .dispatch() .await; // Ensure we have one more task in the database. let new_tasks = Task::all(&conn).await.unwrap(); assert_eq!(new_tasks.len(), init_tasks.len() + 1); // Ensure the task is what we expect. assert_eq!(new_tasks[0].description, "My first task"); assert_eq!(new_tasks[0].completed, false); // Issue a request to delete the task. let id = new_tasks[0].id.unwrap(); client.delete(format!("/todo/{}", id)).dispatch().await; // Ensure it's gone. let final_tasks = Task::all(&conn).await.unwrap(); assert_eq!(final_tasks.len(), init_tasks.len()); if final_tasks.len() > 0 { assert_ne!(final_tasks[0].description, "My first task"); } }) } #[test] fn test_toggle() { run_test!(|client, conn| { // Issue a request to insert a new task; ensure it's not yet completed. client.post("/todo") .header(ContentType::Form) .body("description=test_for_completion") .dispatch() .await; let task = Task::all(&conn).await.unwrap()[0].clone(); assert_eq!(task.completed, false); // Issue a request to toggle the task; ensure it is completed. client.put(format!("/todo/{}", task.id.unwrap())).dispatch().await; assert_eq!(Task::all(&conn).await.unwrap()[0].completed, true); // Issue a request to toggle the task; ensure it's not completed again. client.put(format!("/todo/{}", task.id.unwrap())).dispatch().await; assert_eq!(Task::all(&conn).await.unwrap()[0].completed, false); }) } #[test] fn test_many_insertions() { const ITER: usize = 100; run_test!(|client, conn| { // Get the number of tasks initially. let init_num = Task::all(&conn).await.unwrap().len(); let mut descs = Vec::new(); for i in 0..ITER { // Issue a request to insert a new task with a random description. let desc: String = thread_rng() .sample_iter(&Alphanumeric) .take(12) .map(char::from) .collect(); client.post("/todo") .header(ContentType::Form) .body(format!("description={}", desc)) .dispatch() .await; // Record the description we choose for this iteration. descs.insert(0, desc); // Ensure the task was inserted properly and all other tasks remain. let tasks = Task::all(&conn).await.unwrap(); assert_eq!(tasks.len(), init_num + i + 1); for j in 0..i { assert_eq!(descs[j], tasks[j].description); } } }) } #[test] fn test_bad_form_submissions() { run_test!(|client, _conn| { // Submit an empty form. We should get a 422 but no flash error. let res = client.post("/todo") .header(ContentType::Form) .dispatch() .await; let mut cookies = res.headers().get("Set-Cookie"); assert_eq!(res.status(), Status::UnprocessableEntity); assert!(!cookies.any(|value| value.contains("error"))); // Submit a form with an empty description. We look for 'error' in the // cookies which corresponds to flash message being set as an error. let res = client.post("/todo") .header(ContentType::Form) .body("description=") .dispatch() .await; let mut cookies = res.headers().get("Set-Cookie"); assert!(cookies.any(|value| value.contains("error"))); // Submit a form without a description. Expect a 422 but no flash error. let res = client.post("/todo") .header(ContentType::Form) .body("evil=smile") .dispatch() .await; let mut cookies = res.headers().get("Set-Cookie"); assert_eq!(res.status(), Status::UnprocessableEntity); assert!(!cookies.any(|value| value.contains("error"))); }) }
33.869048
92
0.574517
d75295a75ff28a3eb691fad3c387fe00cd0a86ca
4,510
use crate::*; use ic_crypto_internal_hmac::{Hmac, Sha512}; use ic_types::PrincipalId; #[derive(Debug, Clone)] pub enum DerivationIndex { U32(u32), Generalized(Vec<u8>), } impl DerivationIndex { /// Return the BIP32 "next" derivation path /// /// This is only used very rarely. The +1 behavior for u32's matches /// standard BIP32. For the generalized case, the "next" value is not /// necessarily obvious, so instead we cause key derivation to fails. /// /// This does mean that with ~ 1/2**127 chance, a canister will not be /// able to derive a public key for itself. fn next(&self) -> ThresholdEcdsaResult<Self> { match self { Self::U32(i) => Ok(Self::U32(i + 1)), Self::Generalized(_) => Err(ThresholdEcdsaError::InvalidDerivationPath), } } } #[derive(Debug, Clone)] pub struct DerivationPath { path: Vec<DerivationIndex>, } impl DerivationPath { /// Create a standard BIP32 derivation path pub fn new_bip32(bip32: &[u32]) -> Self { let mut path = Vec::with_capacity(bip32.len()); for n in bip32 { path.push(DerivationIndex::U32(*n)); } Self::new_arbitrary(path) } /// Create a derivation path from a principal ID and a BIP32 path pub fn new_with_principal(principal: PrincipalId, bip32: &[u32]) -> Self { let mut path = Vec::with_capacity(1 + bip32.len()); path.push(DerivationIndex::Generalized(principal.to_vec())); for n in bip32 { path.push(DerivationIndex::U32(*n)); } Self::new_arbitrary(path) } /// Create a free-form derivation path pub fn new_arbitrary(path: Vec<DerivationIndex>) -> Self { Self { path } } /// BIP32 Public parent key -> public child key (aka CKDpub) /// /// See <https://en.bitcoin.it/wiki/BIP_0032#Child_key_derivation_.28CKD.29_functions> /// /// Extended to support larger inputs, which is needed for /// deriving the canister public key fn bip32_ckdpub( public_key: &EccPoint, chain_key: &[u8], index: &DerivationIndex, ) -> ThresholdEcdsaResult<(EccPoint, Vec<u8>, EccScalar)> { // BIP32 is only defined for secp256k1 if public_key.curve_type() != EccCurveType::K256 { return Err(ThresholdEcdsaError::CurveMismatch); } let mut hmac = Hmac::<Sha512>::new(chain_key); hmac.write(&public_key.serialize()); match index { DerivationIndex::U32(u) => { if (u >> 31) != 0 { // hard derivation not supported return Err(ThresholdEcdsaError::InvalidDerivationPath); } hmac.write(&u.to_be_bytes()); } DerivationIndex::Generalized(v) => { hmac.write(v); } } let hmac_output = hmac.finish(); let key_offset = EccScalar::from_bytes_wide(public_key.curve_type(), &hmac_output[..32])?; let new_chain_key = hmac_output[32..].to_vec(); let new_key = public_key.add_points(&EccPoint::mul_by_g(&key_offset)?)?; // If iL >= order or new_key=inf, try again with the "next" index if key_offset.serialize() != hmac_output[..32] || new_key.is_infinity()? { Self::bip32_ckdpub(public_key, chain_key, &index.next()?) } else { Ok((new_key, new_chain_key, key_offset)) } } pub fn derive_tweak( &self, master_public_key: &EccPoint, ) -> ThresholdEcdsaResult<(EccScalar, Vec<u8>)> { let curve_type = master_public_key.curve_type(); if curve_type == EccCurveType::K256 { let mut derived_key = *master_public_key; let mut derived_chain_key = vec![0; 32]; let mut derived_offset = EccScalar::zero(curve_type); for idx in &self.path { let (next_derived_key, next_chain_key, next_offset) = Self::bip32_ckdpub(&derived_key, &derived_chain_key, idx)?; derived_key = next_derived_key; derived_chain_key = next_chain_key; derived_offset = derived_offset.add(&next_offset)?; } Ok((derived_offset, derived_chain_key)) } else { // Key derivation is not currently defined for curves other than secp256k1 Err(ThresholdEcdsaError::InvalidDerivationPath) } } }
33.656716
98
0.595344
d74b3ceabc98ff9799f1431c2794acf97754fce5
1,486
extern crate wasmparser; use std::env; use std::fs::File; use std::io; use std::io::prelude::*; use std::str; use wasmparser::Parser; use wasmparser::ParserState; use wasmparser::WasmDecoder; fn main() { let args = env::args().collect::<Vec<_>>(); if args.len() != 2 { println!("Usage: {} in.wasm", args[0]); return; } let buf: Vec<u8> = read_wasm(&args[1]).unwrap(); let mut parser = Parser::new(&buf); loop { let state = parser.read(); match *state { ParserState::ExportSectionEntry { field, ref kind, index, } => { println!( "ExportSectionEntry {{ field: \"{}\", kind: {:?}, index: {} }}", field, kind, index ); } ParserState::ImportSectionEntry { module, field, ref ty, } => { println!( "ImportSectionEntry {{ module: \"{}\", field: \"{}\", ty: {:?} }}", module, field, ty ); } ParserState::EndWasm => break, ParserState::Error(err) => panic!("Error: {:?}", err), _ => println!("{:?}", state), } } } fn read_wasm(file: &str) -> io::Result<Vec<u8>> { let mut data = Vec::new(); let mut f = File::open(file)?; f.read_to_end(&mut data)?; Ok(data) }
26.070175
87
0.438762
cc84bbb25c75cfb07e947332b0bb348a9788b1f5
354
use crate::args::{Identifier, LoggingArgs, StreamFilter}; use crate::args::output::OutputArgs; #[derive(clap::Clap)] pub struct CheckArgs { #[clap(flatten)] pub identifier: Identifier, #[clap(flatten)] pub stream_filter: StreamFilter, #[clap(flatten)] pub logging: LoggingArgs, #[clap(flatten)] pub output: OutputArgs, }
23.6
57
0.677966
5bba78c44d9ead7bb48dab4773e5e7e5421cf94a
1,232
use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct DestinationTableProperties { /// [Optional] The description for the destination table. This will only be used if the destination table is newly created. If the table already exists and a value different than the current description is provided, the job will fail. #[serde(skip_serializing_if = "Option::is_none")] pub description: Option<String>, /// [Optional] The friendly name for the destination table. This will only be used if the destination table is newly created. If the table already exists and a value different than the current friendly name is provided, the job will fail. #[serde(skip_serializing_if = "Option::is_none")] pub friendly_name: Option<String>, /// [Optional] The labels associated with this table. You can use these to organize and group your tables. This will only be used if the destination table is newly created. If the table already exists and labels are different than the current labels are provided, the job will fail. #[serde(skip_serializing_if = "Option::is_none")] pub labels: Option<::std::collections::HashMap<String, String>>, }
77
286
0.755682
2fa1c312bc7579ea3ba9d78b23ee04a7166c45e1
6,841
//! Access secure credentials at runtime with multiple backends. //! //! For more information, see [the //! homepage](https://github.com/emk/credentials). //! //! ``` //! use credentials; //! use std::env; //! //! env::set_var("PASSWORD", "secret"); //! assert_eq!("secret", credentials::var("PASSWORD").unwrap()); //! ``` #![warn(missing_docs)] extern crate dirs; #[macro_use] extern crate failure; extern crate hyper; #[macro_use] extern crate lazy_static; #[macro_use] extern crate log; extern crate regex; extern crate reqwest; #[macro_use] extern crate serde_derive; extern crate serde_json; use backend::Backend; use std::cell::RefCell; use std::convert::AsRef; use std::default::Default; use std::ops::DerefMut; use std::path::Path; use std::sync::{Mutex, MutexGuard}; // Be very careful not to export any more of the Secretfile API than // strictly necessary, because we don't want to stablize too much at this // point. pub use errors::{Error, Result}; pub use secretfile::{Secretfile, SecretfileKeys}; mod backend; mod chained; mod envvar; mod errors; mod secretfile; mod vault; /// Options which can be passed to `Client::new`. pub struct Options { secretfile: Option<Secretfile>, allow_override: bool, } impl Default for Options { /// Create an `Options` object using the default values for each /// option. fn default() -> Options { Options { secretfile: None, allow_override: true, } } } impl Options { /// Specify a `Secretfile` for the `Client` to use. This takes `self` /// by value, so it consumes the `Options` structure it is called on, /// and returns a new one. Defaults to `Secretfile::default()`. pub fn secretfile(mut self, secretfile: Secretfile) -> Options { self.secretfile = Some(secretfile); self } /// Allow secrets in environment variables and local files to override /// the ones specified in our `Secretfile`. Defaults to true. pub fn allow_override(mut self, allow_override: bool) -> Options { self.allow_override = allow_override; self } } /// A client which fetches secrets. Under normal circumstances, it's /// usually easier to use the static `credentials::var` and /// `credentials::file` methods instead, but you may need to use this to /// customize behavior. pub struct Client { secretfile: Secretfile, backend: chained::Client, } impl Client { /// Create a new client using the specified options. pub fn new(options: Options) -> Result<Client> { let secretfile = match options.secretfile { Some(sf) => sf, None => Secretfile::default()?, }; let over = options.allow_override; Ok(Client { secretfile: secretfile, backend: chained::Client::with_default_backends(over)?, }) } /// Create a new client using the default options. pub fn default() -> Result<Client> { Client::new(Default::default()) } /// Create a new client using the specified `Secretfile`. pub fn with_secretfile(secretfile: Secretfile) -> Result<Client> { Client::new(Options::default().secretfile(secretfile)) } /// Provide access to a copy of the Secretfile we're using. pub fn secretfile(&self) -> &Secretfile { &self.secretfile } /// Fetch the value of an environment-variable-style credential. pub fn var<S: AsRef<str>>(&mut self, name: S) -> Result<String> { let name_ref = name.as_ref(); trace!("getting secure credential {}", name_ref); self.backend .var(&self.secretfile, name_ref) .map_err(|err| Error::Credential { name: name_ref.to_owned(), cause: Box::new(err), }) } /// Fetch the value of a file-style credential. pub fn file<S: AsRef<Path>>(&mut self, path: S) -> Result<String> { let path_ref = path.as_ref(); let path_str = path_ref.to_str().ok_or_else(|| Error::Credential { name: format!("{}", path_ref.display()), cause: Box::new(Error::NonUnicodePath { path: path_ref.to_owned(), }), })?; trace!("getting secure credential {}", path_str); self.backend .file(&self.secretfile, path_str) .map_err(|err| Error::Credential { name: path_str.to_owned(), cause: Box::new(err), }) } } lazy_static! { // Our shared global client, initialized by `lazy_static!` and // protected by a Mutex. // // Rust deliberately makes it a nuisance to use mutable global // variables. In this case, the `Mutex` provides thread-safe locking, // the `RefCell` makes this assignable, and the `Option` makes this // optional. This is a message from the language saying, "Really? A // mutable global that might be null? Have you really thought about // this?" But the global default client is only for convenience, so // we're OK with it, at least so far. static ref CLIENT: Mutex<RefCell<Option<Client>>> = Mutex::new(RefCell::new(None)); } /// Call `body` with the default global client, or return an error if we /// can't allocate a default global client. fn with_client<F>(body: F) -> Result<String> where F: FnOnce(&mut Client) -> Result<String>, { let client_cell: MutexGuard<_> = CLIENT.lock().unwrap(); // Try to set up the client if we haven't already. if client_cell.borrow().is_none() { *client_cell.borrow_mut() = Some(Client::default()?); } // Call the provided function. I have to break out `result` separately // for mysterious reasons related to the borrow checker and global // mutable state. let result = match client_cell.borrow_mut().deref_mut() { &mut Some(ref mut client) => body(client), // We theoretically handed this just above, and exited if we // failed. &mut None => panic!("Should have a client, but we don't"), }; result } /// Fetch the value of an environment-variable-style credential. pub fn var<S: AsRef<str>>(name: S) -> Result<String> { with_client(|client| client.var(name)) } /// Fetch the value of a file-style credential. pub fn file<S: AsRef<Path>>(path: S) -> Result<String> { with_client(|client| client.file(path)) } #[cfg(test)] mod test { use super::file; use std::fs; use std::io::Read; #[test] fn test_file() { // Some arbitrary file contents. let mut f = fs::File::open("Cargo.toml").unwrap(); let mut expected = String::new(); f.read_to_string(&mut expected).unwrap(); assert_eq!(expected, file("Cargo.toml").unwrap()); assert!(file("nosuchfile.txt").is_err()); } }
30.954751
75
0.630463
72c0379d47642464a03150455370ed85a26d1fca
35,801
#![allow(clippy::type_complexity)] #![allow(clippy::cognitive_complexity)] use super::methods::{ErrorMessage, RPCErrorResponse, RequestId, ResponseTermination}; use super::protocol::{RPCError, RPCProtocol, RPCRequest}; use super::RPCEvent; use crate::rpc::protocol::{InboundFramed, OutboundFramed}; use core::marker::PhantomData; use fnv::FnvHashMap; use futures::prelude::*; use libp2p::core::upgrade::{InboundUpgrade, OutboundUpgrade, UpgradeError}; use libp2p::swarm::protocols_handler::{ KeepAlive, ProtocolsHandler, ProtocolsHandlerEvent, ProtocolsHandlerUpgrErr, SubstreamProtocol, }; use slog::{crit, debug, error, warn}; use smallvec::SmallVec; use std::collections::hash_map::Entry; use std::time::{Duration, Instant}; use tokio::io::{AsyncRead, AsyncWrite}; use tokio::timer::{delay_queue, DelayQueue}; //TODO: Implement close() on the substream types to improve the poll code. //TODO: Implement check_timeout() on the substream types /// The time (in seconds) before a substream that is awaiting a response from the user times out. pub const RESPONSE_TIMEOUT: u64 = 10; /// The number of times to retry an outbound upgrade in the case of IO errors. const IO_ERROR_RETRIES: u8 = 3; /// Inbound requests are given a sequential `RequestId` to keep track of. All inbound streams are /// identified by their substream ID which is identical to the RPC Id. type InboundRequestId = RequestId; /// Outbound requests are associated with an id that is given by the application that sent the /// request. type OutboundRequestId = RequestId; /// Implementation of `ProtocolsHandler` for the RPC protocol. pub struct RPCHandler<TSubstream> where TSubstream: AsyncRead + AsyncWrite, { /// The upgrade for inbound substreams. listen_protocol: SubstreamProtocol<RPCProtocol>, /// If something bad happened and we should shut down the handler with an error. pending_error: Vec<(RequestId, ProtocolsHandlerUpgrErr<RPCError>)>, /// Queue of events to produce in `poll()`. events_out: SmallVec<[RPCEvent; 4]>, /// Queue of outbound substreams to open. dial_queue: SmallVec<[RPCEvent; 4]>, /// Current number of concurrent outbound substreams being opened. dial_negotiated: u32, /// Current inbound substreams awaiting processing. inbound_substreams: FnvHashMap<InboundRequestId, (InboundSubstreamState<TSubstream>, Option<delay_queue::Key>)>, /// Inbound substream `DelayQueue` which keeps track of when an inbound substream will timeout. inbound_substreams_delay: DelayQueue<InboundRequestId>, /// Map of outbound substreams that need to be driven to completion. The `RequestId` is /// maintained by the application sending the request. outbound_substreams: FnvHashMap<OutboundRequestId, (OutboundSubstreamState<TSubstream>, delay_queue::Key)>, /// Inbound substream `DelayQueue` which keeps track of when an inbound substream will timeout. outbound_substreams_delay: DelayQueue<OutboundRequestId>, /// Map of outbound items that are queued as the stream processes them. queued_outbound_items: FnvHashMap<RequestId, Vec<RPCErrorResponse>>, /// Sequential ID for waiting substreams. For inbound substreams, this is also the inbound request ID. current_inbound_substream_id: RequestId, /// Maximum number of concurrent outbound substreams being opened. Value is never modified. max_dial_negotiated: u32, /// Value to return from `connection_keep_alive`. keep_alive: KeepAlive, /// After the given duration has elapsed, an inactive connection will shutdown. inactive_timeout: Duration, /// Try to negotiate the outbound upgrade a few times if there is an IO error before reporting the request as failed. /// This keeps track of the number of attempts. outbound_io_error_retries: u8, /// Logger for handling RPC streams log: slog::Logger, /// Marker to pin the generic stream. _phantom: PhantomData<TSubstream>, } /// State of an outbound substream. Either waiting for a response, or in the process of sending. pub enum InboundSubstreamState<TSubstream> where TSubstream: AsyncRead + AsyncWrite, { /// A response has been sent, pending writing and flush. ResponsePendingSend { /// The substream used to send the response substream: futures::sink::Send<InboundFramed<TSubstream>>, /// Whether a stream termination is requested. If true the stream will be closed after /// this send. Otherwise it will transition to an idle state until a stream termination is /// requested or a timeout is reached. closing: bool, }, /// The response stream is idle and awaiting input from the application to send more chunked /// responses. ResponseIdle(InboundFramed<TSubstream>), /// The substream is attempting to shutdown. Closing(InboundFramed<TSubstream>), /// Temporary state during processing Poisoned, } pub enum OutboundSubstreamState<TSubstream> { /// A request has been sent, and we are awaiting a response. This future is driven in the /// handler because GOODBYE requests can be handled and responses dropped instantly. RequestPendingResponse { /// The framed negotiated substream. substream: OutboundFramed<TSubstream>, /// Keeps track of the actual request sent. request: RPCRequest, }, /// Closing an outbound substream> Closing(OutboundFramed<TSubstream>), /// Temporary state during processing Poisoned, } impl<TSubstream> InboundSubstreamState<TSubstream> where TSubstream: AsyncRead + AsyncWrite, { /// Moves the substream state to closing and informs the connected peer. The /// `queued_outbound_items` must be given as a parameter to add stream termination messages to /// the outbound queue. pub fn close(&mut self, outbound_queue: &mut Vec<RPCErrorResponse>) { // When terminating a stream, report the stream termination to the requesting user via // an RPC error let error = RPCErrorResponse::ServerError(ErrorMessage { error_message: b"Request timed out".to_vec(), }); // The stream termination type is irrelevant, this will terminate the // stream let stream_termination = RPCErrorResponse::StreamTermination(ResponseTermination::BlocksByRange); match std::mem::replace(self, InboundSubstreamState::Poisoned) { InboundSubstreamState::ResponsePendingSend { substream, closing } => { if !closing { outbound_queue.push(error); outbound_queue.push(stream_termination); } // if the stream is closing after the send, allow it to finish *self = InboundSubstreamState::ResponsePendingSend { substream, closing } } InboundSubstreamState::ResponseIdle(mut substream) => { // check if the stream is already closed if let Ok(Async::Ready(None)) = substream.poll() { *self = InboundSubstreamState::Closing(substream); } else { *self = InboundSubstreamState::ResponsePendingSend { substream: substream.send(error), closing: true, }; } } InboundSubstreamState::Closing(substream) => { // let the stream close *self = InboundSubstreamState::Closing(substream); } InboundSubstreamState::Poisoned => { unreachable!("Coding error: Timeout poisoned substream") } }; } } impl<TSubstream> RPCHandler<TSubstream> where TSubstream: AsyncRead + AsyncWrite, { pub fn new( listen_protocol: SubstreamProtocol<RPCProtocol>, inactive_timeout: Duration, log: &slog::Logger, ) -> Self { RPCHandler { listen_protocol, pending_error: Vec::new(), events_out: SmallVec::new(), dial_queue: SmallVec::new(), dial_negotiated: 0, queued_outbound_items: FnvHashMap::default(), inbound_substreams: FnvHashMap::default(), outbound_substreams: FnvHashMap::default(), inbound_substreams_delay: DelayQueue::new(), outbound_substreams_delay: DelayQueue::new(), current_inbound_substream_id: 1, max_dial_negotiated: 8, keep_alive: KeepAlive::Yes, inactive_timeout, outbound_io_error_retries: 0, log: log.clone(), _phantom: PhantomData, } } /// Returns the number of pending requests. pub fn pending_requests(&self) -> u32 { self.dial_negotiated + self.dial_queue.len() as u32 } /// Returns a reference to the listen protocol configuration. /// /// > **Note**: If you modify the protocol, modifications will only applies to future inbound /// > substreams, not the ones already being negotiated. pub fn listen_protocol_ref(&self) -> &SubstreamProtocol<RPCProtocol> { &self.listen_protocol } /// Returns a mutable reference to the listen protocol configuration. /// /// > **Note**: If you modify the protocol, modifications will only applies to future inbound /// > substreams, not the ones already being negotiated. pub fn listen_protocol_mut(&mut self) -> &mut SubstreamProtocol<RPCProtocol> { &mut self.listen_protocol } /// Opens an outbound substream with a request. pub fn send_request(&mut self, rpc_event: RPCEvent) { self.keep_alive = KeepAlive::Yes; self.dial_queue.push(rpc_event); } } impl<TSubstream> ProtocolsHandler for RPCHandler<TSubstream> where TSubstream: AsyncRead + AsyncWrite, { type InEvent = RPCEvent; type OutEvent = RPCEvent; type Error = ProtocolsHandlerUpgrErr<RPCError>; type Substream = TSubstream; type InboundProtocol = RPCProtocol; type OutboundProtocol = RPCRequest; type OutboundOpenInfo = RPCEvent; // Keep track of the id and the request fn listen_protocol(&self) -> SubstreamProtocol<Self::InboundProtocol> { self.listen_protocol.clone() } fn inject_fully_negotiated_inbound( &mut self, out: <RPCProtocol as InboundUpgrade<TSubstream>>::Output, ) { // update the keep alive timeout if there are no more remaining outbound streams if let KeepAlive::Until(_) = self.keep_alive { self.keep_alive = KeepAlive::Until(Instant::now() + self.inactive_timeout); } let (req, substream) = out; // drop the stream and return a 0 id for goodbye "requests" if let r @ RPCRequest::Goodbye(_) = req { self.events_out.push(RPCEvent::Request(0, r)); return; } // New inbound request. Store the stream and tag the output. let delay_key = self.inbound_substreams_delay.insert( self.current_inbound_substream_id, Duration::from_secs(RESPONSE_TIMEOUT), ); let awaiting_stream = InboundSubstreamState::ResponseIdle(substream); self.inbound_substreams.insert( self.current_inbound_substream_id, (awaiting_stream, Some(delay_key)), ); self.events_out .push(RPCEvent::Request(self.current_inbound_substream_id, req)); self.current_inbound_substream_id += 1; } fn inject_fully_negotiated_outbound( &mut self, out: <RPCRequest as OutboundUpgrade<TSubstream>>::Output, rpc_event: Self::OutboundOpenInfo, ) { self.dial_negotiated -= 1; if self.dial_negotiated == 0 && self.dial_queue.is_empty() && self.outbound_substreams.is_empty() { self.keep_alive = KeepAlive::Until(Instant::now() + self.inactive_timeout); } else { self.keep_alive = KeepAlive::Yes; } // add the stream to substreams if we expect a response, otherwise drop the stream. match rpc_event { RPCEvent::Request(id, request) if request.expect_response() => { // new outbound request. Store the stream and tag the output. let delay_key = self .outbound_substreams_delay .insert(id, Duration::from_secs(RESPONSE_TIMEOUT)); let awaiting_stream = OutboundSubstreamState::RequestPendingResponse { substream: out, request, }; if let Some(_) = self .outbound_substreams .insert(id, (awaiting_stream, delay_key)) { warn!(self.log, "Duplicate outbound substream id"; "id" => format!("{:?}", id)); } } _ => { // a response is not expected, drop the stream for all other requests } } } // Note: If the substream has closed due to inactivity, or the substream is in the // wrong state a response will fail silently. fn inject_event(&mut self, rpc_event: Self::InEvent) { match rpc_event { RPCEvent::Request(_, _) => self.send_request(rpc_event), RPCEvent::Response(rpc_id, response) => { // check if the stream matching the response still exists // variables indicating if the response is an error response or a multi-part // response let res_is_error = response.is_error(); let res_is_multiple = response.multiple_responses(); match self.inbound_substreams.get_mut(&rpc_id) { Some((substream_state, _)) => { match std::mem::replace(substream_state, InboundSubstreamState::Poisoned) { InboundSubstreamState::ResponseIdle(substream) => { // close the stream if there is no response if let RPCErrorResponse::StreamTermination(_) = response { //trace!(self.log, "Stream termination sent. Ending the stream"); *substream_state = InboundSubstreamState::Closing(substream); } else { // send the response // if it's a single rpc request or an error, close the stream after *substream_state = InboundSubstreamState::ResponsePendingSend { substream: substream.send(response), closing: !res_is_multiple | res_is_error, // close if an error or we are not expecting more responses }; } } InboundSubstreamState::ResponsePendingSend { substream, closing } if res_is_multiple => { // the stream is in use, add the request to a pending queue self.queued_outbound_items .entry(rpc_id) .or_insert_with(Vec::new) .push(response); // return the state *substream_state = InboundSubstreamState::ResponsePendingSend { substream, closing, }; } InboundSubstreamState::Closing(substream) => { *substream_state = InboundSubstreamState::Closing(substream); debug!(self.log, "Response not sent. Stream is closing"; "response" => format!("{}",response)); } InboundSubstreamState::ResponsePendingSend { substream, .. } => { *substream_state = InboundSubstreamState::ResponsePendingSend { substream, closing: true, }; error!(self.log, "Attempted sending multiple responses to a single response request"); } InboundSubstreamState::Poisoned => { crit!(self.log, "Poisoned inbound substream"); unreachable!("Coding error: Poisoned substream"); } } } None => { debug!(self.log, "Stream has expired. Response not sent"; "response" => format!("{}",response)); } }; } // We do not send errors as responses RPCEvent::Error(_, _) => {} } } fn inject_dial_upgrade_error( &mut self, request: Self::OutboundOpenInfo, error: ProtocolsHandlerUpgrErr< <Self::OutboundProtocol as OutboundUpgrade<Self::Substream>>::Error, >, ) { if let ProtocolsHandlerUpgrErr::Upgrade(UpgradeError::Apply(RPCError::IoError(_))) = error { self.outbound_io_error_retries += 1; if self.outbound_io_error_retries < IO_ERROR_RETRIES { self.send_request(request); return; } } self.outbound_io_error_retries = 0; // add the error let request_id = { if let RPCEvent::Request(id, _) = request { id } else { 0 } }; self.pending_error.push((request_id, error)); } fn connection_keep_alive(&self) -> KeepAlive { self.keep_alive } fn poll( &mut self, ) -> Poll< ProtocolsHandlerEvent<Self::OutboundProtocol, Self::OutboundOpenInfo, Self::OutEvent>, Self::Error, > { if let Some((request_id, err)) = self.pending_error.pop() { // Returning an error here will result in dropping the peer. match err { ProtocolsHandlerUpgrErr::Upgrade(UpgradeError::Apply( RPCError::InvalidProtocol(protocol_string), )) => { // Peer does not support the protocol. // TODO: We currently will not drop the peer, for maximal compatibility with // other clients testing their software. In the future, we will need to decide // which protocols are a bare minimum to support before kicking the peer. error!(self.log, "Peer doesn't support the RPC protocol"; "protocol" => protocol_string); return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( RPCEvent::Error(request_id, RPCError::InvalidProtocol(protocol_string)), ))); } ProtocolsHandlerUpgrErr::Timeout | ProtocolsHandlerUpgrErr::Timer => { // negotiation timeout, mark the request as failed debug!(self.log, "Active substreams before timeout"; "len" => self.outbound_substreams.len()); return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( RPCEvent::Error( request_id, RPCError::Custom("Protocol negotiation timeout".into()), ), ))); } ProtocolsHandlerUpgrErr::Upgrade(UpgradeError::Apply(err)) => { // IO/Decode/Custom Error, report to the application return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( RPCEvent::Error(request_id, err), ))); } ProtocolsHandlerUpgrErr::Upgrade(UpgradeError::Select(err)) => { // Error during negotiation return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( RPCEvent::Error(request_id, RPCError::Custom(format!("{}", err))), ))); } } } // return any events that need to be reported if !self.events_out.is_empty() { return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( self.events_out.remove(0), ))); } else { self.events_out.shrink_to_fit(); } // purge expired inbound substreams and send an error while let Async::Ready(Some(stream_id)) = self .inbound_substreams_delay .poll() .map_err(|_| ProtocolsHandlerUpgrErr::Timer)? { let rpc_id = stream_id.get_ref(); // handle a stream timeout for various states if let Some((substream_state, delay_key)) = self.inbound_substreams.get_mut(rpc_id) { // the delay has been removed *delay_key = None; let outbound_queue = self .queued_outbound_items .entry(*rpc_id) .or_insert_with(Vec::new); substream_state.close(outbound_queue); } } // purge expired outbound substreams if let Async::Ready(Some(stream_id)) = self .outbound_substreams_delay .poll() .map_err(|_| ProtocolsHandlerUpgrErr::Timer)? { self.outbound_substreams.remove(stream_id.get_ref()); // notify the user return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( RPCEvent::Error( *stream_id.get_ref(), RPCError::Custom("Stream timed out".into()), ), ))); } // drive inbound streams that need to be processed for request_id in self.inbound_substreams.keys().copied().collect::<Vec<_>>() { // Drain all queued items until all messages have been processed for this stream // TODO Improve this code logic let mut new_items_to_send = true; while new_items_to_send { new_items_to_send = false; match self.inbound_substreams.entry(request_id) { Entry::Occupied(mut entry) => { match std::mem::replace( &mut entry.get_mut().0, InboundSubstreamState::Poisoned, ) { InboundSubstreamState::ResponsePendingSend { mut substream, closing, } => { match substream.poll() { Ok(Async::Ready(raw_substream)) => { // completed the send // close the stream if required if closing { entry.get_mut().0 = InboundSubstreamState::Closing(raw_substream) } else { // check for queued chunks and update the stream entry.get_mut().0 = apply_queued_responses( raw_substream, &mut self .queued_outbound_items .get_mut(&request_id), &mut new_items_to_send, ); } } Ok(Async::NotReady) => { entry.get_mut().0 = InboundSubstreamState::ResponsePendingSend { substream, closing, }; } Err(e) => { if let Some(delay_key) = &entry.get().1 { self.inbound_substreams_delay.remove(delay_key); } entry.remove_entry(); return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( RPCEvent::Error(0, e), ))); } }; } InboundSubstreamState::ResponseIdle(substream) => { entry.get_mut().0 = apply_queued_responses( substream, &mut self.queued_outbound_items.get_mut(&request_id), &mut new_items_to_send, ); } InboundSubstreamState::Closing(mut substream) => { match substream.close() { Ok(Async::Ready(())) | Err(_) => { //trace!(self.log, "Inbound stream dropped"); if let Some(delay_key) = &entry.get().1 { self.inbound_substreams_delay.remove(delay_key); } self.queued_outbound_items.remove(&request_id); entry.remove(); if self.outbound_substreams.is_empty() && self.inbound_substreams.is_empty() { self.keep_alive = KeepAlive::Until( Instant::now() + self.inactive_timeout, ); } } // drop the stream Ok(Async::NotReady) => { entry.get_mut().0 = InboundSubstreamState::Closing(substream); } } } InboundSubstreamState::Poisoned => { crit!(self.log, "Poisoned outbound substream"); unreachable!("Coding Error: Inbound Substream is poisoned"); } }; } Entry::Vacant(_) => unreachable!(), } } } // drive outbound streams that need to be processed for request_id in self.outbound_substreams.keys().copied().collect::<Vec<_>>() { match self.outbound_substreams.entry(request_id) { Entry::Occupied(mut entry) => { match std::mem::replace( &mut entry.get_mut().0, OutboundSubstreamState::Poisoned, ) { OutboundSubstreamState::RequestPendingResponse { mut substream, request, } => match substream.poll() { Ok(Async::Ready(Some(response))) => { if request.multiple_responses() && !response.is_error() { entry.get_mut().0 = OutboundSubstreamState::RequestPendingResponse { substream, request, }; let delay_key = &entry.get().1; self.outbound_substreams_delay .reset(delay_key, Duration::from_secs(RESPONSE_TIMEOUT)); } else { // either this is a single response request or we received an // error //trace!(self.log, "Closing single stream request"); // only expect a single response, close the stream entry.get_mut().0 = OutboundSubstreamState::Closing(substream); } return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( RPCEvent::Response(request_id, response), ))); } Ok(Async::Ready(None)) => { // stream closed // if we expected multiple streams send a stream termination, // else report the stream terminating only. //trace!(self.log, "RPC Response - stream closed by remote"); // drop the stream let delay_key = &entry.get().1; self.outbound_substreams_delay.remove(delay_key); entry.remove_entry(); // notify the application error if request.multiple_responses() { // return an end of stream result return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( RPCEvent::Response( request_id, RPCErrorResponse::StreamTermination( request.stream_termination(), ), ), ))); } // else we return an error, stream should not have closed early. return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( RPCEvent::Error( request_id, RPCError::Custom( "Stream closed early. Empty response".into(), ), ), ))); } Ok(Async::NotReady) => { entry.get_mut().0 = OutboundSubstreamState::RequestPendingResponse { substream, request, } } Err(e) => { // drop the stream let delay_key = &entry.get().1; self.outbound_substreams_delay.remove(delay_key); entry.remove_entry(); return Ok(Async::Ready(ProtocolsHandlerEvent::Custom( RPCEvent::Error(request_id, e), ))); } }, OutboundSubstreamState::Closing(mut substream) => match substream.close() { Ok(Async::Ready(())) | Err(_) => { //trace!(self.log, "Outbound stream dropped"); // drop the stream let delay_key = &entry.get().1; self.outbound_substreams_delay.remove(delay_key); entry.remove_entry(); if self.outbound_substreams.is_empty() && self.inbound_substreams.is_empty() { self.keep_alive = KeepAlive::Until(Instant::now() + self.inactive_timeout); } } Ok(Async::NotReady) => { entry.get_mut().0 = OutboundSubstreamState::Closing(substream); } }, OutboundSubstreamState::Poisoned => { crit!(self.log, "Poisoned outbound substream"); unreachable!("Coding Error: Outbound substream is poisoned") } } } Entry::Vacant(_) => unreachable!(), } } // establish outbound substreams if !self.dial_queue.is_empty() && self.dial_negotiated < self.max_dial_negotiated { self.dial_negotiated += 1; let rpc_event = self.dial_queue.remove(0); self.dial_queue.shrink_to_fit(); if let RPCEvent::Request(id, req) = rpc_event { return Ok(Async::Ready( ProtocolsHandlerEvent::OutboundSubstreamRequest { protocol: SubstreamProtocol::new(req.clone()), info: RPCEvent::Request(id, req), }, )); } } Ok(Async::NotReady) } } // Check for new items to send to the peer and update the underlying stream fn apply_queued_responses<TSubstream: AsyncRead + AsyncWrite>( raw_substream: InboundFramed<TSubstream>, queued_outbound_items: &mut Option<&mut Vec<RPCErrorResponse>>, new_items_to_send: &mut bool, ) -> InboundSubstreamState<TSubstream> { match queued_outbound_items { Some(ref mut queue) if !queue.is_empty() => { *new_items_to_send = true; // we have queued items match queue.remove(0) { RPCErrorResponse::StreamTermination(_) => { // close the stream if this is a stream termination InboundSubstreamState::Closing(raw_substream) } chunk => InboundSubstreamState::ResponsePendingSend { substream: raw_substream.send(chunk), closing: false, }, } } _ => { // no items queued set to idle InboundSubstreamState::ResponseIdle(raw_substream) } } }
46.075933
141
0.50236
233bf51ec06bef4f56550b5a7b581355a9b85976
6,561
//! Completes lifetimes and labels. //! //! These completions work a bit differently in that they are only shown when what the user types //! has a `'` preceding it, as our fake syntax tree is invalid otherwise (due to us not inserting //! a lifetime but an ident for obvious reasons). //! Due to this all the tests for lifetimes and labels live in this module for the time being as //! there is no value in lifting these out into the outline module test since they will either not //! show up for normal completions, or they won't show completions other than lifetimes depending //! on the fixture input. use hir::ScopeDef; use syntax::ast; use crate::{completions::Completions, context::CompletionContext}; /// Completes lifetimes. pub(crate) fn complete_lifetime(acc: &mut Completions, ctx: &CompletionContext) { if !ctx.lifetime_allowed { return; } let lp_string; let param_lifetime = match (&ctx.name_syntax, ctx.lifetime_param_syntax.as_ref().and_then(|lp| lp.lifetime())) { (Some(ast::NameLike::Lifetime(lt)), Some(lp)) if lp == lt.clone() => return, (Some(_), Some(lp)) => { lp_string = lp.to_string(); Some(&*lp_string) } _ => None, }; ctx.scope.process_all_names(&mut |name, res| { if let ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) = res { if param_lifetime != Some(&*name.to_string()) { acc.add_resolution(ctx, name, &res); } } }); if param_lifetime.is_none() { acc.add_static_lifetime(ctx); } } /// Completes labels. pub(crate) fn complete_label(acc: &mut Completions, ctx: &CompletionContext) { if !ctx.is_label_ref { return; } ctx.scope.process_all_names(&mut |name, res| { if let ScopeDef::Label(_) = res { acc.add_resolution(ctx, name, &res); } }); } #[cfg(test)] mod tests { use expect_test::{expect, Expect}; use crate::tests::{check_edit, completion_list}; fn check(ra_fixture: &str, expect: Expect) { let actual = completion_list(ra_fixture); expect.assert_eq(&actual); } #[test] fn check_lifetime_edit() { check_edit( "'lifetime", r#" fn func<'lifetime>(foo: &'li$0) {} "#, r#" fn func<'lifetime>(foo: &'lifetime) {} "#, ); cov_mark::check!(completes_if_lifetime_without_idents); check_edit( "'lifetime", r#" fn func<'lifetime>(foo: &'$0) {} "#, r#" fn func<'lifetime>(foo: &'lifetime) {} "#, ); } #[test] fn complete_lifetime_in_ref() { check( r#" fn foo<'lifetime>(foo: &'a$0 usize) {} "#, expect![[r#" lt 'lifetime lt 'static "#]], ); } #[test] fn complete_lifetime_in_ref_missing_ty() { check( r#" fn foo<'lifetime>(foo: &'a$0) {} "#, expect![[r#" lt 'lifetime lt 'static "#]], ); } #[test] fn complete_lifetime_in_self_ref() { check( r#" struct Foo; impl<'impl> Foo { fn foo<'func>(&'a$0 self) {} } "#, expect![[r#" lt 'func lt 'impl lt 'static "#]], ); } #[test] fn complete_lifetime_in_arg_list() { check( r#" struct Foo<'lt>; fn foo<'lifetime>(_: Foo<'a$0>) {} "#, expect![[r#" lt 'lifetime lt 'static "#]], ); } #[test] fn complete_lifetime_in_where_pred() { check( r#" fn foo2<'lifetime, T>() where 'a$0 {} "#, expect![[r#" lt 'lifetime lt 'static "#]], ); } #[test] fn complete_lifetime_in_ty_bound() { check( r#" fn foo2<'lifetime, T>() where T: 'a$0 {} "#, expect![[r#" lt 'lifetime lt 'static "#]], ); check( r#" fn foo2<'lifetime, T>() where T: Trait<'a$0> {} "#, expect![[r#" lt 'lifetime lt 'static "#]], ); } #[test] fn dont_complete_lifetime_in_assoc_ty_bound() { check( r#" fn foo2<'lifetime, T>() where T: Trait<Item = 'a$0> {} "#, expect![[r#""#]], ); } #[test] fn complete_lifetime_in_param_list() { check( r#" fn foo<'a$0>() {} "#, expect![[r#""#]], ); check( r#" fn foo<'footime, 'lifetime: 'a$0>() {} "#, expect![[r#" lt 'footime "#]], ); } #[test] fn check_label_edit() { check_edit( "'label", r#" fn foo() { 'label: loop { break '$0 } } "#, r#" fn foo() { 'label: loop { break 'label } } "#, ); } #[test] fn complete_label_in_loop() { check( r#" fn foo() { 'foop: loop { break '$0 } } "#, expect![[r#" lb 'foop "#]], ); check( r#" fn foo() { 'foop: loop { continue '$0 } } "#, expect![[r#" lb 'foop "#]], ); } #[test] fn complete_label_in_block_nested() { check( r#" fn foo() { 'foop: { 'baap: { break '$0 } } } "#, expect![[r#" lb 'baap lb 'foop "#]], ); } #[test] fn complete_label_in_loop_with_value() { check( r#" fn foo() { 'foop: loop { break '$0 i32; } } "#, expect![[r#" lb 'foop "#]], ); } #[test] fn complete_label_in_while_cond() { check( r#" fn foo() { 'outer: while { 'inner: loop { break '$0 } } {} } "#, expect![[r#" lb 'inner lb 'outer "#]], ); } #[test] fn complete_label_in_for_iterable() { check( r#" fn foo() { 'outer: for _ in [{ 'inner: loop { break '$0 } }] {} } "#, expect![[r#" lb 'inner "#]], ); } }
20.439252
99
0.445511
e28fc67718f75db04f9385804e7ce6fa79bcc2b0
7,686
mod ffg_updates; mod no_votes; mod votes; use crate::proto_array_fork_choice::{Block, ExecutionStatus, ProtoArrayForkChoice}; use serde_derive::{Deserialize, Serialize}; use types::{AttestationShufflingId, Checkpoint, Epoch, EthSpec, Hash256, MainnetEthSpec, Slot}; pub use ffg_updates::*; pub use no_votes::*; pub use votes::*; #[derive(Debug, Clone, Serialize, Deserialize)] pub enum Operation { FindHead { justified_checkpoint: Checkpoint, finalized_checkpoint: Checkpoint, justified_state_balances: Vec<u64>, expected_head: Hash256, }, InvalidFindHead { justified_checkpoint: Checkpoint, finalized_checkpoint: Checkpoint, justified_state_balances: Vec<u64>, }, ProcessBlock { slot: Slot, root: Hash256, parent_root: Hash256, justified_checkpoint: Checkpoint, finalized_checkpoint: Checkpoint, }, ProcessAttestation { validator_index: usize, block_root: Hash256, target_epoch: Epoch, }, Prune { finalized_root: Hash256, prune_threshold: usize, expected_len: usize, }, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ForkChoiceTestDefinition { pub finalized_block_slot: Slot, pub justified_checkpoint: Checkpoint, pub finalized_checkpoint: Checkpoint, pub operations: Vec<Operation>, } impl ForkChoiceTestDefinition { pub fn run(self) { let junk_shuffling_id = AttestationShufflingId::from_components(Epoch::new(0), Hash256::zero()); let execution_status = ExecutionStatus::irrelevant(); let mut fork_choice = ProtoArrayForkChoice::new( self.finalized_block_slot, Hash256::zero(), self.justified_checkpoint, self.finalized_checkpoint, junk_shuffling_id.clone(), junk_shuffling_id, execution_status, ) .expect("should create fork choice struct"); for (op_index, op) in self.operations.into_iter().enumerate() { match op.clone() { Operation::FindHead { justified_checkpoint, finalized_checkpoint, justified_state_balances, expected_head, } => { let head = fork_choice .find_head::<MainnetEthSpec>( justified_checkpoint, finalized_checkpoint, &justified_state_balances, Hash256::zero(), &MainnetEthSpec::default_spec(), ) .map_err(|e| e) .unwrap_or_else(|e| { panic!("find_head op at index {} returned error {}", op_index, e) }); assert_eq!( head, expected_head, "Operation at index {} failed checks. Operation: {:?}", op_index, op ); check_bytes_round_trip(&fork_choice); } Operation::InvalidFindHead { justified_checkpoint, finalized_checkpoint, justified_state_balances, } => { let result = fork_choice.find_head::<MainnetEthSpec>( justified_checkpoint, finalized_checkpoint, &justified_state_balances, Hash256::zero(), &MainnetEthSpec::default_spec(), ); assert!( result.is_err(), "Operation at index {} . Operation: {:?}", op_index, op ); check_bytes_round_trip(&fork_choice); } Operation::ProcessBlock { slot, root, parent_root, justified_checkpoint, finalized_checkpoint, } => { let block = Block { slot, root, parent_root: Some(parent_root), state_root: Hash256::zero(), target_root: Hash256::zero(), current_epoch_shuffling_id: AttestationShufflingId::from_components( Epoch::new(0), Hash256::zero(), ), next_epoch_shuffling_id: AttestationShufflingId::from_components( Epoch::new(0), Hash256::zero(), ), justified_checkpoint, finalized_checkpoint, execution_status, }; fork_choice.process_block(block).unwrap_or_else(|e| { panic!( "process_block op at index {} returned error: {:?}", op_index, e ) }); check_bytes_round_trip(&fork_choice); } Operation::ProcessAttestation { validator_index, block_root, target_epoch, } => { fork_choice .process_attestation(validator_index, block_root, target_epoch) .unwrap_or_else(|_| { panic!( "process_attestation op at index {} returned error", op_index ) }); check_bytes_round_trip(&fork_choice); } Operation::Prune { finalized_root, prune_threshold, expected_len, } => { fork_choice.set_prune_threshold(prune_threshold); fork_choice .maybe_prune(finalized_root) .expect("update_finalized_root op at index {} returned error"); // Ensure that no pruning happened. assert_eq!( fork_choice.len(), expected_len, "Prune op at index {} failed with {} instead of {}", op_index, fork_choice.len(), expected_len ); } } } } } /// Gives a hash that is not the zero hash (unless i is `usize::max_value)`. fn get_hash(i: u64) -> Hash256 { Hash256::from_low_u64_be(i + 1) } /// Gives a checkpoint with a root that is not the zero hash (unless i is `usize::max_value)`. /// `Epoch` will always equal `i`. fn get_checkpoint(i: u64) -> Checkpoint { Checkpoint { epoch: Epoch::new(i), root: get_hash(i), } } fn check_bytes_round_trip(original: &ProtoArrayForkChoice) { let bytes = original.as_bytes(); let decoded = ProtoArrayForkChoice::from_bytes(&bytes).expect("fork choice should decode from bytes"); assert!( *original == decoded, "fork choice should encode and decode without change" ); }
35.915888
96
0.476711
fbeaf94b20c0ea8397c8435a6ad321ddf0890d6e
7,185
use crate::{expr::*, types::*}; pub trait OrderedStatement { #[doc(hidden)] // Implementation for the trait. fn add_order_by(&mut self, order: OrderExpr) -> &mut Self; /// Order by column. /// /// # Examples /// /// Order by ASC and DESC /// ``` /// use sea_query::{*, tests_cfg::*}; /// /// let query = Query::select() /// .column(Glyph::Aspect) /// .from(Glyph::Table) /// .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2)) /// .order_by(Glyph::Image, Order::Desc) /// .order_by((Glyph::Table, Glyph::Aspect), Order::Asc) /// .to_owned(); /// /// assert_eq!( /// query.to_string(MysqlQueryBuilder), /// r#"SELECT `aspect` FROM `glyph` WHERE IFNULL(`aspect`, 0) > 2 ORDER BY `image` DESC, `glyph`.`aspect` ASC"# /// ); /// ``` /// /// Order by custom field ordering /// ``` /// use sea_query::{tests_cfg::*, *}; /// /// let query = Query::select() /// .columns(vec![Glyph::Aspect]) /// .from(Glyph::Table) /// .order_by( /// Glyph::Id, /// Order::Field(Values(vec![4.into(), 5.into(), 1.into(), 3.into()])), /// ) /// .to_owned(); /// /// assert_eq!( /// query.to_string(MysqlQueryBuilder), /// [ /// r#"SELECT `aspect`"#, /// r#"FROM `glyph`"#, /// r#"ORDER BY CASE"#, /// r#"WHEN `id`=4 THEN 0"#, /// r#"WHEN `id`=5 THEN 1"#, /// r#"WHEN `id`=1 THEN 2"#, /// r#"WHEN `id`=3 THEN 3"#, /// r#"ELSE 4 END"#, /// ] /// .join(" ") /// ); /// /// assert_eq!( /// query.to_string(PostgresQueryBuilder), /// [ /// r#"SELECT "aspect""#, /// r#"FROM "glyph""#, /// r#"ORDER BY CASE"#, /// r#"WHEN "id"=4 THEN 0"#, /// r#"WHEN "id"=5 THEN 1"#, /// r#"WHEN "id"=1 THEN 2"#, /// r#"WHEN "id"=3 THEN 3"#, /// r#"ELSE 4 END"#, /// ] /// .join(" ") /// ); /// /// assert_eq!( /// query.to_string(SqliteQueryBuilder), /// [ /// r#"SELECT "aspect""#, /// r#"FROM "glyph""#, /// r#"ORDER BY CASE"#, /// r#"WHEN "id"=4 THEN 0"#, /// r#"WHEN "id"=5 THEN 1"#, /// r#"WHEN "id"=1 THEN 2"#, /// r#"WHEN "id"=3 THEN 3"#, /// r#"ELSE 4 END"#, /// ] /// .join(" ") /// ); /// ``` fn order_by<T>(&mut self, col: T, order: Order) -> &mut Self where T: IntoColumnRef, { self.add_order_by(OrderExpr { expr: SimpleExpr::Column(col.into_column_ref()), order, nulls: None, }) } #[deprecated( since = "0.9.0", note = "Please use the [`OrderedStatement::order_by`] with a tuple as [`ColumnRef`]" )] fn order_by_tbl<T, C>(&mut self, table: T, col: C, order: Order) -> &mut Self where T: IntoIden, C: IntoIden, { self.order_by((table.into_iden(), col.into_iden()), order) } /// Order by [`SimpleExpr`]. fn order_by_expr(&mut self, expr: SimpleExpr, order: Order) -> &mut Self { self.add_order_by(OrderExpr { expr, order, nulls: None, }) } /// Order by custom string. fn order_by_customs<T>(&mut self, cols: Vec<(T, Order)>) -> &mut Self where T: ToString, { cols.into_iter().for_each(|(c, order)| { self.add_order_by(OrderExpr { expr: SimpleExpr::Custom(c.to_string()), order, nulls: None, }); }); self } /// Order by vector of columns. fn order_by_columns<T>(&mut self, cols: Vec<(T, Order)>) -> &mut Self where T: IntoColumnRef, { cols.into_iter().for_each(|(c, order)| { self.add_order_by(OrderExpr { expr: SimpleExpr::Column(c.into_column_ref()), order, nulls: None, }); }); self } #[deprecated( since = "0.9.0", note = "Please use the [`OrderedStatement::order_by_columns`] with a tuple as [`ColumnRef`]" )] fn order_by_table_columns<T, C>(&mut self, cols: Vec<(T, C, Order)>) -> &mut Self where T: IntoIden, C: IntoIden, { self.order_by_columns( cols.into_iter() .map(|(t, c, o)| ((t.into_iden(), c.into_iden()), o)) .collect(), ) } /// Order by column with nulls order option. /// /// # Examples /// /// ``` /// use sea_query::{*, tests_cfg::*}; /// /// let query = Query::select() /// .column(Glyph::Aspect) /// .from(Glyph::Table) /// .order_by_with_nulls(Glyph::Image, Order::Desc, NullOrdering::Last) /// .order_by_with_nulls((Glyph::Table, Glyph::Aspect), Order::Asc, NullOrdering::First) /// .to_owned(); /// /// assert_eq!( /// query.to_string(PostgresQueryBuilder), /// r#"SELECT "aspect" FROM "glyph" ORDER BY "image" DESC NULLS LAST, "glyph"."aspect" ASC NULLS FIRST"# /// ); /// assert_eq!( /// query.to_string(MysqlQueryBuilder), /// r#"SELECT `aspect` FROM `glyph` ORDER BY `image` IS NULL ASC, `image` DESC, `glyph`.`aspect` IS NULL DESC, `glyph`.`aspect` ASC"# /// ); /// ``` fn order_by_with_nulls<T>(&mut self, col: T, order: Order, nulls: NullOrdering) -> &mut Self where T: IntoColumnRef, { self.add_order_by(OrderExpr { expr: SimpleExpr::Column(col.into_column_ref()), order, nulls: Some(nulls), }) } /// Order by [`SimpleExpr`] with nulls order option. fn order_by_expr_with_nulls( &mut self, expr: SimpleExpr, order: Order, nulls: NullOrdering, ) -> &mut Self { self.add_order_by(OrderExpr { expr, order, nulls: Some(nulls), }) } /// Order by custom string with nulls order option. fn order_by_customs_with_nulls<T>(&mut self, cols: Vec<(T, Order, NullOrdering)>) -> &mut Self where T: ToString, { cols.into_iter().for_each(|(c, order, nulls)| { self.add_order_by(OrderExpr { expr: SimpleExpr::Custom(c.to_string()), order, nulls: Some(nulls), }); }); self } /// Order by vector of columns with nulls order option. fn order_by_columns_with_nulls<T>(&mut self, cols: Vec<(T, Order, NullOrdering)>) -> &mut Self where T: IntoColumnRef, { cols.into_iter().for_each(|(c, order, nulls)| { self.add_order_by(OrderExpr { expr: SimpleExpr::Column(c.into_column_ref()), order, nulls: Some(nulls), }); }); self } }
29.446721
141
0.470424
4aabbe7efbef456866710ed954d93109d96d219d
95,310
use crate::pp::Breaks::{Consistent, Inconsistent}; use crate::pp::{self, Breaks}; use rustc_ast::ast::{self, BlockCheckMode, PatKind, RangeEnd, RangeSyntax}; use rustc_ast::ast::{Attribute, GenericArg, MacArgs}; use rustc_ast::ast::{GenericBound, SelfKind, TraitBoundModifier}; use rustc_ast::attr; use rustc_ast::ptr::P; use rustc_ast::token::{self, BinOpToken, DelimToken, Nonterminal, Token, TokenKind}; use rustc_ast::tokenstream::{self, TokenStream, TokenTree}; use rustc_ast::util::parser::{self, AssocOp, Fixity}; use rustc_ast::util::{classify, comments}; use rustc_span::edition::Edition; use rustc_span::source_map::{SourceMap, Spanned}; use rustc_span::symbol::{kw, sym, IdentPrinter}; use rustc_span::{BytePos, FileName, Span}; use std::borrow::Cow; #[cfg(test)] mod tests; pub enum MacHeader<'a> { Path(&'a ast::Path), Keyword(&'static str), } pub enum AnnNode<'a> { Ident(&'a ast::Ident), Name(&'a ast::Name), Block(&'a ast::Block), Item(&'a ast::Item), SubItem(ast::NodeId), Expr(&'a ast::Expr), Pat(&'a ast::Pat), Crate(&'a ast::Crate), } pub trait PpAnn { fn pre(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {} fn post(&self, _state: &mut State<'_>, _node: AnnNode<'_>) {} } #[derive(Copy, Clone)] pub struct NoAnn; impl PpAnn for NoAnn {} pub struct Comments<'a> { sm: &'a SourceMap, comments: Vec<comments::Comment>, current: usize, } impl<'a> Comments<'a> { pub fn new(sm: &'a SourceMap, filename: FileName, input: String) -> Comments<'a> { let comments = comments::gather_comments(sm, filename, input); Comments { sm, comments, current: 0 } } pub fn next(&self) -> Option<comments::Comment> { self.comments.get(self.current).cloned() } pub fn trailing_comment( &mut self, span: rustc_span::Span, next_pos: Option<BytePos>, ) -> Option<comments::Comment> { if let Some(cmnt) = self.next() { if cmnt.style != comments::Trailing { return None; } let span_line = self.sm.lookup_char_pos(span.hi()); let comment_line = self.sm.lookup_char_pos(cmnt.pos); let next = next_pos.unwrap_or_else(|| cmnt.pos + BytePos(1)); if span.hi() < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line { return Some(cmnt); } } None } } pub struct State<'a> { pub s: pp::Printer, comments: Option<Comments<'a>>, ann: &'a (dyn PpAnn + 'a), is_expanded: bool, } crate const INDENT_UNIT: usize = 4; /// Requires you to pass an input filename and reader so that /// it can scan the input text for comments to copy forward. pub fn print_crate<'a>( sm: &'a SourceMap, krate: &ast::Crate, filename: FileName, input: String, ann: &'a dyn PpAnn, is_expanded: bool, edition: Edition, has_injected_crate: bool, ) -> String { let mut s = State { s: pp::mk_printer(), comments: Some(Comments::new(sm, filename, input)), ann, is_expanded, }; if is_expanded && has_injected_crate { // We need to print `#![no_std]` (and its feature gate) so that // compiling pretty-printed source won't inject libstd again. // However, we don't want these attributes in the AST because // of the feature gate, so we fake them up here. // `#![feature(prelude_import)]` let pi_nested = attr::mk_nested_word_item(ast::Ident::with_dummy_span(sym::prelude_import)); let list = attr::mk_list_item(ast::Ident::with_dummy_span(sym::feature), vec![pi_nested]); let fake_attr = attr::mk_attr_inner(list); s.print_attribute(&fake_attr); // Currently, in Rust 2018 we don't have `extern crate std;` at the crate // root, so this is not needed, and actually breaks things. if edition == Edition::Edition2015 { // `#![no_std]` let no_std_meta = attr::mk_word_item(ast::Ident::with_dummy_span(sym::no_std)); let fake_attr = attr::mk_attr_inner(no_std_meta); s.print_attribute(&fake_attr); } } s.print_mod(&krate.module, &krate.attrs); s.print_remaining_comments(); s.ann.post(&mut s, AnnNode::Crate(krate)); s.s.eof() } pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String { let mut printer = State { s: pp::mk_printer(), comments: None, ann: &NoAnn, is_expanded: false }; f(&mut printer); printer.s.eof() } // This makes comma-separated lists look slightly nicer, // and also addresses a specific regression described in issue #63896. fn tt_prepend_space(tt: &TokenTree, prev: &TokenTree) -> bool { match tt { TokenTree::Token(token) => match token.kind { token::Comma => false, _ => true, }, TokenTree::Delimited(_, DelimToken::Paren, _) => match prev { TokenTree::Token(token) => match token.kind { token::Ident(_, _) => false, _ => true, }, _ => true, }, _ => true, } } fn binop_to_string(op: BinOpToken) -> &'static str { match op { token::Plus => "+", token::Minus => "-", token::Star => "*", token::Slash => "/", token::Percent => "%", token::Caret => "^", token::And => "&", token::Or => "|", token::Shl => "<<", token::Shr => ">>", } } pub fn literal_to_string(lit: token::Lit) -> String { let token::Lit { kind, symbol, suffix } = lit; let mut out = match kind { token::Byte => format!("b'{}'", symbol), token::Char => format!("'{}'", symbol), token::Str => format!("\"{}\"", symbol), token::StrRaw(n) => { format!("r{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = symbol) } token::ByteStr => format!("b\"{}\"", symbol), token::ByteStrRaw(n) => { format!("br{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = symbol) } token::Integer | token::Float | token::Bool | token::Err => symbol.to_string(), }; if let Some(suffix) = suffix { out.push_str(&suffix.as_str()) } out } /// Print the token kind precisely, without converting `$crate` into its respective crate name. pub fn token_kind_to_string(tok: &TokenKind) -> String { token_kind_to_string_ext(tok, None) } fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>) -> String { match *tok { token::Eq => "=".to_string(), token::Lt => "<".to_string(), token::Le => "<=".to_string(), token::EqEq => "==".to_string(), token::Ne => "!=".to_string(), token::Ge => ">=".to_string(), token::Gt => ">".to_string(), token::Not => "!".to_string(), token::Tilde => "~".to_string(), token::OrOr => "||".to_string(), token::AndAnd => "&&".to_string(), token::BinOp(op) => binop_to_string(op).to_string(), token::BinOpEq(op) => format!("{}=", binop_to_string(op)), /* Structural symbols */ token::At => "@".to_string(), token::Dot => ".".to_string(), token::DotDot => "..".to_string(), token::DotDotDot => "...".to_string(), token::DotDotEq => "..=".to_string(), token::Comma => ",".to_string(), token::Semi => ";".to_string(), token::Colon => ":".to_string(), token::ModSep => "::".to_string(), token::RArrow => "->".to_string(), token::LArrow => "<-".to_string(), token::FatArrow => "=>".to_string(), token::OpenDelim(token::Paren) => "(".to_string(), token::CloseDelim(token::Paren) => ")".to_string(), token::OpenDelim(token::Bracket) => "[".to_string(), token::CloseDelim(token::Bracket) => "]".to_string(), token::OpenDelim(token::Brace) => "{".to_string(), token::CloseDelim(token::Brace) => "}".to_string(), token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => " ".to_string(), token::Pound => "#".to_string(), token::Dollar => "$".to_string(), token::Question => "?".to_string(), token::SingleQuote => "'".to_string(), /* Literals */ token::Literal(lit) => literal_to_string(lit), /* Name components */ token::Ident(s, is_raw) => IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string(), token::Lifetime(s) => s.to_string(), /* Other */ token::DocComment(s) => s.to_string(), token::Eof => "<eof>".to_string(), token::Whitespace => " ".to_string(), token::Comment => "/* */".to_string(), token::Shebang(s) => format!("/* shebang: {}*/", s), token::Unknown(s) => s.to_string(), token::Interpolated(ref nt) => nonterminal_to_string(nt), } } /// Print the token precisely, without converting `$crate` into its respective crate name. pub fn token_to_string(token: &Token) -> String { token_to_string_ext(token, false) } fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String { let convert_dollar_crate = convert_dollar_crate.then_some(token.span); token_kind_to_string_ext(&token.kind, convert_dollar_crate) } pub fn nonterminal_to_string(nt: &Nonterminal) -> String { match *nt { token::NtExpr(ref e) => expr_to_string(e), token::NtMeta(ref e) => attr_item_to_string(e), token::NtTy(ref e) => ty_to_string(e), token::NtPath(ref e) => path_to_string(e), token::NtItem(ref e) => item_to_string(e), token::NtBlock(ref e) => block_to_string(e), token::NtStmt(ref e) => stmt_to_string(e), token::NtPat(ref e) => pat_to_string(e), token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw).to_string(), token::NtLifetime(e) => e.to_string(), token::NtLiteral(ref e) => expr_to_string(e), token::NtTT(ref tree) => tt_to_string(tree.clone()), token::NtVis(ref e) => vis_to_string(e), } } pub fn ty_to_string(ty: &ast::Ty) -> String { to_string(|s| s.print_type(ty)) } pub fn bounds_to_string(bounds: &[ast::GenericBound]) -> String { to_string(|s| s.print_type_bounds("", bounds)) } pub fn pat_to_string(pat: &ast::Pat) -> String { to_string(|s| s.print_pat(pat)) } pub fn expr_to_string(e: &ast::Expr) -> String { to_string(|s| s.print_expr(e)) } pub fn tt_to_string(tt: tokenstream::TokenTree) -> String { to_string(|s| s.print_tt(tt, false)) } pub fn tts_to_string(tokens: TokenStream) -> String { to_string(|s| s.print_tts(tokens, false)) } pub fn stmt_to_string(stmt: &ast::Stmt) -> String { to_string(|s| s.print_stmt(stmt)) } pub fn item_to_string(i: &ast::Item) -> String { to_string(|s| s.print_item(i)) } pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String { to_string(|s| s.print_generic_params(generic_params)) } pub fn path_to_string(p: &ast::Path) -> String { to_string(|s| s.print_path(p, false, 0)) } pub fn path_segment_to_string(p: &ast::PathSegment) -> String { to_string(|s| s.print_path_segment(p, false)) } pub fn vis_to_string(v: &ast::Visibility) -> String { to_string(|s| s.print_visibility(v)) } fn block_to_string(blk: &ast::Block) -> String { to_string(|s| { // Containing cbox, will be closed by `print_block` at `}`. s.cbox(INDENT_UNIT); // Head-ibox, will be closed by `print_block` after `{`. s.ibox(0); s.print_block(blk) }) } pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String { to_string(|s| s.print_meta_list_item(li)) } fn attr_item_to_string(ai: &ast::AttrItem) -> String { to_string(|s| s.print_attr_item(ai, ai.path.span)) } pub fn attribute_to_string(attr: &ast::Attribute) -> String { to_string(|s| s.print_attribute(attr)) } pub fn param_to_string(arg: &ast::Param) -> String { to_string(|s| s.print_param(arg, false)) } fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String { format!("{}{}", to_string(|s| s.print_visibility(vis)), s) } impl std::ops::Deref for State<'_> { type Target = pp::Printer; fn deref(&self) -> &Self::Target { &self.s } } impl std::ops::DerefMut for State<'_> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.s } } pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::DerefMut { fn comments(&mut self) -> &mut Option<Comments<'a>>; fn print_ident(&mut self, ident: ast::Ident); fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool); fn strsep<T, F>( &mut self, sep: &'static str, space_before: bool, b: Breaks, elts: &[T], mut op: F, ) where F: FnMut(&mut Self, &T), { self.rbox(0, b); if let Some((first, rest)) = elts.split_first() { op(self, first); for elt in rest { if space_before { self.space(); } self.word_space(sep); op(self, elt); } } self.end(); } fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], op: F) where F: FnMut(&mut Self, &T), { self.strsep(",", false, b, elts, op) } fn maybe_print_comment(&mut self, pos: BytePos) { while let Some(ref cmnt) = self.next_comment() { if cmnt.pos < pos { self.print_comment(cmnt); } else { break; } } } fn print_comment(&mut self, cmnt: &comments::Comment) { match cmnt.style { comments::Mixed => { assert_eq!(cmnt.lines.len(), 1); self.zerobreak(); self.word(cmnt.lines[0].clone()); self.zerobreak() } comments::Isolated => { self.hardbreak_if_not_bol(); for line in &cmnt.lines { // Don't print empty lines because they will end up as trailing // whitespace. if !line.is_empty() { self.word(line.clone()); } self.hardbreak(); } } comments::Trailing => { if !self.is_beginning_of_line() { self.word(" "); } if cmnt.lines.len() == 1 { self.word(cmnt.lines[0].clone()); self.hardbreak() } else { self.ibox(0); for line in &cmnt.lines { if !line.is_empty() { self.word(line.clone()); } self.hardbreak(); } self.end(); } } comments::BlankLine => { // We need to do at least one, possibly two hardbreaks. let twice = match self.last_token() { pp::Token::String(s) => ";" == s, pp::Token::Begin(_) => true, pp::Token::End => true, _ => false, }; if twice { self.hardbreak(); } self.hardbreak(); } } if let Some(cmnts) = self.comments() { cmnts.current += 1; } } fn next_comment(&mut self) -> Option<comments::Comment> { self.comments().as_mut().and_then(|c| c.next()) } fn print_literal(&mut self, lit: &ast::Lit) { self.maybe_print_comment(lit.span.lo()); self.word(lit.token.to_string()) } fn print_string(&mut self, st: &str, style: ast::StrStyle) { let st = match style { ast::StrStyle::Cooked => (format!("\"{}\"", st.escape_debug())), ast::StrStyle::Raw(n) => { format!("r{delim}\"{string}\"{delim}", delim = "#".repeat(n as usize), string = st) } }; self.word(st) } fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) { self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, true) } fn print_inner_attributes_no_trailing_hardbreak(&mut self, attrs: &[ast::Attribute]) { self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, false) } fn print_outer_attributes(&mut self, attrs: &[ast::Attribute]) { self.print_either_attributes(attrs, ast::AttrStyle::Outer, false, true) } fn print_inner_attributes_inline(&mut self, attrs: &[ast::Attribute]) { self.print_either_attributes(attrs, ast::AttrStyle::Inner, true, true) } fn print_outer_attributes_inline(&mut self, attrs: &[ast::Attribute]) { self.print_either_attributes(attrs, ast::AttrStyle::Outer, true, true) } fn print_either_attributes( &mut self, attrs: &[ast::Attribute], kind: ast::AttrStyle, is_inline: bool, trailing_hardbreak: bool, ) { let mut count = 0; for attr in attrs { if attr.style == kind { self.print_attribute_inline(attr, is_inline); if is_inline { self.nbsp(); } count += 1; } } if count > 0 && trailing_hardbreak && !is_inline { self.hardbreak_if_not_bol(); } } fn print_attribute(&mut self, attr: &ast::Attribute) { self.print_attribute_inline(attr, false) } fn print_attribute_inline(&mut self, attr: &ast::Attribute, is_inline: bool) { if !is_inline { self.hardbreak_if_not_bol(); } self.maybe_print_comment(attr.span.lo()); match attr.kind { ast::AttrKind::Normal(ref item) => { match attr.style { ast::AttrStyle::Inner => self.word("#!["), ast::AttrStyle::Outer => self.word("#["), } self.print_attr_item(&item, attr.span); self.word("]"); } ast::AttrKind::DocComment(comment) => { self.word(comment.to_string()); self.hardbreak() } } } fn print_attr_item(&mut self, item: &ast::AttrItem, span: Span) { self.ibox(0); match &item.args { MacArgs::Delimited(_, delim, tokens) => self.print_mac_common( Some(MacHeader::Path(&item.path)), false, None, delim.to_token(), tokens.clone(), true, span, ), MacArgs::Empty | MacArgs::Eq(..) => { self.print_path(&item.path, false, 0); if let MacArgs::Eq(_, tokens) = &item.args { self.space(); self.word_space("="); self.print_tts(tokens.clone(), true); } } } self.end(); } fn print_meta_list_item(&mut self, item: &ast::NestedMetaItem) { match item { ast::NestedMetaItem::MetaItem(ref mi) => self.print_meta_item(mi), ast::NestedMetaItem::Literal(ref lit) => self.print_literal(lit), } } fn print_meta_item(&mut self, item: &ast::MetaItem) { self.ibox(INDENT_UNIT); match item.kind { ast::MetaItemKind::Word => self.print_path(&item.path, false, 0), ast::MetaItemKind::NameValue(ref value) => { self.print_path(&item.path, false, 0); self.space(); self.word_space("="); self.print_literal(value); } ast::MetaItemKind::List(ref items) => { self.print_path(&item.path, false, 0); self.popen(); self.commasep(Consistent, &items[..], |s, i| s.print_meta_list_item(i)); self.pclose(); } } self.end(); } /// This doesn't deserve to be called "pretty" printing, but it should be /// meaning-preserving. A quick hack that might help would be to look at the /// spans embedded in the TTs to decide where to put spaces and newlines. /// But it'd be better to parse these according to the grammar of the /// appropriate macro, transcribe back into the grammar we just parsed from, /// and then pretty-print the resulting AST nodes (so, e.g., we print /// expression arguments as expressions). It can be done! I think. fn print_tt(&mut self, tt: tokenstream::TokenTree, convert_dollar_crate: bool) { match tt { TokenTree::Token(ref token) => { self.word(token_to_string_ext(&token, convert_dollar_crate)); match token.kind { token::DocComment(..) => self.hardbreak(), _ => {} } } TokenTree::Delimited(dspan, delim, tts) => { self.print_mac_common( None, false, None, delim, tts, convert_dollar_crate, dspan.entire(), ); } } } fn print_tts(&mut self, tts: tokenstream::TokenStream, convert_dollar_crate: bool) { let mut iter = tts.into_trees().peekable(); while let Some(tt) = iter.next() { let show_space = if let Some(next) = iter.peek() { tt_prepend_space(next, &tt) } else { false }; self.print_tt(tt, convert_dollar_crate); if show_space { self.space(); } } } fn print_mac_common( &mut self, header: Option<MacHeader<'_>>, has_bang: bool, ident: Option<ast::Ident>, delim: DelimToken, tts: TokenStream, convert_dollar_crate: bool, span: Span, ) { if delim == DelimToken::Brace { self.cbox(INDENT_UNIT); } match header { Some(MacHeader::Path(path)) => self.print_path(path, false, 0), Some(MacHeader::Keyword(kw)) => self.word(kw), None => {} } if has_bang { self.word("!"); } if let Some(ident) = ident { self.nbsp(); self.print_ident(ident); } match delim { DelimToken::Brace => { if header.is_some() || has_bang || ident.is_some() { self.nbsp(); } self.word("{"); if !tts.is_empty() { self.space(); } } _ => self.word(token_kind_to_string(&token::OpenDelim(delim))), } self.ibox(0); self.print_tts(tts, convert_dollar_crate); self.end(); match delim { DelimToken::Brace => self.bclose(span), _ => self.word(token_kind_to_string(&token::CloseDelim(delim))), } } fn print_path(&mut self, path: &ast::Path, colons_before_params: bool, depth: usize) { self.maybe_print_comment(path.span.lo()); for (i, segment) in path.segments[..path.segments.len() - depth].iter().enumerate() { if i > 0 { self.word("::") } self.print_path_segment(segment, colons_before_params); } } fn print_path_segment(&mut self, segment: &ast::PathSegment, colons_before_params: bool) { if segment.ident.name != kw::PathRoot { self.print_ident(segment.ident); if let Some(ref args) = segment.args { self.print_generic_args(args, colons_before_params); } } } fn head<S: Into<Cow<'static, str>>>(&mut self, w: S) { let w = w.into(); // Outer-box is consistent. self.cbox(INDENT_UNIT); // Head-box is inconsistent. self.ibox(w.len() + 1); // Keyword that starts the head. if !w.is_empty() { self.word_nbsp(w); } } fn bopen(&mut self) { self.word("{"); self.end(); // Close the head-box. } fn bclose_maybe_open(&mut self, span: rustc_span::Span, close_box: bool) { self.maybe_print_comment(span.hi()); self.break_offset_if_not_bol(1, -(INDENT_UNIT as isize)); self.word("}"); if close_box { self.end(); // Close the outer-box. } } fn bclose(&mut self, span: rustc_span::Span) { self.bclose_maybe_open(span, true) } fn break_offset_if_not_bol(&mut self, n: usize, off: isize) { if !self.is_beginning_of_line() { self.break_offset(n, off) } else { if off != 0 && self.last_token().is_hardbreak_tok() { // We do something pretty sketchy here: tuck the nonzero // offset-adjustment we were going to deposit along with the // break into the previous hardbreak. self.replace_last_token(pp::Printer::hardbreak_tok_offset(off)); } } } } impl<'a> PrintState<'a> for State<'a> { fn comments(&mut self) -> &mut Option<Comments<'a>> { &mut self.comments } fn print_ident(&mut self, ident: ast::Ident) { self.s.word(IdentPrinter::for_ast_ident(ident, ident.is_raw_guess()).to_string()); self.ann.post(self, AnnNode::Ident(&ident)) } fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool) { if colons_before_params { self.s.word("::") } match *args { ast::GenericArgs::AngleBracketed(ref data) => { self.s.word("<"); self.commasep(Inconsistent, &data.args, |s, arg| match arg { ast::AngleBracketedArg::Arg(a) => s.print_generic_arg(a), ast::AngleBracketedArg::Constraint(c) => s.print_assoc_constraint(c), }); self.s.word(">") } ast::GenericArgs::Parenthesized(ref data) => { self.s.word("("); self.commasep(Inconsistent, &data.inputs, |s, ty| s.print_type(ty)); self.s.word(")"); self.print_fn_ret_ty(&data.output); } } } } impl<'a> State<'a> { // Synthesizes a comment that was not textually present in the original source // file. pub fn synth_comment(&mut self, text: String) { self.s.word("/*"); self.s.space(); self.s.word(text); self.s.space(); self.s.word("*/") } crate fn commasep_cmnt<T, F, G>(&mut self, b: Breaks, elts: &[T], mut op: F, mut get_span: G) where F: FnMut(&mut State<'_>, &T), G: FnMut(&T) -> rustc_span::Span, { self.rbox(0, b); let len = elts.len(); let mut i = 0; for elt in elts { self.maybe_print_comment(get_span(elt).hi()); op(self, elt); i += 1; if i < len { self.s.word(","); self.maybe_print_trailing_comment(get_span(elt), Some(get_span(&elts[i]).hi())); self.space_if_not_bol(); } } self.end(); } crate fn commasep_exprs(&mut self, b: Breaks, exprs: &[P<ast::Expr>]) { self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span) } pub fn print_mod(&mut self, _mod: &ast::Mod, attrs: &[ast::Attribute]) { self.print_inner_attributes(attrs); for item in &_mod.items { self.print_item(item); } } crate fn print_foreign_mod(&mut self, nmod: &ast::ForeignMod, attrs: &[Attribute]) { self.print_inner_attributes(attrs); for item in &nmod.items { self.print_foreign_item(item); } } pub fn print_opt_lifetime(&mut self, lifetime: &Option<ast::Lifetime>) { if let Some(lt) = *lifetime { self.print_lifetime(lt); self.nbsp(); } } fn print_assoc_constraint(&mut self, constraint: &ast::AssocTyConstraint) { self.print_ident(constraint.ident); self.s.space(); match &constraint.kind { ast::AssocTyConstraintKind::Equality { ty } => { self.word_space("="); self.print_type(ty); } ast::AssocTyConstraintKind::Bound { bounds } => { self.print_type_bounds(":", &*bounds); } } } crate fn print_generic_arg(&mut self, generic_arg: &GenericArg) { match generic_arg { GenericArg::Lifetime(lt) => self.print_lifetime(*lt), GenericArg::Type(ty) => self.print_type(ty), GenericArg::Const(ct) => self.print_expr(&ct.value), } } pub fn print_type(&mut self, ty: &ast::Ty) { self.maybe_print_comment(ty.span.lo()); self.ibox(0); match ty.kind { ast::TyKind::Slice(ref ty) => { self.s.word("["); self.print_type(ty); self.s.word("]"); } ast::TyKind::Ptr(ref mt) => { self.s.word("*"); self.print_mt(mt, true); } ast::TyKind::Rptr(ref lifetime, ref mt) => { self.s.word("&"); self.print_opt_lifetime(lifetime); self.print_mt(mt, false); } ast::TyKind::Never => { self.s.word("!"); } ast::TyKind::Tup(ref elts) => { self.popen(); self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(ty)); if elts.len() == 1 { self.s.word(","); } self.pclose(); } ast::TyKind::Paren(ref typ) => { self.popen(); self.print_type(typ); self.pclose(); } ast::TyKind::BareFn(ref f) => { self.print_ty_fn(f.ext, f.unsafety, &f.decl, None, &f.generic_params); } ast::TyKind::Path(None, ref path) => { self.print_path(path, false, 0); } ast::TyKind::Path(Some(ref qself), ref path) => self.print_qpath(path, qself, false), ast::TyKind::TraitObject(ref bounds, syntax) => { let prefix = if syntax == ast::TraitObjectSyntax::Dyn { "dyn" } else { "" }; self.print_type_bounds(prefix, &bounds[..]); } ast::TyKind::ImplTrait(_, ref bounds) => { self.print_type_bounds("impl", &bounds[..]); } ast::TyKind::Array(ref ty, ref length) => { self.s.word("["); self.print_type(ty); self.s.word("; "); self.print_expr(&length.value); self.s.word("]"); } ast::TyKind::Typeof(ref e) => { self.s.word("typeof("); self.print_expr(&e.value); self.s.word(")"); } ast::TyKind::Infer => { self.s.word("_"); } ast::TyKind::Err => { self.popen(); self.s.word("/*ERROR*/"); self.pclose(); } ast::TyKind::ImplicitSelf => { self.s.word("Self"); } ast::TyKind::MacCall(ref m) => { self.print_mac(m); } ast::TyKind::CVarArgs => { self.s.word("..."); } } self.end(); } crate fn print_foreign_item(&mut self, item: &ast::ForeignItem) { let ast::Item { id, span, ident, ref attrs, ref kind, ref vis, tokens: _ } = *item; self.ann.pre(self, AnnNode::SubItem(id)); self.hardbreak_if_not_bol(); self.maybe_print_comment(span.lo()); self.print_outer_attributes(attrs); match kind { ast::ForeignItemKind::Fn(def, sig, gen, body) => { self.print_fn_full(sig, ident, gen, vis, *def, body.as_deref(), attrs); } ast::ForeignItemKind::Static(ty, mutbl, body) => { let def = ast::Defaultness::Final; self.print_item_const(ident, Some(*mutbl), ty, body.as_deref(), vis, def); } ast::ForeignItemKind::TyAlias(def, generics, bounds, ty) => { self.print_associated_type(ident, generics, bounds, ty.as_deref(), vis, *def); } ast::ForeignItemKind::MacCall(m) => { self.print_mac(m); if m.args.need_semicolon() { self.s.word(";"); } } } self.ann.post(self, AnnNode::SubItem(id)) } fn print_item_const( &mut self, ident: ast::Ident, mutbl: Option<ast::Mutability>, ty: &ast::Ty, body: Option<&ast::Expr>, vis: &ast::Visibility, defaultness: ast::Defaultness, ) { self.head(""); self.print_visibility(vis); self.print_defaultness(defaultness); let leading = match mutbl { None => "const", Some(ast::Mutability::Not) => "static", Some(ast::Mutability::Mut) => "static mut", }; self.word_space(leading); self.print_ident(ident); self.word_space(":"); self.print_type(ty); self.s.space(); self.end(); // end the head-ibox if let Some(body) = body { self.word_space("="); self.print_expr(body); } self.s.word(";"); self.end(); // end the outer cbox } fn print_associated_type( &mut self, ident: ast::Ident, generics: &ast::Generics, bounds: &ast::GenericBounds, ty: Option<&ast::Ty>, vis: &ast::Visibility, defaultness: ast::Defaultness, ) { self.head(""); self.print_visibility(vis); self.print_defaultness(defaultness); self.word_space("type"); self.print_ident(ident); self.print_generic_params(&generics.params); self.print_type_bounds(":", bounds); self.print_where_clause(&generics.where_clause); if let Some(ty) = ty { self.s.space(); self.word_space("="); self.print_type(ty); } self.s.word(";"); self.end(); // end inner head-block self.end(); // end outer head-block } /// Pretty-prints an item. crate fn print_item(&mut self, item: &ast::Item) { self.hardbreak_if_not_bol(); self.maybe_print_comment(item.span.lo()); self.print_outer_attributes(&item.attrs); self.ann.pre(self, AnnNode::Item(item)); match item.kind { ast::ItemKind::ExternCrate(orig_name) => { self.head(visibility_qualified(&item.vis, "extern crate")); if let Some(orig_name) = orig_name { self.print_name(orig_name); self.s.space(); self.s.word("as"); self.s.space(); } self.print_ident(item.ident); self.s.word(";"); self.end(); // end inner head-block self.end(); // end outer head-block } ast::ItemKind::Use(ref tree) => { self.head(visibility_qualified(&item.vis, "use")); self.print_use_tree(tree); self.s.word(";"); self.end(); // end inner head-block self.end(); // end outer head-block } ast::ItemKind::Static(ref ty, mutbl, ref body) => { let def = ast::Defaultness::Final; self.print_item_const(item.ident, Some(mutbl), ty, body.as_deref(), &item.vis, def); } ast::ItemKind::Const(def, ref ty, ref body) => { self.print_item_const(item.ident, None, ty, body.as_deref(), &item.vis, def); } ast::ItemKind::Fn(def, ref sig, ref gen, ref body) => { let body = body.as_deref(); self.print_fn_full(sig, item.ident, gen, &item.vis, def, body, &item.attrs); } ast::ItemKind::Mod(ref _mod) => { self.head(visibility_qualified(&item.vis, "mod")); self.print_ident(item.ident); if _mod.inline || self.is_expanded { self.nbsp(); self.bopen(); self.print_mod(_mod, &item.attrs); self.bclose(item.span); } else { self.s.word(";"); self.end(); // end inner head-block self.end(); // end outer head-block } } ast::ItemKind::ForeignMod(ref nmod) => { self.head("extern"); if let Some(abi) = nmod.abi { self.print_literal(&abi.as_lit()); self.nbsp(); } self.bopen(); self.print_foreign_mod(nmod, &item.attrs); self.bclose(item.span); } ast::ItemKind::GlobalAsm(ref ga) => { self.head(visibility_qualified(&item.vis, "global_asm!")); self.s.word(ga.asm.to_string()); self.end(); } ast::ItemKind::TyAlias(def, ref generics, ref bounds, ref ty) => { let ty = ty.as_deref(); self.print_associated_type(item.ident, generics, bounds, ty, &item.vis, def); } ast::ItemKind::Enum(ref enum_definition, ref params) => { self.print_enum_def(enum_definition, params, item.ident, item.span, &item.vis); } ast::ItemKind::Struct(ref struct_def, ref generics) => { self.head(visibility_qualified(&item.vis, "struct")); self.print_struct(struct_def, generics, item.ident, item.span, true); } ast::ItemKind::Union(ref struct_def, ref generics) => { self.head(visibility_qualified(&item.vis, "union")); self.print_struct(struct_def, generics, item.ident, item.span, true); } ast::ItemKind::Impl { unsafety, polarity, defaultness, constness, ref generics, ref of_trait, ref self_ty, ref items, } => { self.head(""); self.print_visibility(&item.vis); self.print_defaultness(defaultness); self.print_unsafety(unsafety); self.word_nbsp("impl"); self.print_constness(constness); if !generics.params.is_empty() { self.print_generic_params(&generics.params); self.s.space(); } if let ast::ImplPolarity::Negative(_) = polarity { self.s.word("!"); } if let Some(ref t) = *of_trait { self.print_trait_ref(t); self.s.space(); self.word_space("for"); } self.print_type(self_ty); self.print_where_clause(&generics.where_clause); self.s.space(); self.bopen(); self.print_inner_attributes(&item.attrs); for impl_item in items { self.print_assoc_item(impl_item); } self.bclose(item.span); } ast::ItemKind::Trait(is_auto, unsafety, ref generics, ref bounds, ref trait_items) => { self.head(""); self.print_visibility(&item.vis); self.print_unsafety(unsafety); self.print_is_auto(is_auto); self.word_nbsp("trait"); self.print_ident(item.ident); self.print_generic_params(&generics.params); let mut real_bounds = Vec::with_capacity(bounds.len()); for b in bounds.iter() { if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b { self.s.space(); self.word_space("for ?"); self.print_trait_ref(&ptr.trait_ref); } else { real_bounds.push(b.clone()); } } self.print_type_bounds(":", &real_bounds[..]); self.print_where_clause(&generics.where_clause); self.s.word(" "); self.bopen(); self.print_inner_attributes(&item.attrs); for trait_item in trait_items { self.print_assoc_item(trait_item); } self.bclose(item.span); } ast::ItemKind::TraitAlias(ref generics, ref bounds) => { self.head(""); self.print_visibility(&item.vis); self.word_nbsp("trait"); self.print_ident(item.ident); self.print_generic_params(&generics.params); let mut real_bounds = Vec::with_capacity(bounds.len()); // FIXME(durka) this seems to be some quite outdated syntax for b in bounds.iter() { if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b { self.s.space(); self.word_space("for ?"); self.print_trait_ref(&ptr.trait_ref); } else { real_bounds.push(b.clone()); } } self.nbsp(); self.print_type_bounds("=", &real_bounds[..]); self.print_where_clause(&generics.where_clause); self.s.word(";"); } ast::ItemKind::MacCall(ref mac) => { self.print_mac(mac); if mac.args.need_semicolon() { self.s.word(";"); } } ast::ItemKind::MacroDef(ref macro_def) => { let (kw, has_bang) = if macro_def.macro_rules { ("macro_rules", true) } else { self.print_visibility(&item.vis); ("macro", false) }; self.print_mac_common( Some(MacHeader::Keyword(kw)), has_bang, Some(item.ident), macro_def.body.delim(), macro_def.body.inner_tokens(), true, item.span, ); } } self.ann.post(self, AnnNode::Item(item)) } fn print_trait_ref(&mut self, t: &ast::TraitRef) { self.print_path(&t.path, false, 0) } fn print_formal_generic_params(&mut self, generic_params: &[ast::GenericParam]) { if !generic_params.is_empty() { self.s.word("for"); self.print_generic_params(generic_params); self.nbsp(); } } fn print_poly_trait_ref(&mut self, t: &ast::PolyTraitRef) { self.print_formal_generic_params(&t.bound_generic_params); self.print_trait_ref(&t.trait_ref) } crate fn print_enum_def( &mut self, enum_definition: &ast::EnumDef, generics: &ast::Generics, ident: ast::Ident, span: rustc_span::Span, visibility: &ast::Visibility, ) { self.head(visibility_qualified(visibility, "enum")); self.print_ident(ident); self.print_generic_params(&generics.params); self.print_where_clause(&generics.where_clause); self.s.space(); self.print_variants(&enum_definition.variants, span) } crate fn print_variants(&mut self, variants: &[ast::Variant], span: rustc_span::Span) { self.bopen(); for v in variants { self.space_if_not_bol(); self.maybe_print_comment(v.span.lo()); self.print_outer_attributes(&v.attrs); self.ibox(INDENT_UNIT); self.print_variant(v); self.s.word(","); self.end(); self.maybe_print_trailing_comment(v.span, None); } self.bclose(span) } crate fn print_visibility(&mut self, vis: &ast::Visibility) { match vis.node { ast::VisibilityKind::Public => self.word_nbsp("pub"), ast::VisibilityKind::Crate(sugar) => match sugar { ast::CrateSugar::PubCrate => self.word_nbsp("pub(crate)"), ast::CrateSugar::JustCrate => self.word_nbsp("crate"), }, ast::VisibilityKind::Restricted { ref path, .. } => { let path = to_string(|s| s.print_path(path, false, 0)); if path == "self" || path == "super" { self.word_nbsp(format!("pub({})", path)) } else { self.word_nbsp(format!("pub(in {})", path)) } } ast::VisibilityKind::Inherited => {} } } crate fn print_defaultness(&mut self, defaultness: ast::Defaultness) { if let ast::Defaultness::Default(_) = defaultness { self.word_nbsp("default"); } } crate fn print_struct( &mut self, struct_def: &ast::VariantData, generics: &ast::Generics, ident: ast::Ident, span: rustc_span::Span, print_finalizer: bool, ) { self.print_ident(ident); self.print_generic_params(&generics.params); match struct_def { ast::VariantData::Tuple(..) | ast::VariantData::Unit(..) => { if let ast::VariantData::Tuple(..) = struct_def { self.popen(); self.commasep(Inconsistent, struct_def.fields(), |s, field| { s.maybe_print_comment(field.span.lo()); s.print_outer_attributes(&field.attrs); s.print_visibility(&field.vis); s.print_type(&field.ty) }); self.pclose(); } self.print_where_clause(&generics.where_clause); if print_finalizer { self.s.word(";"); } self.end(); self.end(); // Close the outer-box. } ast::VariantData::Struct(..) => { self.print_where_clause(&generics.where_clause); self.nbsp(); self.bopen(); self.hardbreak_if_not_bol(); for field in struct_def.fields() { self.hardbreak_if_not_bol(); self.maybe_print_comment(field.span.lo()); self.print_outer_attributes(&field.attrs); self.print_visibility(&field.vis); self.print_ident(field.ident.unwrap()); self.word_nbsp(":"); self.print_type(&field.ty); self.s.word(","); } self.bclose(span) } } } crate fn print_variant(&mut self, v: &ast::Variant) { self.head(""); self.print_visibility(&v.vis); let generics = ast::Generics::default(); self.print_struct(&v.data, &generics, v.ident, v.span, false); match v.disr_expr { Some(ref d) => { self.s.space(); self.word_space("="); self.print_expr(&d.value) } _ => {} } } crate fn print_assoc_item(&mut self, item: &ast::AssocItem) { let ast::Item { id, span, ident, ref attrs, ref kind, ref vis, tokens: _ } = *item; self.ann.pre(self, AnnNode::SubItem(id)); self.hardbreak_if_not_bol(); self.maybe_print_comment(span.lo()); self.print_outer_attributes(attrs); match kind { ast::AssocItemKind::Fn(def, sig, gen, body) => { self.print_fn_full(sig, ident, gen, vis, *def, body.as_deref(), attrs); } ast::AssocItemKind::Const(def, ty, body) => { self.print_item_const(ident, None, ty, body.as_deref(), vis, *def); } ast::AssocItemKind::TyAlias(def, generics, bounds, ty) => { self.print_associated_type(ident, generics, bounds, ty.as_deref(), vis, *def); } ast::AssocItemKind::MacCall(m) => { self.print_mac(m); if m.args.need_semicolon() { self.s.word(";"); } } } self.ann.post(self, AnnNode::SubItem(id)) } crate fn print_stmt(&mut self, st: &ast::Stmt) { self.maybe_print_comment(st.span.lo()); match st.kind { ast::StmtKind::Local(ref loc) => { self.print_outer_attributes(&loc.attrs); self.space_if_not_bol(); self.ibox(INDENT_UNIT); self.word_nbsp("let"); self.ibox(INDENT_UNIT); self.print_local_decl(loc); self.end(); if let Some(ref init) = loc.init { self.nbsp(); self.word_space("="); self.print_expr(init); } self.s.word(";"); self.end(); } ast::StmtKind::Item(ref item) => self.print_item(item), ast::StmtKind::Expr(ref expr) => { self.space_if_not_bol(); self.print_expr_outer_attr_style(expr, false); if classify::expr_requires_semi_to_be_stmt(expr) { self.s.word(";"); } } ast::StmtKind::Semi(ref expr) => { self.space_if_not_bol(); self.print_expr_outer_attr_style(expr, false); self.s.word(";"); } ast::StmtKind::Empty => { self.space_if_not_bol(); self.s.word(";"); } ast::StmtKind::MacCall(ref mac) => { let (ref mac, style, ref attrs) = **mac; self.space_if_not_bol(); self.print_outer_attributes(attrs); self.print_mac(mac); if style == ast::MacStmtStyle::Semicolon { self.s.word(";"); } } } self.maybe_print_trailing_comment(st.span, None) } crate fn print_block(&mut self, blk: &ast::Block) { self.print_block_with_attrs(blk, &[]) } crate fn print_block_unclosed_indent(&mut self, blk: &ast::Block) { self.print_block_maybe_unclosed(blk, &[], false) } crate fn print_block_with_attrs(&mut self, blk: &ast::Block, attrs: &[ast::Attribute]) { self.print_block_maybe_unclosed(blk, attrs, true) } crate fn print_block_maybe_unclosed( &mut self, blk: &ast::Block, attrs: &[ast::Attribute], close_box: bool, ) { match blk.rules { BlockCheckMode::Unsafe(..) => self.word_space("unsafe"), BlockCheckMode::Default => (), } self.maybe_print_comment(blk.span.lo()); self.ann.pre(self, AnnNode::Block(blk)); self.bopen(); self.print_inner_attributes(attrs); for (i, st) in blk.stmts.iter().enumerate() { match st.kind { ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => { self.maybe_print_comment(st.span.lo()); self.space_if_not_bol(); self.print_expr_outer_attr_style(expr, false); self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi())); } _ => self.print_stmt(st), } } self.bclose_maybe_open(blk.span, close_box); self.ann.post(self, AnnNode::Block(blk)) } /// Print a `let pat = scrutinee` expression. crate fn print_let(&mut self, pat: &ast::Pat, scrutinee: &ast::Expr) { self.s.word("let "); self.print_pat(pat); self.s.space(); self.word_space("="); self.print_expr_cond_paren( scrutinee, Self::cond_needs_par(scrutinee) || parser::needs_par_as_let_scrutinee(scrutinee.precedence().order()), ) } fn print_else(&mut self, els: Option<&ast::Expr>) { if let Some(_else) = els { match _else.kind { // Another `else if` block. ast::ExprKind::If(ref i, ref then, ref e) => { self.cbox(INDENT_UNIT - 1); self.ibox(0); self.s.word(" else if "); self.print_expr_as_cond(i); self.s.space(); self.print_block(then); self.print_else(e.as_deref()) } // Final `else` block. ast::ExprKind::Block(ref b, _) => { self.cbox(INDENT_UNIT - 1); self.ibox(0); self.s.word(" else "); self.print_block(b) } // Constraints would be great here! _ => { panic!("print_if saw if with weird alternative"); } } } } crate fn print_if(&mut self, test: &ast::Expr, blk: &ast::Block, elseopt: Option<&ast::Expr>) { self.head("if"); self.print_expr_as_cond(test); self.s.space(); self.print_block(blk); self.print_else(elseopt) } crate fn print_mac(&mut self, m: &ast::MacCall) { self.print_mac_common( Some(MacHeader::Path(&m.path)), true, None, m.args.delim(), m.args.inner_tokens(), true, m.span(), ); } fn print_call_post(&mut self, args: &[P<ast::Expr>]) { self.popen(); self.commasep_exprs(Inconsistent, args); self.pclose() } crate fn print_expr_maybe_paren(&mut self, expr: &ast::Expr, prec: i8) { self.print_expr_cond_paren(expr, expr.precedence().order() < prec) } /// Prints an expr using syntax that's acceptable in a condition position, such as the `cond` in /// `if cond { ... }`. crate fn print_expr_as_cond(&mut self, expr: &ast::Expr) { self.print_expr_cond_paren(expr, Self::cond_needs_par(expr)) } /// Does `expr` need parenthesis when printed in a condition position? fn cond_needs_par(expr: &ast::Expr) -> bool { match expr.kind { // These cases need parens due to the parse error observed in #26461: `if return {}` // parses as the erroneous construct `if (return {})`, not `if (return) {}`. ast::ExprKind::Closure(..) | ast::ExprKind::Ret(..) | ast::ExprKind::Break(..) => true, _ => parser::contains_exterior_struct_lit(expr), } } /// Prints `expr` or `(expr)` when `needs_par` holds. fn print_expr_cond_paren(&mut self, expr: &ast::Expr, needs_par: bool) { if needs_par { self.popen(); } self.print_expr(expr); if needs_par { self.pclose(); } } fn print_expr_vec(&mut self, exprs: &[P<ast::Expr>], attrs: &[Attribute]) { self.ibox(INDENT_UNIT); self.s.word("["); self.print_inner_attributes_inline(attrs); self.commasep_exprs(Inconsistent, &exprs[..]); self.s.word("]"); self.end(); } fn print_expr_repeat( &mut self, element: &ast::Expr, count: &ast::AnonConst, attrs: &[Attribute], ) { self.ibox(INDENT_UNIT); self.s.word("["); self.print_inner_attributes_inline(attrs); self.print_expr(element); self.word_space(";"); self.print_expr(&count.value); self.s.word("]"); self.end(); } fn print_expr_struct( &mut self, path: &ast::Path, fields: &[ast::Field], wth: &Option<P<ast::Expr>>, attrs: &[Attribute], ) { self.print_path(path, true, 0); self.s.word("{"); self.print_inner_attributes_inline(attrs); self.commasep_cmnt( Consistent, &fields[..], |s, field| { s.print_outer_attributes(&field.attrs); s.ibox(INDENT_UNIT); if !field.is_shorthand { s.print_ident(field.ident); s.word_space(":"); } s.print_expr(&field.expr); s.end(); }, |f| f.span, ); match *wth { Some(ref expr) => { self.ibox(INDENT_UNIT); if !fields.is_empty() { self.s.word(","); self.s.space(); } self.s.word(".."); self.print_expr(expr); self.end(); } _ => { if !fields.is_empty() { self.s.word(",") } } } self.s.word("}"); } fn print_expr_tup(&mut self, exprs: &[P<ast::Expr>], attrs: &[Attribute]) { self.popen(); self.print_inner_attributes_inline(attrs); self.commasep_exprs(Inconsistent, &exprs[..]); if exprs.len() == 1 { self.s.word(","); } self.pclose() } fn print_expr_call(&mut self, func: &ast::Expr, args: &[P<ast::Expr>]) { let prec = match func.kind { ast::ExprKind::Field(..) => parser::PREC_FORCE_PAREN, _ => parser::PREC_POSTFIX, }; self.print_expr_maybe_paren(func, prec); self.print_call_post(args) } fn print_expr_method_call(&mut self, segment: &ast::PathSegment, args: &[P<ast::Expr>]) { let base_args = &args[1..]; self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX); self.s.word("."); self.print_ident(segment.ident); if let Some(ref args) = segment.args { self.print_generic_args(args, true); } self.print_call_post(base_args) } fn print_expr_binary(&mut self, op: ast::BinOp, lhs: &ast::Expr, rhs: &ast::Expr) { let assoc_op = AssocOp::from_ast_binop(op.node); let prec = assoc_op.precedence() as i8; let fixity = assoc_op.fixity(); let (left_prec, right_prec) = match fixity { Fixity::Left => (prec, prec + 1), Fixity::Right => (prec + 1, prec), Fixity::None => (prec + 1, prec + 1), }; let left_prec = match (&lhs.kind, op.node) { // These cases need parens: `x as i32 < y` has the parser thinking that `i32 < y` is // the beginning of a path type. It starts trying to parse `x as (i32 < y ...` instead // of `(x as i32) < ...`. We need to convince it _not_ to do that. (&ast::ExprKind::Cast { .. }, ast::BinOpKind::Lt) | (&ast::ExprKind::Cast { .. }, ast::BinOpKind::Shl) => parser::PREC_FORCE_PAREN, // We are given `(let _ = a) OP b`. // // - When `OP <= LAnd` we should print `let _ = a OP b` to avoid redundant parens // as the parser will interpret this as `(let _ = a) OP b`. // // - Otherwise, e.g. when we have `(let a = b) < c` in AST, // parens are required since the parser would interpret `let a = b < c` as // `let a = (b < c)`. To achieve this, we force parens. (&ast::ExprKind::Let { .. }, _) if !parser::needs_par_as_let_scrutinee(prec) => { parser::PREC_FORCE_PAREN } _ => left_prec, }; self.print_expr_maybe_paren(lhs, left_prec); self.s.space(); self.word_space(op.node.to_string()); self.print_expr_maybe_paren(rhs, right_prec) } fn print_expr_unary(&mut self, op: ast::UnOp, expr: &ast::Expr) { self.s.word(ast::UnOp::to_string(op)); self.print_expr_maybe_paren(expr, parser::PREC_PREFIX) } fn print_expr_addr_of( &mut self, kind: ast::BorrowKind, mutability: ast::Mutability, expr: &ast::Expr, ) { self.s.word("&"); match kind { ast::BorrowKind::Ref => self.print_mutability(mutability, false), ast::BorrowKind::Raw => { self.word_nbsp("raw"); self.print_mutability(mutability, true); } } self.print_expr_maybe_paren(expr, parser::PREC_PREFIX) } pub fn print_expr(&mut self, expr: &ast::Expr) { self.print_expr_outer_attr_style(expr, true) } fn print_expr_outer_attr_style(&mut self, expr: &ast::Expr, is_inline: bool) { self.maybe_print_comment(expr.span.lo()); let attrs = &expr.attrs; if is_inline { self.print_outer_attributes_inline(attrs); } else { self.print_outer_attributes(attrs); } self.ibox(INDENT_UNIT); self.ann.pre(self, AnnNode::Expr(expr)); match expr.kind { ast::ExprKind::Box(ref expr) => { self.word_space("box"); self.print_expr_maybe_paren(expr, parser::PREC_PREFIX); } ast::ExprKind::Array(ref exprs) => { self.print_expr_vec(&exprs[..], attrs); } ast::ExprKind::Repeat(ref element, ref count) => { self.print_expr_repeat(element, count, attrs); } ast::ExprKind::Struct(ref path, ref fields, ref wth) => { self.print_expr_struct(path, &fields[..], wth, attrs); } ast::ExprKind::Tup(ref exprs) => { self.print_expr_tup(&exprs[..], attrs); } ast::ExprKind::Call(ref func, ref args) => { self.print_expr_call(func, &args[..]); } ast::ExprKind::MethodCall(ref segment, ref args) => { self.print_expr_method_call(segment, &args[..]); } ast::ExprKind::Binary(op, ref lhs, ref rhs) => { self.print_expr_binary(op, lhs, rhs); } ast::ExprKind::Unary(op, ref expr) => { self.print_expr_unary(op, expr); } ast::ExprKind::AddrOf(k, m, ref expr) => { self.print_expr_addr_of(k, m, expr); } ast::ExprKind::Lit(ref lit) => { self.print_literal(lit); } ast::ExprKind::Cast(ref expr, ref ty) => { let prec = AssocOp::As.precedence() as i8; self.print_expr_maybe_paren(expr, prec); self.s.space(); self.word_space("as"); self.print_type(ty); } ast::ExprKind::Type(ref expr, ref ty) => { let prec = AssocOp::Colon.precedence() as i8; self.print_expr_maybe_paren(expr, prec); self.word_space(":"); self.print_type(ty); } ast::ExprKind::Let(ref pat, ref scrutinee) => { self.print_let(pat, scrutinee); } ast::ExprKind::If(ref test, ref blk, ref elseopt) => { self.print_if(test, blk, elseopt.as_deref()) } ast::ExprKind::While(ref test, ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident); self.word_space(":"); } self.head("while"); self.print_expr_as_cond(test); self.s.space(); self.print_block_with_attrs(blk, attrs); } ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident); self.word_space(":"); } self.head("for"); self.print_pat(pat); self.s.space(); self.word_space("in"); self.print_expr_as_cond(iter); self.s.space(); self.print_block_with_attrs(blk, attrs); } ast::ExprKind::Loop(ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident); self.word_space(":"); } self.head("loop"); self.s.space(); self.print_block_with_attrs(blk, attrs); } ast::ExprKind::Match(ref expr, ref arms) => { self.cbox(INDENT_UNIT); self.ibox(INDENT_UNIT); self.word_nbsp("match"); self.print_expr_as_cond(expr); self.s.space(); self.bopen(); self.print_inner_attributes_no_trailing_hardbreak(attrs); for arm in arms { self.print_arm(arm); } self.bclose(expr.span); } ast::ExprKind::Closure( capture_clause, asyncness, movability, ref decl, ref body, _, ) => { self.print_movability(movability); self.print_asyncness(asyncness); self.print_capture_clause(capture_clause); self.print_fn_params_and_ret(decl, true); self.s.space(); self.print_expr(body); self.end(); // need to close a box // a box will be closed by print_expr, but we didn't want an overall // wrapper so we closed the corresponding opening. so create an // empty box to satisfy the close. self.ibox(0); } ast::ExprKind::Block(ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident); self.word_space(":"); } // containing cbox, will be closed by print-block at } self.cbox(INDENT_UNIT); // head-box, will be closed by print-block after { self.ibox(0); self.print_block_with_attrs(blk, attrs); } ast::ExprKind::Async(capture_clause, _, ref blk) => { self.word_nbsp("async"); self.print_capture_clause(capture_clause); self.s.space(); // cbox/ibox in analogy to the `ExprKind::Block` arm above self.cbox(INDENT_UNIT); self.ibox(0); self.print_block_with_attrs(blk, attrs); } ast::ExprKind::Await(ref expr) => { self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX); self.s.word(".await"); } ast::ExprKind::Assign(ref lhs, ref rhs, _) => { let prec = AssocOp::Assign.precedence() as i8; self.print_expr_maybe_paren(lhs, prec + 1); self.s.space(); self.word_space("="); self.print_expr_maybe_paren(rhs, prec); } ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => { let prec = AssocOp::Assign.precedence() as i8; self.print_expr_maybe_paren(lhs, prec + 1); self.s.space(); self.s.word(op.node.to_string()); self.word_space("="); self.print_expr_maybe_paren(rhs, prec); } ast::ExprKind::Field(ref expr, ident) => { self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX); self.s.word("."); self.print_ident(ident); } ast::ExprKind::Index(ref expr, ref index) => { self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX); self.s.word("["); self.print_expr(index); self.s.word("]"); } ast::ExprKind::Range(ref start, ref end, limits) => { // Special case for `Range`. `AssocOp` claims that `Range` has higher precedence // than `Assign`, but `x .. x = x` gives a parse error instead of `x .. (x = x)`. // Here we use a fake precedence value so that any child with lower precedence than // a "normal" binop gets parenthesized. (`LOr` is the lowest-precedence binop.) let fake_prec = AssocOp::LOr.precedence() as i8; if let Some(ref e) = *start { self.print_expr_maybe_paren(e, fake_prec); } if limits == ast::RangeLimits::HalfOpen { self.s.word(".."); } else { self.s.word("..="); } if let Some(ref e) = *end { self.print_expr_maybe_paren(e, fake_prec); } } ast::ExprKind::Path(None, ref path) => self.print_path(path, true, 0), ast::ExprKind::Path(Some(ref qself), ref path) => self.print_qpath(path, qself, true), ast::ExprKind::Break(opt_label, ref opt_expr) => { self.s.word("break"); self.s.space(); if let Some(label) = opt_label { self.print_ident(label.ident); self.s.space(); } if let Some(ref expr) = *opt_expr { self.print_expr_maybe_paren(expr, parser::PREC_JUMP); self.s.space(); } } ast::ExprKind::Continue(opt_label) => { self.s.word("continue"); self.s.space(); if let Some(label) = opt_label { self.print_ident(label.ident); self.s.space() } } ast::ExprKind::Ret(ref result) => { self.s.word("return"); if let Some(ref expr) = *result { self.s.word(" "); self.print_expr_maybe_paren(expr, parser::PREC_JUMP); } } ast::ExprKind::LlvmInlineAsm(ref a) => { self.s.word("llvm_asm!"); self.popen(); self.print_string(&a.asm.as_str(), a.asm_str_style); self.word_space(":"); self.commasep(Inconsistent, &a.outputs, |s, out| { let constraint = out.constraint.as_str(); let mut ch = constraint.chars(); match ch.next() { Some('=') if out.is_rw => { s.print_string(&format!("+{}", ch.as_str()), ast::StrStyle::Cooked) } _ => s.print_string(&constraint, ast::StrStyle::Cooked), } s.popen(); s.print_expr(&out.expr); s.pclose(); }); self.s.space(); self.word_space(":"); self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| { s.print_string(&co.as_str(), ast::StrStyle::Cooked); s.popen(); s.print_expr(o); s.pclose(); }); self.s.space(); self.word_space(":"); self.commasep(Inconsistent, &a.clobbers, |s, co| { s.print_string(&co.as_str(), ast::StrStyle::Cooked); }); let mut options = vec![]; if a.volatile { options.push("volatile"); } if a.alignstack { options.push("alignstack"); } if a.dialect == ast::LlvmAsmDialect::Intel { options.push("intel"); } if !options.is_empty() { self.s.space(); self.word_space(":"); self.commasep(Inconsistent, &options, |s, &co| { s.print_string(co, ast::StrStyle::Cooked); }); } self.pclose(); } ast::ExprKind::MacCall(ref m) => self.print_mac(m), ast::ExprKind::Paren(ref e) => { self.popen(); self.print_inner_attributes_inline(attrs); self.print_expr(e); self.pclose(); } ast::ExprKind::Yield(ref e) => { self.s.word("yield"); match *e { Some(ref expr) => { self.s.space(); self.print_expr_maybe_paren(expr, parser::PREC_JUMP); } _ => (), } } ast::ExprKind::Try(ref e) => { self.print_expr_maybe_paren(e, parser::PREC_POSTFIX); self.s.word("?") } ast::ExprKind::TryBlock(ref blk) => { self.head("try"); self.s.space(); self.print_block_with_attrs(blk, attrs) } ast::ExprKind::Err => { self.popen(); self.s.word("/*ERROR*/"); self.pclose() } } self.ann.post(self, AnnNode::Expr(expr)); self.end(); } crate fn print_local_decl(&mut self, loc: &ast::Local) { self.print_pat(&loc.pat); if let Some(ref ty) = loc.ty { self.word_space(":"); self.print_type(ty); } } pub fn print_usize(&mut self, i: usize) { self.s.word(i.to_string()) } crate fn print_name(&mut self, name: ast::Name) { self.s.word(name.to_string()); self.ann.post(self, AnnNode::Name(&name)) } fn print_qpath(&mut self, path: &ast::Path, qself: &ast::QSelf, colons_before_params: bool) { self.s.word("<"); self.print_type(&qself.ty); if qself.position > 0 { self.s.space(); self.word_space("as"); let depth = path.segments.len() - qself.position; self.print_path(path, false, depth); } self.s.word(">"); self.s.word("::"); let item_segment = path.segments.last().unwrap(); self.print_ident(item_segment.ident); match item_segment.args { Some(ref args) => self.print_generic_args(args, colons_before_params), None => {} } } crate fn print_pat(&mut self, pat: &ast::Pat) { self.maybe_print_comment(pat.span.lo()); self.ann.pre(self, AnnNode::Pat(pat)); /* Pat isn't normalized, but the beauty of it is that it doesn't matter */ match pat.kind { PatKind::Wild => self.s.word("_"), PatKind::Ident(binding_mode, ident, ref sub) => { match binding_mode { ast::BindingMode::ByRef(mutbl) => { self.word_nbsp("ref"); self.print_mutability(mutbl, false); } ast::BindingMode::ByValue(ast::Mutability::Not) => {} ast::BindingMode::ByValue(ast::Mutability::Mut) => { self.word_nbsp("mut"); } } self.print_ident(ident); if let Some(ref p) = *sub { self.s.space(); self.s.word_space("@"); self.print_pat(p); } } PatKind::TupleStruct(ref path, ref elts) => { self.print_path(path, true, 0); self.popen(); self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p)); self.pclose(); } PatKind::Or(ref pats) => { self.strsep("|", true, Inconsistent, &pats[..], |s, p| s.print_pat(p)); } PatKind::Path(None, ref path) => { self.print_path(path, true, 0); } PatKind::Path(Some(ref qself), ref path) => { self.print_qpath(path, qself, false); } PatKind::Struct(ref path, ref fields, etc) => { self.print_path(path, true, 0); self.nbsp(); self.word_space("{"); self.commasep_cmnt( Consistent, &fields[..], |s, f| { s.cbox(INDENT_UNIT); if !f.is_shorthand { s.print_ident(f.ident); s.word_nbsp(":"); } s.print_pat(&f.pat); s.end(); }, |f| f.pat.span, ); if etc { if !fields.is_empty() { self.word_space(","); } self.s.word(".."); } self.s.space(); self.s.word("}"); } PatKind::Tuple(ref elts) => { self.popen(); self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p)); if elts.len() == 1 { self.s.word(","); } self.pclose(); } PatKind::Box(ref inner) => { self.s.word("box "); self.print_pat(inner); } PatKind::Ref(ref inner, mutbl) => { self.s.word("&"); if mutbl == ast::Mutability::Mut { self.s.word("mut "); } self.print_pat(inner); } PatKind::Lit(ref e) => self.print_expr(&**e), PatKind::Range(ref begin, ref end, Spanned { node: ref end_kind, .. }) => { if let Some(e) = begin { self.print_expr(e); self.s.space(); } match *end_kind { RangeEnd::Included(RangeSyntax::DotDotDot) => self.s.word("..."), RangeEnd::Included(RangeSyntax::DotDotEq) => self.s.word("..="), RangeEnd::Excluded => self.s.word(".."), } if let Some(e) = end { self.print_expr(e); } } PatKind::Slice(ref elts) => { self.s.word("["); self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p)); self.s.word("]"); } PatKind::Rest => self.s.word(".."), PatKind::Paren(ref inner) => { self.popen(); self.print_pat(inner); self.pclose(); } PatKind::MacCall(ref m) => self.print_mac(m), } self.ann.post(self, AnnNode::Pat(pat)) } fn print_arm(&mut self, arm: &ast::Arm) { // Note, I have no idea why this check is necessary, but here it is. if arm.attrs.is_empty() { self.s.space(); } self.cbox(INDENT_UNIT); self.ibox(0); self.maybe_print_comment(arm.pat.span.lo()); self.print_outer_attributes(&arm.attrs); self.print_pat(&arm.pat); self.s.space(); if let Some(ref e) = arm.guard { self.word_space("if"); self.print_expr(e); self.s.space(); } self.word_space("=>"); match arm.body.kind { ast::ExprKind::Block(ref blk, opt_label) => { if let Some(label) = opt_label { self.print_ident(label.ident); self.word_space(":"); } // The block will close the pattern's ibox. self.print_block_unclosed_indent(blk); // If it is a user-provided unsafe block, print a comma after it. if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules { self.s.word(","); } } _ => { self.end(); // Close the ibox for the pattern. self.print_expr(&arm.body); self.s.word(","); } } self.end(); // Close enclosing cbox. } fn print_explicit_self(&mut self, explicit_self: &ast::ExplicitSelf) { match explicit_self.node { SelfKind::Value(m) => { self.print_mutability(m, false); self.s.word("self") } SelfKind::Region(ref lt, m) => { self.s.word("&"); self.print_opt_lifetime(lt); self.print_mutability(m, false); self.s.word("self") } SelfKind::Explicit(ref typ, m) => { self.print_mutability(m, false); self.s.word("self"); self.word_space(":"); self.print_type(typ) } } } fn print_fn_full( &mut self, sig: &ast::FnSig, name: ast::Ident, generics: &ast::Generics, vis: &ast::Visibility, defaultness: ast::Defaultness, body: Option<&ast::Block>, attrs: &[ast::Attribute], ) { if body.is_some() { self.head(""); } self.print_visibility(vis); self.print_defaultness(defaultness); self.print_fn(&sig.decl, sig.header, Some(name), generics); if let Some(body) = body { self.nbsp(); self.print_block_with_attrs(body, attrs); } else { self.s.word(";"); } } crate fn print_fn( &mut self, decl: &ast::FnDecl, header: ast::FnHeader, name: Option<ast::Ident>, generics: &ast::Generics, ) { self.print_fn_header_info(header); if let Some(name) = name { self.nbsp(); self.print_ident(name); } self.print_generic_params(&generics.params); self.print_fn_params_and_ret(decl, false); self.print_where_clause(&generics.where_clause) } crate fn print_fn_params_and_ret(&mut self, decl: &ast::FnDecl, is_closure: bool) { let (open, close) = if is_closure { ("|", "|") } else { ("(", ")") }; self.word(open); self.commasep(Inconsistent, &decl.inputs, |s, param| s.print_param(param, is_closure)); self.word(close); self.print_fn_ret_ty(&decl.output) } crate fn print_movability(&mut self, movability: ast::Movability) { match movability { ast::Movability::Static => self.word_space("static"), ast::Movability::Movable => {} } } crate fn print_asyncness(&mut self, asyncness: ast::Async) { if asyncness.is_async() { self.word_nbsp("async"); } } crate fn print_capture_clause(&mut self, capture_clause: ast::CaptureBy) { match capture_clause { ast::CaptureBy::Value => self.word_space("move"), ast::CaptureBy::Ref => {} } } pub fn print_type_bounds(&mut self, prefix: &'static str, bounds: &[ast::GenericBound]) { if !bounds.is_empty() { self.s.word(prefix); let mut first = true; for bound in bounds { if !(first && prefix.is_empty()) { self.nbsp(); } if first { first = false; } else { self.word_space("+"); } match bound { GenericBound::Trait(tref, modifier) => { if modifier == &TraitBoundModifier::Maybe { self.s.word("?"); } self.print_poly_trait_ref(tref); } GenericBound::Outlives(lt) => self.print_lifetime(*lt), } } } } crate fn print_lifetime(&mut self, lifetime: ast::Lifetime) { self.print_name(lifetime.ident.name) } crate fn print_lifetime_bounds( &mut self, lifetime: ast::Lifetime, bounds: &ast::GenericBounds, ) { self.print_lifetime(lifetime); if !bounds.is_empty() { self.s.word(": "); for (i, bound) in bounds.iter().enumerate() { if i != 0 { self.s.word(" + "); } match bound { ast::GenericBound::Outlives(lt) => self.print_lifetime(*lt), _ => panic!(), } } } } crate fn print_generic_params(&mut self, generic_params: &[ast::GenericParam]) { if generic_params.is_empty() { return; } self.s.word("<"); self.commasep(Inconsistent, &generic_params, |s, param| { s.print_outer_attributes_inline(&param.attrs); match param.kind { ast::GenericParamKind::Lifetime => { let lt = ast::Lifetime { id: param.id, ident: param.ident }; s.print_lifetime_bounds(lt, &param.bounds) } ast::GenericParamKind::Type { ref default } => { s.print_ident(param.ident); s.print_type_bounds(":", &param.bounds); if let Some(ref default) = default { s.s.space(); s.word_space("="); s.print_type(default) } } ast::GenericParamKind::Const { ref ty } => { s.word_space("const"); s.print_ident(param.ident); s.s.space(); s.word_space(":"); s.print_type(ty); s.print_type_bounds(":", &param.bounds) } } }); self.s.word(">"); } crate fn print_where_clause(&mut self, where_clause: &ast::WhereClause) { if where_clause.predicates.is_empty() { return; } self.s.space(); self.word_space("where"); for (i, predicate) in where_clause.predicates.iter().enumerate() { if i != 0 { self.word_space(","); } match *predicate { ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate { ref bound_generic_params, ref bounded_ty, ref bounds, .. }) => { self.print_formal_generic_params(bound_generic_params); self.print_type(bounded_ty); self.print_type_bounds(":", bounds); } ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate { ref lifetime, ref bounds, .. }) => { self.print_lifetime_bounds(*lifetime, bounds); } ast::WherePredicate::EqPredicate(ast::WhereEqPredicate { ref lhs_ty, ref rhs_ty, .. }) => { self.print_type(lhs_ty); self.s.space(); self.word_space("="); self.print_type(rhs_ty); } } } } crate fn print_use_tree(&mut self, tree: &ast::UseTree) { match tree.kind { ast::UseTreeKind::Simple(rename, ..) => { self.print_path(&tree.prefix, false, 0); if let Some(rename) = rename { self.s.space(); self.word_space("as"); self.print_ident(rename); } } ast::UseTreeKind::Glob => { if !tree.prefix.segments.is_empty() { self.print_path(&tree.prefix, false, 0); self.s.word("::"); } self.s.word("*"); } ast::UseTreeKind::Nested(ref items) => { if tree.prefix.segments.is_empty() { self.s.word("{"); } else { self.print_path(&tree.prefix, false, 0); self.s.word("::{"); } self.commasep(Inconsistent, &items[..], |this, &(ref tree, _)| { this.print_use_tree(tree) }); self.s.word("}"); } } } pub fn print_mutability(&mut self, mutbl: ast::Mutability, print_const: bool) { match mutbl { ast::Mutability::Mut => self.word_nbsp("mut"), ast::Mutability::Not => { if print_const { self.word_nbsp("const"); } } } } crate fn print_mt(&mut self, mt: &ast::MutTy, print_const: bool) { self.print_mutability(mt.mutbl, print_const); self.print_type(&mt.ty) } crate fn print_param(&mut self, input: &ast::Param, is_closure: bool) { self.ibox(INDENT_UNIT); self.print_outer_attributes_inline(&input.attrs); match input.ty.kind { ast::TyKind::Infer if is_closure => self.print_pat(&input.pat), _ => { if let Some(eself) = input.to_self() { self.print_explicit_self(&eself); } else { let invalid = if let PatKind::Ident(_, ident, _) = input.pat.kind { ident.name == kw::Invalid } else { false }; if !invalid { self.print_pat(&input.pat); self.s.word(":"); self.s.space(); } self.print_type(&input.ty); } } } self.end(); } crate fn print_fn_ret_ty(&mut self, fn_ret_ty: &ast::FnRetTy) { if let ast::FnRetTy::Ty(ty) = fn_ret_ty { self.space_if_not_bol(); self.ibox(INDENT_UNIT); self.word_space("->"); self.print_type(ty); self.end(); self.maybe_print_comment(ty.span.lo()); } } crate fn print_ty_fn( &mut self, ext: ast::Extern, unsafety: ast::Unsafe, decl: &ast::FnDecl, name: Option<ast::Ident>, generic_params: &[ast::GenericParam], ) { self.ibox(INDENT_UNIT); if !generic_params.is_empty() { self.s.word("for"); self.print_generic_params(generic_params); } let generics = ast::Generics { params: Vec::new(), where_clause: ast::WhereClause { predicates: Vec::new(), span: rustc_span::DUMMY_SP }, span: rustc_span::DUMMY_SP, }; let header = ast::FnHeader { unsafety, ext, ..ast::FnHeader::default() }; self.print_fn(decl, header, name, &generics); self.end(); } crate fn maybe_print_trailing_comment( &mut self, span: rustc_span::Span, next_pos: Option<BytePos>, ) { if let Some(cmnts) = self.comments() { if let Some(cmnt) = cmnts.trailing_comment(span, next_pos) { self.print_comment(&cmnt); } } } crate fn print_remaining_comments(&mut self) { // If there aren't any remaining comments, then we need to manually // make sure there is a line break at the end. if self.next_comment().is_none() { self.s.hardbreak(); } while let Some(ref cmnt) = self.next_comment() { self.print_comment(cmnt); } } crate fn print_fn_header_info(&mut self, header: ast::FnHeader) { self.print_constness(header.constness); self.print_asyncness(header.asyncness); self.print_unsafety(header.unsafety); match header.ext { ast::Extern::None => {} ast::Extern::Implicit => { self.word_nbsp("extern"); } ast::Extern::Explicit(abi) => { self.word_nbsp("extern"); self.print_literal(&abi.as_lit()); self.nbsp(); } } self.s.word("fn") } crate fn print_unsafety(&mut self, s: ast::Unsafe) { match s { ast::Unsafe::No => {} ast::Unsafe::Yes(_) => self.word_nbsp("unsafe"), } } crate fn print_constness(&mut self, s: ast::Const) { match s { ast::Const::No => {} ast::Const::Yes(_) => self.word_nbsp("const"), } } crate fn print_is_auto(&mut self, s: ast::IsAuto) { match s { ast::IsAuto::Yes => self.word_nbsp("auto"), ast::IsAuto::No => {} } } }
35.23475
100
0.488039
231f2ae36b049835947932d3390833c05d20b96c
6,069
// Copyright 2019-2021 Tauri Programme within The Commons Conservancy // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: MIT use crate::{ helpers::{ framework::{infer_from_package_json as infer_framework, Framework}, resolve_tauri_path, template, Logger, }, VersionMetadata, }; use std::{ collections::BTreeMap, env::current_dir, fmt::Display, fs::{read_to_string, remove_dir_all}, path::PathBuf, str::FromStr, }; use crate::Result; use anyhow::Context; use clap::Parser; use dialoguer::Input; use handlebars::{to_json, Handlebars}; use include_dir::{include_dir, Dir}; use serde::Deserialize; const TEMPLATE_DIR: Dir<'_> = include_dir!("templates/app"); #[derive(Debug, Parser)] #[clap(about = "Initializes a Tauri project")] pub struct Options { /// Skip prompting for values #[clap(long)] ci: bool, /// Force init to overwrite the src-tauri folder #[clap(short, long)] force: bool, /// Enables logging #[clap(short, long)] log: bool, /// Set target directory for init #[clap(short, long)] #[clap(default_value_t = current_dir().expect("failed to read cwd").display().to_string())] directory: String, /// Path of the Tauri project to use (relative to the cwd) #[clap(short, long)] tauri_path: Option<PathBuf>, /// Name of your Tauri application #[clap(short = 'A', long)] app_name: Option<String>, /// Window title of your Tauri application #[clap(short = 'W', long)] window_title: Option<String>, /// Web assets location, relative to <project-dir>/src-tauri #[clap(short = 'D', long)] dist_dir: Option<String>, /// Url of your dev server #[clap(short = 'P', long)] dev_path: Option<String>, } #[derive(Deserialize)] struct PackageJson { name: Option<String>, product_name: Option<String>, } #[derive(Default)] struct InitDefaults { app_name: Option<String>, framework: Option<Framework>, } impl Options { fn load(mut self) -> Result<Self> { self.ci = self.ci || std::env::var("CI").is_ok(); let package_json_path = PathBuf::from(&self.directory).join("package.json"); let init_defaults = if package_json_path.exists() { let package_json_text = read_to_string(package_json_path)?; let package_json: PackageJson = serde_json::from_str(&package_json_text)?; let (framework, _) = infer_framework(&package_json_text); InitDefaults { app_name: package_json.product_name.or(package_json.name), framework, } } else { Default::default() }; self.app_name = self.app_name.map(|s| Ok(Some(s))).unwrap_or_else(|| { request_input( "What is your app name?", init_defaults.app_name.clone(), self.ci, ) })?; self.window_title = self.window_title.map(|s| Ok(Some(s))).unwrap_or_else(|| { request_input( "What should the window title be?", init_defaults.app_name.clone(), self.ci, ) })?; self.dist_dir = self.dist_dir.map(|s| Ok(Some(s))).unwrap_or_else(|| request_input( r#"Where are your web assets (HTML/CSS/JS) located, relative to the "<current dir>/src-tauri/tauri.conf.json" file that will be created?"#, init_defaults.framework.as_ref().map(|f| f.dist_dir()), self.ci ))?; self.dev_path = self.dev_path.map(|s| Ok(Some(s))).unwrap_or_else(|| { request_input( "What is the url of your dev server?", init_defaults.framework.map(|f| f.dev_path()), self.ci, ) })?; Ok(self) } } pub fn command(mut options: Options) -> Result<()> { options = options.load()?; let logger = Logger::new("tauri:init"); let template_target_path = PathBuf::from(&options.directory).join("src-tauri"); let metadata = serde_json::from_str::<VersionMetadata>(include_str!("../metadata.json"))?; if template_target_path.exists() && !options.force { logger.warn(format!( "Tauri dir ({:?}) not empty. Run `init --force` to overwrite.", template_target_path )); } else { let (tauri_dep, tauri_build_dep) = if let Some(tauri_path) = options.tauri_path { ( format!( r#"{{ path = {:?}, features = [ "api-all" ] }}"#, resolve_tauri_path(&tauri_path, "core/tauri") ), format!( "{{ path = {:?} }}", resolve_tauri_path(&tauri_path, "core/tauri-build") ), ) } else { ( format!( r#"{{ version = "{}", features = [ "api-all" ] }}"#, metadata.tauri ), format!(r#"{{ version = "{}" }}"#, metadata.tauri_build), ) }; let _ = remove_dir_all(&template_target_path); let handlebars = Handlebars::new(); let mut data = BTreeMap::new(); data.insert("tauri_dep", to_json(tauri_dep)); data.insert("tauri_build_dep", to_json(tauri_build_dep)); data.insert( "dist_dir", to_json(options.dist_dir.unwrap_or_else(|| "../dist".to_string())), ); data.insert( "dev_path", to_json( options .dev_path .unwrap_or_else(|| "http://localhost:4000".to_string()), ), ); data.insert( "app_name", to_json(options.app_name.unwrap_or_else(|| "Tauri App".to_string())), ); data.insert( "window_title", to_json(options.window_title.unwrap_or_else(|| "Tauri".to_string())), ); template::render(&handlebars, &data, &TEMPLATE_DIR, &options.directory) .with_context(|| "failed to render Tauri template")?; } Ok(()) } fn request_input<T>(prompt: &str, default: Option<T>, skip: bool) -> Result<Option<T>> where T: Clone + FromStr + Display + ToString, T::Err: Display + std::fmt::Debug, { if skip { Ok(default) } else { let theme = dialoguer::theme::ColorfulTheme::default(); let mut builder = Input::with_theme(&theme); builder.with_prompt(prompt); if let Some(v) = default { builder.default(v.clone()); builder.with_initial_text(v.to_string()); } builder.interact_text().map(Some).map_err(Into::into) } }
28.227907
145
0.625803
ab6b6b0d7a39ec592c2f7b8508fd459e354f1460
805
use ethers_core::types::{Address, Chain}; use std::env::VarError; #[derive(Debug, thiserror::Error)] pub enum EtherscanError { #[error("chain {0} not supported")] ChainNotSupported(Chain), #[error("contract execution call failed: {0}")] ExecutionFailed(String), #[error("balance failed")] BalanceFailed, #[error("tx receipt failed")] TransactionReceiptFailed, #[error("gas estimation failed")] GasEstimationFailed, #[error("bad status code {0}")] BadStatusCode(String), #[error(transparent)] EnvVarNotFound(#[from] VarError), #[error(transparent)] Reqwest(#[from] reqwest::Error), #[error(transparent)] Serde(#[from] serde_json::Error), #[error("Contract source code not verified: {0}")] ContractCodeNotVerified(Address), }
29.814815
54
0.664596
1c667ebcf37ce6133eb73c1acaa3f906b4db8ffd
2,113
use crate::Data; use std::{error::Error, io::Cursor}; use miniserde::json; use tiny_http::{Header, Method, Response, Server, StatusCode}; const INDEX_HTML: &str = include_str!(env!("INDEX_HTML")); const INDEX_JS: &str = include_str!(env!("INDEX_JS")); fn cors_headers() -> Vec<Header> { use std::str::FromStr; vec![Header::from_str("Access-Control-Allow-Origin: *").unwrap()] } fn resp_from_str(string: &str) -> Response<Cursor<&[u8]>> { let read = Cursor::new(string.as_bytes()); Response::new( StatusCode(200), cors_headers(), read, Some(string.len()), None, ) } fn resp_from_string(string: String) -> Response<Cursor<Vec<u8>>> { let str_len = string.len(); let read = Cursor::new(string.into_bytes()); Response::new(StatusCode(200), cors_headers(), read, Some(str_len), None) } pub fn launch(listen_url: &str, data: &Data) -> Result<(), Box<dyn Error + Send + Sync + 'static>> { let server = Server::http(listen_url)?; for request in server.incoming_requests() { eprintln!("handling {} {}", request.method(), request.url()); let path = (request.url().trim_matches('/').split('/')).collect::<Vec<_>>(); let response = match (request.method(), path.as_slice()) { (Method::Get, [""]) => request.respond(resp_from_str(INDEX_HTML)), (Method::Get, ["index.js"]) => request.respond(resp_from_str(INDEX_JS)), (Method::Get, ["overview"]) => { request.respond(resp_from_string(json::to_string(&data.overview()))) } (Method::Get, ["moment", moment]) => { let moment = moment.parse::<usize>()?; if let Some(moment) = data.moment(moment) { request.respond(resp_from_string(json::to_string(&moment))) } else { continue; } } (unknown, route) => { println!("unhandled route {} {:?}", unknown, route); continue; } }; response?; } Ok(()) }
32.015152
100
0.553715
16a8010177b4c4c1463fec73ce6449b87b7b1fb4
1,101
//! Player playing random actions #![allow(dead_code)] extern crate rand; use self::rand::{Rng, ThreadRng}; use super::Player; use super::super::field::Field; pub struct PlayerRandom { initialized: bool, pid: i32, //player ID rng: Box<ThreadRng>, } impl PlayerRandom { pub fn new() -> Box<PlayerRandom> { Box::new(PlayerRandom { initialized: false, pid: 0, rng: Box::new(rand::thread_rng()) }) } } impl Player for PlayerRandom { #[allow(unused_variables)] fn init(&mut self, field:&Field, p:i32) -> bool { self.initialized = true; self.pid = p; true } #[allow(unused_variables)] fn startp(&mut self, p:i32) { //nothing } fn play(&mut self, field:&mut Field) -> bool { if !self.initialized { return false; } let w = field.get_w(); let mut random = (*self.rng).gen::<u32>() % w; while !field.play(self.pid, random) { random = (*self.rng).gen::<u32>() % w; } true } #[allow(unused_variables)] fn outcome(&mut self, field:&mut Field, state:i32) { //nothing } } impl Drop for PlayerRandom { fn drop(&mut self) { //nothing to do } }
15.728571
90
0.632153
c18695bf7cdc9cec32931420a45b8eeb0cacc903
751
use futures::StreamExt; use tracing::Level; use tracing_subscriber::FmtSubscriber; #[tokio::main] async fn main() -> Result<(), failure::Error> { // Setup logging to the console let subscriber = FmtSubscriber::builder() .with_max_level(Level::TRACE) .finish(); tracing::subscriber::set_global_default(subscriber)?; // Initialize a rosty node rosty::init("subscribe_examples").await?; // Subscribe to a topic tokio::spawn( rosty::subscribe::<rosty_msg::rosgraph_msgs::Log>("/rosout", 1) .await? .for_each(|(_, message)| async move { println!("{:?}", message); }), ); // Run the node until it quits rosty::run().await; Ok(()) }
25.033333
71
0.591212
e633e438b77e2120e2da8482d6977fc0f289fe78
3,680
use yew::prelude::*; use yew::{html, Component, ComponentLink, Html, ShouldRender}; use gba_emulator::gba::GBA; use gba_emulator::cpu::cpu::{InstructionSet, OperatingMode}; use std::rc::Rc; use std::cell::RefCell; pub struct Status { props: StatusProp, link: ComponentLink<Self> } #[derive(Properties, Clone)] pub struct StatusProp { pub gba: Rc<RefCell<GBA>> } pub enum Msg { UpdateInstructionSet(InstructionSet), UpdateOperatingMode(OperatingMode) } impl Component for Status { type Message = Msg; type Properties = StatusProp; fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self { Status { props: props, link: link } } fn update(&mut self, msg: Self::Message) -> ShouldRender { match msg { Msg::UpdateInstructionSet(instr_set) => { self.props.gba.borrow_mut().cpu.set_instruction_set(instr_set); }, Msg::UpdateOperatingMode(op_mode) => { self.props.gba.borrow_mut().cpu.set_operating_mode(op_mode); } } true } fn change(&mut self, props: Self::Properties) -> ShouldRender { self.props = props; true } fn view(&self) -> Html { html! { <div> <h4>{"Status"}</h4> <div class="dropdown m-2"> <button class="btn btn-outline-primary dropdown-toggle" type="button" data-toggle="dropdown"> {&format!("{:?}", self.props.gba.borrow().cpu.get_instruction_set())} </button> <div class="dropdown-menu"> <button class="dropdown-item" type="button" onclick=self.link.callback(|_|{Msg::UpdateInstructionSet(InstructionSet::Arm)})>{"Arm"}</button> <button class="dropdown-item" type="button" onclick=self.link.callback(|_|{Msg::UpdateInstructionSet(InstructionSet::Thumb)})>{"Thumb"}</button> </div> </div> <div class="dropdown m-2"> <button class="btn btn-outline-primary dropdown-toggle" type="button" data-toggle="dropdown"> {&format!("{:?}", self.props.gba.borrow().cpu.get_operating_mode())} </button> <div class="dropdown-menu"> <button class="dropdown-item" type="button" onclick=self.link.callback(|_|{Msg::UpdateOperatingMode(OperatingMode::System)})>{"System"}</button> <button class="dropdown-item" type="button" onclick=self.link.callback(|_|{Msg::UpdateOperatingMode(OperatingMode::User)})>{"User"}</button> <button class="dropdown-item" type="button" onclick=self.link.callback(|_|{Msg::UpdateOperatingMode(OperatingMode::FastInterrupt)})>{"Fast Interrupt"}</button> <button class="dropdown-item" type="button" onclick=self.link.callback(|_|{Msg::UpdateOperatingMode(OperatingMode::Supervisor)})>{"Supervisor"}</button> <button class="dropdown-item" type="button" onclick=self.link.callback(|_|{Msg::UpdateOperatingMode(OperatingMode::Abort)})>{"Abort"}</button> <button class="dropdown-item" type="button" onclick=self.link.callback(|_|{Msg::UpdateOperatingMode(OperatingMode::Interrupt)})>{"Interrupt"}</button> <button class="dropdown-item" type="button" onclick=self.link.callback(|_|{Msg::UpdateOperatingMode(OperatingMode::Undefined)})>{"Undefined"}</button> </div> </div> </div> } } }
45.432099
183
0.582609
4b93904c349bb8150876dcd21263a60b2afbdfa5
15,252
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::EP1R { #[doc = r" Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline(always)] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct EAR { bits: u8, } impl EAR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct STAT_TXR { bits: u8, } impl STAT_TXR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct DTOG_TXR { bits: bool, } impl DTOG_TXR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct CTR_TXR { bits: bool, } impl CTR_TXR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct EP_KINDR { bits: bool, } impl EP_KINDR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct EP_TYPER { bits: u8, } impl EP_TYPER { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct SETUPR { bits: bool, } impl SETUPR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct STAT_RXR { bits: u8, } impl STAT_RXR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u8 { self.bits } } #[doc = r" Value of the field"] pub struct DTOG_RXR { bits: bool, } impl DTOG_RXR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct CTR_RXR { bits: bool, } impl CTR_RXR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline(always)] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline(always)] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Proxy"] pub struct _EAW<'a> { w: &'a mut W, } impl<'a> _EAW<'a> { #[doc = r" Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _STAT_TXW<'a> { w: &'a mut W, } impl<'a> _STAT_TXW<'a> { #[doc = r" Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _DTOG_TXW<'a> { w: &'a mut W, } impl<'a> _DTOG_TXW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 6; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _CTR_TXW<'a> { w: &'a mut W, } impl<'a> _CTR_TXW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 7; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _EP_KINDW<'a> { w: &'a mut W, } impl<'a> _EP_KINDW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _EP_TYPEW<'a> { w: &'a mut W, } impl<'a> _EP_TYPEW<'a> { #[doc = r" Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 9; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _SETUPW<'a> { w: &'a mut W, } impl<'a> _SETUPW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 11; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _STAT_RXW<'a> { w: &'a mut W, } impl<'a> _STAT_RXW<'a> { #[doc = r" Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 12; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _DTOG_RXW<'a> { w: &'a mut W, } impl<'a> _DTOG_RXW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 14; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _CTR_RXW<'a> { w: &'a mut W, } impl<'a> _CTR_RXW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 15; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:3 - Endpoint address"] #[inline(always)] pub fn ea(&self) -> EAR { let bits = { const MASK: u8 = 15; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 }; EAR { bits } } #[doc = "Bits 4:5 - Status bits, for transmission transfers"] #[inline(always)] pub fn stat_tx(&self) -> STAT_TXR { let bits = { const MASK: u8 = 3; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) as u8 }; STAT_TXR { bits } } #[doc = "Bit 6 - Data Toggle, for transmission transfers"] #[inline(always)] pub fn dtog_tx(&self) -> DTOG_TXR { let bits = { const MASK: bool = true; const OFFSET: u8 = 6; ((self.bits >> OFFSET) & MASK as u32) != 0 }; DTOG_TXR { bits } } #[doc = "Bit 7 - Correct Transfer for transmission"] #[inline(always)] pub fn ctr_tx(&self) -> CTR_TXR { let bits = { const MASK: bool = true; const OFFSET: u8 = 7; ((self.bits >> OFFSET) & MASK as u32) != 0 }; CTR_TXR { bits } } #[doc = "Bit 8 - Endpoint kind"] #[inline(always)] pub fn ep_kind(&self) -> EP_KINDR { let bits = { const MASK: bool = true; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) != 0 }; EP_KINDR { bits } } #[doc = "Bits 9:10 - Endpoint type"] #[inline(always)] pub fn ep_type(&self) -> EP_TYPER { let bits = { const MASK: u8 = 3; const OFFSET: u8 = 9; ((self.bits >> OFFSET) & MASK as u32) as u8 }; EP_TYPER { bits } } #[doc = "Bit 11 - Setup transaction completed"] #[inline(always)] pub fn setup(&self) -> SETUPR { let bits = { const MASK: bool = true; const OFFSET: u8 = 11; ((self.bits >> OFFSET) & MASK as u32) != 0 }; SETUPR { bits } } #[doc = "Bits 12:13 - Status bits, for reception transfers"] #[inline(always)] pub fn stat_rx(&self) -> STAT_RXR { let bits = { const MASK: u8 = 3; const OFFSET: u8 = 12; ((self.bits >> OFFSET) & MASK as u32) as u8 }; STAT_RXR { bits } } #[doc = "Bit 14 - Data Toggle, for reception transfers"] #[inline(always)] pub fn dtog_rx(&self) -> DTOG_RXR { let bits = { const MASK: bool = true; const OFFSET: u8 = 14; ((self.bits >> OFFSET) & MASK as u32) != 0 }; DTOG_RXR { bits } } #[doc = "Bit 15 - Correct transfer for reception"] #[inline(always)] pub fn ctr_rx(&self) -> CTR_RXR { let bits = { const MASK: bool = true; const OFFSET: u8 = 15; ((self.bits >> OFFSET) & MASK as u32) != 0 }; CTR_RXR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline(always)] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:3 - Endpoint address"] #[inline(always)] pub fn ea(&mut self) -> _EAW { _EAW { w: self } } #[doc = "Bits 4:5 - Status bits, for transmission transfers"] #[inline(always)] pub fn stat_tx(&mut self) -> _STAT_TXW { _STAT_TXW { w: self } } #[doc = "Bit 6 - Data Toggle, for transmission transfers"] #[inline(always)] pub fn dtog_tx(&mut self) -> _DTOG_TXW { _DTOG_TXW { w: self } } #[doc = "Bit 7 - Correct Transfer for transmission"] #[inline(always)] pub fn ctr_tx(&mut self) -> _CTR_TXW { _CTR_TXW { w: self } } #[doc = "Bit 8 - Endpoint kind"] #[inline(always)] pub fn ep_kind(&mut self) -> _EP_KINDW { _EP_KINDW { w: self } } #[doc = "Bits 9:10 - Endpoint type"] #[inline(always)] pub fn ep_type(&mut self) -> _EP_TYPEW { _EP_TYPEW { w: self } } #[doc = "Bit 11 - Setup transaction completed"] #[inline(always)] pub fn setup(&mut self) -> _SETUPW { _SETUPW { w: self } } #[doc = "Bits 12:13 - Status bits, for reception transfers"] #[inline(always)] pub fn stat_rx(&mut self) -> _STAT_RXW { _STAT_RXW { w: self } } #[doc = "Bit 14 - Data Toggle, for reception transfers"] #[inline(always)] pub fn dtog_rx(&mut self) -> _DTOG_RXW { _DTOG_RXW { w: self } } #[doc = "Bit 15 - Correct transfer for reception"] #[inline(always)] pub fn ctr_rx(&mut self) -> _CTR_RXW { _CTR_RXW { w: self } } }
26.161235
65
0.507015
ff274442214730c27610b0e4a23b7b0063a40fad
6,969
use core::{mem, slice}; use crate::data::*; use crate::error::*; use crate::flag::*; use crate::number::*; pub trait SchemeMut { fn handle(&mut self, packet: &mut Packet) { let res = match packet.a { SYS_OPEN => self.open(unsafe { slice::from_raw_parts(packet.b as *const u8, packet.c) }, packet.d, packet.uid, packet.gid), SYS_CHMOD => self.chmod(unsafe { slice::from_raw_parts(packet.b as *const u8, packet.c) }, packet.d as u16, packet.uid, packet.gid), SYS_RMDIR => self.rmdir(unsafe { slice::from_raw_parts(packet.b as *const u8, packet.c) }, packet.uid, packet.gid), SYS_UNLINK => self.unlink(unsafe { slice::from_raw_parts(packet.b as *const u8, packet.c) }, packet.uid, packet.gid), SYS_DUP => self.dup(packet.b, unsafe { slice::from_raw_parts(packet.c as *const u8, packet.d) }), SYS_READ => self.read(packet.b, unsafe { slice::from_raw_parts_mut(packet.c as *mut u8, packet.d) }), SYS_WRITE => self.write(packet.b, unsafe { slice::from_raw_parts(packet.c as *const u8, packet.d) }), SYS_LSEEK => self.seek(packet.b, packet.c as isize, packet.d).map(|o| o as usize), SYS_FCHMOD => self.fchmod(packet.b, packet.c as u16), SYS_FCHOWN => self.fchown(packet.b, packet.c as u32, packet.d as u32), SYS_FCNTL => self.fcntl(packet.b, packet.c, packet.d), SYS_FEVENT => self.fevent(packet.b, EventFlags::from_bits_truncate(packet.c)).map(|f| f.bits()), SYS_FMAP_OLD => if packet.d >= mem::size_of::<OldMap>() { self.fmap_old(packet.b, unsafe { &*(packet.c as *const OldMap) }) } else { Err(Error::new(EFAULT)) }, SYS_FMAP => if packet.d >= mem::size_of::<Map>() { self.fmap(packet.b, unsafe { &*(packet.c as *const Map) }) } else { Err(Error::new(EFAULT)) }, SYS_FUNMAP_OLD => self.funmap_old(packet.b), SYS_FUNMAP => self.funmap(packet.b, packet.c), SYS_FPATH => self.fpath(packet.b, unsafe { slice::from_raw_parts_mut(packet.c as *mut u8, packet.d) }), SYS_FRENAME => self.frename(packet.b, unsafe { slice::from_raw_parts(packet.c as *const u8, packet.d) }, packet.uid, packet.gid), SYS_FSTAT => if packet.d >= mem::size_of::<Stat>() { self.fstat(packet.b, unsafe { &mut *(packet.c as *mut Stat) }) } else { Err(Error::new(EFAULT)) }, SYS_FSTATVFS => if packet.d >= mem::size_of::<StatVfs>() { self.fstatvfs(packet.b, unsafe { &mut *(packet.c as *mut StatVfs) }) } else { Err(Error::new(EFAULT)) }, SYS_FSYNC => self.fsync(packet.b), SYS_FTRUNCATE => self.ftruncate(packet.b, packet.c), SYS_FUTIMENS => if packet.d >= mem::size_of::<TimeSpec>() { self.futimens(packet.b, unsafe { slice::from_raw_parts(packet.c as *const TimeSpec, packet.d / mem::size_of::<TimeSpec>()) }) } else { Err(Error::new(EFAULT)) }, SYS_CLOSE => self.close(packet.b), _ => Err(Error::new(ENOSYS)) }; packet.a = Error::mux(res); } /* Scheme operations */ #[allow(unused_variables)] fn open(&mut self, path: &[u8], flags: usize, uid: u32, gid: u32) -> Result<usize> { Err(Error::new(ENOENT)) } #[allow(unused_variables)] fn chmod(&mut self, path: &[u8], mode: u16, uid: u32, gid: u32) -> Result<usize> { Err(Error::new(ENOENT)) } #[allow(unused_variables)] fn rmdir(&mut self, path: &[u8], uid: u32, gid: u32) -> Result<usize> { Err(Error::new(ENOENT)) } #[allow(unused_variables)] fn unlink(&mut self, path: &[u8], uid: u32, gid: u32) -> Result<usize> { Err(Error::new(ENOENT)) } /* Resource operations */ #[allow(unused_variables)] fn dup(&mut self, old_id: usize, buf: &[u8]) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn read(&mut self, id: usize, buf: &mut [u8]) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn write(&mut self, id: usize, buf: &[u8]) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn seek(&mut self, id: usize, pos: isize, whence: usize) -> Result<isize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn fchmod(&mut self, id: usize, mode: u16) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn fchown(&mut self, id: usize, uid: u32, gid: u32) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn fcntl(&mut self, id: usize, cmd: usize, arg: usize) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn fevent(&mut self, id: usize, flags: EventFlags) -> Result<EventFlags> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn fmap_old(&mut self, id: usize, map: &OldMap) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn fmap(&mut self, id: usize, map: &Map) -> Result<usize> { if map.flags.contains(MapFlags::MAP_FIXED) { return Err(Error::new(EINVAL)); } self.fmap_old(id, &OldMap { offset: map.offset, size: map.size, flags: map.flags, }) } #[allow(unused_variables)] fn funmap_old(&mut self, address: usize) -> Result<usize> { Ok(0) } #[allow(unused_variables)] fn funmap(&mut self, address: usize, length: usize) -> Result<usize> { Ok(0) } #[allow(unused_variables)] fn fpath(&mut self, id: usize, buf: &mut [u8]) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn frename(&mut self, id: usize, path: &[u8], uid: u32, gid: u32) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn fstat(&mut self, id: usize, stat: &mut Stat) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn fstatvfs(&mut self, id: usize, stat: &mut StatVfs) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn fsync(&mut self, id: usize) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn ftruncate(&mut self, id: usize, len: usize) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn futimens(&mut self, id: usize, times: &[TimeSpec]) -> Result<usize> { Err(Error::new(EBADF)) } #[allow(unused_variables)] fn close(&mut self, id: usize) -> Result<usize> { Err(Error::new(EBADF)) } }
36.486911
144
0.562348
e92566891277e8bdee9545a2ca4dd6f2ffab278a
185
mod config; mod context; mod generator; pub mod utils; pub use generator::generate_from_path; pub use config::Config; pub use context::Context; use crate::renderer::render_to_files;
15.416667
38
0.778378
48695ce40c8616f30157e15e4e13ec239723fdb8
957
// This file was generated by gir (https://github.com/gtk-rs/gir @ 8b9d0bb) // from gir-files (https://github.com/gtk-rs/gir-files @ 77d1f70) // DO NOT EDIT use Box; use Buildable; use Container; use FontChooser; use Orientable; use Widget; use ffi; use glib::object::Downcast; use glib::translate::*; use glib_ffi; use gobject_ffi; use std::mem; use std::ptr; glib_wrapper! { pub struct FontChooserWidget(Object<ffi::GtkFontChooserWidget, ffi::GtkFontChooserWidgetClass>): Box, Container, Widget, Buildable, Orientable, FontChooser; match fn { get_type => || ffi::gtk_font_chooser_widget_get_type(), } } impl FontChooserWidget { pub fn new() -> FontChooserWidget { assert_initialized_main_thread!(); unsafe { Widget::from_glib_none(ffi::gtk_font_chooser_widget_new()).downcast_unchecked() } } } impl Default for FontChooserWidget { fn default() -> Self { Self::new() } }
23.341463
160
0.684431
18197e604ddc61fea5a7b2f28651dc25f29221cc
3,184
#[doc = "Register `se_aes_0_key_6` reader"] pub struct R(crate::R<SE_AES_0_KEY_6_SPEC>); impl core::ops::Deref for R { type Target = crate::R<SE_AES_0_KEY_6_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<SE_AES_0_KEY_6_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<SE_AES_0_KEY_6_SPEC>) -> Self { R(reader) } } #[doc = "Register `se_aes_0_key_6` writer"] pub struct W(crate::W<SE_AES_0_KEY_6_SPEC>); impl core::ops::Deref for W { type Target = crate::W<SE_AES_0_KEY_6_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<SE_AES_0_KEY_6_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<SE_AES_0_KEY_6_SPEC>) -> Self { W(writer) } } #[doc = "Field `se_aes_0_key_6` reader - "] pub struct SE_AES_0_KEY_6_R(crate::FieldReader<u32, u32>); impl SE_AES_0_KEY_6_R { #[inline(always)] pub(crate) fn new(bits: u32) -> Self { SE_AES_0_KEY_6_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for SE_AES_0_KEY_6_R { type Target = crate::FieldReader<u32, u32>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `se_aes_0_key_6` writer - "] pub struct SE_AES_0_KEY_6_W<'a> { w: &'a mut W, } impl<'a> SE_AES_0_KEY_6_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = value; self.w } } impl R { #[doc = "Bits 0:31"] #[inline(always)] pub fn se_aes_0_key_6(&self) -> SE_AES_0_KEY_6_R { SE_AES_0_KEY_6_R::new(self.bits) } } impl W { #[doc = "Bits 0:31"] #[inline(always)] pub fn se_aes_0_key_6(&mut self) -> SE_AES_0_KEY_6_W { SE_AES_0_KEY_6_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "se_aes_0_key_6.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [se_aes_0_key_6](index.html) module"] pub struct SE_AES_0_KEY_6_SPEC; impl crate::RegisterSpec for SE_AES_0_KEY_6_SPEC { type Ux = u32; } #[doc = "`read()` method returns [se_aes_0_key_6::R](R) reader structure"] impl crate::Readable for SE_AES_0_KEY_6_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [se_aes_0_key_6::W](W) writer structure"] impl crate::Writable for SE_AES_0_KEY_6_SPEC { type Writer = W; } #[doc = "`reset()` method sets se_aes_0_key_6 to value 0"] impl crate::Resettable for SE_AES_0_KEY_6_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
30.615385
410
0.627513
9c081eff888da5e88ba56ded5e3cc3c0f2ec5343
322
#![no_main] use libfuzzer_sys::fuzz_target; use wasm_smith::Module; use wasmtime::Strategy; use wasmtime_fuzzing::oracles; fuzz_target!(|module: Module| { let mut module = module; module.ensure_termination(1000); let wasm_bytes = module.to_bytes(); oracles::instantiate(&wasm_bytes, Strategy::Auto); });
23
54
0.726708
d5b618663dc631cd50daa98c2f9503edcef2b923
48,379
use crate::io::{Interest, PollEvented, ReadBuf, Ready}; use crate::net::unix::SocketAddr; use std::convert::TryFrom; use std::fmt; use std::io; use std::net::Shutdown; use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd}; use std::os::unix::net; use std::path::Path; use std::task::{Context, Poll}; cfg_io_util! { use bytes::BufMut; } cfg_net_unix! { /// An I/O object representing a Unix datagram socket. /// /// A socket can be either named (associated with a filesystem path) or /// unnamed. /// /// This type does not provide a `split` method, because this functionality /// can be achieved by wrapping the socket in an [`Arc`]. Note that you do /// not need a `Mutex` to share the `UnixDatagram` — an `Arc<UnixDatagram>` /// is enough. This is because all of the methods take `&self` instead of /// `&mut self`. /// /// **Note:** named sockets are persisted even after the object is dropped /// and the program has exited, and cannot be reconnected. It is advised /// that you either check for and unlink the existing socket if it exists, /// or use a temporary file that is guaranteed to not already exist. /// /// [`Arc`]: std::sync::Arc /// /// # Examples /// Using named sockets, associated with a filesystem path: /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// use tempfile::tempdir; /// /// // We use a temporary directory so that the socket /// // files left by the bound sockets will get cleaned up. /// let tmp = tempdir()?; /// /// // Bind each socket to a filesystem path /// let tx_path = tmp.path().join("tx"); /// let tx = UnixDatagram::bind(&tx_path)?; /// let rx_path = tmp.path().join("rx"); /// let rx = UnixDatagram::bind(&rx_path)?; /// /// let bytes = b"hello world"; /// tx.send_to(bytes, &rx_path).await?; /// /// let mut buf = vec![0u8; 24]; /// let (size, addr) = rx.recv_from(&mut buf).await?; /// /// let dgram = &buf[..size]; /// assert_eq!(dgram, bytes); /// assert_eq!(addr.as_pathname().unwrap(), &tx_path); /// /// # Ok(()) /// # } /// ``` /// /// Using unnamed sockets, created as a pair /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// /// // Create the pair of sockets /// let (sock1, sock2) = UnixDatagram::pair()?; /// /// // Since the sockets are paired, the paired send/recv /// // functions can be used /// let bytes = b"hello world"; /// sock1.send(bytes).await?; /// /// let mut buff = vec![0u8; 24]; /// let size = sock2.recv(&mut buff).await?; /// /// let dgram = &buff[..size]; /// assert_eq!(dgram, bytes); /// /// # Ok(()) /// # } /// ``` pub struct UnixDatagram { io: PollEvented<mio::net::UnixDatagram>, } } impl UnixDatagram { /// Waits for any of the requested ready states. /// /// This function is usually paired with `try_recv()` or `try_send()`. It /// can be used to concurrently recv / send to the same socket on a single /// task without splitting the socket. /// /// The function may complete without the socket being ready. This is a /// false-positive and attempting an operation will return with /// `io::ErrorKind::WouldBlock`. /// /// # Cancel safety /// /// This method is cancel safe. Once a readiness event occurs, the method /// will continue to return immediately until the readiness event is /// consumed by an attempt to read or write that fails with `WouldBlock` or /// `Poll::Pending`. /// /// # Examples /// /// Concurrently receive from and send to the socket on the same task /// without splitting. /// /// ```no_run /// use tokio::io::Interest; /// use tokio::net::UnixDatagram; /// use std::io; /// /// #[tokio::main] /// async fn main() -> io::Result<()> { /// let dir = tempfile::tempdir().unwrap(); /// let client_path = dir.path().join("client.sock"); /// let server_path = dir.path().join("server.sock"); /// let socket = UnixDatagram::bind(&client_path)?; /// socket.connect(&server_path)?; /// /// loop { /// let ready = socket.ready(Interest::READABLE | Interest::WRITABLE).await?; /// /// if ready.is_readable() { /// let mut data = [0; 1024]; /// match socket.try_recv(&mut data[..]) { /// Ok(n) => { /// println!("received {:?}", &data[..n]); /// } /// // False-positive, continue /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {} /// Err(e) => { /// return Err(e); /// } /// } /// } /// /// if ready.is_writable() { /// // Write some data /// match socket.try_send(b"hello world") { /// Ok(n) => { /// println!("sent {} bytes", n); /// } /// // False-positive, continue /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {} /// Err(e) => { /// return Err(e); /// } /// } /// } /// } /// } /// ``` pub async fn ready(&self, interest: Interest) -> io::Result<Ready> { let event = self.io.registration().readiness(interest).await?; Ok(event.ready) } /// Waits for the socket to become writable. /// /// This function is equivalent to `ready(Interest::WRITABLE)` and is /// usually paired with `try_send()` or `try_send_to()`. /// /// The function may complete without the socket being writable. This is a /// false-positive and attempting a `try_send()` will return with /// `io::ErrorKind::WouldBlock`. /// /// # Cancel safety /// /// This method is cancel safe. Once a readiness event occurs, the method /// will continue to return immediately until the readiness event is /// consumed by an attempt to write that fails with `WouldBlock` or /// `Poll::Pending`. /// /// # Examples /// /// ```no_run /// use tokio::net::UnixDatagram; /// use std::io; /// /// #[tokio::main] /// async fn main() -> io::Result<()> { /// let dir = tempfile::tempdir().unwrap(); /// let client_path = dir.path().join("client.sock"); /// let server_path = dir.path().join("server.sock"); /// let socket = UnixDatagram::bind(&client_path)?; /// socket.connect(&server_path)?; /// /// loop { /// // Wait for the socket to be writable /// socket.writable().await?; /// /// // Try to send data, this may still fail with `WouldBlock` /// // if the readiness event is a false positive. /// match socket.try_send(b"hello world") { /// Ok(n) => { /// break; /// } /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { /// continue; /// } /// Err(e) => { /// return Err(e); /// } /// } /// } /// /// Ok(()) /// } /// ``` pub async fn writable(&self) -> io::Result<()> { self.ready(Interest::WRITABLE).await?; Ok(()) } /// Polls for write/send readiness. /// /// If the socket is not currently ready for sending, this method will /// store a clone of the `Waker` from the provided `Context`. When the socket /// becomes ready for sending, `Waker::wake` will be called on the /// waker. /// /// Note that on multiple calls to `poll_send_ready` or `poll_send`, only /// the `Waker` from the `Context` passed to the most recent call is /// scheduled to receive a wakeup. (However, `poll_recv_ready` retains a /// second, independent waker.) /// /// This function is intended for cases where creating and pinning a future /// via [`writable`] is not feasible. Where possible, using [`writable`] is /// preferred, as this supports polling from multiple tasks at once. /// /// # Return value /// /// The function returns: /// /// * `Poll::Pending` if the socket is not ready for writing. /// * `Poll::Ready(Ok(()))` if the socket is ready for writing. /// * `Poll::Ready(Err(e))` if an error is encountered. /// /// # Errors /// /// This function may encounter any standard I/O error except `WouldBlock`. /// /// [`writable`]: method@Self::writable pub fn poll_send_ready(&self, cx: &mut Context<'_>) -> Poll<io::Result<()>> { self.io.registration().poll_write_ready(cx).map_ok(|_| ()) } /// Waits for the socket to become readable. /// /// This function is equivalent to `ready(Interest::READABLE)` and is usually /// paired with `try_recv()`. /// /// The function may complete without the socket being readable. This is a /// false-positive and attempting a `try_recv()` will return with /// `io::ErrorKind::WouldBlock`. /// /// # Cancel safety /// /// This method is cancel safe. Once a readiness event occurs, the method /// will continue to return immediately until the readiness event is /// consumed by an attempt to read that fails with `WouldBlock` or /// `Poll::Pending`. /// /// # Examples /// /// ```no_run /// use tokio::net::UnixDatagram; /// use std::io; /// /// #[tokio::main] /// async fn main() -> io::Result<()> { /// // Connect to a peer /// let dir = tempfile::tempdir().unwrap(); /// let client_path = dir.path().join("client.sock"); /// let server_path = dir.path().join("server.sock"); /// let socket = UnixDatagram::bind(&client_path)?; /// socket.connect(&server_path)?; /// /// loop { /// // Wait for the socket to be readable /// socket.readable().await?; /// /// // The buffer is **not** included in the async task and will /// // only exist on the stack. /// let mut buf = [0; 1024]; /// /// // Try to recv data, this may still fail with `WouldBlock` /// // if the readiness event is a false positive. /// match socket.try_recv(&mut buf) { /// Ok(n) => { /// println!("GOT {:?}", &buf[..n]); /// break; /// } /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { /// continue; /// } /// Err(e) => { /// return Err(e); /// } /// } /// } /// /// Ok(()) /// } /// ``` pub async fn readable(&self) -> io::Result<()> { self.ready(Interest::READABLE).await?; Ok(()) } /// Polls for read/receive readiness. /// /// If the socket is not currently ready for receiving, this method will /// store a clone of the `Waker` from the provided `Context`. When the /// socket becomes ready for reading, `Waker::wake` will be called on the /// waker. /// /// Note that on multiple calls to `poll_recv_ready`, `poll_recv` or /// `poll_peek`, only the `Waker` from the `Context` passed to the most /// recent call is scheduled to receive a wakeup. (However, /// `poll_send_ready` retains a second, independent waker.) /// /// This function is intended for cases where creating and pinning a future /// via [`readable`] is not feasible. Where possible, using [`readable`] is /// preferred, as this supports polling from multiple tasks at once. /// /// # Return value /// /// The function returns: /// /// * `Poll::Pending` if the socket is not ready for reading. /// * `Poll::Ready(Ok(()))` if the socket is ready for reading. /// * `Poll::Ready(Err(e))` if an error is encountered. /// /// # Errors /// /// This function may encounter any standard I/O error except `WouldBlock`. /// /// [`readable`]: method@Self::readable pub fn poll_recv_ready(&self, cx: &mut Context<'_>) -> Poll<io::Result<()>> { self.io.registration().poll_read_ready(cx).map_ok(|_| ()) } /// Creates a new `UnixDatagram` bound to the specified path. /// /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// use tempfile::tempdir; /// /// // We use a temporary directory so that the socket /// // files left by the bound sockets will get cleaned up. /// let tmp = tempdir()?; /// /// // Bind the socket to a filesystem path /// let socket_path = tmp.path().join("socket"); /// let socket = UnixDatagram::bind(&socket_path)?; /// /// # Ok(()) /// # } /// ``` pub fn bind<P>(path: P) -> io::Result<UnixDatagram> where P: AsRef<Path>, { let socket = mio::net::UnixDatagram::bind(path)?; UnixDatagram::new(socket) } /// Creates an unnamed pair of connected sockets. /// /// This function will create a pair of interconnected Unix sockets for /// communicating back and forth between one another. /// /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// /// // Create the pair of sockets /// let (sock1, sock2) = UnixDatagram::pair()?; /// /// // Since the sockets are paired, the paired send/recv /// // functions can be used /// let bytes = b"hail eris"; /// sock1.send(bytes).await?; /// /// let mut buff = vec![0u8; 24]; /// let size = sock2.recv(&mut buff).await?; /// /// let dgram = &buff[..size]; /// assert_eq!(dgram, bytes); /// /// # Ok(()) /// # } /// ``` pub fn pair() -> io::Result<(UnixDatagram, UnixDatagram)> { let (a, b) = mio::net::UnixDatagram::pair()?; let a = UnixDatagram::new(a)?; let b = UnixDatagram::new(b)?; Ok((a, b)) } /// Creates new `UnixDatagram` from a `std::os::unix::net::UnixDatagram`. /// /// This function is intended to be used to wrap a UnixDatagram from the /// standard library in the Tokio equivalent. The conversion assumes /// nothing about the underlying datagram; it is left up to the user to set /// it in non-blocking mode. /// /// # Panics /// /// This function panics if thread-local runtime is not set. /// /// The runtime is usually set implicitly when this function is called /// from a future driven by a Tokio runtime, otherwise runtime can be set /// explicitly with [`Runtime::enter`](crate::runtime::Runtime::enter) function. /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// use std::os::unix::net::UnixDatagram as StdUDS; /// use tempfile::tempdir; /// /// // We use a temporary directory so that the socket /// // files left by the bound sockets will get cleaned up. /// let tmp = tempdir()?; /// /// // Bind the socket to a filesystem path /// let socket_path = tmp.path().join("socket"); /// let std_socket = StdUDS::bind(&socket_path)?; /// std_socket.set_nonblocking(true)?; /// let tokio_socket = UnixDatagram::from_std(std_socket)?; /// /// # Ok(()) /// # } /// ``` pub fn from_std(datagram: net::UnixDatagram) -> io::Result<UnixDatagram> { let socket = mio::net::UnixDatagram::from_std(datagram); let io = PollEvented::new(socket)?; Ok(UnixDatagram { io }) } /// Turns a [`tokio::net::UnixDatagram`] into a [`std::os::unix::net::UnixDatagram`]. /// /// The returned [`std::os::unix::net::UnixDatagram`] will have nonblocking /// mode set as `true`. Use [`set_nonblocking`] to change the blocking mode /// if needed. /// /// # Examples /// /// ```rust,no_run /// use std::error::Error; /// /// #[tokio::main] /// async fn main() -> Result<(), Box<dyn Error>> { /// let tokio_socket = tokio::net::UnixDatagram::bind("127.0.0.1:0")?; /// let std_socket = tokio_socket.into_std()?; /// std_socket.set_nonblocking(false)?; /// Ok(()) /// } /// ``` /// /// [`tokio::net::UnixDatagram`]: UnixDatagram /// [`std::os::unix::net::UnixDatagram`]: std::os::unix::net::UnixDatagram /// [`set_nonblocking`]: fn@std::os::unix::net::UnixDatagram::set_nonblocking pub fn into_std(self) -> io::Result<std::os::unix::net::UnixDatagram> { self.io .into_inner() .map(|io| io.into_raw_fd()) .map(|raw_fd| unsafe { std::os::unix::net::UnixDatagram::from_raw_fd(raw_fd) }) } fn new(socket: mio::net::UnixDatagram) -> io::Result<UnixDatagram> { let io = PollEvented::new(socket)?; Ok(UnixDatagram { io }) } /// Creates a new `UnixDatagram` which is not bound to any address. /// /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// use tempfile::tempdir; /// /// // Create an unbound socket /// let tx = UnixDatagram::unbound()?; /// /// // Create another, bound socket /// let tmp = tempdir()?; /// let rx_path = tmp.path().join("rx"); /// let rx = UnixDatagram::bind(&rx_path)?; /// /// // Send to the bound socket /// let bytes = b"hello world"; /// tx.send_to(bytes, &rx_path).await?; /// /// let mut buf = vec![0u8; 24]; /// let (size, addr) = rx.recv_from(&mut buf).await?; /// /// let dgram = &buf[..size]; /// assert_eq!(dgram, bytes); /// /// # Ok(()) /// # } /// ``` pub fn unbound() -> io::Result<UnixDatagram> { let socket = mio::net::UnixDatagram::unbound()?; UnixDatagram::new(socket) } /// Connects the socket to the specified address. /// /// The `send` method may be used to send data to the specified address. /// `recv` and `recv_from` will only receive data from that address. /// /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// use tempfile::tempdir; /// /// // Create an unbound socket /// let tx = UnixDatagram::unbound()?; /// /// // Create another, bound socket /// let tmp = tempdir()?; /// let rx_path = tmp.path().join("rx"); /// let rx = UnixDatagram::bind(&rx_path)?; /// /// // Connect to the bound socket /// tx.connect(&rx_path)?; /// /// // Send to the bound socket /// let bytes = b"hello world"; /// tx.send(bytes).await?; /// /// let mut buf = vec![0u8; 24]; /// let (size, addr) = rx.recv_from(&mut buf).await?; /// /// let dgram = &buf[..size]; /// assert_eq!(dgram, bytes); /// /// # Ok(()) /// # } /// ``` pub fn connect<P: AsRef<Path>>(&self, path: P) -> io::Result<()> { self.io.connect(path) } /// Sends data on the socket to the socket's peer. /// /// # Cancel safety /// /// This method is cancel safe. If `send` is used as the event in a /// [`tokio::select!`](crate::select) statement and some other branch /// completes first, then it is guaranteed that the message was not sent. /// /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// /// // Create the pair of sockets /// let (sock1, sock2) = UnixDatagram::pair()?; /// /// // Since the sockets are paired, the paired send/recv /// // functions can be used /// let bytes = b"hello world"; /// sock1.send(bytes).await?; /// /// let mut buff = vec![0u8; 24]; /// let size = sock2.recv(&mut buff).await?; /// /// let dgram = &buff[..size]; /// assert_eq!(dgram, bytes); /// /// # Ok(()) /// # } /// ``` pub async fn send(&self, buf: &[u8]) -> io::Result<usize> { self.io .registration() .async_io(Interest::WRITABLE, || self.io.send(buf)) .await } /// Tries to send a datagram to the peer without waiting. /// /// # Examples /// /// ```no_run /// use tokio::net::UnixDatagram; /// use std::io; /// /// #[tokio::main] /// async fn main() -> io::Result<()> { /// let dir = tempfile::tempdir().unwrap(); /// let client_path = dir.path().join("client.sock"); /// let server_path = dir.path().join("server.sock"); /// let socket = UnixDatagram::bind(&client_path)?; /// socket.connect(&server_path)?; /// /// loop { /// // Wait for the socket to be writable /// socket.writable().await?; /// /// // Try to send data, this may still fail with `WouldBlock` /// // if the readiness event is a false positive. /// match socket.try_send(b"hello world") { /// Ok(n) => { /// break; /// } /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { /// continue; /// } /// Err(e) => { /// return Err(e); /// } /// } /// } /// /// Ok(()) /// } /// ``` pub fn try_send(&self, buf: &[u8]) -> io::Result<usize> { self.io .registration() .try_io(Interest::WRITABLE, || self.io.send(buf)) } /// Tries to send a datagram to the peer without waiting. /// /// # Examples /// /// ```no_run /// use tokio::net::UnixDatagram; /// use std::io; /// /// #[tokio::main] /// async fn main() -> io::Result<()> { /// let dir = tempfile::tempdir().unwrap(); /// let client_path = dir.path().join("client.sock"); /// let server_path = dir.path().join("server.sock"); /// let socket = UnixDatagram::bind(&client_path)?; /// /// loop { /// // Wait for the socket to be writable /// socket.writable().await?; /// /// // Try to send data, this may still fail with `WouldBlock` /// // if the readiness event is a false positive. /// match socket.try_send_to(b"hello world", &server_path) { /// Ok(n) => { /// break; /// } /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { /// continue; /// } /// Err(e) => { /// return Err(e); /// } /// } /// } /// /// Ok(()) /// } /// ``` pub fn try_send_to<P>(&self, buf: &[u8], target: P) -> io::Result<usize> where P: AsRef<Path>, { self.io .registration() .try_io(Interest::WRITABLE, || self.io.send_to(buf, target)) } /// Receives data from the socket. /// /// # Cancel safety /// /// This method is cancel safe. If `recv` is used as the event in a /// [`tokio::select!`](crate::select) statement and some other branch /// completes first, it is guaranteed that no messages were received on this /// socket. /// /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// /// // Create the pair of sockets /// let (sock1, sock2) = UnixDatagram::pair()?; /// /// // Since the sockets are paired, the paired send/recv /// // functions can be used /// let bytes = b"hello world"; /// sock1.send(bytes).await?; /// /// let mut buff = vec![0u8; 24]; /// let size = sock2.recv(&mut buff).await?; /// /// let dgram = &buff[..size]; /// assert_eq!(dgram, bytes); /// /// # Ok(()) /// # } /// ``` pub async fn recv(&self, buf: &mut [u8]) -> io::Result<usize> { self.io .registration() .async_io(Interest::READABLE, || self.io.recv(buf)) .await } /// Tries to receive a datagram from the peer without waiting. /// /// # Examples /// /// ```no_run /// use tokio::net::UnixDatagram; /// use std::io; /// /// #[tokio::main] /// async fn main() -> io::Result<()> { /// // Connect to a peer /// let dir = tempfile::tempdir().unwrap(); /// let client_path = dir.path().join("client.sock"); /// let server_path = dir.path().join("server.sock"); /// let socket = UnixDatagram::bind(&client_path)?; /// socket.connect(&server_path)?; /// /// loop { /// // Wait for the socket to be readable /// socket.readable().await?; /// /// // The buffer is **not** included in the async task and will /// // only exist on the stack. /// let mut buf = [0; 1024]; /// /// // Try to recv data, this may still fail with `WouldBlock` /// // if the readiness event is a false positive. /// match socket.try_recv(&mut buf) { /// Ok(n) => { /// println!("GOT {:?}", &buf[..n]); /// break; /// } /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { /// continue; /// } /// Err(e) => { /// return Err(e); /// } /// } /// } /// /// Ok(()) /// } /// ``` pub fn try_recv(&self, buf: &mut [u8]) -> io::Result<usize> { self.io .registration() .try_io(Interest::READABLE, || self.io.recv(buf)) } cfg_io_util! { /// Tries to receive data from the socket without waiting. /// /// # Examples /// /// ```no_run /// use tokio::net::UnixDatagram; /// use std::io; /// /// #[tokio::main] /// async fn main() -> io::Result<()> { /// // Connect to a peer /// let dir = tempfile::tempdir().unwrap(); /// let client_path = dir.path().join("client.sock"); /// let server_path = dir.path().join("server.sock"); /// let socket = UnixDatagram::bind(&client_path)?; /// /// loop { /// // Wait for the socket to be readable /// socket.readable().await?; /// /// let mut buf = Vec::with_capacity(1024); /// /// // Try to recv data, this may still fail with `WouldBlock` /// // if the readiness event is a false positive. /// match socket.try_recv_buf_from(&mut buf) { /// Ok((n, _addr)) => { /// println!("GOT {:?}", &buf[..n]); /// break; /// } /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { /// continue; /// } /// Err(e) => { /// return Err(e); /// } /// } /// } /// /// Ok(()) /// } /// ``` pub fn try_recv_buf_from<B: BufMut>(&self, buf: &mut B) -> io::Result<(usize, SocketAddr)> { let (n, addr) = self.io.registration().try_io(Interest::READABLE, || { let dst = buf.chunk_mut(); let dst = unsafe { &mut *(dst as *mut _ as *mut [std::mem::MaybeUninit<u8>] as *mut [u8]) }; // Safety: We trust `UnixDatagram::recv_from` to have filled up `n` bytes in the // buffer. let (n, addr) = (&*self.io).recv_from(dst)?; unsafe { buf.advance_mut(n); } Ok((n, addr)) })?; Ok((n, SocketAddr(addr))) } /// Tries to read data from the stream into the provided buffer, advancing the /// buffer's internal cursor, returning how many bytes were read. /// /// # Examples /// /// ```no_run /// use tokio::net::UnixDatagram; /// use std::io; /// /// #[tokio::main] /// async fn main() -> io::Result<()> { /// // Connect to a peer /// let dir = tempfile::tempdir().unwrap(); /// let client_path = dir.path().join("client.sock"); /// let server_path = dir.path().join("server.sock"); /// let socket = UnixDatagram::bind(&client_path)?; /// socket.connect(&server_path)?; /// /// loop { /// // Wait for the socket to be readable /// socket.readable().await?; /// /// let mut buf = Vec::with_capacity(1024); /// /// // Try to recv data, this may still fail with `WouldBlock` /// // if the readiness event is a false positive. /// match socket.try_recv_buf(&mut buf) { /// Ok(n) => { /// println!("GOT {:?}", &buf[..n]); /// break; /// } /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { /// continue; /// } /// Err(e) => { /// return Err(e); /// } /// } /// } /// /// Ok(()) /// } /// ``` pub fn try_recv_buf<B: BufMut>(&self, buf: &mut B) -> io::Result<usize> { self.io.registration().try_io(Interest::READABLE, || { let dst = buf.chunk_mut(); let dst = unsafe { &mut *(dst as *mut _ as *mut [std::mem::MaybeUninit<u8>] as *mut [u8]) }; // Safety: We trust `UnixDatagram::recv` to have filled up `n` bytes in the // buffer. let n = (&*self.io).recv(dst)?; unsafe { buf.advance_mut(n); } Ok(n) }) } } /// Sends data on the socket to the specified address. /// /// # Cancel safety /// /// This method is cancel safe. If `send_to` is used as the event in a /// [`tokio::select!`](crate::select) statement and some other branch /// completes first, then it is guaranteed that the message was not sent. /// /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// use tempfile::tempdir; /// /// // We use a temporary directory so that the socket /// // files left by the bound sockets will get cleaned up. /// let tmp = tempdir()?; /// /// // Bind each socket to a filesystem path /// let tx_path = tmp.path().join("tx"); /// let tx = UnixDatagram::bind(&tx_path)?; /// let rx_path = tmp.path().join("rx"); /// let rx = UnixDatagram::bind(&rx_path)?; /// /// let bytes = b"hello world"; /// tx.send_to(bytes, &rx_path).await?; /// /// let mut buf = vec![0u8; 24]; /// let (size, addr) = rx.recv_from(&mut buf).await?; /// /// let dgram = &buf[..size]; /// assert_eq!(dgram, bytes); /// assert_eq!(addr.as_pathname().unwrap(), &tx_path); /// /// # Ok(()) /// # } /// ``` pub async fn send_to<P>(&self, buf: &[u8], target: P) -> io::Result<usize> where P: AsRef<Path>, { self.io .registration() .async_io(Interest::WRITABLE, || self.io.send_to(buf, target.as_ref())) .await } /// Receives data from the socket. /// /// # Cancel safety /// /// This method is cancel safe. If `recv_from` is used as the event in a /// [`tokio::select!`](crate::select) statement and some other branch /// completes first, it is guaranteed that no messages were received on this /// socket. /// /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// use tempfile::tempdir; /// /// // We use a temporary directory so that the socket /// // files left by the bound sockets will get cleaned up. /// let tmp = tempdir()?; /// /// // Bind each socket to a filesystem path /// let tx_path = tmp.path().join("tx"); /// let tx = UnixDatagram::bind(&tx_path)?; /// let rx_path = tmp.path().join("rx"); /// let rx = UnixDatagram::bind(&rx_path)?; /// /// let bytes = b"hello world"; /// tx.send_to(bytes, &rx_path).await?; /// /// let mut buf = vec![0u8; 24]; /// let (size, addr) = rx.recv_from(&mut buf).await?; /// /// let dgram = &buf[..size]; /// assert_eq!(dgram, bytes); /// assert_eq!(addr.as_pathname().unwrap(), &tx_path); /// /// # Ok(()) /// # } /// ``` pub async fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> { let (n, addr) = self .io .registration() .async_io(Interest::READABLE, || self.io.recv_from(buf)) .await?; Ok((n, SocketAddr(addr))) } /// Attempts to receive a single datagram on the specified address. /// /// Note that on multiple calls to a `poll_*` method in the recv direction, only the /// `Waker` from the `Context` passed to the most recent call will be scheduled to /// receive a wakeup. /// /// # Return value /// /// The function returns: /// /// * `Poll::Pending` if the socket is not ready to read /// * `Poll::Ready(Ok(addr))` reads data from `addr` into `ReadBuf` if the socket is ready /// * `Poll::Ready(Err(e))` if an error is encountered. /// /// # Errors /// /// This function may encounter any standard I/O error except `WouldBlock`. pub fn poll_recv_from( &self, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<io::Result<SocketAddr>> { let (n, addr) = ready!(self.io.registration().poll_read_io(cx, || { // Safety: will not read the maybe uninitialized bytes. let b = unsafe { &mut *(buf.unfilled_mut() as *mut [std::mem::MaybeUninit<u8>] as *mut [u8]) }; self.io.recv_from(b) }))?; // Safety: We trust `recv` to have filled up `n` bytes in the buffer. unsafe { buf.assume_init(n); } buf.advance(n); Poll::Ready(Ok(SocketAddr(addr))) } /// Attempts to send data to the specified address. /// /// Note that on multiple calls to a `poll_*` method in the send direction, only the /// `Waker` from the `Context` passed to the most recent call will be scheduled to /// receive a wakeup. /// /// # Return value /// /// The function returns: /// /// * `Poll::Pending` if the socket is not ready to write /// * `Poll::Ready(Ok(n))` `n` is the number of bytes sent. /// * `Poll::Ready(Err(e))` if an error is encountered. /// /// # Errors /// /// This function may encounter any standard I/O error except `WouldBlock`. pub fn poll_send_to<P>( &self, cx: &mut Context<'_>, buf: &[u8], target: P, ) -> Poll<io::Result<usize>> where P: AsRef<Path>, { self.io .registration() .poll_write_io(cx, || self.io.send_to(buf, target.as_ref())) } /// Attempts to send data on the socket to the remote address to which it /// was previously `connect`ed. /// /// The [`connect`] method will connect this socket to a remote address. /// This method will fail if the socket is not connected. /// /// Note that on multiple calls to a `poll_*` method in the send direction, /// only the `Waker` from the `Context` passed to the most recent call will /// be scheduled to receive a wakeup. /// /// # Return value /// /// The function returns: /// /// * `Poll::Pending` if the socket is not available to write /// * `Poll::Ready(Ok(n))` `n` is the number of bytes sent /// * `Poll::Ready(Err(e))` if an error is encountered. /// /// # Errors /// /// This function may encounter any standard I/O error except `WouldBlock`. /// /// [`connect`]: method@Self::connect pub fn poll_send(&self, cx: &mut Context<'_>, buf: &[u8]) -> Poll<io::Result<usize>> { self.io .registration() .poll_write_io(cx, || self.io.send(buf)) } /// Attempts to receive a single datagram message on the socket from the remote /// address to which it is `connect`ed. /// /// The [`connect`] method will connect this socket to a remote address. This method /// resolves to an error if the socket is not connected. /// /// Note that on multiple calls to a `poll_*` method in the recv direction, only the /// `Waker` from the `Context` passed to the most recent call will be scheduled to /// receive a wakeup. /// /// # Return value /// /// The function returns: /// /// * `Poll::Pending` if the socket is not ready to read /// * `Poll::Ready(Ok(()))` reads data `ReadBuf` if the socket is ready /// * `Poll::Ready(Err(e))` if an error is encountered. /// /// # Errors /// /// This function may encounter any standard I/O error except `WouldBlock`. /// /// [`connect`]: method@Self::connect pub fn poll_recv(&self, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>) -> Poll<io::Result<()>> { let n = ready!(self.io.registration().poll_read_io(cx, || { // Safety: will not read the maybe uninitialized bytes. let b = unsafe { &mut *(buf.unfilled_mut() as *mut [std::mem::MaybeUninit<u8>] as *mut [u8]) }; self.io.recv(b) }))?; // Safety: We trust `recv` to have filled up `n` bytes in the buffer. unsafe { buf.assume_init(n); } buf.advance(n); Poll::Ready(Ok(())) } /// Tries to receive data from the socket without waiting. /// /// # Examples /// /// ```no_run /// use tokio::net::UnixDatagram; /// use std::io; /// /// #[tokio::main] /// async fn main() -> io::Result<()> { /// // Connect to a peer /// let dir = tempfile::tempdir().unwrap(); /// let client_path = dir.path().join("client.sock"); /// let server_path = dir.path().join("server.sock"); /// let socket = UnixDatagram::bind(&client_path)?; /// /// loop { /// // Wait for the socket to be readable /// socket.readable().await?; /// /// // The buffer is **not** included in the async task and will /// // only exist on the stack. /// let mut buf = [0; 1024]; /// /// // Try to recv data, this may still fail with `WouldBlock` /// // if the readiness event is a false positive. /// match socket.try_recv_from(&mut buf) { /// Ok((n, _addr)) => { /// println!("GOT {:?}", &buf[..n]); /// break; /// } /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { /// continue; /// } /// Err(e) => { /// return Err(e); /// } /// } /// } /// /// Ok(()) /// } /// ``` pub fn try_recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> { let (n, addr) = self .io .registration() .try_io(Interest::READABLE, || self.io.recv_from(buf))?; Ok((n, SocketAddr(addr))) } /// Tries to read or write from the socket using a user-provided IO operation. /// /// If the socket is ready, the provided closure is called. The closure /// should attempt to perform IO operation from the socket by manually /// calling the appropriate syscall. If the operation fails because the /// socket is not actually ready, then the closure should return a /// `WouldBlock` error and the readiness flag is cleared. The return value /// of the closure is then returned by `try_io`. /// /// If the socket is not ready, then the closure is not called /// and a `WouldBlock` error is returned. /// /// The closure should only return a `WouldBlock` error if it has performed /// an IO operation on the socket that failed due to the socket not being /// ready. Returning a `WouldBlock` error in any other situation will /// incorrectly clear the readiness flag, which can cause the socket to /// behave incorrectly. /// /// The closure should not perform the IO operation using any of the methods /// defined on the Tokio `UnixDatagram` type, as this will mess with the /// readiness flag and can cause the socket to behave incorrectly. /// /// Usually, [`readable()`], [`writable()`] or [`ready()`] is used with this function. /// /// [`readable()`]: UnixDatagram::readable() /// [`writable()`]: UnixDatagram::writable() /// [`ready()`]: UnixDatagram::ready() pub fn try_io<R>( &self, interest: Interest, f: impl FnOnce() -> io::Result<R>, ) -> io::Result<R> { self.io.registration().try_io(interest, f) } /// Returns the local address that this socket is bound to. /// /// # Examples /// For a socket bound to a local path /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// use tempfile::tempdir; /// /// // We use a temporary directory so that the socket /// // files left by the bound sockets will get cleaned up. /// let tmp = tempdir()?; /// /// // Bind socket to a filesystem path /// let socket_path = tmp.path().join("socket"); /// let socket = UnixDatagram::bind(&socket_path)?; /// /// assert_eq!(socket.local_addr()?.as_pathname().unwrap(), &socket_path); /// /// # Ok(()) /// # } /// ``` /// /// For an unbound socket /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// /// // Create an unbound socket /// let socket = UnixDatagram::unbound()?; /// /// assert!(socket.local_addr()?.is_unnamed()); /// /// # Ok(()) /// # } /// ``` pub fn local_addr(&self) -> io::Result<SocketAddr> { self.io.local_addr().map(SocketAddr) } /// Returns the address of this socket's peer. /// /// The `connect` method will connect the socket to a peer. /// /// # Examples /// For a peer with a local path /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// use tempfile::tempdir; /// /// // Create an unbound socket /// let tx = UnixDatagram::unbound()?; /// /// // Create another, bound socket /// let tmp = tempdir()?; /// let rx_path = tmp.path().join("rx"); /// let rx = UnixDatagram::bind(&rx_path)?; /// /// // Connect to the bound socket /// tx.connect(&rx_path)?; /// /// assert_eq!(tx.peer_addr()?.as_pathname().unwrap(), &rx_path); /// /// # Ok(()) /// # } /// ``` /// /// For an unbound peer /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// /// // Create the pair of sockets /// let (sock1, sock2) = UnixDatagram::pair()?; /// /// assert!(sock1.peer_addr()?.is_unnamed()); /// /// # Ok(()) /// # } /// ``` pub fn peer_addr(&self) -> io::Result<SocketAddr> { self.io.peer_addr().map(SocketAddr) } /// Returns the value of the `SO_ERROR` option. /// /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// /// // Create an unbound socket /// let socket = UnixDatagram::unbound()?; /// /// if let Ok(Some(err)) = socket.take_error() { /// println!("Got error: {:?}", err); /// } /// /// # Ok(()) /// # } /// ``` pub fn take_error(&self) -> io::Result<Option<io::Error>> { self.io.take_error() } /// Shuts down the read, write, or both halves of this connection. /// /// This function will cause all pending and future I/O calls on the /// specified portions to immediately return with an appropriate value /// (see the documentation of `Shutdown`). /// /// # Examples /// ``` /// # use std::error::Error; /// # #[tokio::main] /// # async fn main() -> Result<(), Box<dyn Error>> { /// use tokio::net::UnixDatagram; /// use std::net::Shutdown; /// /// // Create an unbound socket /// let (socket, other) = UnixDatagram::pair()?; /// /// socket.shutdown(Shutdown::Both)?; /// /// // NOTE: the following commented out code does NOT work as expected. /// // Due to an underlying issue, the recv call will block indefinitely. /// // See: https://github.com/tokio-rs/tokio/issues/1679 /// //let mut buff = vec![0u8; 24]; /// //let size = socket.recv(&mut buff).await?; /// //assert_eq!(size, 0); /// /// let send_result = socket.send(b"hello world").await; /// assert!(send_result.is_err()); /// /// # Ok(()) /// # } /// ``` pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { self.io.shutdown(how) } } impl TryFrom<std::os::unix::net::UnixDatagram> for UnixDatagram { type Error = io::Error; /// Consumes stream, returning the Tokio I/O object. /// /// This is equivalent to /// [`UnixDatagram::from_std(stream)`](UnixDatagram::from_std). fn try_from(stream: std::os::unix::net::UnixDatagram) -> Result<Self, Self::Error> { Self::from_std(stream) } } impl fmt::Debug for UnixDatagram { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.io.fmt(f) } } impl AsRawFd for UnixDatagram { fn as_raw_fd(&self) -> RawFd { self.io.as_raw_fd() } }
33.997892
102
0.505033
db32a35fc7894ca61ca318ac4e89a81acaa771c9
844
use bevy::prelude::*; use bevy_mod_picking::{PickingCamera, PickingPlugin}; use board::BoardPlugin; use camera::CameraPlugin; use pieces::PiecesPlugin; use ui::UIPlugin; mod board; mod camera; mod pieces; mod ui; fn main() { App::build() // Set antialiasing to use 4 samples .insert_resource(Msaa { samples: 4 }) // Set WindowDescriptor Resource to change title and size .insert_resource(WindowDescriptor { title: "Chess!".to_string(), width: 600., height: 600., ..Default::default() }) .add_plugins(DefaultPlugins) .init_resource::<PickingCamera>() .add_plugin(PickingPlugin) .add_plugin(BoardPlugin) .add_plugin(PiecesPlugin) .add_plugin(CameraPlugin) .add_plugin(UIPlugin) .run(); }
25.575758
65
0.617299
4a93f93533a6611c19c4564c07a6fc87c69cae58
8,305
//! A simple example demonstrating how one might implement a custom //! subscriber. //! //! This subscriber implements a tree-structured logger similar to //! the "compact" formatter in [`slog-term`]. The demo mimicks the //! example output in the screenshot in the [`slog` README]. //! //! Note that this logger isn't ready for actual production use. //! Several corners were cut to make the example simple. //! //! [`slog-term`]: https://docs.rs/slog-term/2.4.0/slog_term/ //! [`slog` README]: https://github.com/slog-rs/slog#terminal-output-example extern crate ansi_term; extern crate humantime; use self::ansi_term::{Color, Style}; use super::tokio_trace::{ self, field::{Field, Record}, Id, Level, Subscriber, }; use std::{ cell::RefCell, collections::HashMap, fmt, io::{self, Write}, sync::{ atomic::{AtomicUsize, Ordering}, Mutex, }, thread, time::SystemTime, }; /// Tracks the currently executing span on a per-thread basis. #[derive(Clone)] pub struct CurrentSpanPerThread { current: &'static thread::LocalKey<RefCell<Vec<Id>>>, } impl CurrentSpanPerThread { pub fn new() -> Self { thread_local! { static CURRENT: RefCell<Vec<Id>> = RefCell::new(vec![]); }; Self { current: &CURRENT } } /// Returns the [`Id`](::Id) of the span in which the current thread is /// executing, or `None` if it is not inside of a span. pub fn id(&self) -> Option<Id> { self.current .with(|current| current.borrow().last().cloned()) } pub fn enter(&self, span: Id) { self.current.with(|current| { current.borrow_mut().push(span); }) } pub fn exit(&self) { self.current.with(|current| { let _ = current.borrow_mut().pop(); }) } } pub struct SloggishSubscriber { // TODO: this can probably be unified with the "stack" that's used for // printing? current: CurrentSpanPerThread, indent_amount: usize, stderr: io::Stderr, stack: Mutex<Vec<Id>>, spans: Mutex<HashMap<Id, Span>>, ids: AtomicUsize, } struct Span { parent: Option<Id>, kvs: Vec<(&'static str, String)>, } struct Event<'a> { stderr: io::StderrLock<'a>, comma: bool, } struct ColorLevel<'a>(&'a Level); impl<'a> fmt::Display for ColorLevel<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self.0 { &Level::TRACE => Color::Purple.paint("TRACE"), &Level::DEBUG => Color::Blue.paint("DEBUG"), &Level::INFO => Color::Green.paint("INFO "), &Level::WARN => Color::Yellow.paint("WARN "), &Level::ERROR => Color::Red.paint("ERROR"), } .fmt(f) } } impl Span { fn new( parent: Option<Id>, _meta: &tokio_trace::Metadata, values: &tokio_trace::field::ValueSet, ) -> Self { let mut span = Self { parent, kvs: Vec::new(), }; values.record(&mut span); span } } impl Record for Span { fn record_debug(&mut self, field: &Field, value: &fmt::Debug) { self.kvs.push((field.name(), format!("{:?}", value))) } } impl<'a> Record for Event<'a> { fn record_debug(&mut self, field: &Field, value: &fmt::Debug) { write!( &mut self.stderr, "{comma} ", comma = if self.comma { "," } else { "" }, ) .unwrap(); let name = field.name(); if name == "message" { write!( &mut self.stderr, "{}", // Have to alloc here due to `ansi_term`'s API... Style::new().bold().paint(format!("{:?}", value)) ) .unwrap(); self.comma = true; } else { write!( &mut self.stderr, "{}: {:?}", Style::new().bold().paint(name), value ) .unwrap(); self.comma = true; } } } impl SloggishSubscriber { pub fn new(indent_amount: usize) -> Self { Self { current: CurrentSpanPerThread::new(), indent_amount, stderr: io::stderr(), stack: Mutex::new(vec![]), spans: Mutex::new(HashMap::new()), ids: AtomicUsize::new(0), } } fn print_kvs<'a, I, K, V>( &self, writer: &mut impl Write, kvs: I, leading: &str, ) -> io::Result<()> where I: IntoIterator<Item = (K, V)>, K: AsRef<str> + 'a, V: fmt::Display + 'a, { let mut kvs = kvs.into_iter(); if let Some((k, v)) = kvs.next() { write!( writer, "{}{}: {}", leading, Style::new().bold().paint(k.as_ref()), v )?; } for (k, v) in kvs { write!(writer, ", {}: {}", Style::new().bold().paint(k.as_ref()), v)?; } Ok(()) } fn print_indent(&self, writer: &mut impl Write, indent: usize) -> io::Result<()> { for _ in 0..(indent * self.indent_amount) { write!(writer, " ")?; } Ok(()) } } impl Subscriber for SloggishSubscriber { fn enabled(&self, _metadata: &tokio_trace::Metadata) -> bool { true } fn new_span( &self, span: &tokio_trace::Metadata, values: &tokio_trace::field::ValueSet, ) -> tokio_trace::Id { let next = self.ids.fetch_add(1, Ordering::SeqCst) as u64; let id = tokio_trace::Id::from_u64(next); let span = Span::new(self.current.id(), span, values); self.spans.lock().unwrap().insert(id.clone(), span); id } fn record(&self, span: &tokio_trace::Id, values: &tokio_trace::field::ValueSet) { let mut spans = self.spans.lock().expect("mutex poisoned!"); if let Some(span) = spans.get_mut(span) { values.record(span); } } fn record_follows_from(&self, _span: &tokio_trace::Id, _follows: &tokio_trace::Id) { // unimplemented } fn enter(&self, span_id: &tokio_trace::Id) { self.current.enter(span_id.clone()); let mut stderr = self.stderr.lock(); let mut stack = self.stack.lock().unwrap(); let spans = self.spans.lock().unwrap(); let data = spans.get(span_id); let parent = data.and_then(|span| span.parent.as_ref()); if stack.iter().any(|id| id == span_id) { // We are already in this span, do nothing. return; } else { let indent = if let Some(idx) = stack .iter() .position(|id| parent.map(|p| id == p).unwrap_or(false)) { let idx = idx + 1; stack.truncate(idx); idx } else { stack.clear(); 0 }; self.print_indent(&mut stderr, indent).unwrap(); stack.push(span_id.clone()); if let Some(data) = data { self.print_kvs(&mut stderr, data.kvs.iter().map(|(k, v)| (k, v)), "") .unwrap(); } write!(&mut stderr, "\n").unwrap(); } } fn event(&self, event: &tokio_trace::Event) { let mut stderr = self.stderr.lock(); let indent = self.stack.lock().unwrap().len(); self.print_indent(&mut stderr, indent).unwrap(); write!( &mut stderr, "{timestamp} {level} {target}", timestamp = humantime::format_rfc3339_seconds(SystemTime::now()), level = ColorLevel(event.metadata().level()), target = &event.metadata().target(), ) .unwrap(); let mut recorder = Event { stderr, comma: false, }; event.record(&mut recorder); write!(&mut recorder.stderr, "\n").unwrap(); } #[inline] fn exit(&self, _span: &tokio_trace::Id) { // TODO: unify stack with current span self.current.exit(); } fn drop_span(&self, _id: tokio_trace::Id) { // TODO: GC unneeded spans. } }
28.248299
88
0.510777
0e4872d7c2f00a97ea6472f254954929ca6cd005
171
use crate::serde::serializable_enum; serializable_enum! { pub enum LiquidType: u8 { None = 0, Water = 1, Lava = 2, Honey = 3, } }
15.545455
36
0.51462
28579b42d82a58377ab7ddaf942abc1078e810be
355
extern crate noise; use noise::{utils::*, Blend, Fbm, Perlin, RidgedMulti}; fn main() { let perlin = Perlin::default(); let ridged = RidgedMulti::<Perlin>::default(); let fbm = Fbm::<Perlin>::default(); let blend = Blend::new(perlin, ridged, fbm); PlaneMapBuilder::new(blend) .build() .write_to_file("blend.png"); }
23.666667
55
0.611268
f4ecc47f8fed1885514dd8880c64cd09177b243b
1,040
use crate::ShardManagerContainer; use log::{error, info}; use serenity::client::Context; use serenity::framework::standard::macros::command; use serenity::framework::standard::CommandResult; use serenity::model::prelude::Message; #[command] #[owners_only] #[description = "Gracefully shutdowns the bot."] pub fn shutdown(context: &mut Context, message: &Message) -> CommandResult { message.channel_id.send_message(&context, |message| message.content("Shutting down..."))?; let data = context.data.write(); let manager = match data.get::<ShardManagerContainer>() { Some(shard) => shard, None => { error!("Unable to get the shard manager...killing the bot ungracefully."); std::process::exit(0); } }; if let Some(mut shards) = manager.try_lock() { info!("Shutting down all shards..."); shards.shutdown_all(); } else { error!("Unable to get shard manager...killing the bot ungracefully."); std::process::exit(0); } Ok(()) }
28.888889
94
0.639423
bbe05bbdac186952e94f27919e5f468fd2fb757c
54,034
#![allow(non_snake_case)] extern crate ndarray; extern crate defmac; extern crate itertools; use ndarray::{Slice, SliceInfo, SliceOrIndex}; use ndarray::prelude::*; use ndarray::{ rcarr2, arr3, multislice, }; use ndarray::indices; use defmac::defmac; use itertools::{enumerate, zip}; macro_rules! assert_panics { ($body:expr) => { if let Ok(v) = ::std::panic::catch_unwind(|| $body) { panic!("assertion failed: should_panic; \ non-panicking result: {:?}", v); } }; ($body:expr, $($arg:tt)*) => { if let Ok(_) = ::std::panic::catch_unwind(|| $body) { panic!($($arg)*); } }; } #[test] fn test_matmul_arcarray() { let mut A = ArcArray::<usize, _>::zeros((2, 3)); for (i, elt) in A.iter_mut().enumerate() { *elt = i; } let mut B = ArcArray::<usize, _>::zeros((3, 4)); for (i, elt) in B.iter_mut().enumerate() { *elt = i; } let c = A.dot(&B); println!("A = \n{:?}", A); println!("B = \n{:?}", B); println!("A x B = \n{:?}", c); unsafe { let result = ArcArray::from_shape_vec_unchecked((2, 4), vec![20, 23, 26, 29, 56, 68, 80, 92]); assert_eq!(c.shape(), result.shape()); assert!(c.iter().zip(result.iter()).all(|(a,b)| a == b)); assert!(c == result); } } #[allow(unused)] fn arrayview_shrink_lifetime<'a, 'b: 'a>(view: ArrayView1<'b, f64>) -> ArrayView1<'a, f64> { view.reborrow() } #[allow(unused)] fn arrayviewmut_shrink_lifetime<'a, 'b: 'a>(view: ArrayViewMut1<'b, f64>) -> ArrayViewMut1<'a, f64> { view.reborrow() } #[test] fn test_mat_mul() { // smoke test, a big matrix multiplication of uneven size let (n, m) = (45, 33); let a = ArcArray::linspace(0., ((n * m) - 1) as f32, n as usize * m as usize ).reshape((n, m)); let b = ArcArray::eye(m); assert_eq!(a.dot(&b), a); let c = ArcArray::eye(n); assert_eq!(c.dot(&a), a); } #[deny(unsafe_code)] #[test] fn test_slice() { let mut A = ArcArray::<usize, _>::zeros((3, 4, 5)); for (i, elt) in A.iter_mut().enumerate() { *elt = i; } let vi = A.slice(s![1.., ..;2, Slice::new(0, None, 2)]); assert_eq!(vi.shape(), &[2, 2, 3]); let vi = A.slice(s![.., .., ..]); assert_eq!(vi.shape(), A.shape()); assert!(vi.iter().zip(A.iter()).all(|(a, b)| a == b)); } #[test] fn test_slice_inclusive_range() { let arr = array![[1, 2, 3], [4, 5, 6]]; assert_eq!(arr.slice(s![1..=1, 1..=2]), array![[5, 6]]); assert_eq!(arr.slice(s![1..=-1, -2..=2;-1]), array![[6, 5]]); assert_eq!(arr.slice(s![0..=-1, 0..=2;2]), array![[1, 3], [4, 6]]); } /// Test that the compiler can infer a type for a sliced array from the /// arguments to `s![]`. /// /// This test relies on the fact that `.dot()` is implemented for both /// `ArrayView1` and `ArrayView2`, so the compiler needs to determine which /// type is the correct result for the `.slice()` call. #[test] fn test_slice_infer() { let a = array![1., 2.]; let b = array![[3., 4.], [5., 6.]]; b.slice(s![..-1, ..]).dot(&a); // b.slice(s![0, ..]).dot(&a); } #[test] fn test_slice_with_many_dim() { let mut A = ArcArray::<usize, _>::zeros(&[3, 1, 4, 1, 3, 2, 1][..]); for (i, elt) in A.iter_mut().enumerate() { *elt = i; } let vi = A.slice(s![..2, .., ..;2, ..1, ..1, 1.., ..]); let new_shape = &[2, 1, 2, 1, 1, 1, 1][..]; assert_eq!(vi.shape(), new_shape); let correct = array![ [A[&[0, 0, 0, 0, 0, 1, 0][..]], A[&[0, 0, 2, 0, 0, 1, 0][..]]], [A[&[1, 0, 0, 0, 0, 1, 0][..]], A[&[1, 0, 2, 0, 0, 1, 0][..]]] ].into_shape(new_shape) .unwrap(); assert_eq!(vi, correct); let vi = A.slice(s![..2, 0, ..;2, 0, 0, 1, 0]); assert_eq!(vi.shape(), &[2, 2][..]); let correct = array![ [A[&[0, 0, 0, 0, 0, 1, 0][..]], A[&[0, 0, 2, 0, 0, 1, 0][..]]], [A[&[1, 0, 0, 0, 0, 1, 0][..]], A[&[1, 0, 2, 0, 0, 1, 0][..]]] ]; assert_eq!(vi, correct); } #[test] fn test_slice_range_variable() { let range = 1..4; let arr = array![0, 1, 2, 3, 4]; assert_eq!(arr.slice(s![range]), array![1, 2, 3]); } #[test] fn test_slice_args_eval_range_once() { let mut eval_count = 0; { let mut range = || { eval_count += 1; 1..4 }; let arr = array![0, 1, 2, 3, 4]; assert_eq!(arr.slice(s![range()]), array![1, 2, 3]); } assert_eq!(eval_count, 1); } #[test] fn test_slice_args_eval_step_once() { let mut eval_count = 0; { let mut step = || { eval_count += 1; -1 }; let arr = array![0, 1, 2, 3, 4]; assert_eq!(arr.slice(s![1..4;step()]), array![3, 2, 1]); } assert_eq!(eval_count, 1); } #[test] fn test_slice_array_fixed() { let mut arr = Array3::<f64>::zeros((5, 2, 5)); let info = s![1.., 1, ..;2]; arr.slice(info); arr.slice_mut(info); arr.view().slice_move(info); arr.view().slice_collapse(info); } #[test] fn test_slice_dyninput_array_fixed() { let mut arr = Array3::<f64>::zeros((5, 2, 5)).into_dyn(); let info = s![1.., 1, ..;2]; arr.slice(info); arr.slice_mut(info); arr.view().slice_move(info); arr.view().slice_collapse(info.as_ref()); } #[test] fn test_slice_array_dyn() { let mut arr = Array3::<f64>::zeros((5, 2, 5)); let info = &SliceInfo::<_, IxDyn>::new([ SliceOrIndex::from(1..), SliceOrIndex::from(1), SliceOrIndex::from(..).step_by(2), ]).unwrap(); arr.slice(info); arr.slice_mut(info); arr.view().slice_move(info); arr.view().slice_collapse(info); } #[test] fn test_slice_dyninput_array_dyn() { let mut arr = Array3::<f64>::zeros((5, 2, 5)).into_dyn(); let info = &SliceInfo::<_, IxDyn>::new([ SliceOrIndex::from(1..), SliceOrIndex::from(1), SliceOrIndex::from(..).step_by(2), ]).unwrap(); arr.slice(info); arr.slice_mut(info); arr.view().slice_move(info); arr.view().slice_collapse(info.as_ref()); } #[test] fn test_slice_dyninput_vec_fixed() { let mut arr = Array3::<f64>::zeros((5, 2, 5)).into_dyn(); let info = &SliceInfo::<_, Ix2>::new(vec![ SliceOrIndex::from(1..), SliceOrIndex::from(1), SliceOrIndex::from(..).step_by(2), ]).unwrap(); arr.slice(info.as_ref()); arr.slice_mut(info.as_ref()); arr.view().slice_move(info.as_ref()); arr.view().slice_collapse(info.as_ref()); } #[test] fn test_slice_dyninput_vec_dyn() { let mut arr = Array3::<f64>::zeros((5, 2, 5)).into_dyn(); let info = &SliceInfo::<_, IxDyn>::new(vec![ SliceOrIndex::from(1..), SliceOrIndex::from(1), SliceOrIndex::from(..).step_by(2), ]).unwrap(); arr.slice(info.as_ref()); arr.slice_mut(info.as_ref()); arr.view().slice_move(info.as_ref()); arr.view().slice_collapse(info.as_ref()); } #[test] fn test_slice_with_subview() { let mut arr = ArcArray::<usize, _>::zeros((3, 5, 4)); for (i, elt) in arr.iter_mut().enumerate() { *elt = i; } let vi = arr.slice(s![1.., 2, ..;2]); assert_eq!(vi.shape(), &[2, 2]); assert!( vi.iter() .zip(arr.index_axis(Axis(1), 2).slice(s![1.., ..;2]).iter()) .all(|(a, b)| a == b) ); let vi = arr.slice(s![1, 2, ..;2]); assert_eq!(vi.shape(), &[2]); assert!( vi.iter() .zip( arr.index_axis(Axis(0), 1) .index_axis(Axis(0), 2) .slice(s![..;2]) .iter() ) .all(|(a, b)| a == b) ); let vi = arr.slice(s![1, 2, 3]); assert_eq!(vi.shape(), &[]); assert_eq!(vi, Array0::from_elem((), arr[(1, 2, 3)])); } #[test] fn test_slice_collapse_with_indices() { let mut arr = ArcArray::<usize, _>::zeros((3, 5, 4)); for (i, elt) in arr.iter_mut().enumerate() { *elt = i; } { let mut vi = arr.view(); vi.slice_collapse(s![1.., 2, ..;2]); assert_eq!(vi.shape(), &[2, 1, 2]); assert!( vi.iter() .zip(arr.slice(s![1.., 2..3, ..;2]).iter()) .all(|(a, b)| a == b) ); let mut vi = arr.view(); vi.slice_collapse(s![1, 2, ..;2]); assert_eq!(vi.shape(), &[1, 1, 2]); assert!( vi.iter() .zip(arr.slice(s![1..2, 2..3, ..;2]).iter()) .all(|(a, b)| a == b) ); let mut vi = arr.view(); vi.slice_collapse(s![1, 2, 3]); assert_eq!(vi.shape(), &[1, 1, 1]); assert_eq!(vi, Array3::from_elem((1, 1, 1), arr[(1, 2, 3)])); } // Do it to the ArcArray itself let elem = arr[(1, 2, 3)]; let mut vi = arr; vi.slice_collapse(s![1, 2, 3]); assert_eq!(vi.shape(), &[1, 1, 1]); assert_eq!(vi, Array3::from_elem((1, 1, 1), elem)); } #[test] fn test_multislice() { defmac!(test_multislice mut arr, s1, s2 => { { let copy = arr.clone(); assert_eq!( multislice!(arr, mut s1, mut s2,), (copy.clone().slice_mut(s1), copy.clone().slice_mut(s2)) ); } { let copy = arr.clone(); assert_eq!( multislice!(arr, mut s1, s2,), (copy.clone().slice_mut(s1), copy.clone().slice(s2)) ); } { let copy = arr.clone(); assert_eq!( multislice!(arr, s1, mut s2), (copy.clone().slice(s1), copy.clone().slice_mut(s2)) ); } { let copy = arr.clone(); assert_eq!( multislice!(arr, s1, s2), (copy.clone().slice(s1), copy.clone().slice(s2)) ); } }); let mut arr = Array1::from_iter(0..48).into_shape((8, 6)).unwrap(); assert_eq!((arr.clone().view(),), multislice!(arr, [.., ..])); test_multislice!(&mut arr, s![0, ..], s![1, ..]); test_multislice!(&mut arr, s![0, ..], s![-1, ..]); test_multislice!(&mut arr, s![0, ..], s![1.., ..]); test_multislice!(&mut arr, s![1, ..], s![..;2, ..]); test_multislice!(&mut arr, s![..2, ..], s![2.., ..]); test_multislice!(&mut arr, s![1..;2, ..], s![..;2, ..]); test_multislice!(&mut arr, s![..;-2, ..], s![..;2, ..]); test_multislice!(&mut arr, s![..;12, ..], s![3..;3, ..]); } #[test] fn test_multislice_intersecting() { assert_panics!({ let mut arr = Array2::<u8>::zeros((8, 6)); multislice!(arr, mut [3, ..], [3, ..]); }); assert_panics!({ let mut arr = Array2::<u8>::zeros((8, 6)); multislice!(arr, mut [3, ..], [3.., ..]); }); assert_panics!({ let mut arr = Array2::<u8>::zeros((8, 6)); multislice!(arr, mut [3, ..], [..;3, ..]); }); assert_panics!({ let mut arr = Array2::<u8>::zeros((8, 6)); multislice!(arr, mut [..;6, ..], [3..;3, ..]); }); assert_panics!({ let mut arr = Array2::<u8>::zeros((8, 6)); multislice!(arr, mut [2, ..], mut [..-1;-2, ..]); }); { let mut arr = Array2::<u8>::zeros((8, 6)); multislice!(arr, [3, ..], [-1..;-2, ..]); } } #[test] fn test_multislice_eval_args_only_once() { let mut arr = Array1::<u8>::zeros(10); let mut eval_count = 0; { let mut slice = || { eval_count += 1; s![1..2].clone() }; multislice!(arr, mut &slice(), [3..4], [5..6]); } assert_eq!(eval_count, 1); let mut eval_count = 0; { let mut slice = || { eval_count += 1; s![1..2].clone() }; multislice!(arr, [3..4], mut &slice(), [5..6]); } assert_eq!(eval_count, 1); let mut eval_count = 0; { let mut slice = || { eval_count += 1; s![1..2].clone() }; multislice!(arr, [3..4], [5..6], mut &slice()); } assert_eq!(eval_count, 1); let mut eval_count = 0; { let mut slice = || { eval_count += 1; s![1..2].clone() }; multislice!(arr, &slice(), mut [3..4], [5..6]); } assert_eq!(eval_count, 1); let mut eval_count = 0; { let mut slice = || { eval_count += 1; s![1..2].clone() }; multislice!(arr, mut [3..4], &slice(), [5..6]); } assert_eq!(eval_count, 1); let mut eval_count = 0; { let mut slice = || { eval_count += 1; s![1..2].clone() }; multislice!(arr, mut [3..4], [5..6], &slice()); } assert_eq!(eval_count, 1); } #[should_panic] #[test] fn index_out_of_bounds() { let mut a = Array::<i32, _>::zeros((3, 4)); a[[3, 2]] = 1; } #[should_panic] #[test] fn slice_oob() { let a = ArcArray::<i32, _>::zeros((3, 4)); let _vi = a.slice(s![..10, ..]); } #[should_panic] #[test] fn slice_axis_oob() { let a = ArcArray::<i32, _>::zeros((3, 4)); let _vi = a.slice_axis(Axis(0), Slice::new(0, Some(10), 1)); } #[should_panic] #[test] fn slice_wrong_dim() { let a = ArcArray::<i32, _>::zeros(vec![3, 4, 5]); let _vi = a.slice(s![.., ..]); } #[test] fn test_index() { let mut A = ArcArray::<usize, _>::zeros((2, 3)); for (i, elt) in A.iter_mut().enumerate() { *elt = i; } for ((i, j), a) in zip(indices((2, 3)), &A) { assert_eq!(*a, A[[i, j]]); } let vi = A.slice(s![1.., ..;2]); let mut it = vi.iter(); for ((i, j), x) in zip(indices((1, 2)), &mut it) { assert_eq!(*x, vi[[i, j]]); } assert!(it.next().is_none()); } #[test] fn test_index_arrays() { let a = Array1::from_iter(0..12); assert_eq!(a[1], a[[1]]); let v = a.view().into_shape((3, 4)).unwrap(); assert_eq!(a[1], v[[0, 1]]); let w = v.into_shape((2, 2, 3)).unwrap(); assert_eq!(a[1], w[[0, 0, 1]]); } #[test] fn test_add() { let mut A = ArcArray::<usize, _>::zeros((2, 2)); for (i, elt) in A.iter_mut().enumerate() { *elt = i; } let B = A.clone(); A = A + &B; assert_eq!(A[[0, 0]], 0); assert_eq!(A[[0, 1]], 2); assert_eq!(A[[1, 0]], 4); assert_eq!(A[[1, 1]], 6); } #[test] fn test_multidim() { let mut mat = ArcArray::zeros(2*3*4*5*6).reshape((2,3,4,5,6)); mat[(0,0,0,0,0)] = 22u8; { for (i, elt) in mat.iter_mut().enumerate() { *elt = i as u8; } } assert_eq!(mat.shape(), &[2,3,4,5,6]); } /* array([[[ 7, 6], [ 5, 4], [ 3, 2], [ 1, 0]], [[15, 14], [13, 12], [11, 10], [ 9, 8]]]) */ #[test] fn test_negative_stride_arcarray() { let mut mat = ArcArray::zeros((2, 4, 2)); mat[[0, 0, 0]] = 1.0f32; for (i, elt) in mat.iter_mut().enumerate() { *elt = i as f32; } { let vi = mat.slice(s![.., ..;-1, ..;-1]); assert_eq!(vi.shape(), &[2, 4, 2]); // Test against sequential iterator let seq = [7f32,6., 5.,4.,3.,2.,1.,0.,15.,14.,13., 12.,11., 10., 9., 8.]; for (a, b) in vi.clone().iter().zip(seq.iter()) { assert_eq!(*a, *b); } } { let vi = mat.slice(s![.., ..;-5, ..]); let seq = [6., 7., 14., 15.]; for (a, b) in vi.iter().zip(seq.iter()) { assert_eq!(*a, *b); } } } #[test] fn test_cow() { let mut mat = ArcArray::zeros((2,2)); mat[[0, 0]] = 1; let n = mat.clone(); mat[[0, 1]] = 2; mat[[1, 0]] = 3; mat[[1, 1]] = 4; assert_eq!(mat[[0, 0]], 1); assert_eq!(mat[[0, 1]], 2); assert_eq!(n[[0, 0]], 1); assert_eq!(n[[0, 1]], 0); assert_eq!(n.get((0, 1)), Some(&0)); let mut rev = mat.reshape(4); rev.slice_collapse(s![..;-1]); assert_eq!(rev[0], 4); assert_eq!(rev[1], 3); assert_eq!(rev[2], 2); assert_eq!(rev[3], 1); let before = rev.clone(); // mutation rev[0] = 5; assert_eq!(rev[0], 5); assert_eq!(rev[1], 3); assert_eq!(rev[2], 2); assert_eq!(rev[3], 1); assert_eq!(before[0], 4); assert_eq!(before[1], 3); assert_eq!(before[2], 2); assert_eq!(before[3], 1); } #[test] fn test_cow_shrink() { // A test for clone-on-write in the case that // mutation shrinks the array and gives it different strides // let mut mat = ArcArray::zeros((2, 3)); //mat.slice_collapse(s![.., ..;2]); mat[[0, 0]] = 1; let n = mat.clone(); mat[[0, 1]] = 2; mat[[0, 2]] = 3; mat[[1, 0]] = 4; mat[[1, 1]] = 5; mat[[1, 2]] = 6; assert_eq!(mat[[0, 0]], 1); assert_eq!(mat[[0, 1]], 2); assert_eq!(n[[0, 0]], 1); assert_eq!(n[[0, 1]], 0); assert_eq!(n.get((0, 1)), Some(&0)); // small has non-C strides this way let mut small = mat.reshape(6); small.slice_collapse(s![4..;-1]); assert_eq!(small[0], 6); assert_eq!(small[1], 5); let before = small.clone(); // mutation // small gets back C strides in CoW. small[1] = 9; assert_eq!(small[0], 6); assert_eq!(small[1], 9); assert_eq!(before[0], 6); assert_eq!(before[1], 5); } #[test] fn test_sub() { let mat = ArcArray::linspace(0., 15., 16).reshape((2, 4, 2)); let s1 = mat.index_axis(Axis(0), 0); let s2 = mat.index_axis(Axis(0), 1); assert_eq!(s1.shape(), &[4, 2]); assert_eq!(s2.shape(), &[4, 2]); let n = ArcArray::linspace(8., 15., 8).reshape((4,2)); assert_eq!(n, s2); let m = ArcArray::from_vec(vec![2., 3., 10., 11.]).reshape((2, 2)); assert_eq!(m, mat.index_axis(Axis(1), 1)); } #[should_panic] #[test] fn test_sub_oob_1() { let mat = ArcArray::linspace(0., 15., 16).reshape((2, 4, 2)); mat.index_axis(Axis(0), 2); } #[test] fn test_select(){ // test for 2-d array let x = arr2(&[[0., 1.], [1.,0.],[1.,0.],[1.,0.],[1.,0.],[0., 1.],[0., 1.]]); let r = x.select(Axis(0),&[1,3,5]); let c = x.select(Axis(1),&[1]); let r_target = arr2(&[[1.,0.],[1.,0.],[0., 1.]]); let c_target = arr2(&[[1.,0.,0.,0.,0., 1., 1.]]); assert!(r.all_close(&r_target,1e-8)); assert!(c.all_close(&c_target.t(),1e-8)); // test for 3-d array let y = arr3(&[[[1., 2., 3.], [1.5, 1.5, 3.]], [[1., 2., 8.], [1., 2.5, 3.]]]); let r = y.select(Axis(1),&[1]); let c = y.select(Axis(2),&[1]); let r_target = arr3(&[[[1.5, 1.5, 3.]], [[1., 2.5, 3.]]]); let c_target = arr3(&[[[2.],[1.5]],[[2.],[2.5]]]); assert!(r.all_close(&r_target,1e-8)); assert!(c.all_close(&c_target,1e-8)); } #[test] fn diag() { let d = arr2(&[[1., 2., 3.0f32]]).into_diag(); assert_eq!(d.dim(), 1); let a = arr2(&[[1., 2., 3.0f32], [0., 0., 0.]]); let d = a.view().into_diag(); assert_eq!(d.dim(), 2); let d = arr2::<f32, _>(&[[]]).into_diag(); assert_eq!(d.dim(), 0); let d = ArcArray::<f32, _>::zeros(()).into_diag(); assert_eq!(d.dim(), 1); } /// Check that the merged shape is correct. /// /// Note that this does not check the strides in the "merged" case! #[test] fn merge_axes() { macro_rules! assert_merged { ($arr:expr, $slice:expr, $take:expr, $into:expr) => { let mut v = $arr.slice($slice); let merged_len = v.len_of(Axis($take)) * v.len_of(Axis($into)); assert!(v.merge_axes(Axis($take), Axis($into))); assert_eq!(v.len_of(Axis($take)), if merged_len == 0 { 0 } else { 1 }); assert_eq!(v.len_of(Axis($into)), merged_len); } } macro_rules! assert_not_merged { ($arr:expr, $slice:expr, $take:expr, $into:expr) => { let mut v = $arr.slice($slice); let old_dim = v.raw_dim(); let old_strides = v.strides().to_owned(); assert!(!v.merge_axes(Axis($take), Axis($into))); assert_eq!(v.raw_dim(), old_dim); assert_eq!(v.strides(), &old_strides[..]); } } let a = Array4::<u8>::zeros((3, 4, 5, 4)); assert_not_merged!(a, s![.., .., .., ..], 0, 0); assert_merged!(a, s![.., .., .., ..], 0, 1); assert_not_merged!(a, s![.., .., .., ..], 0, 2); assert_not_merged!(a, s![.., .., .., ..], 0, 3); assert_not_merged!(a, s![.., .., .., ..], 1, 0); assert_not_merged!(a, s![.., .., .., ..], 1, 1); assert_merged!(a, s![.., .., .., ..], 1, 2); assert_not_merged!(a, s![.., .., .., ..], 1, 3); assert_not_merged!(a, s![.., .., .., ..], 2, 1); assert_not_merged!(a, s![.., .., .., ..], 2, 2); assert_merged!(a, s![.., .., .., ..], 2, 3); assert_not_merged!(a, s![.., .., .., ..], 3, 0); assert_not_merged!(a, s![.., .., .., ..], 3, 1); assert_not_merged!(a, s![.., .., .., ..], 3, 2); assert_not_merged!(a, s![.., .., .., ..], 3, 3); assert_merged!(a, s![.., .., .., ..;2], 0, 1); assert_not_merged!(a, s![.., .., .., ..;2], 1, 0); assert_merged!(a, s![.., .., .., ..;2], 1, 2); assert_not_merged!(a, s![.., .., .., ..;2], 2, 1); assert_merged!(a, s![.., .., .., ..;2], 2, 3); assert_not_merged!(a, s![.., .., .., ..;2], 3, 2); assert_merged!(a, s![.., .., .., ..3], 0, 1); assert_not_merged!(a, s![.., .., .., ..3], 1, 0); assert_merged!(a, s![.., .., .., ..3], 1, 2); assert_not_merged!(a, s![.., .., .., ..3], 2, 1); assert_not_merged!(a, s![.., .., .., ..3], 2, 3); assert_merged!(a, s![.., .., ..;2, ..], 0, 1); assert_not_merged!(a, s![.., .., ..;2, ..], 1, 0); assert_not_merged!(a, s![.., .., ..;2, ..], 1, 2); assert_not_merged!(a, s![.., .., ..;2, ..], 2, 3); assert_merged!(a, s![.., ..;2, .., ..], 0, 1); assert_not_merged!(a, s![.., ..;2, .., ..], 1, 0); assert_not_merged!(a, s![.., ..;2, .., ..], 1, 2); assert_merged!(a, s![.., ..;2, .., ..], 2, 3); assert_not_merged!(a, s![.., ..;2, .., ..], 3, 2); let a = Array4::<u8>::zeros((3, 1, 5, 1).f()); assert_merged!(a, s![.., .., ..;2, ..], 0, 1); assert_merged!(a, s![.., .., ..;2, ..], 0, 3); assert_merged!(a, s![.., .., ..;2, ..], 1, 0); assert_merged!(a, s![.., .., ..;2, ..], 1, 1); assert_merged!(a, s![.., .., ..;2, ..], 1, 2); assert_merged!(a, s![.., .., ..;2, ..], 1, 3); assert_merged!(a, s![.., .., ..;2, ..], 2, 1); assert_merged!(a, s![.., .., ..;2, ..], 2, 3); assert_merged!(a, s![.., .., ..;2, ..], 3, 0); assert_merged!(a, s![.., .., ..;2, ..], 3, 1); assert_merged!(a, s![.., .., ..;2, ..], 3, 2); assert_merged!(a, s![.., .., ..;2, ..], 3, 3); let a = Array4::<u8>::zeros((3, 0, 5, 1)); assert_merged!(a, s![.., .., ..;2, ..], 0, 1); assert_merged!(a, s![.., .., ..;2, ..], 1, 1); assert_merged!(a, s![.., .., ..;2, ..], 2, 1); assert_merged!(a, s![.., .., ..;2, ..], 3, 1); assert_merged!(a, s![.., .., ..;2, ..], 1, 0); assert_merged!(a, s![.., .., ..;2, ..], 1, 2); assert_merged!(a, s![.., .., ..;2, ..], 1, 3); } #[test] fn swapaxes() { let mut a = arr2(&[[1., 2.], [3., 4.0f32]]); let b = arr2(&[[1., 3.], [2., 4.0f32]]); assert!(a != b); a.swap_axes(0, 1); assert_eq!(a, b); a.swap_axes(1, 1); assert_eq!(a, b); assert_eq!(a.as_slice_memory_order(), Some(&[1., 2., 3., 4.][..])); assert_eq!(b.as_slice_memory_order(), Some(&[1., 3., 2., 4.][..])); } #[test] fn permuted_axes() { let a = array![1].index_axis_move(Axis(0), 0); let permuted = a.view().permuted_axes([]); assert_eq!(a, permuted); let a = array![1]; let permuted = a.view().permuted_axes([0]); assert_eq!(a, permuted); let a = Array::from_iter(0..24).into_shape((2, 3, 4)).unwrap(); let permuted = a.view().permuted_axes([2, 1, 0]); for ((i0, i1, i2), elem) in a.indexed_iter() { assert_eq!(*elem, permuted[(i2, i1, i0)]); } let permuted = a.view().into_dyn().permuted_axes(&[0, 2, 1][..]); for ((i0, i1, i2), elem) in a.indexed_iter() { assert_eq!(*elem, permuted[&[i0, i2, i1][..]]); } let a = Array::from_iter(0..120).into_shape((2, 3, 4, 5)).unwrap(); let permuted = a.view().permuted_axes([1, 0, 3, 2]); for ((i0, i1, i2, i3), elem) in a.indexed_iter() { assert_eq!(*elem, permuted[(i1, i0, i3, i2)]); } let permuted = a.view().into_dyn().permuted_axes(&[1, 2, 3, 0][..]); for ((i0, i1, i2, i3), elem) in a.indexed_iter() { assert_eq!(*elem, permuted[&[i1, i2, i3, i0][..]]); } } #[should_panic] #[test] fn permuted_axes_repeated_axis() { let a = Array::from_iter(0..24).into_shape((2, 3, 4)).unwrap(); a.view().permuted_axes([1, 0, 1]); } #[should_panic] #[test] fn permuted_axes_missing_axis() { let a = Array::from_iter(0..24).into_shape((2, 3, 4)).unwrap().into_dyn(); a.view().permuted_axes(&[2, 0][..]); } #[should_panic] #[test] fn permuted_axes_oob() { let a = Array::from_iter(0..24).into_shape((2, 3, 4)).unwrap(); a.view().permuted_axes([1, 0, 3]); } #[test] fn standard_layout() { let mut a = arr2(&[[1., 2.], [3., 4.0]]); assert!(a.is_standard_layout()); a.swap_axes(0, 1); assert!(!a.is_standard_layout()); a.swap_axes(0, 1); assert!(a.is_standard_layout()); let x1 = a.index_axis(Axis(0), 0); assert!(x1.is_standard_layout()); let x2 = a.index_axis(Axis(1), 0); assert!(!x2.is_standard_layout()); let x3 = ArrayView1::from_shape(1.strides(2), &[1]).unwrap(); assert!(x3.is_standard_layout()); let x4 = ArrayView2::from_shape((0, 2).strides((0, 1)), &[1, 2]).unwrap(); assert!(x4.is_standard_layout()); } #[test] fn assign() { let mut a = arr2(&[[1., 2.], [3., 4.]]); let b = arr2(&[[1., 3.], [2., 4.]]); a.assign(&b); assert_eq!(a, b); /* Test broadcasting */ a.assign(&ArcArray::zeros(1)); assert_eq!(a, ArcArray::zeros((2, 2))); /* Test other type */ a.assign(&Array::from_elem((2, 2), 3.)); assert_eq!(a, ArcArray::from_elem((2, 2), 3.)); /* Test mut view */ let mut a = arr2(&[[1, 2], [3, 4]]); { let mut v = a.view_mut(); v.slice_collapse(s![..1, ..]); v.fill(0); } assert_eq!(a, arr2(&[[0, 0], [3, 4]])); } #[test] fn iter_size_hint() { let mut a = arr2(&[[1., 2.], [3., 4.]]); { let mut it = a.iter(); assert_eq!(it.size_hint(), (4, Some(4))); it.next(); assert_eq!(it.size_hint().0, 3); it.next(); assert_eq!(it.size_hint().0, 2); it.next(); assert_eq!(it.size_hint().0, 1); it.next(); assert_eq!(it.size_hint().0, 0); assert!(it.next().is_none()); assert_eq!(it.size_hint().0, 0); } a.swap_axes(0, 1); { let mut it = a.iter(); assert_eq!(it.size_hint(), (4, Some(4))); it.next(); assert_eq!(it.size_hint().0, 3); it.next(); assert_eq!(it.size_hint().0, 2); it.next(); assert_eq!(it.size_hint().0, 1); it.next(); assert_eq!(it.size_hint().0, 0); assert!(it.next().is_none()); assert_eq!(it.size_hint().0, 0); } } #[test] fn zero_axes() { let mut a = arr1::<f32>(&[]); for _ in a.iter() { assert!(false); } a.map(|_| assert!(false)); a.map_inplace(|_| assert!(false)); a.visit(|_| assert!(false)); println!("{:?}", a); let b = arr2::<f32, _>(&[[], [], [], []]); println!("{:?}\n{:?}", b.shape(), b); // we can even get a subarray of b let bsub = b.index_axis(Axis(0), 2); assert_eq!(bsub.dim(), 0); } #[test] fn equality() { let a = arr2(&[[1., 2.], [3., 4.]]); let mut b = arr2(&[[1., 2.], [2., 4.]]); assert!(a != b); b[(1, 0)] = 3.; assert!(a == b); // make sure we can compare different shapes without failure. let c = arr2(&[[1., 2.]]); assert!(a != c); } #[test] fn map1() { let a = arr2(&[[1., 2.], [3., 4.]]); let b = a.map(|&x| (x / 3.) as isize); assert_eq!(b, arr2(&[[0, 0], [1, 1]])); // test map to reference with array's lifetime. let c = a.map(|x| x); assert_eq!(a[(0, 0)], *c[(0, 0)]); } #[test] fn as_slice_memory_order() { // test that mutation breaks sharing let a = rcarr2(&[[1., 2.], [3., 4.0f32]]); let mut b = a.clone(); for elt in b.as_slice_memory_order_mut().unwrap() { *elt = 0.; } assert!(a != b, "{:?} != {:?}", a, b); } #[test] fn array0_into_scalar() { // With this kind of setup, the `Array`'s pointer is not the same as the // underlying `Vec`'s pointer. let a: Array0<i32> = array![4, 5, 6, 7].index_axis_move(Axis(0), 2); assert_ne!(a.as_ptr(), a.into_raw_vec().as_ptr()); // `.into_scalar()` should still work correctly. let a: Array0<i32> = array![4, 5, 6, 7].index_axis_move(Axis(0), 2); assert_eq!(a.into_scalar(), 6); // It should work for zero-size elements too. let a: Array0<()> = array![(), (), (), ()].index_axis_move(Axis(0), 2); assert_eq!(a.into_scalar(), ()); } #[test] fn owned_array1() { let mut a = Array::from_vec(vec![1, 2, 3, 4]); for elt in a.iter_mut() { *elt = 2; } for elt in a.iter() { assert_eq!(*elt, 2); } assert_eq!(a.shape(), &[4]); let mut a = Array::zeros((2, 2)); let mut b = ArcArray::zeros((2, 2)); a[(1, 1)] = 3; b[(1, 1)] = 3; assert_eq!(a, b); let c = a.clone(); let d1 = &a + &b; let d2 = a + b; assert!(c != d1); assert_eq!(d1, d2); } #[test] fn owned_array_with_stride() { let v: Vec<_> = (0..12).collect(); let dim = (2, 3, 2); let strides = (1, 4, 2); let a = Array::from_shape_vec(dim.strides(strides), v).unwrap(); assert_eq!(a.strides(), &[1, 4, 2]); } #[test] fn owned_array_discontiguous() { use ::std::iter::repeat; let v: Vec<_> = (0..12).flat_map(|x| repeat(x).take(2)).collect(); let dim = (3, 2, 2); let strides = (8, 4, 2); let a = Array::from_shape_vec(dim.strides(strides), v).unwrap(); assert_eq!(a.strides(), &[8, 4, 2]); println!("{:?}", a.iter().cloned().collect::<Vec<_>>()); itertools::assert_equal(a.iter().cloned(), 0..12); } #[test] fn owned_array_discontiguous_drop() { use ::std::rc::Rc; use ::std::cell::RefCell; use ::std::collections::BTreeSet; struct InsertOnDrop<T: Ord>(Rc<RefCell<BTreeSet<T>>>, Option<T>); impl<T: Ord> Drop for InsertOnDrop<T> { fn drop(&mut self) { let InsertOnDrop(ref set, ref mut value) = *self; set.borrow_mut().insert(value.take().expect("double drop!")); } } let set = Rc::new(RefCell::new(BTreeSet::new())); { let v: Vec<_> = (0..12).map(|x| InsertOnDrop(set.clone(), Some(x))).collect(); let mut a = Array::from_shape_vec((2, 6), v).unwrap(); // discontiguous and non-zero offset a.slice_collapse(s![.., 1..]); } // each item was dropped exactly once itertools::assert_equal(set.borrow().iter().cloned(), 0..12); } macro_rules! assert_matches { ($value:expr, $pat:pat) => { match $value { $pat => {} ref err => panic!("assertion failed: `{}` matches `{}` found: {:?}", stringify!($value), stringify!($pat), err), } } } #[test] fn from_vec_dim_stride_empty_1d() { let empty: [f32; 0] = []; assert_matches!(Array::from_shape_vec(0.strides(1), empty.to_vec()), Ok(_)); } #[test] fn from_vec_dim_stride_0d() { let empty: [f32; 0] = []; let one = [1.]; let two = [1., 2.]; // too few elements assert_matches!(Array::from_shape_vec(().strides(()), empty.to_vec()), Err(_)); // exact number of elements assert_matches!(Array::from_shape_vec(().strides(()), one.to_vec()), Ok(_)); // too many are ok assert_matches!(Array::from_shape_vec(().strides(()), two.to_vec()), Ok(_)); } #[test] fn from_vec_dim_stride_2d_1() { let two = [1., 2.]; let d = Ix2(2, 1); let s = d.default_strides(); assert_matches!(Array::from_shape_vec(d.strides(s), two.to_vec()), Ok(_)); } #[test] fn from_vec_dim_stride_2d_2() { let two = [1., 2.]; let d = Ix2(1, 2); let s = d.default_strides(); assert_matches!(Array::from_shape_vec(d.strides(s), two.to_vec()), Ok(_)); } #[test] fn from_vec_dim_stride_2d_3() { let a = arr3(&[[[1]], [[2]], [[3]]]); let d = a.raw_dim(); let s = d.default_strides(); assert_matches!(Array::from_shape_vec(d.strides(s), a.as_slice().unwrap().to_vec()), Ok(_)); } #[test] fn from_vec_dim_stride_2d_4() { let a = arr3(&[[[1]], [[2]], [[3]]]); let d = a.raw_dim(); let s = d.fortran_strides(); assert_matches!(Array::from_shape_vec(d.strides(s), a.as_slice().unwrap().to_vec()), Ok(_)); } #[test] fn from_vec_dim_stride_2d_5() { let a = arr3(&[[[1, 2, 3]]]); let d = a.raw_dim(); let s = d.fortran_strides(); assert_matches!(Array::from_shape_vec(d.strides(s), a.as_slice().unwrap().to_vec()), Ok(_)); } #[test] fn from_vec_dim_stride_2d_6() { let a = [1., 2., 3., 4., 5., 6.]; let d = (2, 1, 1); let s = (2, 2, 1); assert_matches!(Array::from_shape_vec(d.strides(s), a.to_vec()), Ok(_)); let d = (1, 2, 1); let s = (2, 2, 1); assert_matches!(Array::from_shape_vec(d.strides(s), a.to_vec()), Ok(_)); } #[test] fn from_vec_dim_stride_2d_7() { // empty arrays can have 0 strides let a: [f32; 0] = []; // [[]] shape=[4, 0], strides=[0, 1] let d = (4, 0); let s = (0, 1); assert_matches!(Array::from_shape_vec(d.strides(s), a.to_vec()), Ok(_)); } #[test] fn from_vec_dim_stride_2d_8() { // strides of length 1 axes can be zero let a = [1.]; let d = (1, 1); let s = (0, 1); assert_matches!(Array::from_shape_vec(d.strides(s), a.to_vec()), Ok(_)); } #[test] fn from_vec_dim_stride_2d_rejects() { let two = [1., 2.]; let d = (2, 2); let s = (1, 0); assert_matches!(Array::from_shape_vec(d.strides(s), two.to_vec()), Err(_)); let d = (2, 2); let s = (0, 1); assert_matches!(Array::from_shape_vec(d.strides(s), two.to_vec()), Err(_)); } #[test] fn views() { let a = ArcArray::from_vec(vec![1, 2, 3, 4]).reshape((2, 2)); let b = a.view(); assert_eq!(a, b); assert_eq!(a.shape(), b.shape()); assert_eq!(a.clone() + a.clone(), &b + &b); assert_eq!(a.clone() + b, &b + &b); a.clone()[(0, 0)] = 99; assert_eq!(b[(0, 0)], 1); assert_eq!(a.view().into_iter().cloned().collect::<Vec<_>>(), vec![1, 2, 3, 4]); } #[test] fn view_mut() { let mut a = ArcArray::from_vec(vec![1, 2, 3, 4]).reshape((2, 2)); for elt in &mut a.view_mut() { *elt = 0; } assert_eq!(a, Array::zeros((2, 2))); { let mut b = a.view_mut(); b[(0, 0)] = 7; } assert_eq!(a[(0, 0)], 7); for elt in a.view_mut() { *elt = 2; } assert_eq!(a, ArcArray::from_elem((2, 2), 2)); } #[test] fn slice_mut() { let mut a = ArcArray::from_vec(vec![1, 2, 3, 4]).reshape((2, 2)); for elt in a.slice_mut(s![.., ..]) { *elt = 0; } assert_eq!(a, aview2(&[[0, 0], [0, 0]])); let mut b = arr2(&[[1, 2, 3], [4, 5, 6]]); let c = b.clone(); // make sure we can mutate b even if it has to be unshared first for elt in b.slice_mut(s![.., ..1]) { *elt = 0; } assert_eq!(b, aview2(&[[0, 2, 3], [0, 5, 6]])); assert!(c != b); for elt in b.slice_mut(s![.., ..;2]) { *elt = 99; } assert_eq!(b, aview2(&[[99, 2, 99], [99, 5, 99]])); } #[test] fn assign_ops() { let mut a = arr2(&[[1., 2.], [3., 4.]]); let b = arr2(&[[1., 3.], [2., 4.]]); (*&mut a.view_mut()) += &b; assert_eq!(a, arr2(&[[2., 5.], [5., 8.]])); a -= &b; a -= &b; assert_eq!(a, arr2(&[[0., -1.,], [1., 0.]])); a += 1.; assert_eq!(a, arr2(&[[1., 0.,], [2., 1.]])); a *= 10.; a /= 5.; assert_eq!(a, arr2(&[[2., 0.,], [4., 2.]])); } #[test] fn aview() { let a = arr2(&[[1., 2., 3.], [4., 5., 6.]]); let data = [[1., 2., 3.], [4., 5., 6.]]; let b = aview2(&data); assert_eq!(a, b); assert_eq!(b.shape(), &[2, 3]); } #[test] fn aview_mut() { let mut data = [0; 16]; { let mut a = aview_mut1(&mut data).into_shape((4, 4)).unwrap(); { let mut slc = a.slice_mut(s![..2, ..;2]); slc += 1; } } assert_eq!(data, [1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0]); } #[test] fn transpose_view() { let a = arr2(&[[1, 2], [3, 4]]); let at = a.view().reversed_axes(); assert_eq!(at, arr2(&[[1, 3], [2, 4]])); let a = arr2(&[[1, 2, 3], [4, 5, 6]]); let at = a.view().reversed_axes(); assert_eq!(at, arr2(&[[1, 4], [2, 5], [3, 6]])); } #[test] fn transpose_view_mut() { let mut a = arr2(&[[1, 2], [3, 4]]); let mut at = a.view_mut().reversed_axes(); at[[0, 1]] = 5; assert_eq!(at, arr2(&[[1, 5], [2, 4]])); let mut a = arr2(&[[1, 2, 3], [4, 5, 6]]); let mut at = a.view_mut().reversed_axes(); at[[2, 1]] = 7; assert_eq!(at, arr2(&[[1, 4], [2, 5], [3, 7]])); } #[test] fn reshape() { let data = [1, 2, 3, 4, 5, 6, 7, 8]; let v = aview1(&data); let u = v.into_shape((3, 3)); assert!(u.is_err()); let u = v.into_shape((2, 2, 2)); assert!(u.is_ok()); let u = u.unwrap(); assert_eq!(u.shape(), &[2, 2, 2]); let s = u.into_shape((4, 2)).unwrap(); assert_eq!(s.shape(), &[4, 2]); assert_eq!(s, aview2(&[[1, 2], [3, 4], [5, 6], [7, 8]])); } #[test] #[should_panic(expected = "IncompatibleShape")] fn reshape_error1() { let data = [1, 2, 3, 4, 5, 6, 7, 8]; let v = aview1(&data); let _u = v.into_shape((2, 5)).unwrap(); } #[test] #[should_panic(expected = "IncompatibleLayout")] fn reshape_error2() { let data = [1, 2, 3, 4, 5, 6, 7, 8]; let v = aview1(&data); let mut u = v.into_shape((2, 2, 2)).unwrap(); u.swap_axes(0, 1); let _s = u.into_shape((2, 4)).unwrap(); } #[test] fn reshape_f() { let mut u = Array::zeros((3, 4).f()); for (i, elt) in enumerate(u.as_slice_memory_order_mut().unwrap()) { *elt = i as i32; } let v = u.view(); println!("{:?}", v); // noop ok let v2 = v.into_shape((3, 4)); assert!(v2.is_ok()); assert_eq!(v, v2.unwrap()); let u = v.into_shape((3, 2, 2)); assert!(u.is_ok()); let u = u.unwrap(); println!("{:?}", u); assert_eq!(u.shape(), &[3, 2, 2]); let s = u.into_shape((4, 3)).unwrap(); println!("{:?}", s); assert_eq!(s.shape(), &[4, 3]); assert_eq!(s, aview2(&[[0, 4, 8], [1, 5, 9], [2, 6,10], [3, 7,11]])); } #[test] fn insert_axis() { defmac!(test_insert orig, index, new => { let res = orig.insert_axis(Axis(index)); assert_eq!(res, new); assert!(res.is_standard_layout()); }); let v = 1; test_insert!(aview0(&v), 0, arr1(&[1])); assert!(::std::panic::catch_unwind(|| aview0(&v).insert_axis(Axis(1))).is_err()); test_insert!(arr1(&[1, 2, 3]), 0, arr2(&[[1, 2, 3]])); test_insert!(arr1(&[1, 2, 3]), 1, arr2(&[[1], [2], [3]])); assert!(::std::panic::catch_unwind(|| arr1(&[1, 2, 3]).insert_axis(Axis(2))).is_err()); test_insert!(arr2(&[[1, 2, 3], [4, 5, 6]]), 0, arr3(&[[[1, 2, 3], [4, 5, 6]]])); test_insert!(arr2(&[[1, 2, 3], [4, 5, 6]]), 1, arr3(&[[[1, 2, 3]], [[4, 5, 6]]])); test_insert!(arr2(&[[1, 2, 3], [4, 5, 6]]), 2, arr3(&[[[1], [2], [3]], [[4], [5], [6]]])); assert!(::std::panic::catch_unwind( || arr2(&[[1, 2, 3], [4, 5, 6]]).insert_axis(Axis(3))).is_err()); test_insert!(Array3::<u8>::zeros((3, 4, 5)), 0, Array4::<u8>::zeros((1, 3, 4, 5))); test_insert!(Array3::<u8>::zeros((3, 4, 5)), 1, Array4::<u8>::zeros((3, 1, 4, 5))); test_insert!(Array3::<u8>::zeros((3, 4, 5)), 3, Array4::<u8>::zeros((3, 4, 5, 1))); assert!(::std::panic::catch_unwind( || Array3::<u8>::zeros((3, 4, 5)).insert_axis(Axis(4))).is_err()); test_insert!(Array6::<u8>::zeros((2, 3, 4, 3, 2, 3)), 0, ArrayD::<u8>::zeros(vec![1, 2, 3, 4, 3, 2, 3])); test_insert!(Array6::<u8>::zeros((2, 3, 4, 3, 2, 3)), 3, ArrayD::<u8>::zeros(vec![2, 3, 4, 1, 3, 2, 3])); test_insert!(Array6::<u8>::zeros((2, 3, 4, 3, 2, 3)), 6, ArrayD::<u8>::zeros(vec![2, 3, 4, 3, 2, 3, 1])); assert!(::std::panic::catch_unwind( || Array6::<u8>::zeros((2, 3, 4, 3, 2, 3)).insert_axis(Axis(7))).is_err()); test_insert!(ArrayD::<u8>::zeros(vec![3, 4, 5]), 0, ArrayD::<u8>::zeros(vec![1, 3, 4, 5])); test_insert!(ArrayD::<u8>::zeros(vec![3, 4, 5]), 1, ArrayD::<u8>::zeros(vec![3, 1, 4, 5])); test_insert!(ArrayD::<u8>::zeros(vec![3, 4, 5]), 3, ArrayD::<u8>::zeros(vec![3, 4, 5, 1])); assert!(::std::panic::catch_unwind( || ArrayD::<u8>::zeros(vec![3, 4, 5]).insert_axis(Axis(4))).is_err()); } #[test] fn insert_axis_f() { defmac!(test_insert_f orig, index, new => { let res = orig.insert_axis(Axis(index)); assert_eq!(res, new); assert!(res.t().is_standard_layout()); }); test_insert_f!(Array0::from_shape_vec(().f(), vec![1]).unwrap(), 0, arr1(&[1])); assert!(::std::panic::catch_unwind( || Array0::from_shape_vec(().f(), vec![1]).unwrap().insert_axis(Axis(1))).is_err()); test_insert_f!(Array1::<u8>::zeros((3).f()), 0, Array2::<u8>::zeros((1, 3))); test_insert_f!(Array1::<u8>::zeros((3).f()), 1, Array2::<u8>::zeros((3, 1))); assert!(::std::panic::catch_unwind( || Array1::<u8>::zeros((3).f()).insert_axis(Axis(2))).is_err()); test_insert_f!(Array3::<u8>::zeros((3, 4, 5).f()), 1, Array4::<u8>::zeros((3, 1, 4, 5))); assert!(::std::panic::catch_unwind( || Array3::<u8>::zeros((3, 4, 5).f()).insert_axis(Axis(4))).is_err()); test_insert_f!(ArrayD::<u8>::zeros(vec![3, 4, 5].f()), 1, ArrayD::<u8>::zeros(vec![3, 1, 4, 5])); assert!(::std::panic::catch_unwind( || ArrayD::<u8>::zeros(vec![3, 4, 5].f()).insert_axis(Axis(4))).is_err()); } #[test] fn insert_axis_view() { let a = array![[[1, 2], [3, 4]], [[5, 6], [7, 8]], [[9, 10], [11, 12]]]; assert_eq!(a.index_axis(Axis(1), 0).insert_axis(Axis(0)), array![[[1, 2], [5, 6], [9, 10]]]); assert_eq!(a.index_axis(Axis(1), 0).insert_axis(Axis(1)), array![[[1, 2]], [[5, 6]], [[9, 10]]]); assert_eq!(a.index_axis(Axis(1), 0).insert_axis(Axis(2)), array![[[1], [2]], [[5], [6]], [[9], [10]]]); } #[test] fn arithmetic_broadcast() { let mut a = arr2(&[[1., 2.], [3., 4.]]); let b = a.clone() * aview0(&1.); assert_eq!(a, b); a.swap_axes(0, 1); let b = a.clone() / aview0(&1.); assert_eq!(a, b); } #[test] fn char_array() { // test compilation & basics of non-numerical array let cc = ArcArray::from_iter("alphabet".chars()).reshape((4, 2)); assert!(cc.index_axis(Axis(1), 0) == ArcArray::from_iter("apae".chars())); } #[test] fn scalar_ops() { let a = Array::<i32, _>::zeros((5, 5)); let b = &a + 1; let c = (&a + &a + 2) - 3; println!("{:?}", b); println!("{:?}", c); let a = Array::<f32, _>::zeros((2, 2)); let b = (1. + a) * 3.; assert_eq!(b, arr2(&[[3., 3.], [3., 3.]])); let a = arr1(&[false, true, true]); let b = &a ^ true; let c = true ^ &a; assert_eq!(b, c); assert_eq!(true & &a, a); assert_eq!(b, arr1(&[true, false, false])); assert_eq!(true ^ &a, !a); let zero = Array::<f32, _>::zeros((2, 2)); let one = &zero + 1.; assert_eq!(0. * &one, zero); assert_eq!(&one * 0., zero); assert_eq!((&one + &one).sum(), 8.); assert_eq!(&one / 2., 0.5 * &one); assert_eq!(&one % 1., zero); let zero = Array::<i32, _>::zeros((2, 2)); let one = &zero + 1; assert_eq!(one.clone() << 3, 8 * &one); assert_eq!(3 << one.clone() , 6 * &one); assert_eq!(&one << 3, 8 * &one); assert_eq!(3 << &one , 6 * &one); } #[test] fn split_at() { let mut a = arr2(&[[1., 2.], [3., 4.]]); { let (c0, c1) = a.view().split_at(Axis(1), 1); assert_eq!(c0, arr2(&[[1.], [3.]])); assert_eq!(c1, arr2(&[[2.], [4.]])); } { let (mut r0, mut r1) = a.view_mut().split_at(Axis(0), 1); r0[[0, 1]] = 5.; r1[[0, 0]] = 8.; } assert_eq!(a, arr2(&[[1., 5.], [8., 4.]])); let b = ArcArray::linspace(0., 59., 60).reshape((3, 4, 5)); let (left, right) = b.view().split_at(Axis(2), 2); assert_eq!(left.shape(), [3, 4, 2]); assert_eq!(right.shape(), [3, 4, 3]); assert_eq!(left, arr3(&[[[0., 1.], [5., 6.], [10., 11.], [15., 16.]], [[20., 21.], [25., 26.], [30., 31.], [35., 36.]], [[40., 41.], [45., 46.], [50., 51.], [55., 56.]]])); // we allow for an empty right view when index == dim[axis] let (_, right) = b.view().split_at(Axis(1), 4); assert_eq!(right.shape(), [3, 0, 5]); } #[test] #[should_panic] fn deny_split_at_axis_out_of_bounds() { let a = arr2(&[[1., 2.], [3., 4.]]); a.view().split_at(Axis(2), 0); } #[test] #[should_panic] fn deny_split_at_index_out_of_bounds() { let a = arr2(&[[1., 2.], [3., 4.]]); a.view().split_at(Axis(1), 3); } #[test] fn test_range() { let a = Array::range(0., 5., 1.); assert_eq!(a.len(), 5); assert_eq!(a[0], 0.); assert_eq!(a[4], 4.); let b = Array::range(0., 2.2, 1.); assert_eq!(b.len(), 3); assert_eq!(b[0], 0.); assert_eq!(b[2], 2.); let c = Array::range(0., 5., 2.); assert_eq!(c.len(), 3); assert_eq!(c[0], 0.); assert_eq!(c[1], 2.); assert_eq!(c[2], 4.); let d = Array::range(1.0, 2.2, 0.1); assert_eq!(d.len(), 13); assert_eq!(d[0], 1.); assert_eq!(d[10], 2.); assert_eq!(d[12], 2.2); let e = Array::range(1., 1., 1.); assert_eq!(e.len(), 0); assert!(e.is_empty()); } #[test] fn test_f_order() { // Test that arrays are logically equal in every way, // even if the underlying memory order is different let c = arr2(&[[1, 2, 3], [4, 5, 6]]); let mut f = Array::zeros(c.dim().f()); f.assign(&c); assert_eq!(f, c); assert_eq!(f.shape(), c.shape()); assert_eq!(c.strides(), &[3, 1]); assert_eq!(f.strides(), &[1, 2]); itertools::assert_equal(f.iter(), c.iter()); itertools::assert_equal(f.genrows(), c.genrows()); itertools::assert_equal(f.outer_iter(), c.outer_iter()); itertools::assert_equal(f.axis_iter(Axis(0)), c.axis_iter(Axis(0))); itertools::assert_equal(f.axis_iter(Axis(1)), c.axis_iter(Axis(1))); let dupc = &c + &c; let dupf = &f + &f; assert_eq!(dupc, dupf); } #[test] fn to_owned_memory_order() { // check that .to_owned() makes f-contiguous arrays out of f-contiguous // input. let c = arr2(&[[1, 2, 3], [4, 5, 6]]); let mut f = c.view(); f.swap_axes(0, 1); let fo = f.to_owned(); assert_eq!(f, fo); assert_eq!(f.strides(), fo.strides()); } #[test] fn to_owned_neg_stride() { let mut c = arr2(&[[1, 2, 3], [4, 5, 6]]); c.slice_collapse(s![.., ..;-1]); let co = c.to_owned(); assert_eq!(c, co); } #[test] fn discontiguous_owned_to_owned() { let mut c = arr2(&[[1, 2, 3], [4, 5, 6]]); c.slice_collapse(s![.., ..;2]); let co = c.to_owned(); assert_eq!(c.strides(), &[3, 2]); assert_eq!(co.strides(), &[2, 1]); assert_eq!(c, co); } #[test] fn map_memory_order() { let a = arr3(&[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [0, -1, -2]]]); let mut v = a.view(); v.swap_axes(0, 1); let amap = v.map(|x| *x >= 3); assert_eq!(amap.dim(), v.dim()); assert_eq!(amap.strides(), v.strides()); } #[test] fn test_contiguous() { let c = arr3(&[[[1, 2, 3], [4, 5, 6]], [[4, 5, 6], [7, 7, 7]]]); assert!(c.is_standard_layout()); assert!(c.as_slice_memory_order().is_some()); let v = c.slice(s![.., 0..1, ..]); assert!(!v.is_standard_layout()); assert!(!v.as_slice_memory_order().is_some()); let v = c.slice(s![1..2, .., ..]); assert!(v.is_standard_layout()); assert!(v.as_slice_memory_order().is_some()); let v = v.reversed_axes(); assert!(!v.is_standard_layout()); assert!(v.as_slice_memory_order().is_some()); let mut v = v.reversed_axes(); v.swap_axes(1, 2); assert!(!v.is_standard_layout()); assert!(v.as_slice_memory_order().is_some()); let a = Array::<f32, _>::zeros((20, 1)); let b = Array::<f32, _>::zeros((20, 1).f()); assert!(a.as_slice().is_some()); assert!(b.as_slice().is_some()); assert!(a.as_slice_memory_order().is_some()); assert!(b.as_slice_memory_order().is_some()); let a = a.t(); let b = b.t(); assert!(a.as_slice().is_some()); assert!(b.as_slice().is_some()); assert!(a.as_slice_memory_order().is_some()); assert!(b.as_slice_memory_order().is_some()); } #[test] fn test_all_close() { let c = arr3(&[[[1., 2., 3.], [1.5, 1.5, 3.]], [[1., 2., 3.], [1., 2.5, 3.]]]); assert!(c.all_close(&aview1(&[1., 2., 3.]), 1.)); assert!(!c.all_close(&aview1(&[1., 2., 3.]), 0.1)); } #[test] fn test_swap() { let mut a = arr2(&[[1, 2, 3], [4, 5, 6], [7, 8, 9]]); let b = a.clone(); for i in 0..a.rows() { for j in i + 1..a.cols() { a.swap((i, j), (j, i)); } } assert_eq!(a, b.t()); } #[test] fn test_uswap() { let mut a = arr2(&[[1, 2, 3], [4, 5, 6], [7, 8, 9]]); let b = a.clone(); for i in 0..a.rows() { for j in i + 1..a.cols() { unsafe { a.uswap((i, j), (j, i)) }; } } assert_eq!(a, b.t()); } #[test] fn test_shape() { let data = [0, 1, 2, 3, 4, 5]; let a = Array::from_shape_vec((1, 2, 3), data.to_vec()).unwrap(); let b = Array::from_shape_vec((1, 2, 3).f(), data.to_vec()).unwrap(); let c = Array::from_shape_vec((1, 2, 3).strides((1, 3, 1)), data.to_vec()).unwrap(); println!("{:?}", a); println!("{:?}", b); println!("{:?}", c); assert_eq!(a.strides(), &[6, 3, 1]); assert_eq!(b.strides(), &[1, 1, 2]); assert_eq!(c.strides(), &[1, 3, 1]); } #[test] fn test_view_from_shape_ptr() { let data = [0, 1, 2, 3, 4, 5]; let view = unsafe { ArrayView::from_shape_ptr((2, 3), data.as_ptr()) }; assert_eq!(view, aview2(&[[0, 1, 2], [3, 4, 5]])); let mut data = data; let mut view = unsafe { ArrayViewMut::from_shape_ptr((2, 3), data.as_mut_ptr()) }; view[[1, 2]] = 6; assert_eq!(view, aview2(&[[0, 1, 2], [3, 4, 6]])); view[[0, 1]] = 0; assert_eq!(view, aview2(&[[0, 0, 2], [3, 4, 6]])); } #[test] fn test_default() { let a = <Array<f32, Ix2> as Default>::default(); assert_eq!(a, aview2(&[[0.0; 0]; 0])); #[derive(Default, Debug, PartialEq)] struct Foo(i32); let b = <Array<Foo, Ix0> as Default>::default(); assert_eq!(b, arr0(Foo::default())); } #[test] fn test_default_ixdyn() { let a = <Array<f32, IxDyn> as Default>::default(); let b = <Array<f32, _>>::zeros(IxDyn(&[0])); assert_eq!(a, b); } #[test] fn test_map_axis() { let a = arr2(&[[1, 2, 3], [4, 5, 6], [7, 8, 9], [10,11,12]]); let b = a.map_axis(Axis(0), |view| view.sum()); let answer1 = arr1(&[22, 26, 30]); assert_eq!(b, answer1); let c = a.map_axis(Axis(1), |view| view.sum()); let answer2 = arr1(&[6, 15, 24, 33]); assert_eq!(c, answer2); } #[test] fn test_to_vec() { let mut a = arr2(&[[1, 2, 3], [4, 5, 6], [7, 8, 9], [10,11,12]]); a.slice_collapse(s![..;-1, ..]); assert_eq!(a.row(3).to_vec(), vec![1, 2, 3]); assert_eq!(a.column(2).to_vec(), vec![12, 9, 6, 3]); a.slice_collapse(s![.., ..;-1]); assert_eq!(a.row(3).to_vec(), vec![3, 2, 1]); } #[test] fn test_array_clone_unalias() { let a = Array::<i32, _>::zeros((3, 3)); let mut b = a.clone(); b.fill(1); assert!(a != b); assert_eq!(a, Array::<_, _>::zeros((3, 3))); } #[test] fn test_array_clone_same_view() { let mut a = Array::from_iter(0..9).into_shape((3, 3)).unwrap(); a.slice_collapse(s![..;-1, ..;-1]); let b = a.clone(); assert_eq!(a, b); } #[test] fn array_macros() { // array let a1 = array![1, 2, 3]; assert_eq!(a1, arr1(&[1, 2, 3])); let a2 = array![[1, 2], [3, 4], [5, 6]]; assert_eq!(a2, arr2(&[[1, 2], [3, 4], [5, 6]])); let a3 = array![[[1, 2], [3, 4]], [[5, 6], [7, 8]]]; assert_eq!(a3, arr3(&[[[1, 2], [3, 4]], [[5, 6], [7, 8]]])); let a4 = array![[[1, 2,], [3, 4,]], [[5, 6,], [7, 8,],],]; // trailing commas assert_eq!(a4, arr3(&[[[1, 2], [3, 4]], [[5, 6], [7, 8]]])); let s = String::from("abc"); let a2s = array![[String::from("w"), s], [String::from("x"), String::from("y")]]; assert_eq!(a2s[[0, 0]], "w"); assert_eq!(a2s[[0, 1]], "abc"); assert_eq!(a2s[[1, 0]], "x"); assert_eq!(a2s[[1, 1]], "y"); let empty1: Array<f32, Ix1> = array![]; assert_eq!(empty1, array![]); let empty2: Array<f32, Ix2> = array![[]]; assert_eq!(empty2, array![[]]); }
28.529039
107
0.482752
22e6e9010757a0a7a018d15001f85e481e4de32a
289
use crate::ecs::*; use shrinkwraprs::Shrinkwrap; #[derive(Shrinkwrap, Debug, Copy, Clone, Eq, PartialEq)] #[shrinkwrap(mutable)] pub struct Parent(pub Entity); #[derive(Shrinkwrap, Debug, Copy, Clone, Eq, PartialEq)] #[shrinkwrap(mutable)] pub struct PreviousParent(pub Option<Entity>);
26.272727
56
0.740484
d60059ffac452add8f7480af51cf9c6daecd429c
2,123
mod agents; mod post; mod text_input; use agents::posts::{PostId, PostStore, Request}; use post::Post; use text_input::TextInput; use weblog::console_log; use yew::prelude::*; use yew_agent::utils::store::{Bridgeable, ReadOnly, StoreWrapper}; use yew_agent::Bridge; pub enum Msg { CreatePost(String), PostStoreMsg(ReadOnly<PostStore>), } pub struct Model { link: ComponentLink<Self>, post_ids: Vec<PostId>, post_store: Box<dyn Bridge<StoreWrapper<PostStore>>>, } impl Component for Model { type Message = Msg; type Properties = (); fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self { let callback = link.callback(Msg::PostStoreMsg); Self { link, post_ids: Vec::new(), post_store: PostStore::bridge(callback), } } fn update(&mut self, msg: Self::Message) -> ShouldRender { match msg { Msg::CreatePost(text) => { self.post_store.send(Request::CreatePost(text)); false } Msg::PostStoreMsg(state) => { // We can see this is logged once before we click any button. // The state of the store is sent when we open a bridge. console_log!("Received update"); let state = state.borrow(); if state.posts.len() != self.post_ids.len() { self.post_ids = state.posts.keys().copied().collect(); self.post_ids.sort_unstable(); true } else { false } } } } fn change(&mut self, _props: Self::Properties) -> ShouldRender { false } fn view(&self) -> Html { html! { <> <TextInput value="New post" onsubmit={self.link.callback(Msg::CreatePost)} /> <div> { for self.post_ids.iter().map(|&id| html!{ <Post key={id} id={id} /> }) } </div> </> } } } fn main() { yew::start_app::<Model>(); }
26.873418
94
0.523787
9b5f311cf29808d5911518886bd097ea308ab769
795
use core::num::Wrapping; pub(crate) const RNG: XorShiftRng = XorShiftRng { x: Wrapping(0x0787_3B4A), y: Wrapping(0xFAAB_8FFE), z: Wrapping(0x1745_980F), w: Wrapping(0xB0AD_B4F3), }; pub(crate) struct XorShiftRng { x: Wrapping<u32>, y: Wrapping<u32>, z: Wrapping<u32>, w: Wrapping<u32>, } impl XorShiftRng { pub(crate) fn fill(&mut self, buf: &mut [u8; 1024]) { for chunk in buf.chunks_exact_mut(4) { chunk.copy_from_slice(&self.next_u32().to_le_bytes()); } } fn next_u32(&mut self) -> u32 { let x = self.x; let t = x ^ (x << 11); self.x = self.y; self.y = self.z; self.z = self.w; let w = self.w; self.w = w ^ (w >> 19) ^ (t ^ (t >> 8)); self.w.0 } }
22.714286
66
0.533333
23af470c5e99bac21eab2bef7991e14c719de873
107,394
#![doc = "generated by AutoRust 0.1.0"] #![allow(non_camel_case_types)] #![allow(unused_imports)] use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountsListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<DatabaseAccountGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GraphResourcesListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<GraphResourceGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlDatabaseListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<SqlDatabaseGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlContainerListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<SqlContainerGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlStoredProcedureListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<SqlStoredProcedureGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlUserDefinedFunctionListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<SqlUserDefinedFunctionGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlTriggerListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<SqlTriggerGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbDatabaseListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<MongoDbDatabaseGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbCollectionListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<MongoDbCollectionGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TableListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<TableGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraKeyspaceListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<CassandraKeyspaceGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraTableListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<CassandraTableGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinDatabaseListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<GremlinDatabaseGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinGraphListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<GremlinGraphGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraViewListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<CassandraViewGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ErrorResponse { #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub message: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CloudError { #[serde(default, skip_serializing_if = "Option::is_none")] pub error: Option<ErrorResponse>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FailoverPolicies { #[serde(rename = "failoverPolicies")] pub failover_policies: Vec<FailoverPolicy>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct FailoverPolicy { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "locationName", default, skip_serializing_if = "Option::is_none")] pub location_name: Option<String>, #[serde(rename = "failoverPriority", default, skip_serializing_if = "Option::is_none")] pub failover_priority: Option<i32>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RegionForOnlineOffline { pub region: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Location { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "locationName", default, skip_serializing_if = "Option::is_none")] pub location_name: Option<String>, #[serde(rename = "documentEndpoint", default, skip_serializing_if = "Option::is_none")] pub document_endpoint: Option<String>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<ProvisioningState>, #[serde(rename = "failoverPriority", default, skip_serializing_if = "Option::is_none")] pub failover_priority: Option<i32>, #[serde(rename = "isZoneRedundant", default, skip_serializing_if = "Option::is_none")] pub is_zone_redundant: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ArmResourceProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<Tags>, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<ManagedServiceIdentity>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ArmProxyResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<database_account_get_results::Kind>, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<ManagedServiceIdentity>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<DatabaseAccountGetProperties>, #[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")] pub system_data: Option<SystemData>, } pub mod database_account_get_results { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Kind { #[serde(rename = "GlobalDocumentDB")] GlobalDocumentDb, #[serde(rename = "MongoDB")] MongoDb, Parse, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ExtendedResourceProperties { #[serde(rename = "_rid", default, skip_serializing_if = "Option::is_none")] pub rid: Option<String>, #[serde(rename = "_ts", default, skip_serializing_if = "Option::is_none")] pub ts: Option<f64>, #[serde(rename = "_etag", default, skip_serializing_if = "Option::is_none")] pub etag: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ThroughputSettingsGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ThroughputSettingsGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ThroughputSettingsGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlDatabaseGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SqlDatabaseGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GraphResourceGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GraphResourceGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<GraphResourceGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlDatabaseGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<sql_database_get_properties::Resource>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } pub mod sql_database_get_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(flatten)] pub sql_database_resource: SqlDatabaseResource, #[serde(flatten)] pub extended_resource_properties: ExtendedResourceProperties, #[serde(rename = "_colls", default, skip_serializing_if = "Option::is_none")] pub colls: Option<String>, #[serde(rename = "_users", default, skip_serializing_if = "Option::is_none")] pub users: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlContainerGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SqlContainerGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlContainerGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlStoredProcedureGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SqlStoredProcedureGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlStoredProcedureGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlUserDefinedFunctionGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SqlUserDefinedFunctionGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlUserDefinedFunctionGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlTriggerGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SqlTriggerGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlTriggerGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbDatabaseGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<MongoDbDatabaseGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbDatabaseGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbCollectionGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<MongoDbCollectionGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbCollectionGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TableGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<TableGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TableGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraKeyspaceGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<CassandraKeyspaceGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraKeyspaceGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraTableGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<CassandraTableGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraTableGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinDatabaseGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<GremlinDatabaseGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinDatabaseGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinGraphGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<GremlinGraphGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinGraphGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraViewGetResults { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<CassandraViewGetProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraViewGetProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<serde_json::Value>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ConsistencyPolicy { #[serde(rename = "defaultConsistencyLevel")] pub default_consistency_level: consistency_policy::DefaultConsistencyLevel, #[serde(rename = "maxStalenessPrefix", default, skip_serializing_if = "Option::is_none")] pub max_staleness_prefix: Option<i64>, #[serde(rename = "maxIntervalInSeconds", default, skip_serializing_if = "Option::is_none")] pub max_interval_in_seconds: Option<i32>, } pub mod consistency_policy { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum DefaultConsistencyLevel { Eventual, Session, BoundedStaleness, Strong, ConsistentPrefix, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CorsPolicy { #[serde(rename = "allowedOrigins")] pub allowed_origins: String, #[serde(rename = "allowedMethods", default, skip_serializing_if = "Option::is_none")] pub allowed_methods: Option<String>, #[serde(rename = "allowedHeaders", default, skip_serializing_if = "Option::is_none")] pub allowed_headers: Option<String>, #[serde(rename = "exposedHeaders", default, skip_serializing_if = "Option::is_none")] pub exposed_headers: Option<String>, #[serde(rename = "maxAgeInSeconds", default, skip_serializing_if = "Option::is_none")] pub max_age_in_seconds: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountGetProperties { #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<ProvisioningState>, #[serde(rename = "documentEndpoint", default, skip_serializing_if = "Option::is_none")] pub document_endpoint: Option<String>, #[serde(rename = "databaseAccountOfferType", default, skip_serializing_if = "Option::is_none")] pub database_account_offer_type: Option<DatabaseAccountOfferType>, #[serde(rename = "ipRules", default, skip_serializing_if = "Option::is_none")] pub ip_rules: Option<IpRules>, #[serde(rename = "isVirtualNetworkFilterEnabled", default, skip_serializing_if = "Option::is_none")] pub is_virtual_network_filter_enabled: Option<bool>, #[serde(rename = "enableAutomaticFailover", default, skip_serializing_if = "Option::is_none")] pub enable_automatic_failover: Option<bool>, #[serde(rename = "consistencyPolicy", default, skip_serializing_if = "Option::is_none")] pub consistency_policy: Option<ConsistencyPolicy>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub capabilities: Vec<Capability>, #[serde(rename = "writeLocations", default, skip_serializing_if = "Vec::is_empty")] pub write_locations: Vec<Location>, #[serde(rename = "readLocations", default, skip_serializing_if = "Vec::is_empty")] pub read_locations: Vec<Location>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub locations: Vec<Location>, #[serde(rename = "failoverPolicies", default, skip_serializing_if = "Vec::is_empty")] pub failover_policies: Vec<FailoverPolicy>, #[serde(rename = "virtualNetworkRules", default, skip_serializing_if = "Vec::is_empty")] pub virtual_network_rules: Vec<VirtualNetworkRule>, #[serde(rename = "privateEndpointConnections", default, skip_serializing_if = "Vec::is_empty")] pub private_endpoint_connections: Vec<PrivateEndpointConnection>, #[serde(rename = "enableMultipleWriteLocations", default, skip_serializing_if = "Option::is_none")] pub enable_multiple_write_locations: Option<bool>, #[serde(rename = "enableCassandraConnector", default, skip_serializing_if = "Option::is_none")] pub enable_cassandra_connector: Option<bool>, #[serde(rename = "connectorOffer", default, skip_serializing_if = "Option::is_none")] pub connector_offer: Option<ConnectorOffer>, #[serde(rename = "disableKeyBasedMetadataWriteAccess", default, skip_serializing_if = "Option::is_none")] pub disable_key_based_metadata_write_access: Option<bool>, #[serde(rename = "keyVaultKeyUri", default, skip_serializing_if = "Option::is_none")] pub key_vault_key_uri: Option<String>, #[serde(rename = "defaultIdentity", default, skip_serializing_if = "Option::is_none")] pub default_identity: Option<String>, #[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")] pub public_network_access: Option<PublicNetworkAccess>, #[serde(rename = "enableFreeTier", default, skip_serializing_if = "Option::is_none")] pub enable_free_tier: Option<bool>, #[serde(rename = "apiProperties", default, skip_serializing_if = "Option::is_none")] pub api_properties: Option<ApiProperties>, #[serde(rename = "enableAnalyticalStorage", default, skip_serializing_if = "Option::is_none")] pub enable_analytical_storage: Option<bool>, #[serde(rename = "analyticalStorageConfiguration", default, skip_serializing_if = "Option::is_none")] pub analytical_storage_configuration: Option<AnalyticalStorageConfiguration>, #[serde(rename = "instanceId", default, skip_serializing_if = "Option::is_none")] pub instance_id: Option<String>, #[serde(rename = "createMode", default, skip_serializing_if = "Option::is_none")] pub create_mode: Option<CreateMode>, #[serde(rename = "restoreParameters", default, skip_serializing_if = "Option::is_none")] pub restore_parameters: Option<RestoreParameters>, #[serde(rename = "backupPolicy", default, skip_serializing_if = "Option::is_none")] pub backup_policy: Option<BackupPolicy>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub cors: Vec<CorsPolicy>, #[serde(rename = "networkAclBypass", default, skip_serializing_if = "Option::is_none")] pub network_acl_bypass: Option<NetworkAclBypass>, #[serde(rename = "networkAclBypassResourceIds", default, skip_serializing_if = "Vec::is_empty")] pub network_acl_bypass_resource_ids: Vec<String>, #[serde(rename = "diagnosticLogSettings", default, skip_serializing_if = "Option::is_none")] pub diagnostic_log_settings: Option<DiagnosticLogSettings>, #[serde(rename = "disableLocalAuth", default, skip_serializing_if = "Option::is_none")] pub disable_local_auth: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountCreateUpdateProperties { #[serde(rename = "consistencyPolicy", default, skip_serializing_if = "Option::is_none")] pub consistency_policy: Option<ConsistencyPolicy>, pub locations: Vec<Location>, #[serde(rename = "databaseAccountOfferType")] pub database_account_offer_type: DatabaseAccountOfferType, #[serde(rename = "ipRules", default, skip_serializing_if = "Option::is_none")] pub ip_rules: Option<IpRules>, #[serde(rename = "isVirtualNetworkFilterEnabled", default, skip_serializing_if = "Option::is_none")] pub is_virtual_network_filter_enabled: Option<bool>, #[serde(rename = "enableAutomaticFailover", default, skip_serializing_if = "Option::is_none")] pub enable_automatic_failover: Option<bool>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub capabilities: Vec<Capability>, #[serde(rename = "virtualNetworkRules", default, skip_serializing_if = "Vec::is_empty")] pub virtual_network_rules: Vec<VirtualNetworkRule>, #[serde(rename = "enableMultipleWriteLocations", default, skip_serializing_if = "Option::is_none")] pub enable_multiple_write_locations: Option<bool>, #[serde(rename = "enableCassandraConnector", default, skip_serializing_if = "Option::is_none")] pub enable_cassandra_connector: Option<bool>, #[serde(rename = "connectorOffer", default, skip_serializing_if = "Option::is_none")] pub connector_offer: Option<ConnectorOffer>, #[serde(rename = "disableKeyBasedMetadataWriteAccess", default, skip_serializing_if = "Option::is_none")] pub disable_key_based_metadata_write_access: Option<bool>, #[serde(rename = "keyVaultKeyUri", default, skip_serializing_if = "Option::is_none")] pub key_vault_key_uri: Option<String>, #[serde(rename = "defaultIdentity", default, skip_serializing_if = "Option::is_none")] pub default_identity: Option<String>, #[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")] pub public_network_access: Option<PublicNetworkAccess>, #[serde(rename = "enableFreeTier", default, skip_serializing_if = "Option::is_none")] pub enable_free_tier: Option<bool>, #[serde(rename = "apiProperties", default, skip_serializing_if = "Option::is_none")] pub api_properties: Option<ApiProperties>, #[serde(rename = "enableAnalyticalStorage", default, skip_serializing_if = "Option::is_none")] pub enable_analytical_storage: Option<bool>, #[serde(rename = "analyticalStorageConfiguration", default, skip_serializing_if = "Option::is_none")] pub analytical_storage_configuration: Option<AnalyticalStorageConfiguration>, #[serde(rename = "createMode")] pub create_mode: CreateMode, #[serde(rename = "backupPolicy", default, skip_serializing_if = "Option::is_none")] pub backup_policy: Option<BackupPolicy>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub cors: Vec<CorsPolicy>, #[serde(rename = "networkAclBypass", default, skip_serializing_if = "Option::is_none")] pub network_acl_bypass: Option<NetworkAclBypass>, #[serde(rename = "networkAclBypassResourceIds", default, skip_serializing_if = "Vec::is_empty")] pub network_acl_bypass_resource_ids: Vec<String>, #[serde(rename = "diagnosticLogSettings", default, skip_serializing_if = "Option::is_none")] pub diagnostic_log_settings: Option<DiagnosticLogSettings>, #[serde(rename = "disableLocalAuth", default, skip_serializing_if = "Option::is_none")] pub disable_local_auth: Option<bool>, #[serde(rename = "restoreParameters", default, skip_serializing_if = "Option::is_none")] pub restore_parameters: Option<RestoreParameters>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<database_account_create_update_parameters::Kind>, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<ManagedServiceIdentity>, pub properties: DatabaseAccountCreateUpdateProperties, } pub mod database_account_create_update_parameters { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Kind { #[serde(rename = "GlobalDocumentDB")] GlobalDocumentDb, #[serde(rename = "MongoDB")] MongoDb, Parse, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountUpdateProperties { #[serde(rename = "consistencyPolicy", default, skip_serializing_if = "Option::is_none")] pub consistency_policy: Option<ConsistencyPolicy>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub locations: Vec<Location>, #[serde(rename = "ipRules", default, skip_serializing_if = "Option::is_none")] pub ip_rules: Option<IpRules>, #[serde(rename = "isVirtualNetworkFilterEnabled", default, skip_serializing_if = "Option::is_none")] pub is_virtual_network_filter_enabled: Option<bool>, #[serde(rename = "enableAutomaticFailover", default, skip_serializing_if = "Option::is_none")] pub enable_automatic_failover: Option<bool>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub capabilities: Vec<Capability>, #[serde(rename = "virtualNetworkRules", default, skip_serializing_if = "Vec::is_empty")] pub virtual_network_rules: Vec<VirtualNetworkRule>, #[serde(rename = "enableMultipleWriteLocations", default, skip_serializing_if = "Option::is_none")] pub enable_multiple_write_locations: Option<bool>, #[serde(rename = "enableCassandraConnector", default, skip_serializing_if = "Option::is_none")] pub enable_cassandra_connector: Option<bool>, #[serde(rename = "connectorOffer", default, skip_serializing_if = "Option::is_none")] pub connector_offer: Option<ConnectorOffer>, #[serde(rename = "disableKeyBasedMetadataWriteAccess", default, skip_serializing_if = "Option::is_none")] pub disable_key_based_metadata_write_access: Option<bool>, #[serde(rename = "keyVaultKeyUri", default, skip_serializing_if = "Option::is_none")] pub key_vault_key_uri: Option<String>, #[serde(rename = "defaultIdentity", default, skip_serializing_if = "Option::is_none")] pub default_identity: Option<String>, #[serde(rename = "publicNetworkAccess", default, skip_serializing_if = "Option::is_none")] pub public_network_access: Option<PublicNetworkAccess>, #[serde(rename = "enableFreeTier", default, skip_serializing_if = "Option::is_none")] pub enable_free_tier: Option<bool>, #[serde(rename = "apiProperties", default, skip_serializing_if = "Option::is_none")] pub api_properties: Option<ApiProperties>, #[serde(rename = "enableAnalyticalStorage", default, skip_serializing_if = "Option::is_none")] pub enable_analytical_storage: Option<bool>, #[serde(rename = "analyticalStorageConfiguration", default, skip_serializing_if = "Option::is_none")] pub analytical_storage_configuration: Option<AnalyticalStorageConfiguration>, #[serde(rename = "backupPolicy", default, skip_serializing_if = "Option::is_none")] pub backup_policy: Option<BackupPolicy>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub cors: Vec<CorsPolicy>, #[serde(rename = "networkAclBypass", default, skip_serializing_if = "Option::is_none")] pub network_acl_bypass: Option<NetworkAclBypass>, #[serde(rename = "networkAclBypassResourceIds", default, skip_serializing_if = "Vec::is_empty")] pub network_acl_bypass_resource_ids: Vec<String>, #[serde(rename = "diagnosticLogSettings", default, skip_serializing_if = "Option::is_none")] pub diagnostic_log_settings: Option<DiagnosticLogSettings>, #[serde(rename = "disableLocalAuth", default, skip_serializing_if = "Option::is_none")] pub disable_local_auth: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountUpdateParameters { #[serde(default, skip_serializing_if = "Option::is_none")] pub tags: Option<Tags>, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub identity: Option<ManagedServiceIdentity>, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<DatabaseAccountUpdateProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountListReadOnlyKeysResult { #[serde(rename = "primaryReadonlyMasterKey", default, skip_serializing_if = "Option::is_none")] pub primary_readonly_master_key: Option<String>, #[serde(rename = "secondaryReadonlyMasterKey", default, skip_serializing_if = "Option::is_none")] pub secondary_readonly_master_key: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountListKeysResult { #[serde(flatten)] pub database_account_list_read_only_keys_result: DatabaseAccountListReadOnlyKeysResult, #[serde(rename = "primaryMasterKey", default, skip_serializing_if = "Option::is_none")] pub primary_master_key: Option<String>, #[serde(rename = "secondaryMasterKey", default, skip_serializing_if = "Option::is_none")] pub secondary_master_key: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountConnectionString { #[serde(rename = "connectionString", default, skip_serializing_if = "Option::is_none")] pub connection_string: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountListConnectionStringsResult { #[serde(rename = "connectionStrings", default, skip_serializing_if = "Vec::is_empty")] pub connection_strings: Vec<DatabaseAccountConnectionString>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseAccountRegenerateKeyParameters { #[serde(rename = "keyKind")] pub key_kind: database_account_regenerate_key_parameters::KeyKind, } pub mod database_account_regenerate_key_parameters { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum KeyKind { #[serde(rename = "primary")] Primary, #[serde(rename = "secondary")] Secondary, #[serde(rename = "primaryReadonly")] PrimaryReadonly, #[serde(rename = "secondaryReadonly")] SecondaryReadonly, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum DatabaseAccountOfferType { Standard, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ThroughputSettingsUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: ThroughputSettingsUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ThroughputSettingsUpdateProperties { pub resource: ThroughputSettingsResource, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GraphResourceCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: GraphResourceCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GraphResourceCreateUpdateProperties { pub resource: GraphResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GraphResource { pub id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlDatabaseCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: SqlDatabaseCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlDatabaseCreateUpdateProperties { pub resource: SqlDatabaseResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlContainerCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: SqlContainerCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlContainerCreateUpdateProperties { pub resource: SqlContainerResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlStoredProcedureCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: SqlStoredProcedureCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlStoredProcedureCreateUpdateProperties { pub resource: SqlStoredProcedureResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlUserDefinedFunctionCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: SqlUserDefinedFunctionCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlUserDefinedFunctionCreateUpdateProperties { pub resource: SqlUserDefinedFunctionResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlTriggerCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: SqlTriggerCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlTriggerCreateUpdateProperties { pub resource: SqlTriggerResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbDatabaseCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: MongoDbDatabaseCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbDatabaseCreateUpdateProperties { pub resource: MongoDbDatabaseResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbCollectionCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: MongoDbCollectionCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbCollectionCreateUpdateProperties { pub resource: MongoDbCollectionResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TableCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: TableCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TableCreateUpdateProperties { pub resource: TableResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraKeyspaceCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: CassandraKeyspaceCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraKeyspaceCreateUpdateProperties { pub resource: CassandraKeyspaceResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraTableCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: CassandraTableCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraTableCreateUpdateProperties { pub resource: CassandraTableResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinDatabaseCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: GremlinDatabaseCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinDatabaseCreateUpdateProperties { pub resource: GremlinDatabaseResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinGraphCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: GremlinGraphCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinGraphCreateUpdateProperties { pub resource: GremlinGraphResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraViewCreateUpdateParameters { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, pub properties: CassandraViewCreateUpdateProperties, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraViewCreateUpdateProperties { pub resource: CassandraViewResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<CreateUpdateOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ThroughputSettingsResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub throughput: Option<i64>, #[serde(rename = "autoscaleSettings", default, skip_serializing_if = "Option::is_none")] pub autoscale_settings: Option<AutoscaleSettingsResource>, #[serde(rename = "minimumThroughput", default, skip_serializing_if = "Option::is_none")] pub minimum_throughput: Option<String>, #[serde(rename = "offerReplacePending", default, skip_serializing_if = "Option::is_none")] pub offer_replace_pending: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AutoscaleSettingsResource { #[serde(rename = "maxThroughput")] pub max_throughput: i64, #[serde(rename = "autoUpgradePolicy", default, skip_serializing_if = "Option::is_none")] pub auto_upgrade_policy: Option<AutoUpgradePolicyResource>, #[serde(rename = "targetMaxThroughput", default, skip_serializing_if = "Option::is_none")] pub target_max_throughput: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AutoUpgradePolicyResource { #[serde(rename = "throughputPolicy", default, skip_serializing_if = "Option::is_none")] pub throughput_policy: Option<ThroughputPolicyResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ThroughputPolicyResource { #[serde(rename = "isEnabled", default, skip_serializing_if = "Option::is_none")] pub is_enabled: Option<bool>, #[serde(rename = "incrementPercent", default, skip_serializing_if = "Option::is_none")] pub increment_percent: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OptionsResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub throughput: Option<i64>, #[serde(rename = "autoscaleSettings", default, skip_serializing_if = "Option::is_none")] pub autoscale_settings: Option<AutoscaleSettings>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlDatabaseResource { pub id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlContainerResource { pub id: String, #[serde(rename = "indexingPolicy", default, skip_serializing_if = "Option::is_none")] pub indexing_policy: Option<IndexingPolicy>, #[serde(rename = "partitionKey", default, skip_serializing_if = "Option::is_none")] pub partition_key: Option<ContainerPartitionKey>, #[serde(rename = "defaultTtl", default, skip_serializing_if = "Option::is_none")] pub default_ttl: Option<i64>, #[serde(rename = "uniqueKeyPolicy", default, skip_serializing_if = "Option::is_none")] pub unique_key_policy: Option<UniqueKeyPolicy>, #[serde(rename = "conflictResolutionPolicy", default, skip_serializing_if = "Option::is_none")] pub conflict_resolution_policy: Option<ConflictResolutionPolicy>, #[serde(rename = "analyticalStorageTtl", default, skip_serializing_if = "Option::is_none")] pub analytical_storage_ttl: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IndexingPolicy { #[serde(default, skip_serializing_if = "Option::is_none")] pub automatic: Option<bool>, #[serde(rename = "indexingMode", default, skip_serializing_if = "Option::is_none")] pub indexing_mode: Option<indexing_policy::IndexingMode>, #[serde(rename = "includedPaths", default, skip_serializing_if = "Vec::is_empty")] pub included_paths: Vec<IncludedPath>, #[serde(rename = "excludedPaths", default, skip_serializing_if = "Vec::is_empty")] pub excluded_paths: Vec<ExcludedPath>, #[serde(rename = "compositeIndexes", default, skip_serializing_if = "Vec::is_empty")] pub composite_indexes: Vec<CompositePathList>, #[serde(rename = "spatialIndexes", default, skip_serializing_if = "Vec::is_empty")] pub spatial_indexes: Vec<SpatialSpec>, } pub mod indexing_policy { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum IndexingMode { #[serde(rename = "consistent")] Consistent, #[serde(rename = "lazy")] Lazy, #[serde(rename = "none")] None, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ExcludedPath { #[serde(default, skip_serializing_if = "Option::is_none")] pub path: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IncludedPath { #[serde(default, skip_serializing_if = "Option::is_none")] pub path: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub indexes: Vec<Indexes>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Indexes { #[serde(rename = "dataType", default, skip_serializing_if = "Option::is_none")] pub data_type: Option<indexes::DataType>, #[serde(default, skip_serializing_if = "Option::is_none")] pub precision: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<indexes::Kind>, } pub mod indexes { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum DataType { String, Number, Point, Polygon, LineString, MultiPolygon, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Kind { Hash, Range, Spatial, } } pub type CompositePathList = Vec<CompositePath>; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CompositePath { #[serde(default, skip_serializing_if = "Option::is_none")] pub path: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub order: Option<composite_path::Order>, } pub mod composite_path { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Order { #[serde(rename = "ascending")] Ascending, #[serde(rename = "descending")] Descending, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SpatialSpec { #[serde(default, skip_serializing_if = "Option::is_none")] pub path: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub types: Vec<SpatialType>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum SpatialType { Point, LineString, Polygon, MultiPolygon, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ContainerPartitionKey { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub paths: Vec<Path>, #[serde(default, skip_serializing_if = "Option::is_none")] pub kind: Option<container_partition_key::Kind>, #[serde(default, skip_serializing_if = "Option::is_none")] pub version: Option<i32>, #[serde(rename = "systemKey", default, skip_serializing_if = "Option::is_none")] pub system_key: Option<bool>, } pub mod container_partition_key { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Kind { Hash, Range, MultiHash, } } pub type Path = String; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct UniqueKeyPolicy { #[serde(rename = "uniqueKeys", default, skip_serializing_if = "Vec::is_empty")] pub unique_keys: Vec<UniqueKey>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct UniqueKey { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub paths: Vec<Path>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ConflictResolutionPolicy { #[serde(default, skip_serializing_if = "Option::is_none")] pub mode: Option<conflict_resolution_policy::Mode>, #[serde(rename = "conflictResolutionPath", default, skip_serializing_if = "Option::is_none")] pub conflict_resolution_path: Option<String>, #[serde(rename = "conflictResolutionProcedure", default, skip_serializing_if = "Option::is_none")] pub conflict_resolution_procedure: Option<String>, } pub mod conflict_resolution_policy { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Mode { LastWriterWins, Custom, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlStoredProcedureResource { pub id: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub body: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlUserDefinedFunctionResource { pub id: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub body: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlTriggerResource { pub id: String, #[serde(default, skip_serializing_if = "Option::is_none")] pub body: Option<String>, #[serde(rename = "triggerType", default, skip_serializing_if = "Option::is_none")] pub trigger_type: Option<sql_trigger_resource::TriggerType>, #[serde(rename = "triggerOperation", default, skip_serializing_if = "Option::is_none")] pub trigger_operation: Option<sql_trigger_resource::TriggerOperation>, } pub mod sql_trigger_resource { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum TriggerType { Pre, Post, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum TriggerOperation { All, Create, Update, Delete, Replace, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbDatabaseResource { pub id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoDbCollectionResource { pub id: String, #[serde(rename = "shardKey", default, skip_serializing_if = "Option::is_none")] pub shard_key: Option<ShardKeys>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub indexes: Vec<MongoIndex>, #[serde(rename = "analyticalStorageTtl", default, skip_serializing_if = "Option::is_none")] pub analytical_storage_ttl: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ShardKeys {} #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoIndex { #[serde(default, skip_serializing_if = "Option::is_none")] pub key: Option<MongoIndexKeys>, #[serde(default, skip_serializing_if = "Option::is_none")] pub options: Option<MongoIndexOptions>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoIndexKeys { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub keys: Vec<Key>, } pub type Key = String; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MongoIndexOptions { #[serde(rename = "expireAfterSeconds", default, skip_serializing_if = "Option::is_none")] pub expire_after_seconds: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub unique: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct TableResource { pub id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraKeyspaceResource { pub id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraTableResource { pub id: String, #[serde(rename = "defaultTtl", default, skip_serializing_if = "Option::is_none")] pub default_ttl: Option<i64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub schema: Option<CassandraSchema>, #[serde(rename = "analyticalStorageTtl", default, skip_serializing_if = "Option::is_none")] pub analytical_storage_ttl: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraSchema { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub columns: Vec<Column>, #[serde(rename = "partitionKeys", default, skip_serializing_if = "Vec::is_empty")] pub partition_keys: Vec<CassandraPartitionKey>, #[serde(rename = "clusterKeys", default, skip_serializing_if = "Vec::is_empty")] pub cluster_keys: Vec<ClusterKey>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Column { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraPartitionKey { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterKey { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "orderBy", default, skip_serializing_if = "Option::is_none")] pub order_by: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinDatabaseResource { pub id: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GremlinGraphResource { pub id: String, #[serde(rename = "indexingPolicy", default, skip_serializing_if = "Option::is_none")] pub indexing_policy: Option<IndexingPolicy>, #[serde(rename = "partitionKey", default, skip_serializing_if = "Option::is_none")] pub partition_key: Option<ContainerPartitionKey>, #[serde(rename = "defaultTtl", default, skip_serializing_if = "Option::is_none")] pub default_ttl: Option<i64>, #[serde(rename = "uniqueKeyPolicy", default, skip_serializing_if = "Option::is_none")] pub unique_key_policy: Option<UniqueKeyPolicy>, #[serde(rename = "conflictResolutionPolicy", default, skip_serializing_if = "Option::is_none")] pub conflict_resolution_policy: Option<ConflictResolutionPolicy>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CassandraViewResource { pub id: String, #[serde(rename = "viewDefinition")] pub view_definition: String, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct CreateUpdateOptions { #[serde(default, skip_serializing_if = "Option::is_none")] pub throughput: Option<i64>, #[serde(rename = "autoscaleSettings", default, skip_serializing_if = "Option::is_none")] pub autoscale_settings: Option<AutoscaleSettings>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AutoscaleSettings { #[serde(rename = "maxThroughput", default, skip_serializing_if = "Option::is_none")] pub max_throughput: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Capability { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Tags {} #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ManagedServiceIdentity { #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, #[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")] pub tenant_id: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<managed_service_identity::Type>, #[serde(rename = "userAssignedIdentities", default, skip_serializing_if = "Option::is_none")] pub user_assigned_identities: Option<serde_json::Value>, } pub mod managed_service_identity { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { SystemAssigned, UserAssigned, #[serde(rename = "SystemAssigned,UserAssigned")] SystemAssignedUserAssigned, None, } } pub type ProvisioningState = String; pub type IpRules = Vec<IpAddressOrRange>; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct IpAddressOrRange { #[serde(rename = "ipAddressOrRange", default, skip_serializing_if = "Option::is_none")] pub ip_address_or_range: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct VirtualNetworkRule { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(rename = "ignoreMissingVNetServiceEndpoint", default, skip_serializing_if = "Option::is_none")] pub ignore_missing_v_net_service_endpoint: Option<bool>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum NetworkAclBypass { None, AzureServices, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DiagnosticLogSettings { #[serde(rename = "enableFullTextQuery", default, skip_serializing_if = "Option::is_none")] pub enable_full_text_query: Option<diagnostic_log_settings::EnableFullTextQuery>, } pub mod diagnostic_log_settings { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum EnableFullTextQuery { None, True, False, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Operation { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub display: Option<operation::Display>, } pub mod operation { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Display { #[serde(rename = "Provider", default, skip_serializing_if = "Option::is_none")] pub provider: Option<String>, #[serde(rename = "Resource", default, skip_serializing_if = "Option::is_none")] pub resource: Option<String>, #[serde(rename = "Operation", default, skip_serializing_if = "Option::is_none")] pub operation: Option<String>, #[serde(rename = "Description", default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct OperationListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Operation>, #[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")] pub next_link: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct UsagesResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Usage>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Usage { #[serde(default, skip_serializing_if = "Option::is_none")] pub unit: Option<UnitType>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<MetricName>, #[serde(rename = "quotaPeriod", default, skip_serializing_if = "Option::is_none")] pub quota_period: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub limit: Option<i64>, #[serde(rename = "currentValue", default, skip_serializing_if = "Option::is_none")] pub current_value: Option<i64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PartitionUsagesResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<PartitionUsage>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PartitionUsage { #[serde(flatten)] pub usage: Usage, #[serde(rename = "partitionId", default, skip_serializing_if = "Option::is_none")] pub partition_id: Option<String>, #[serde(rename = "partitionKeyRangeId", default, skip_serializing_if = "Option::is_none")] pub partition_key_range_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MetricDefinitionsListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<MetricDefinition>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MetricDefinition { #[serde(rename = "metricAvailabilities", default, skip_serializing_if = "Vec::is_empty")] pub metric_availabilities: Vec<MetricAvailability>, #[serde(rename = "primaryAggregationType", default, skip_serializing_if = "Option::is_none")] pub primary_aggregation_type: Option<metric_definition::PrimaryAggregationType>, #[serde(default, skip_serializing_if = "Option::is_none")] pub unit: Option<UnitType>, #[serde(rename = "resourceUri", default, skip_serializing_if = "Option::is_none")] pub resource_uri: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<MetricName>, } pub mod metric_definition { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum PrimaryAggregationType { None, Average, Total, Minimum, Maximum, Last, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MetricAvailability { #[serde(rename = "timeGrain", default, skip_serializing_if = "Option::is_none")] pub time_grain: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub retention: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MetricListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<Metric>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Metric { #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(rename = "timeGrain", default, skip_serializing_if = "Option::is_none")] pub time_grain: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub unit: Option<UnitType>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<MetricName>, #[serde(rename = "metricValues", default, skip_serializing_if = "Vec::is_empty")] pub metric_values: Vec<MetricValue>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MetricName { #[serde(default, skip_serializing_if = "Option::is_none")] pub value: Option<String>, #[serde(rename = "localizedValue", default, skip_serializing_if = "Option::is_none")] pub localized_value: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct MetricValue { #[serde(rename = "_count", default, skip_serializing_if = "Option::is_none")] pub count: Option<f64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub average: Option<f64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub maximum: Option<f64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub minimum: Option<f64>, #[serde(default, skip_serializing_if = "Option::is_none")] pub timestamp: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub total: Option<f64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PercentileMetricListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<PercentileMetric>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PercentileMetric { #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, #[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")] pub end_time: Option<String>, #[serde(rename = "timeGrain", default, skip_serializing_if = "Option::is_none")] pub time_grain: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub unit: Option<UnitType>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<MetricName>, #[serde(rename = "metricValues", default, skip_serializing_if = "Vec::is_empty")] pub metric_values: Vec<PercentileMetricValue>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PercentileMetricValue { #[serde(flatten)] pub metric_value: MetricValue, #[serde(rename = "P10", default, skip_serializing_if = "Option::is_none")] pub p10: Option<f64>, #[serde(rename = "P25", default, skip_serializing_if = "Option::is_none")] pub p25: Option<f64>, #[serde(rename = "P50", default, skip_serializing_if = "Option::is_none")] pub p50: Option<f64>, #[serde(rename = "P75", default, skip_serializing_if = "Option::is_none")] pub p75: Option<f64>, #[serde(rename = "P90", default, skip_serializing_if = "Option::is_none")] pub p90: Option<f64>, #[serde(rename = "P95", default, skip_serializing_if = "Option::is_none")] pub p95: Option<f64>, #[serde(rename = "P99", default, skip_serializing_if = "Option::is_none")] pub p99: Option<f64>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PartitionMetricListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<PartitionMetric>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PartitionMetric { #[serde(flatten)] pub metric: Metric, #[serde(rename = "partitionId", default, skip_serializing_if = "Option::is_none")] pub partition_id: Option<String>, #[serde(rename = "partitionKeyRangeId", default, skip_serializing_if = "Option::is_none")] pub partition_key_range_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum UnitType { Count, Bytes, Seconds, Percent, CountPerSecond, BytesPerSecond, Milliseconds, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ConnectorOffer { Small, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum PublicNetworkAccess { Enabled, Disabled, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ApiProperties { #[serde(rename = "serverVersion", default, skip_serializing_if = "Option::is_none")] pub server_version: Option<api_properties::ServerVersion>, } pub mod api_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ServerVersion { #[serde(rename = "3.2")] _3_2, #[serde(rename = "3.6")] _3_6, #[serde(rename = "4.0")] _4_0, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct AnalyticalStorageConfiguration { #[serde(rename = "schemaType", default, skip_serializing_if = "Option::is_none")] pub schema_type: Option<AnalyticalStorageSchemaType>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum AnalyticalStorageSchemaType { WellDefined, FullFidelity, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum CreateMode { Default, Restore, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestoreParameters { #[serde(rename = "restoreMode", default, skip_serializing_if = "Option::is_none")] pub restore_mode: Option<restore_parameters::RestoreMode>, #[serde(rename = "restoreSource", default, skip_serializing_if = "Option::is_none")] pub restore_source: Option<String>, #[serde(rename = "restoreTimestampInUtc", default, skip_serializing_if = "Option::is_none")] pub restore_timestamp_in_utc: Option<String>, #[serde(rename = "databasesToRestore", default, skip_serializing_if = "Vec::is_empty")] pub databases_to_restore: Vec<DatabaseRestoreResource>, } pub mod restore_parameters { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum RestoreMode { PointInTime, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DatabaseRestoreResource { #[serde(rename = "databaseName", default, skip_serializing_if = "Option::is_none")] pub database_name: Option<String>, #[serde(rename = "collectionNames", default, skip_serializing_if = "Vec::is_empty")] pub collection_names: Vec<CollectionName>, } pub type CollectionName = String; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BackupPolicy { #[serde(rename = "type")] pub type_: BackupPolicyType, #[serde(rename = "migrationState", default, skip_serializing_if = "Option::is_none")] pub migration_state: Option<BackupPolicyMigrationState>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum BackupPolicyType { Periodic, Continuous, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BackupPolicyMigrationState { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<BackupPolicyMigrationStatus>, #[serde(rename = "targetType", default, skip_serializing_if = "Option::is_none")] pub target_type: Option<BackupPolicyType>, #[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")] pub start_time: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum BackupPolicyMigrationStatus { Invalid, InProgress, Completed, Failed, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PeriodicModeBackupPolicy { #[serde(flatten)] pub backup_policy: BackupPolicy, #[serde(rename = "periodicModeProperties", default, skip_serializing_if = "Option::is_none")] pub periodic_mode_properties: Option<PeriodicModeProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ContinuousModeBackupPolicy { #[serde(flatten)] pub backup_policy: BackupPolicy, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PeriodicModeProperties { #[serde(rename = "backupIntervalInMinutes", default, skip_serializing_if = "Option::is_none")] pub backup_interval_in_minutes: Option<i32>, #[serde(rename = "backupRetentionIntervalInHours", default, skip_serializing_if = "Option::is_none")] pub backup_retention_interval_in_hours: Option<i32>, #[serde(rename = "backupStorageRedundancy", default, skip_serializing_if = "Option::is_none")] pub backup_storage_redundancy: Option<BackupStorageRedundancy>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LocationListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<LocationGetResult>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LocationGetResult { #[serde(flatten)] pub arm_proxy_resource: ArmProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<LocationProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct LocationProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<String>, #[serde(rename = "supportsAvailabilityZone", default, skip_serializing_if = "Option::is_none")] pub supports_availability_zone: Option<bool>, #[serde(rename = "isResidencyRestricted", default, skip_serializing_if = "Option::is_none")] pub is_residency_restricted: Option<bool>, #[serde(rename = "backupStorageRedundancies", default, skip_serializing_if = "Vec::is_empty")] pub backup_storage_redundancies: Vec<BackupStorageRedundancy>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum BackupStorageRedundancy { Geo, Local, Zone, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateEndpointConnectionListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<PrivateEndpointConnection>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateEndpointConnection { #[serde(flatten)] pub proxy_resource: ProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<PrivateEndpointConnectionProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateEndpointConnectionProperties { #[serde(rename = "privateEndpoint", default, skip_serializing_if = "Option::is_none")] pub private_endpoint: Option<PrivateEndpointProperty>, #[serde(rename = "privateLinkServiceConnectionState", default, skip_serializing_if = "Option::is_none")] pub private_link_service_connection_state: Option<PrivateLinkServiceConnectionStateProperty>, #[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")] pub group_id: Option<String>, #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateEndpointProperty { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateLinkServiceConnectionStateProperty { #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub description: Option<String>, #[serde(rename = "actionsRequired", default, skip_serializing_if = "Option::is_none")] pub actions_required: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NotebookWorkspaceCreateUpdateParameters { #[serde(flatten)] pub arm_proxy_resource: ArmProxyResource, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NotebookWorkspaceListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<NotebookWorkspace>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NotebookWorkspace { #[serde(flatten)] pub arm_proxy_resource: ArmProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<NotebookWorkspaceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NotebookWorkspaceProperties { #[serde(rename = "notebookServerEndpoint", default, skip_serializing_if = "Option::is_none")] pub notebook_server_endpoint: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct NotebookWorkspaceConnectionInfoResult { #[serde(rename = "authToken", default, skip_serializing_if = "Option::is_none")] pub auth_token: Option<String>, #[serde(rename = "notebookServerEndpoint", default, skip_serializing_if = "Option::is_none")] pub notebook_server_endpoint: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateLinkResourceListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<PrivateLinkResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateLinkResource { #[serde(flatten)] pub arm_proxy_resource: ArmProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<PrivateLinkResourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct PrivateLinkResourceProperties { #[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")] pub group_id: Option<String>, #[serde(rename = "requiredMembers", default, skip_serializing_if = "Vec::is_empty")] pub required_members: Vec<String>, #[serde(rename = "requiredZoneNames", default, skip_serializing_if = "Vec::is_empty")] pub required_zone_names: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlRoleDefinitionResource { #[serde(rename = "roleName", default, skip_serializing_if = "Option::is_none")] pub role_name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<sql_role_definition_resource::Type>, #[serde(rename = "assignableScopes", default, skip_serializing_if = "Vec::is_empty")] pub assignable_scopes: Vec<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub permissions: Vec<Permission>, } pub mod sql_role_definition_resource { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Type { BuiltInRole, CustomRole, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlRoleDefinitionCreateUpdateParameters { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SqlRoleDefinitionResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlRoleDefinitionGetResults { #[serde(flatten)] pub arm_proxy_resource: ArmProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SqlRoleDefinitionResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Permission { #[serde(rename = "dataActions", default, skip_serializing_if = "Vec::is_empty")] pub data_actions: Vec<String>, #[serde(rename = "notDataActions", default, skip_serializing_if = "Vec::is_empty")] pub not_data_actions: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlRoleDefinitionListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<SqlRoleDefinitionGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlRoleAssignmentResource { #[serde(rename = "roleDefinitionId", default, skip_serializing_if = "Option::is_none")] pub role_definition_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub scope: Option<String>, #[serde(rename = "principalId", default, skip_serializing_if = "Option::is_none")] pub principal_id: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlRoleAssignmentCreateUpdateParameters { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SqlRoleAssignmentResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlRoleAssignmentGetResults { #[serde(flatten)] pub arm_proxy_resource: ArmProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SqlRoleAssignmentResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlRoleAssignmentListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<SqlRoleAssignmentGetResults>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableDatabaseAccountsListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<RestorableDatabaseAccountGetResult>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableDatabaseAccountGetResult { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<RestorableDatabaseAccountProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableDatabaseAccountProperties { #[serde(rename = "accountName", default, skip_serializing_if = "Option::is_none")] pub account_name: Option<String>, #[serde(rename = "creationTime", default, skip_serializing_if = "Option::is_none")] pub creation_time: Option<String>, #[serde(rename = "deletionTime", default, skip_serializing_if = "Option::is_none")] pub deletion_time: Option<String>, #[serde(rename = "apiType", default, skip_serializing_if = "Option::is_none")] pub api_type: Option<ApiType>, #[serde(rename = "restorableLocations", default, skip_serializing_if = "Vec::is_empty")] pub restorable_locations: Vec<RestorableLocationResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ApiType { #[serde(rename = "MongoDB")] MongoDb, Gremlin, Cassandra, Table, Sql, GremlinV2, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableLocationResource { #[serde(rename = "locationName", default, skip_serializing_if = "Option::is_none")] pub location_name: Option<String>, #[serde(rename = "regionalDatabaseAccountInstanceId", default, skip_serializing_if = "Option::is_none")] pub regional_database_account_instance_id: Option<String>, #[serde(rename = "creationTime", default, skip_serializing_if = "Option::is_none")] pub creation_time: Option<String>, #[serde(rename = "deletionTime", default, skip_serializing_if = "Option::is_none")] pub deletion_time: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableSqlDatabasesListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<RestorableSqlDatabaseGetResult>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableSqlDatabaseGetResult { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<RestorableSqlDatabaseProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableSqlDatabaseProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<restorable_sql_database_properties::Resource>, } pub mod restorable_sql_database_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(rename = "_rid", default, skip_serializing_if = "Option::is_none")] pub rid: Option<String>, #[serde(rename = "operationType", default, skip_serializing_if = "Option::is_none")] pub operation_type: Option<OperationType>, #[serde(rename = "eventTimestamp", default, skip_serializing_if = "Option::is_none")] pub event_timestamp: Option<String>, #[serde(rename = "ownerId", default, skip_serializing_if = "Option::is_none")] pub owner_id: Option<String>, #[serde(rename = "ownerResourceId", default, skip_serializing_if = "Option::is_none")] pub owner_resource_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub database: Option<resource::Database>, } pub mod resource { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Database { #[serde(flatten)] pub sql_database_resource: SqlDatabaseResource, #[serde(flatten)] pub extended_resource_properties: ExtendedResourceProperties, #[serde(rename = "_colls", default, skip_serializing_if = "Option::is_none")] pub colls: Option<String>, #[serde(rename = "_users", default, skip_serializing_if = "Option::is_none")] pub users: Option<String>, #[serde(rename = "_self", default, skip_serializing_if = "Option::is_none")] pub self_: Option<String>, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableSqlContainersListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<RestorableSqlContainerGetResult>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableSqlContainerGetResult { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<RestorableSqlContainerProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableSqlContainerProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<restorable_sql_container_properties::Resource>, } pub mod restorable_sql_container_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(rename = "_rid", default, skip_serializing_if = "Option::is_none")] pub rid: Option<String>, #[serde(rename = "operationType", default, skip_serializing_if = "Option::is_none")] pub operation_type: Option<OperationType>, #[serde(rename = "eventTimestamp", default, skip_serializing_if = "Option::is_none")] pub event_timestamp: Option<String>, #[serde(rename = "ownerId", default, skip_serializing_if = "Option::is_none")] pub owner_id: Option<String>, #[serde(rename = "ownerResourceId", default, skip_serializing_if = "Option::is_none")] pub owner_resource_id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub container: Option<resource::Container>, } pub mod resource { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Container { #[serde(flatten)] pub sql_container_resource: SqlContainerResource, #[serde(flatten)] pub extended_resource_properties: ExtendedResourceProperties, #[serde(rename = "_self", default, skip_serializing_if = "Option::is_none")] pub self_: Option<String>, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableSqlResourcesListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<DatabaseRestoreResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableMongodbDatabasesListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<RestorableMongodbDatabaseGetResult>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableMongodbDatabaseGetResult { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<RestorableMongodbDatabaseProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableMongodbDatabaseProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<restorable_mongodb_database_properties::Resource>, } pub mod restorable_mongodb_database_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(rename = "_rid", default, skip_serializing_if = "Option::is_none")] pub rid: Option<String>, #[serde(rename = "operationType", default, skip_serializing_if = "Option::is_none")] pub operation_type: Option<OperationType>, #[serde(rename = "eventTimestamp", default, skip_serializing_if = "Option::is_none")] pub event_timestamp: Option<String>, #[serde(rename = "ownerId", default, skip_serializing_if = "Option::is_none")] pub owner_id: Option<String>, #[serde(rename = "ownerResourceId", default, skip_serializing_if = "Option::is_none")] pub owner_resource_id: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableMongodbCollectionsListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<RestorableMongodbCollectionGetResult>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableMongodbCollectionGetResult { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<RestorableMongodbCollectionProperties>, #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableMongodbCollectionProperties { #[serde(default, skip_serializing_if = "Option::is_none")] pub resource: Option<restorable_mongodb_collection_properties::Resource>, } pub mod restorable_mongodb_collection_properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(rename = "_rid", default, skip_serializing_if = "Option::is_none")] pub rid: Option<String>, #[serde(rename = "operationType", default, skip_serializing_if = "Option::is_none")] pub operation_type: Option<OperationType>, #[serde(rename = "eventTimestamp", default, skip_serializing_if = "Option::is_none")] pub event_timestamp: Option<String>, #[serde(rename = "ownerId", default, skip_serializing_if = "Option::is_none")] pub owner_id: Option<String>, #[serde(rename = "ownerResourceId", default, skip_serializing_if = "Option::is_none")] pub owner_resource_id: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RestorableMongodbResourcesListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<DatabaseRestoreResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum OperationType { Create, Replace, Delete, SystemOperation, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ContinuousBackupRestoreLocation { #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BackupInformation { #[serde(rename = "continuousBackupInformation", default, skip_serializing_if = "Option::is_none")] pub continuous_backup_information: Option<ContinuousBackupInformation>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ContinuousBackupInformation { #[serde(rename = "latestRestorableTimestamp", default, skip_serializing_if = "Option::is_none")] pub latest_restorable_timestamp: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ListClusters { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<ClusterResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ManagedCassandraProvisioningState { Creating, Updating, Deleting, Succeeded, Failed, Canceled, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterResource { #[serde(flatten)] pub arm_resource_properties: ArmResourceProperties, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<cluster_resource::Properties>, } pub mod cluster_resource { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<ManagedCassandraProvisioningState>, #[serde(rename = "restoreFromBackupId", default, skip_serializing_if = "Option::is_none")] pub restore_from_backup_id: Option<String>, #[serde(rename = "delegatedManagementSubnetId", default, skip_serializing_if = "Option::is_none")] pub delegated_management_subnet_id: Option<String>, #[serde(rename = "cassandraVersion", default, skip_serializing_if = "Option::is_none")] pub cassandra_version: Option<String>, #[serde(rename = "clusterNameOverride", default, skip_serializing_if = "Option::is_none")] pub cluster_name_override: Option<String>, #[serde(rename = "authenticationMethod", default, skip_serializing_if = "Option::is_none")] pub authentication_method: Option<properties::AuthenticationMethod>, #[serde(rename = "initialCassandraAdminPassword", default, skip_serializing_if = "Option::is_none")] pub initial_cassandra_admin_password: Option<String>, #[serde(rename = "hoursBetweenBackups", default, skip_serializing_if = "Option::is_none")] pub hours_between_backups: Option<i32>, #[serde(rename = "prometheusEndpoint", default, skip_serializing_if = "Option::is_none")] pub prometheus_endpoint: Option<SeedNode>, #[serde(rename = "repairEnabled", default, skip_serializing_if = "Option::is_none")] pub repair_enabled: Option<bool>, #[serde(rename = "clientCertificates", default, skip_serializing_if = "Vec::is_empty")] pub client_certificates: Vec<Certificate>, #[serde(rename = "externalGossipCertificates", default, skip_serializing_if = "Vec::is_empty")] pub external_gossip_certificates: Vec<Certificate>, #[serde(rename = "gossipCertificates", default, skip_serializing_if = "Vec::is_empty")] pub gossip_certificates: Vec<Certificate>, #[serde(rename = "externalSeedNodes", default, skip_serializing_if = "Vec::is_empty")] pub external_seed_nodes: Vec<SeedNode>, #[serde(rename = "seedNodes", default, skip_serializing_if = "Vec::is_empty")] pub seed_nodes: Vec<SeedNode>, } pub mod properties { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum AuthenticationMethod { None, Cassandra, } } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RepairPostBody { pub keyspace: String, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub tables: Vec<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ClusterNodeStatus { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub nodes: Vec<serde_json::Value>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct BackupResource { #[serde(flatten)] pub arm_proxy_resource: ArmProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<backup_resource::Properties>, } pub mod backup_resource { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(default, skip_serializing_if = "Option::is_none")] pub timestamp: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ListBackups { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<BackupResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ListDataCenters { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<DataCenterResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SeedNode { #[serde(rename = "ipAddress", default, skip_serializing_if = "Option::is_none")] pub ip_address: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Certificate { #[serde(default, skip_serializing_if = "Option::is_none")] pub pem: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DataCenterResource { #[serde(flatten)] pub arm_proxy_resource: ArmProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<data_center_resource::Properties>, } pub mod data_center_resource { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Properties { #[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")] pub provisioning_state: Option<ManagedCassandraProvisioningState>, #[serde(rename = "dataCenterLocation", default, skip_serializing_if = "Option::is_none")] pub data_center_location: Option<String>, #[serde(rename = "delegatedSubnetId", default, skip_serializing_if = "Option::is_none")] pub delegated_subnet_id: Option<String>, #[serde(rename = "nodeCount", default, skip_serializing_if = "Option::is_none")] pub node_count: Option<i32>, #[serde(rename = "seedNodes", default, skip_serializing_if = "Vec::is_empty")] pub seed_nodes: Vec<SeedNode>, #[serde(rename = "base64EncodedCassandraYamlFragment", default, skip_serializing_if = "Option::is_none")] pub base64_encoded_cassandra_yaml_fragment: Option<String>, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceResourceProperties { #[serde(rename = "creationTime", default, skip_serializing_if = "Option::is_none")] pub creation_time: Option<String>, #[serde(rename = "instanceSize", default, skip_serializing_if = "Option::is_none")] pub instance_size: Option<ServiceResourceInstanceSize>, #[serde(rename = "instanceCount", default, skip_serializing_if = "Option::is_none")] pub instance_count: Option<i32>, #[serde(rename = "serviceType")] pub service_type: ServiceType, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<ServiceResourceStatus>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceResourceCreateUpdateParameters { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ServiceResourceCreateUpdateProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceResourceCreateUpdateProperties { #[serde(rename = "instanceSize", default, skip_serializing_if = "Option::is_none")] pub instance_size: Option<ServiceResourceInstanceSize>, #[serde(rename = "instanceCount", default, skip_serializing_if = "Option::is_none")] pub instance_count: Option<i32>, #[serde(rename = "serviceType", default, skip_serializing_if = "Option::is_none")] pub service_type: Option<ServiceType>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceResource { #[serde(flatten)] pub arm_proxy_resource: ArmProxyResource, #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<ServiceResourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ServiceResourceListResult { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub value: Vec<ServiceResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DataTransferServiceResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<DataTransferServiceResourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DataTransferServiceResourceProperties { #[serde(flatten)] pub service_resource_properties: ServiceResourceProperties, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub locations: Vec<DataTransferRegionalServiceResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlDedicatedGatewayServiceResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<SqlDedicatedGatewayServiceResourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlDedicatedGatewayServiceResourceProperties { #[serde(flatten)] pub service_resource_properties: ServiceResourceProperties, #[serde(rename = "sqlDedicatedGatewayEndpoint", default, skip_serializing_if = "Option::is_none")] pub sql_dedicated_gateway_endpoint: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub locations: Vec<SqlDedicatedGatewayRegionalServiceResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GraphApiComputeServiceResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub properties: Option<GraphApiComputeServiceResourceProperties>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GraphApiComputeServiceResourceProperties { #[serde(flatten)] pub service_resource_properties: ServiceResourceProperties, #[serde(rename = "graphApiComputeEndpoint", default, skip_serializing_if = "Option::is_none")] pub graph_api_compute_endpoint: Option<String>, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub locations: Vec<GraphApiComputeRegionalServiceResource>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct RegionalServiceResource { #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub location: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub status: Option<ServiceResourceStatus>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SqlDedicatedGatewayRegionalServiceResource { #[serde(flatten)] pub regional_service_resource: RegionalServiceResource, #[serde(rename = "sqlDedicatedGatewayEndpoint", default, skip_serializing_if = "Option::is_none")] pub sql_dedicated_gateway_endpoint: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct GraphApiComputeRegionalServiceResource { #[serde(flatten)] pub regional_service_resource: RegionalServiceResource, #[serde(rename = "graphApiComputeEndpoint", default, skip_serializing_if = "Option::is_none")] pub graph_api_compute_endpoint: Option<String>, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DataTransferRegionalServiceResource { #[serde(flatten)] pub regional_service_resource: RegionalServiceResource, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ServiceResourceStatus { Creating, Running, Updating, Deleting, Error, Stopped, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ServiceResourceInstanceSize { #[serde(rename = "Cosmos.D4s")] CosmosD4s, #[serde(rename = "Cosmos.D8s")] CosmosD8s, #[serde(rename = "Cosmos.D16s")] CosmosD16s, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum ServiceType { SqlDedicatedGateway, DataTransfer, #[serde(rename = "GraphAPICompute")] GraphApiCompute, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct SystemData { #[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")] pub created_by: Option<String>, #[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")] pub created_by_type: Option<system_data::CreatedByType>, #[serde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")] pub created_at: Option<String>, #[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")] pub last_modified_by: Option<String>, #[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")] pub last_modified_by_type: Option<system_data::LastModifiedByType>, #[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")] pub last_modified_at: Option<String>, } pub mod system_data { use super::*; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum CreatedByType { User, Application, ManagedIdentity, Key, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum LastModifiedByType { User, Application, ManagedIdentity, Key, } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProxyResource { #[serde(flatten)] pub resource: Resource, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Resource { #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option<String>, #[serde(default, skip_serializing_if = "Option::is_none")] pub name: Option<String>, #[serde(rename = "type", default, skip_serializing_if = "Option::is_none")] pub type_: Option<String>, }
45.123529
113
0.727955
4ab2ca3f68a68cee0caa0e4cebda77118f39c39e
1,133
use crate::client_settings::ClientSettings; impl ClientSettings { /// Passes a user configured reqwest client for the Google Maps client to /// use. This allows the you to have more control over the how the Google /// Maps client connects to the Google Maps server. /// /// [Mause](https://github.com/Mause) mentioned that this feature could be /// useful for writing tests. Thanks for the suggestion! /// /// ## Arguments /// /// * `reqwest_client` ‧ A reqwest client built using the /// `reqwest::Client::builder()` function. /// /// ## Examples: /// /// ```rust /// let client = reqwest::Client::builder() /// .user_agent("My Cool App v1.0") /// .build().unwrap(); /// /// let mut google_maps_client = ClientSettings::new("YOUR_API_KEY_HERE") /// .with_reqwest_client(reqwest_client) /// .finalize(); /// ``` pub fn with_reqwest_client( &mut self, reqwest_client: reqwest::Client, ) -> &mut ClientSettings { self.reqwest_client = Some(reqwest_client); self } // fn } // impl
30.621622
78
0.597529
1dcf1cb11ffcc17e14670fa1064c1d7b85a34fb8
32
pub mod basics; pub mod grid2d;
16
16
0.75